Skip to content

replace remaining delphi.midas.cs.cmu.edu with delphi.cmu.edu #66

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 20, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/acquisition/cdcp/cdc_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
Reads zip/csv files from CDC and stores page hit counts in the database.

Files can be uploaded at:
https://delphi.midas.cs.cmu.edu/~automation/public/cdc_upload/
https://delphi.cmu.edu/~automation/public/cdc_upload/

When someone uploads a new file, two things happen:
1. the uploaded file is moved to /common/cdc_stage
Expand Down
2 changes: 1 addition & 1 deletion src/acquisition/flusurv/flusurv.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def fetch_json(path, payload, call_count=1):
"""Send a request to the server and return the parsed JSON response."""

# it's polite to self-identify this "bot"
delphi_url = 'https://delphi.midas.cs.cmu.edu/index.html'
delphi_url = 'https://delphi.cmu.edu/index.html'
user_agent = 'Mozilla/5.0 (compatible; delphibot/1.0; +%s)' % delphi_url

# the FluSurv AMF server
Expand Down
2 changes: 1 addition & 1 deletion src/acquisition/fluview/fluview.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def save_latest(path=None):
sess = requests.session()
sess.headers.update({
# it's polite to self-identify this "bot"
'User-Agent': 'delphibot/1.0 (+https://delphi.midas.cs.cmu.edu/)',
'User-Agent': 'delphibot/1.0 (+https://delphi.cmu.edu/)',
})

# get metatdata
Expand Down
2 changes: 1 addition & 1 deletion src/acquisition/norostat/norostat_raw.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
def fetch_content(norostat_datatable_url="https://www.cdc.gov/norovirus/reporting/norostat/data-table.html"):
"""Download NoroSTAT data-table. Returns the html content."""
headers = {
'User-Agent': 'delphibot/1.0 (+https://delphi.midas.cs.cmu.edu/)',
'User-Agent': 'delphibot/1.0 (+https://delphi.cmu.edu/)',
}
resp = requests.get(norostat_datatable_url, headers=headers)
expect_value_eq(resp.status_code, 200,
Expand Down
40 changes: 20 additions & 20 deletions src/acquisition/wiki/master.php
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
The job server for wiki scraping. Any number of clients (wiki_download.py
instances) fetch jobs from, and upload results to, this server. A simple
dashboard is available from dashboard.php, visible here:
https://delphi.midas.cs.cmu.edu/~automation/public/wiki/
https://delphi.cmu.edu/~automation/public/wiki/

See wiki.py for many more details.
*/
Expand All @@ -17,25 +17,25 @@
$dbPort = 3306;
$dbName = 'epidata';
$dbh = mysql_connect("{$dbHost}:{$dbPort}", $dbUser, $dbPass);
if(!$dbh) {
http_response_code(500);
echo 'db problem';
}
mysql_select_db($dbName, $dbh);
if(isset($_REQUEST['get'])) {
$type = 0;
if(isset($_REQUEST['type'])) {
$type = intval($_REQUEST['type']);
if(!$dbh) {
http_response_code(500);
echo 'db problem';
}
mysql_select_db($dbName, $dbh);
if(isset($_REQUEST['get'])) {
$type = 0;
if(isset($_REQUEST['type'])) {
$type = intval($_REQUEST['type']);
}
mysql_query("UPDATE wiki_raw SET `status` = 0 WHERE `status` = 1 and date_add(`datetime`, interval 10 minute) < now()");
$result = mysql_query("SELECT `id`, `name`, `hash` FROM wiki_raw WHERE `status` = {$type} ORDER BY rand() ASC LIMIT 1");
if($row = mysql_fetch_array($result)) {
mysql_query("UPDATE wiki_raw SET `status` = 1, `datetime` = now() WHERE `id` = {$row['id']}");
echo "{\"id\": {$row['id']}, \"name\": \"{$row['name']}\", \"hash\": \"{$row['hash']}\"}";
} else {
http_response_code(201);
echo "no jobs";
}
mysql_query("UPDATE wiki_raw SET `status` = 1, `datetime` = now() WHERE `id` = {$row['id']}");
echo "{\"id\": {$row['id']}, \"name\": \"{$row['name']}\", \"hash\": \"{$row['hash']}\"}";
} else {
http_response_code(201);
echo "no jobs";
}
} elseif(isset($_REQUEST['put']) && isset($_REQUEST['hmac'])) {
if(hash_hmac('sha256', $_REQUEST['put'], $hmacSecret) === $_REQUEST['hmac']) {
$obj = json_decode($_REQUEST['put']);
Expand All @@ -47,7 +47,7 @@
echo 'wrong hmac';
}
} else {
http_response_code(400);
echo 'bad request';
}
?>
http_response_code(400);
echo 'bad request';
}
?>
2 changes: 1 addition & 1 deletion src/acquisition/wiki/wiki_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@


VERSION = 10
MASTER_URL = 'https://delphi.midas.cs.cmu.edu/~automation/public/wiki/master.php'
MASTER_URL = 'https://delphi.cmu.edu/~automation/public/wiki/master.php'

def text(data_string):
return str(data_string.decode('utf-8'))
Expand Down