Skip to content

Commit 44ee559

Browse files
authored
Merge pull request #66 from pedritom-amzn/covid-doc
replace remaining delphi.midas.cs.cmu.edu with delphi.cmu.edu
2 parents 260a657 + 29899cf commit 44ee559

File tree

6 files changed

+25
-25
lines changed

6 files changed

+25
-25
lines changed

src/acquisition/cdcp/cdc_upload.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
Reads zip/csv files from CDC and stores page hit counts in the database.
77
88
Files can be uploaded at:
9-
https://delphi.midas.cs.cmu.edu/~automation/public/cdc_upload/
9+
https://delphi.cmu.edu/~automation/public/cdc_upload/
1010
1111
When someone uploads a new file, two things happen:
1212
1. the uploaded file is moved to /common/cdc_stage

src/acquisition/flusurv/flusurv.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def fetch_json(path, payload, call_count=1):
7979
"""Send a request to the server and return the parsed JSON response."""
8080

8181
# it's polite to self-identify this "bot"
82-
delphi_url = 'https://delphi.midas.cs.cmu.edu/index.html'
82+
delphi_url = 'https://delphi.cmu.edu/index.html'
8383
user_agent = 'Mozilla/5.0 (compatible; delphibot/1.0; +%s)' % delphi_url
8484

8585
# the FluSurv AMF server

src/acquisition/fluview/fluview.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ def save_latest(path=None):
168168
sess = requests.session()
169169
sess.headers.update({
170170
# it's polite to self-identify this "bot"
171-
'User-Agent': 'delphibot/1.0 (+https://delphi.midas.cs.cmu.edu/)',
171+
'User-Agent': 'delphibot/1.0 (+https://delphi.cmu.edu/)',
172172
})
173173

174174
# get metatdata

src/acquisition/norostat/norostat_raw.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
def fetch_content(norostat_datatable_url="https://www.cdc.gov/norovirus/reporting/norostat/data-table.html"):
2626
"""Download NoroSTAT data-table. Returns the html content."""
2727
headers = {
28-
'User-Agent': 'delphibot/1.0 (+https://delphi.midas.cs.cmu.edu/)',
28+
'User-Agent': 'delphibot/1.0 (+https://delphi.cmu.edu/)',
2929
}
3030
resp = requests.get(norostat_datatable_url, headers=headers)
3131
expect_value_eq(resp.status_code, 200,

src/acquisition/wiki/master.php

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
The job server for wiki scraping. Any number of clients (wiki_download.py
55
instances) fetch jobs from, and upload results to, this server. A simple
66
dashboard is available from dashboard.php, visible here:
7-
https://delphi.midas.cs.cmu.edu/~automation/public/wiki/
7+
https://delphi.cmu.edu/~automation/public/wiki/
88
99
See wiki.py for many more details.
1010
*/
@@ -17,25 +17,25 @@
1717
$dbPort = 3306;
1818
$dbName = 'epidata';
1919
$dbh = mysql_connect("{$dbHost}:{$dbPort}", $dbUser, $dbPass);
20-
if(!$dbh) {
21-
http_response_code(500);
22-
echo 'db problem';
23-
}
24-
mysql_select_db($dbName, $dbh);
25-
if(isset($_REQUEST['get'])) {
26-
$type = 0;
27-
if(isset($_REQUEST['type'])) {
28-
$type = intval($_REQUEST['type']);
20+
if(!$dbh) {
21+
http_response_code(500);
22+
echo 'db problem';
23+
}
24+
mysql_select_db($dbName, $dbh);
25+
if(isset($_REQUEST['get'])) {
26+
$type = 0;
27+
if(isset($_REQUEST['type'])) {
28+
$type = intval($_REQUEST['type']);
2929
}
3030
mysql_query("UPDATE wiki_raw SET `status` = 0 WHERE `status` = 1 and date_add(`datetime`, interval 10 minute) < now()");
3131
$result = mysql_query("SELECT `id`, `name`, `hash` FROM wiki_raw WHERE `status` = {$type} ORDER BY rand() ASC LIMIT 1");
3232
if($row = mysql_fetch_array($result)) {
33-
mysql_query("UPDATE wiki_raw SET `status` = 1, `datetime` = now() WHERE `id` = {$row['id']}");
34-
echo "{\"id\": {$row['id']}, \"name\": \"{$row['name']}\", \"hash\": \"{$row['hash']}\"}";
35-
} else {
36-
http_response_code(201);
37-
echo "no jobs";
38-
}
33+
mysql_query("UPDATE wiki_raw SET `status` = 1, `datetime` = now() WHERE `id` = {$row['id']}");
34+
echo "{\"id\": {$row['id']}, \"name\": \"{$row['name']}\", \"hash\": \"{$row['hash']}\"}";
35+
} else {
36+
http_response_code(201);
37+
echo "no jobs";
38+
}
3939
} elseif(isset($_REQUEST['put']) && isset($_REQUEST['hmac'])) {
4040
if(hash_hmac('sha256', $_REQUEST['put'], $hmacSecret) === $_REQUEST['hmac']) {
4141
$obj = json_decode($_REQUEST['put']);
@@ -47,7 +47,7 @@
4747
echo 'wrong hmac';
4848
}
4949
} else {
50-
http_response_code(400);
51-
echo 'bad request';
52-
}
53-
?>
50+
http_response_code(400);
51+
echo 'bad request';
52+
}
53+
?>

src/acquisition/wiki/wiki_download.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353

5454

5555
VERSION = 10
56-
MASTER_URL = 'https://delphi.midas.cs.cmu.edu/~automation/public/wiki/master.php'
56+
MASTER_URL = 'https://delphi.cmu.edu/~automation/public/wiki/master.php'
5757

5858
def text(data_string):
5959
return str(data_string.decode('utf-8'))

0 commit comments

Comments
 (0)