diff --git a/src/acquisition/cdcp/cdc_upload.py b/src/acquisition/cdcp/cdc_upload.py index 6238dbf3e..3178ff895 100644 --- a/src/acquisition/cdcp/cdc_upload.py +++ b/src/acquisition/cdcp/cdc_upload.py @@ -6,7 +6,7 @@ Reads zip/csv files from CDC and stores page hit counts in the database. Files can be uploaded at: -https://delphi.midas.cs.cmu.edu/~automation/public/cdc_upload/ +https://delphi.cmu.edu/~automation/public/cdc_upload/ When someone uploads a new file, two things happen: 1. the uploaded file is moved to /common/cdc_stage diff --git a/src/acquisition/flusurv/flusurv.py b/src/acquisition/flusurv/flusurv.py index 125888b81..d0c08dfa8 100644 --- a/src/acquisition/flusurv/flusurv.py +++ b/src/acquisition/flusurv/flusurv.py @@ -79,7 +79,7 @@ def fetch_json(path, payload, call_count=1): """Send a request to the server and return the parsed JSON response.""" # it's polite to self-identify this "bot" - delphi_url = 'https://delphi.midas.cs.cmu.edu/index.html' + delphi_url = 'https://delphi.cmu.edu/index.html' user_agent = 'Mozilla/5.0 (compatible; delphibot/1.0; +%s)' % delphi_url # the FluSurv AMF server diff --git a/src/acquisition/fluview/fluview.py b/src/acquisition/fluview/fluview.py index 0704a3068..d723cbc59 100644 --- a/src/acquisition/fluview/fluview.py +++ b/src/acquisition/fluview/fluview.py @@ -168,7 +168,7 @@ def save_latest(path=None): sess = requests.session() sess.headers.update({ # it's polite to self-identify this "bot" - 'User-Agent': 'delphibot/1.0 (+https://delphi.midas.cs.cmu.edu/)', + 'User-Agent': 'delphibot/1.0 (+https://delphi.cmu.edu/)', }) # get metatdata diff --git a/src/acquisition/norostat/norostat_raw.py b/src/acquisition/norostat/norostat_raw.py index bad0f4fae..582de9684 100644 --- a/src/acquisition/norostat/norostat_raw.py +++ b/src/acquisition/norostat/norostat_raw.py @@ -25,7 +25,7 @@ def fetch_content(norostat_datatable_url="https://www.cdc.gov/norovirus/reporting/norostat/data-table.html"): """Download NoroSTAT data-table. Returns the html content.""" headers = { - 'User-Agent': 'delphibot/1.0 (+https://delphi.midas.cs.cmu.edu/)', + 'User-Agent': 'delphibot/1.0 (+https://delphi.cmu.edu/)', } resp = requests.get(norostat_datatable_url, headers=headers) expect_value_eq(resp.status_code, 200, diff --git a/src/acquisition/wiki/master.php b/src/acquisition/wiki/master.php index c53d96e85..99ace0cd4 100644 --- a/src/acquisition/wiki/master.php +++ b/src/acquisition/wiki/master.php @@ -4,7 +4,7 @@ The job server for wiki scraping. Any number of clients (wiki_download.py instances) fetch jobs from, and upload results to, this server. A simple dashboard is available from dashboard.php, visible here: -https://delphi.midas.cs.cmu.edu/~automation/public/wiki/ +https://delphi.cmu.edu/~automation/public/wiki/ See wiki.py for many more details. */ @@ -17,25 +17,25 @@ $dbPort = 3306; $dbName = 'epidata'; $dbh = mysql_connect("{$dbHost}:{$dbPort}", $dbUser, $dbPass); -if(!$dbh) { - http_response_code(500); - echo 'db problem'; -} -mysql_select_db($dbName, $dbh); -if(isset($_REQUEST['get'])) { - $type = 0; - if(isset($_REQUEST['type'])) { - $type = intval($_REQUEST['type']); +if(!$dbh) { + http_response_code(500); + echo 'db problem'; +} +mysql_select_db($dbName, $dbh); +if(isset($_REQUEST['get'])) { + $type = 0; + if(isset($_REQUEST['type'])) { + $type = intval($_REQUEST['type']); } mysql_query("UPDATE wiki_raw SET `status` = 0 WHERE `status` = 1 and date_add(`datetime`, interval 10 minute) < now()"); $result = mysql_query("SELECT `id`, `name`, `hash` FROM wiki_raw WHERE `status` = {$type} ORDER BY rand() ASC LIMIT 1"); if($row = mysql_fetch_array($result)) { - mysql_query("UPDATE wiki_raw SET `status` = 1, `datetime` = now() WHERE `id` = {$row['id']}"); - echo "{\"id\": {$row['id']}, \"name\": \"{$row['name']}\", \"hash\": \"{$row['hash']}\"}"; - } else { - http_response_code(201); - echo "no jobs"; - } + mysql_query("UPDATE wiki_raw SET `status` = 1, `datetime` = now() WHERE `id` = {$row['id']}"); + echo "{\"id\": {$row['id']}, \"name\": \"{$row['name']}\", \"hash\": \"{$row['hash']}\"}"; + } else { + http_response_code(201); + echo "no jobs"; + } } elseif(isset($_REQUEST['put']) && isset($_REQUEST['hmac'])) { if(hash_hmac('sha256', $_REQUEST['put'], $hmacSecret) === $_REQUEST['hmac']) { $obj = json_decode($_REQUEST['put']); @@ -47,7 +47,7 @@ echo 'wrong hmac'; } } else { - http_response_code(400); - echo 'bad request'; -} -?> + http_response_code(400); + echo 'bad request'; +} +?> diff --git a/src/acquisition/wiki/wiki_download.py b/src/acquisition/wiki/wiki_download.py index dd6b49be5..a5140c775 100644 --- a/src/acquisition/wiki/wiki_download.py +++ b/src/acquisition/wiki/wiki_download.py @@ -53,7 +53,7 @@ VERSION = 10 -MASTER_URL = 'https://delphi.midas.cs.cmu.edu/~automation/public/wiki/master.php' +MASTER_URL = 'https://delphi.cmu.edu/~automation/public/wiki/master.php' def text(data_string): return str(data_string.decode('utf-8'))