43
43
import requests
44
44
import yaml
45
45
46
- api_token = os .environ .get ("GITHUB_TOKEN" )
47
- if api_token is not None :
48
- GITHUB_API_HEADERS = {"Authorization" : f"Bearer { api_token } " }
49
- else :
50
- GITHUB_API_HEADERS = {}
51
-
52
46
53
47
class Preprocessors :
54
48
"""
@@ -164,37 +158,39 @@ def maintainers_add_info(context):
164
158
Given the active maintainers defined in the yaml file, it fetches
165
159
the GitHub user information for them.
166
160
"""
167
- timestamp = time .time ()
168
-
169
- cache_file = pathlib .Path ("maintainers.json" )
170
- if cache_file .is_file ():
171
- with open (cache_file ) as f :
172
- context ["maintainers" ] = json .load (f )
173
- # refresh cache after 1 hour
174
- if (timestamp - context ["maintainers" ]["timestamp" ]) < 3_600 :
175
- return context
176
-
177
- context ["maintainers" ]["timestamp" ] = timestamp
178
-
179
161
repeated = set (context ["maintainers" ]["active" ]) & set (
180
162
context ["maintainers" ]["inactive" ]
181
163
)
182
164
if repeated :
183
165
raise ValueError (f"Maintainers { repeated } are both active and inactive" )
184
166
185
- for kind in ("active" , "inactive" ):
186
- context ["maintainers" ][f"{ kind } _with_github_info" ] = []
187
- for user in context ["maintainers" ][kind ]:
188
- resp = requests .get (
189
- f"https://github.com/api/users/{ user } " , headers = GITHUB_API_HEADERS
167
+ maintainers_info = {}
168
+ for user in (
169
+ context ["maintainers" ]["active" ] + context ["maintainers" ]["inactive" ]
170
+ ):
171
+ resp = requests .get (f"https://github.com/api/users/{ user } " )
172
+ if resp .status_code == 403 :
173
+ sys .stderr .write (
174
+ "WARN: GitHub API quota exceeded when fetching maintainers\n "
175
+ )
176
+ # if we exceed github api quota, we use the github info
177
+ # of maintainers saved with the website
178
+ resp_bkp = requests .get (
179
+ context ["main" ]["production_url" ] + "maintainers.json"
190
180
)
191
- if context ["ignore_io_errors" ] and resp .status_code == 403 :
192
- return context
193
- resp .raise_for_status ()
194
- context ["maintainers" ][f"{ kind } _with_github_info" ].append (resp .json ())
181
+ resp_bkp .raise_for_status ()
182
+ maintainers_info = resp_bkp .json ()
183
+ break
195
184
196
- with open (cache_file , "w" ) as f :
197
- json .dump (context ["maintainers" ], f )
185
+ resp .raise_for_status ()
186
+ maintainers_info [user ] = resp .json ()
187
+
188
+ context ["maintainers" ]["github_info" ] = maintainers_info
189
+
190
+ # save the data fetched from github to use it in case we exceed
191
+ # git github api quota in the future
192
+ with open (pathlib .Path (context ["target_path" ]) / "maintainers.json" , "w" ) as f :
193
+ json .dump (maintainers_info , f )
198
194
199
195
return context
200
196
@@ -203,15 +199,20 @@ def home_add_releases(context):
203
199
context ["releases" ] = []
204
200
205
201
github_repo_url = context ["main" ]["github_repo_url" ]
206
- resp = requests .get (
207
- f"https://github.com/api/repos/{ github_repo_url } /releases" ,
208
- headers = GITHUB_API_HEADERS ,
209
- )
210
- if context ["ignore_io_errors" ] and resp .status_code == 403 :
211
- return context
212
- resp .raise_for_status ()
202
+ resp = requests .get (f"https://github.com/api/repos/{ github_repo_url } /releases" )
203
+ if resp .status_code == 403 :
204
+ sys .stderr .write ("WARN: GitHub API quota exceeded when fetching releases\n " )
205
+ resp_bkp = requests .get (context ["main" ]["production_url" ] + "releases.json" )
206
+ resp_bkp .raise_for_status ()
207
+ releases = resp_bkp .json ()
208
+ else :
209
+ resp .raise_for_status ()
210
+ releases = resp .json ()
211
+
212
+ with open (pathlib .Path (context ["target_path" ]) / "releases.json" , "w" ) as f :
213
+ json .dump (releases , f , default = datetime .datetime .isoformat )
213
214
214
- for release in resp . json () :
215
+ for release in releases :
215
216
if release ["prerelease" ]:
216
217
continue
217
218
published = datetime .datetime .strptime (
@@ -229,6 +230,7 @@ def home_add_releases(context):
229
230
),
230
231
}
231
232
)
233
+
232
234
return context
233
235
234
236
@staticmethod
@@ -273,15 +275,22 @@ def roadmap_pdeps(context):
273
275
github_repo_url = context ["main" ]["github_repo_url" ]
274
276
resp = requests .get (
275
277
"https://github.com/api/search/issues?"
276
- f"q=is:pr is:open label:PDEP repo:{ github_repo_url } " ,
277
- headers = GITHUB_API_HEADERS ,
278
+ f"q=is:pr is:open label:PDEP repo:{ github_repo_url } "
278
279
)
279
- if context ["ignore_io_errors" ] and resp .status_code == 403 :
280
- return context
281
- resp .raise_for_status ()
280
+ if resp .status_code == 403 :
281
+ sys .stderr .write ("WARN: GitHub API quota exceeded when fetching pdeps\n " )
282
+ resp_bkp = requests .get (context ["main" ]["production_url" ] + "pdeps.json" )
283
+ resp_bkp .raise_for_status ()
284
+ pdeps = resp_bkp .json ()
285
+ else :
286
+ resp .raise_for_status ()
287
+ pdeps = resp .json ()
282
288
283
- for pdep in resp .json ()["items" ]:
284
- context ["pdeps" ]["under_discussion" ].append (
289
+ with open (pathlib .Path (context ["target_path" ]) / "pdeps.json" , "w" ) as f :
290
+ json .dump (pdeps , f )
291
+
292
+ for pdep in pdeps ["items" ]:
293
+ context ["pdeps" ]["Under discussion" ].append (
285
294
{"title" : pdep ["title" ], "url" : pdep ["url" ]}
286
295
)
287
296
@@ -314,7 +323,7 @@ def get_callable(obj_as_str: str) -> object:
314
323
return obj
315
324
316
325
317
- def get_context (config_fname : str , ignore_io_errors : bool , ** kwargs ):
326
+ def get_context (config_fname : str , ** kwargs ):
318
327
"""
319
328
Load the config yaml as the base context, and enrich it with the
320
329
information added by the context preprocessors defined in the file.
@@ -323,7 +332,6 @@ def get_context(config_fname: str, ignore_io_errors: bool, **kwargs):
323
332
context = yaml .safe_load (f )
324
333
325
334
context ["source_path" ] = os .path .dirname (config_fname )
326
- context ["ignore_io_errors" ] = ignore_io_errors
327
335
context .update (kwargs )
328
336
329
337
preprocessors = (
@@ -361,7 +369,9 @@ def extend_base_template(content: str, base_template: str) -> str:
361
369
362
370
363
371
def main (
364
- source_path : str , target_path : str , base_url : str , ignore_io_errors : bool
372
+ source_path : str ,
373
+ target_path : str ,
374
+ base_url : str ,
365
375
) -> int :
366
376
"""
367
377
Copy every file in the source directory to the target directory.
@@ -375,7 +385,7 @@ def main(
375
385
os .makedirs (target_path , exist_ok = True )
376
386
377
387
sys .stderr .write ("Generating context...\n " )
378
- context = get_context (config_fname , ignore_io_errors , base_url = base_url )
388
+ context = get_context (config_fname , base_url = base_url , target_path = target_path )
379
389
sys .stderr .write ("Context generated\n " )
380
390
381
391
templates_path = os .path .join (source_path , context ["main" ]["templates_path" ])
@@ -419,15 +429,5 @@ def main(
419
429
parser .add_argument (
420
430
"--base-url" , default = "" , help = "base url where the website is served from"
421
431
)
422
- parser .add_argument (
423
- "--ignore-io-errors" ,
424
- action = "store_true" ,
425
- help = "do not fail if errors happen when fetching "
426
- "data from http sources, and those fail "
427
- "(mostly useful to allow GitHub quota errors "
428
- "when running the script locally)" ,
429
- )
430
432
args = parser .parse_args ()
431
- sys .exit (
432
- main (args .source_path , args .target_path , args .base_url , args .ignore_io_errors )
433
- )
433
+ sys .exit (main (args .source_path , args .target_path , args .base_url ))
0 commit comments