Skip to content

Commit 052c556

Browse files
authored
Merge pull request #224 from IdentityPython/ft-more_typing
more typing
2 parents c454257 + d5f1903 commit 052c556

File tree

9 files changed

+258
-193
lines changed

9 files changed

+258
-193
lines changed

src/pyff/api.py

Lines changed: 74 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import threading
33
from datetime import datetime, timedelta
44
from json import dumps
5-
from typing import Any, Iterable, List, Mapping
5+
from typing import Any, Dict, Generator, Iterable, List, Mapping, Optional, Tuple
66

77
import pkg_resources
88
import pyramid.httpexceptions as exc
@@ -13,6 +13,7 @@
1313
from lxml import etree
1414
from pyramid.config import Configurator
1515
from pyramid.events import NewRequest
16+
from pyramid.request import Request
1617
from pyramid.response import Response
1718
from six import b
1819
from six.moves.urllib_parse import quote_plus
@@ -22,27 +23,29 @@
2223
from pyff.logs import get_log
2324
from pyff.pipes import plumbing
2425
from pyff.repo import MDRepository
25-
from pyff.resource import Resource, ResourceInfo
26+
from pyff.resource import Resource
2627
from pyff.samlmd import entity_display_name
27-
from pyff.utils import b2u, dumptree, duration2timedelta, hash_id, json_serializer, utc_now
28+
from pyff.utils import b2u, dumptree, hash_id, json_serializer, utc_now
2829

2930
log = get_log(__name__)
3031

3132

3233
class NoCache(object):
33-
def __init__(self):
34+
""" Dummy implementation for when caching isn't enabled """
35+
36+
def __init__(self) -> None:
3437
pass
3538

36-
def __getitem__(self, item):
39+
def __getitem__(self, item: Any) -> None:
3740
return None
3841

39-
def __setitem__(self, instance, value):
42+
def __setitem__(self, instance: Any, value: Any) -> Any:
4043
return value
4144

4245

43-
def robots_handler(request):
46+
def robots_handler(request: Request) -> Response:
4447
"""
45-
Impelements robots.txt
48+
Implements robots.txt
4649
4750
:param request: the HTTP request
4851
:return: robots.txt
@@ -55,7 +58,7 @@ def robots_handler(request):
5558
)
5659

5760

58-
def status_handler(request):
61+
def status_handler(request: Request) -> Response:
5962
"""
6063
Implements the /api/status endpoint
6164
@@ -80,34 +83,38 @@ def status_handler(request):
8083

8184

8285
class MediaAccept(object):
83-
def __init__(self, accept):
86+
def __init__(self, accept: str):
8487
self._type = AcceptableType(accept)
8588

86-
def has_key(self, key):
89+
def has_key(self, key: Any) -> bool: # Literal[True]:
8790
return True
8891

89-
def get(self, item):
92+
def get(self, item: Any) -> Any:
9093
return self._type.matches(item)
9194

92-
def __contains__(self, item):
95+
def __contains__(self, item: Any) -> Any:
9396
return self._type.matches(item)
9497

95-
def __str__(self):
98+
def __str__(self) -> str:
9699
return str(self._type)
97100

98101

99102
xml_types = ('text/xml', 'application/xml', 'application/samlmetadata+xml')
100103

101104

102-
def _is_xml_type(accepter):
105+
def _is_xml_type(accepter: MediaAccept) -> bool:
103106
return any([x in accepter for x in xml_types])
104107

105108

106-
def _is_xml(data):
109+
def _is_xml(data: Any) -> bool:
107110
return isinstance(data, (etree._Element, etree._ElementTree))
108111

109112

110-
def _fmt(data, accepter):
113+
def _fmt(data: Any, accepter: MediaAccept) -> Tuple[str, str]:
114+
"""
115+
Format data according to the accepted content type of the requester.
116+
Return data as string (either XML or json) and a content-type.
117+
"""
111118
if data is None or len(data) == 0:
112119
return "", 'text/plain'
113120
if _is_xml(data) and _is_xml_type(accepter):
@@ -127,7 +134,7 @@ def call(entry: str) -> None:
127134
return None
128135

129136

130-
def request_handler(request):
137+
def request_handler(request: Request) -> Response:
131138
"""
132139
The main GET request handler for pyFF. Implements caching and forwards the request to process_handler
133140
@@ -146,7 +153,7 @@ def request_handler(request):
146153
return r
147154

148155

149-
def process_handler(request):
156+
def process_handler(request: Request) -> Response:
150157
"""
151158
The main request handler for pyFF. Implements API call hooks and content negotiation.
152159
@@ -155,7 +162,8 @@ def process_handler(request):
155162
"""
156163
_ctypes = {'xml': 'application/samlmetadata+xml;application/xml;text/xml', 'json': 'application/json'}
157164

158-
def _d(x, do_split=True):
165+
def _d(x: Optional[str], do_split: bool = True) -> Tuple[Optional[str], Optional[str]]:
166+
""" Split a path into a base component and an extension. """
159167
if x is not None:
160168
x = x.strip()
161169

@@ -170,7 +178,7 @@ def _d(x, do_split=True):
170178

171179
return x, None
172180

173-
log.debug(request)
181+
log.debug(f'Processing request: {request}')
174182

175183
if request.matchdict is None:
176184
raise exc.exception_response(400)
@@ -182,18 +190,18 @@ def _d(x, do_split=True):
182190
pass
183191

184192
entry = request.matchdict.get('entry', 'request')
185-
path = list(request.matchdict.get('path', []))
193+
path_elem = list(request.matchdict.get('path', []))
186194
match = request.params.get('q', request.params.get('query', None))
187195

188196
# Enable matching on scope.
189197
match = match.split('@').pop() if match and not match.endswith('@') else match
190198
log.debug("match={}".format(match))
191199

192-
if 0 == len(path):
193-
path = ['entities']
200+
if not path_elem:
201+
path_elem = ['entities']
194202

195-
alias = path.pop(0)
196-
path = '/'.join(path)
203+
alias = path_elem.pop(0)
204+
path = '/'.join(path_elem)
197205

198206
# Ugly workaround bc WSGI drops double-slashes.
199207
path = path.replace(':/', '://')
@@ -226,23 +234,31 @@ def _d(x, do_split=True):
226234
accept = str(request.accept).split(',')[0]
227235
valid_accept = accept and not ('application/*' in accept or 'text/*' in accept or '*/*' in accept)
228236

229-
path_no_extension, extension = _d(path, True)
230-
accept_from_extension = _ctypes.get(extension, accept)
237+
new_path: Optional[str] = path
238+
path_no_extension, extension = _d(new_path, True)
239+
accept_from_extension = accept
240+
if extension:
241+
accept_from_extension = _ctypes.get(extension, accept)
231242

232243
if policy == 'extension':
233-
path = path_no_extension
244+
new_path = path_no_extension
234245
if not valid_accept:
235246
accept = accept_from_extension
236247
elif policy == 'adaptive':
237248
if not valid_accept:
238-
path = path_no_extension
249+
new_path = path_no_extension
239250
accept = accept_from_extension
240251

241-
if pfx and path:
242-
q = "{%s}%s" % (pfx, path)
243-
path = "/%s/%s" % (alias, path)
252+
if not accept:
253+
log.warning('Could not determine accepted response type')
254+
raise exc.exception_response(400)
255+
256+
q: Optional[str]
257+
if pfx and new_path:
258+
q = f'{{{pfx}}}{new_path}'
259+
new_path = f'/{alias}/{new_path}'
244260
else:
245-
q = path
261+
q = new_path
246262

247263
try:
248264
accepter = MediaAccept(accept)
@@ -254,18 +270,19 @@ def _d(x, do_split=True):
254270
'url': request.current_route_url(),
255271
'select': q,
256272
'match': match.lower() if match else match,
257-
'path': path,
273+
'path': new_path,
258274
'stats': {},
259275
}
260276

261277
r = p.process(request.registry.md, state=state, raise_exceptions=True, scheduler=request.registry.scheduler)
262-
log.debug(r)
278+
log.debug(f'Plumbing process result: {r}')
263279
if r is None:
264280
r = []
265281

266282
response = Response()
267-
response.headers.update(state.get('headers', {}))
268-
ctype = state.get('headers').get('Content-Type', None)
283+
_headers = state.get('headers', {})
284+
response.headers.update(_headers)
285+
ctype = _headers.get('Content-Type', None)
269286
if not ctype:
270287
r, t = _fmt(r, accepter)
271288
ctype = t
@@ -280,20 +297,20 @@ def _d(x, do_split=True):
280297
import traceback
281298

282299
log.debug(traceback.format_exc())
283-
log.warning(ex)
300+
log.warning(f'Exception from processing pipeline: {ex}')
284301
raise exc.exception_response(409)
285302
except BaseException as ex:
286303
import traceback
287304

288305
log.debug(traceback.format_exc())
289-
log.error(ex)
306+
log.error(f'Exception from processing pipeline: {ex}')
290307
raise exc.exception_response(500)
291308

292309
if request.method == 'GET':
293310
raise exc.exception_response(404)
294311

295312

296-
def webfinger_handler(request):
313+
def webfinger_handler(request: Request) -> Response:
297314
"""An implementation the webfinger protocol
298315
(http://tools.ietf.org/html/draft-ietf-appsawg-webfinger-12)
299316
in order to provide information about up and downstream metadata available at
@@ -324,7 +341,7 @@ def webfinger_handler(request):
324341
"subject": "http://reep.refeds.org:8080"
325342
}
326343
327-
Depending on which version of pyFF your're running and the configuration you
344+
Depending on which version of pyFF you're running and the configuration you
328345
may also see downstream metadata listed using the 'role' attribute to the link
329346
elements.
330347
"""
@@ -335,11 +352,11 @@ def webfinger_handler(request):
335352
if resource is None:
336353
resource = request.host_url
337354

338-
jrd = dict()
339-
dt = datetime.now() + duration2timedelta("PT1H")
355+
jrd: Dict[str, Any] = dict()
356+
dt = datetime.now() + timedelta(hours=1)
340357
jrd['expires'] = dt.isoformat()
341358
jrd['subject'] = request.host_url
342-
links = list()
359+
links: List[Dict[str, Any]] = list()
343360
jrd['links'] = links
344361

345362
_dflt_rels = {
@@ -352,7 +369,7 @@ def webfinger_handler(request):
352369
else:
353370
rel = [rel]
354371

355-
def _links(url, title=None):
372+
def _links(url: str, title: Any = None) -> None:
356373
if url.startswith('/'):
357374
url = url.lstrip('/')
358375
for r in rel:
@@ -381,7 +398,7 @@ def _links(url, title=None):
381398
return response
382399

383400

384-
def resources_handler(request):
401+
def resources_handler(request: Request) -> Response:
385402
"""
386403
Implements the /api/resources endpoint
387404
@@ -409,7 +426,7 @@ def _info(r: Resource) -> Mapping[str, Any]:
409426
return response
410427

411428

412-
def pipeline_handler(request):
429+
def pipeline_handler(request: Request) -> Response:
413430
"""
414431
Implements the /api/pipeline endpoint
415432
@@ -422,7 +439,7 @@ def pipeline_handler(request):
422439
return response
423440

424441

425-
def search_handler(request):
442+
def search_handler(request: Request) -> Response:
426443
"""
427444
Implements the /api/search endpoint
428445
@@ -438,7 +455,7 @@ def search_handler(request):
438455
log.debug("match={}".format(match))
439456
store = request.registry.md.store
440457

441-
def _response():
458+
def _response() -> Generator[bytes, bytes, None]:
442459
yield b('[')
443460
in_loop = False
444461
entities = store.search(query=match.lower(), entity_filter=entity_filter)
@@ -454,8 +471,8 @@ def _response():
454471
return response
455472

456473

457-
def add_cors_headers_response_callback(event):
458-
def cors_headers(request, response):
474+
def add_cors_headers_response_callback(event: NewRequest) -> None:
475+
def cors_headers(request: Request, response: Response) -> None:
459476
response.headers.update(
460477
{
461478
'Access-Control-Allow-Origin': '*',
@@ -469,7 +486,7 @@ def cors_headers(request, response):
469486
event.request.add_response_callback(cors_headers)
470487

471488

472-
def launch_memory_usage_server(port=9002):
489+
def launch_memory_usage_server(port: int = 9002) -> None:
473490
import cherrypy
474491
import dowser
475492

@@ -479,7 +496,7 @@ def launch_memory_usage_server(port=9002):
479496
cherrypy.engine.start()
480497

481498

482-
def mkapp(*args, **kwargs):
499+
def mkapp(*args: Any, **kwargs: Any) -> Any:
483500
md = kwargs.pop('md', None)
484501
if md is None:
485502
md = MDRepository()
@@ -501,7 +518,9 @@ def mkapp(*args, **kwargs):
501518
for mn in config.modules:
502519
importlib.import_module(mn)
503520

504-
pipeline = args or None
521+
pipeline = None
522+
if args:
523+
pipeline = list(args)
505524
if pipeline is None and config.pipeline:
506525
pipeline = [config.pipeline]
507526

0 commit comments

Comments
 (0)