Skip to content

Commit b4273f2

Browse files
committed
fix urlparse using six in tests and schema_salad core; no automated fixers
1 parent df685ea commit b4273f2

File tree

7 files changed

+53
-41
lines changed

7 files changed

+53
-41
lines changed

schema_salad/jsonld_context.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44
import json
55
import ruamel.yaml as yaml
66
import six
7+
# import urlparse
8+
from six.moves.urllib import parse
79
try:
810
from ruamel.yaml import CSafeLoader as SafeLoader
911
except ImportError:
@@ -18,7 +20,6 @@
1820
from rdflib import Graph, URIRef
1921
import rdflib.namespace
2022
from rdflib.namespace import RDF, RDFS
21-
import urlparse
2223
import logging
2324
from schema_salad.utils import aslist
2425
from typing import (cast, Any, Dict, Iterable, List, Optional, Text, Tuple,
@@ -36,7 +37,7 @@ def pred(datatype, # type: Dict[str, Union[Dict, str]]
3637
namespaces # type: Dict[str, rdflib.namespace.Namespace]
3738
):
3839
# type: (...) -> Union[Dict, Text]
39-
split = urlparse.urlsplit(name)
40+
split = parse.urlsplit(name)
4041

4142
vee = None # type: Optional[Union[str, Text]]
4243

@@ -104,7 +105,7 @@ def process_type(t, # type: Dict[str, Any]
104105
classnode = URIRef(recordname)
105106
g.add((classnode, RDF.type, RDFS.Class))
106107

107-
split = urlparse.urlsplit(recordname)
108+
split = parse.urlsplit(recordname)
108109
predicate = recordname
109110
if t.get("inVocab", True):
110111
if split.scheme:
@@ -220,7 +221,7 @@ def makerdf(workflow, # type: Union[str, Text]
220221
url = v
221222
if url == "@id":
222223
idfields.append(k)
223-
doc_url, frg = urlparse.urldefrag(url)
224+
doc_url, frg = parse.urldefrag(url)
224225
if "/" in frg:
225226
p = frg.split("/")[0]
226227
prefixes[p] = u"%s#%s/" % (doc_url, p)

schema_salad/main.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,9 @@
66
import traceback
77
import json
88
import os
9-
import urlparse
9+
# import urlparse
10+
from six.moves.urllib import parse
11+
1012

1113
import pkg_resources # part of setuptools
1214

@@ -110,7 +112,7 @@ def main(argsl=None): # type: (List[str]) -> int
110112
# Load schema document and resolve refs
111113

112114
schema_uri = args.schema
113-
if not urlparse.urlparse(schema_uri)[0]:
115+
if not parse.urlparse(schema_uri)[0]:
114116
schema_uri = "file://" + os.path.abspath(schema_uri)
115117
schema_raw_doc = metaschema_loader.fetch(schema_uri)
116118

@@ -207,7 +209,7 @@ def main(argsl=None): # type: (List[str]) -> int
207209
# Load target document and resolve refs
208210
try:
209211
uri = args.document
210-
if not urlparse.urlparse(uri)[0]:
212+
if not parse.urlparse(uri)[0]:
211213
doc = "file://" + os.path.abspath(uri)
212214
document, doc_metadata = document_loader.resolve_ref(uri)
213215
except (validate.ValidationException, RuntimeError) as e:

schema_salad/makedoc.py

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,23 @@
11
from __future__ import absolute_import
2+
23
import mistune
3-
from . import schema
4+
import argparse
45
import json
56
import os
67
import copy
78
import re
89
import sys
910
from StringIO import StringIO
1011
import logging
11-
import urlparse
12+
13+
from . import schema
1214
from schema_salad.utils import add_dictlist, aslist
13-
import re
14-
import argparse
15-
from typing import cast, Any, Dict, IO, List, Optional, Set, Text, Union
15+
1616
import six
1717
from six.moves import range
18+
from six.moves.urllib import parse
19+
20+
from typing import cast, Any, Dict, IO, List, Optional, Set, Text, Union
1821

1922
_logger = logging.getLogger("salad")
2023

@@ -38,7 +41,7 @@ def has_types(items): # type: (Any) -> List[basestring]
3841

3942

4043
def linkto(item): # type: (Text) -> Text
41-
_, frg = urlparse.urldefrag(item)
44+
_, frg = parse.urldefrag(item)
4245
return "[%s](#%s)" % (frg, to_id(frg))
4346

4447

@@ -206,8 +209,8 @@ def __init__(self, toc, j, renderlist, redirects, primitiveType):
206209
if tp not in self.uses:
207210
self.uses[tp] = []
208211
if (t["name"], f["name"]) not in self.uses[tp]:
209-
_, frg1 = urlparse.urldefrag(t["name"])
210-
_, frg2 = urlparse.urldefrag(f["name"])
212+
_, frg1 = parse.urldefrag(t["name"])
213+
_, frg2 = parse.urldefrag(f["name"])
211214
self.uses[tp].append((frg1, frg2))
212215
if tp not in basicTypes and tp not in self.record_refs[t["name"]]:
213216
self.record_refs[t["name"]].append(tp)
@@ -268,7 +271,7 @@ def typefmt(self,
268271
elif str(tp) in basicTypes:
269272
return """<a href="%s">%s</a>""" % (self.primitiveType, schema.avro_name(str(tp)))
270273
else:
271-
_, frg = urlparse.urldefrag(tp)
274+
_, frg = parse.urldefrag(tp)
272275
if frg is not '':
273276
tp = frg
274277
return """<a href="#%s">%s</a>""" % (to_id(tp), tp)
@@ -327,7 +330,7 @@ def extendsfrom(item, ex):
327330
lines.append(l)
328331
f["doc"] = "\n".join(lines)
329332

330-
_, frg = urlparse.urldefrag(f["name"])
333+
_, frg = parse.urldefrag(f["name"])
331334
num = self.toc.add_entry(depth, frg)
332335
doc = "%s %s %s\n" % (("#" * depth), num, frg)
333336
else:

schema_salad/ref_resolver.py

Lines changed: 18 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,15 @@
55
import hashlib
66
import logging
77
import collections
8+
9+
import six
10+
from six.moves import range
811
import urllib
9-
import urlparse
12+
# import urlparse
13+
from six.moves.urllib import parse
14+
1015
import re
1116
import copy
12-
import urllib
1317
from StringIO import StringIO
1418

1519
from . import validate
@@ -29,8 +33,7 @@
2933
import xml.sax
3034
from typing import (cast, Any, AnyStr, Callable, Dict, List, Iterable,
3135
Optional, Set, Text, Tuple, TypeVar, Union)
32-
import six
33-
from six.moves import range
36+
3437

3538
_logger = logging.getLogger("salad")
3639
ContextType = Dict[six.text_type, Union[Dict, six.text_type, Iterable[six.text_type]]]
@@ -54,7 +57,7 @@ def file_uri(path, split_frag=False): # type: (str, bool) -> str
5457
return "file://%s%s" % (urlpath, frag)
5558

5659
def uri_file_path(url): # type: (str) -> str
57-
split = urlparse.urlsplit(url)
60+
split = parse.urlsplit(url)
5861
if split.scheme == "file":
5962
return urllib.url2pathname(
6063
str(split.path)) + ("#" + urllib.unquote(str(split.fragment))
@@ -126,7 +129,7 @@ def fetch_text(self, url):
126129
if url in self.cache:
127130
return self.cache[url]
128131

129-
split = urlparse.urlsplit(url)
132+
split = parse.urlsplit(url)
130133
scheme, path = split.scheme, split.path
131134

132135
if scheme in [u'http', u'https'] and self.session is not None:
@@ -156,7 +159,7 @@ def check_exists(self, url): # type: (Text) -> bool
156159
if url in self.cache:
157160
return True
158161

159-
split = urlparse.urlsplit(url)
162+
split = parse.urlsplit(url)
160163
scheme, path = split.scheme, split.path
161164

162165
if scheme in [u'http', u'https'] and self.session is not None:
@@ -172,7 +175,7 @@ def check_exists(self, url): # type: (Text) -> bool
172175
raise ValueError('Unsupported scheme in url: %s' % url)
173176

174177
def urljoin(self, base_url, url): # type: (Text, Text) -> Text
175-
return urlparse.urljoin(base_url, url)
178+
return parse.urljoin(base_url, url)
176179

177180
class Loader(object):
178181
def __init__(self,
@@ -187,7 +190,7 @@ def __init__(self,
187190
):
188191
# type: (...) -> None
189192

190-
normalize = lambda url: urlparse.urlsplit(url).geturl()
193+
normalize = lambda url: parse.urlsplit(url).geturl()
191194
if idx is not None:
192195
self.idx = idx
193196
else:
@@ -273,20 +276,20 @@ def expand_url(self,
273276
if prefix in self.vocab:
274277
url = self.vocab[prefix] + url[len(prefix) + 1:]
275278

276-
split = urlparse.urlsplit(url)
279+
split = parse.urlsplit(url)
277280

278281
if (bool(split.scheme) or url.startswith(u"$(")
279282
or url.startswith(u"${")):
280283
pass
281284
elif scoped_id and not bool(split.fragment):
282-
splitbase = urlparse.urlsplit(base_url)
285+
splitbase = parse.urlsplit(base_url)
283286
frg = u""
284287
if bool(splitbase.fragment):
285288
frg = splitbase.fragment + u"/" + split.path
286289
else:
287290
frg = split.path
288291
pt = splitbase.path if splitbase.path != '' else "/"
289-
url = urlparse.urlunsplit(
292+
url = parse.urlunsplit(
290293
(splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg))
291294
elif scoped_ref is not None and not split.fragment:
292295
pass
@@ -493,7 +496,7 @@ def resolve_ref(self,
493496
doc_url = url
494497
else:
495498
# Load structured document
496-
doc_url, frg = urlparse.urldefrag(url)
499+
doc_url, frg = parse.urldefrag(url)
497500
if doc_url in self.idx and (not mixin):
498501
# If the base document is in the index, it was already loaded,
499502
# so if we didn't find the reference earlier then it must not
@@ -869,7 +872,7 @@ def fetch(self, url, inject_ids=True): # type: (Text, bool) -> Any
869872

870873
def validate_scoped(self, field, link, docid):
871874
# type: (Text, Text, Text) -> Text
872-
split = urlparse.urlsplit(docid)
875+
split = parse.urlsplit(docid)
873876
sp = split.fragment.split(u"/")
874877
n = self.scoped_ref_fields[field]
875878
while n > 0 and len(sp) > 0:
@@ -878,7 +881,7 @@ def validate_scoped(self, field, link, docid):
878881
tried = []
879882
while True:
880883
sp.append(link)
881-
url = urlparse.urlunsplit((
884+
url = parse.urlunsplit((
882885
split.scheme, split.netloc, split.path, split.query,
883886
u"/".join(sp)))
884887
tried.append(url)

schema_salad/schema.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,10 @@
99
import avro.schema
1010
from . import validate
1111
import json
12-
import urlparse
13-
import os
12+
# import urlparse
13+
from six.moves.urllib import parse
1414
import six
15+
import os
1516
AvroSchemaFromJSONData = avro.schema.make_avsc_object
1617
# AvroSchemaFromJSONData=avro.schema.SchemaFromJSONData
1718
from avro.schema import Names, SchemaParseException
@@ -378,7 +379,7 @@ def replace_type(items, spec, loader, found):
378379

379380

380381
def avro_name(url): # type: (AnyStr) -> AnyStr
381-
doc_url, frg = urlparse.urldefrag(url)
382+
doc_url, frg = parse.urldefrag(url)
382383
if frg != '':
383384
if '/' in frg:
384385
return frg[frg.rindex('/') + 1:]

schema_salad/tests/test_fetch.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,8 @@
99
import ruamel.yaml as yaml
1010
import json
1111
import os
12-
import urlparse
12+
# import urlparse
13+
from six.moves.urllib import parse
1314

1415
class TestFetcher(unittest.TestCase):
1516
def test_fetcher(self):
@@ -32,14 +33,14 @@ def check_exists(self, url): # type: (unicode) -> bool
3233
return False
3334

3435
def urljoin(self, base, url):
35-
urlsp = urlparse.urlsplit(url)
36+
urlsp = parse.urlsplit(url)
3637
if urlsp.scheme:
3738
return url
38-
basesp = urlparse.urlsplit(base)
39+
basesp = parse.urlsplit(base)
3940

4041
if basesp.scheme == "keep":
4142
return base + "/" + url
42-
return urlparse.urljoin(base, url)
43+
return parse.urljoin(base, url)
4344

4445
loader = schema_salad.ref_resolver.Loader({}, fetcher_constructor=TestFetcher)
4546
self.assertEqual({"hello": "foo"}, loader.resolve_ref("foo.txt")[0])

schema_salad/validate.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,8 @@
33
import avro.schema
44
from avro.schema import Schema
55
import sys
6-
import urlparse
6+
# import urlparse
7+
from six.moves.urllib import parse
78
import re
89
import logging
910

@@ -300,7 +301,7 @@ def validate_ex(expected_schema, # type: Schema
300301
if (d not in identifiers and strict) and (
301302
d not in foreign_properties and strict_foreign_properties) and not raise_ex:
302303
return False
303-
split = urlparse.urlsplit(d)
304+
split = parse.urlsplit(d)
304305
if split.scheme:
305306
err = sl.makeError(u"unrecognized extension field `%s`%s."
306307
" Did you include "

0 commit comments

Comments
 (0)