Skip to content

Commit b94bb26

Browse files
committed
run pyupgrade --py38-plus
1 parent d479ce5 commit b94bb26

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

49 files changed

+184
-228
lines changed

docs/conf.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
# -*- coding: utf-8 -*-
21
#
32
# python-scrapinghub documentation build configuration file, created by
43
# sphinx-quickstart on Fri Mar 24 12:28:40 2017.
@@ -58,9 +57,9 @@
5857
master_doc = 'index'
5958

6059
# General information about the project.
61-
project = u'scrapinghub'
62-
copyright = u'2010-{}, Scrapinghub'.format(YEAR)
63-
author = u'Scrapinghub'
60+
project = 'scrapinghub'
61+
copyright = f'2010-{YEAR}, Scrapinghub'
62+
author = 'Scrapinghub'
6463

6564
# The version info for the project you're documenting, acts as replacement for
6665
# |version| and |release|, also used in various other places throughout the
@@ -140,8 +139,8 @@
140139
# (source start file, target name, title,
141140
# author, documentclass [howto, manual, or own class]).
142141
latex_documents = [
143-
(master_doc, 'python-scrapinghub.tex', u'python-scrapinghub Documentation',
144-
u'Pablo Hoffman, Daniel Graña', 'manual'),
142+
(master_doc, 'python-scrapinghub.tex', 'python-scrapinghub Documentation',
143+
'Pablo Hoffman, Daniel Graña', 'manual'),
145144
]
146145

147146

@@ -150,7 +149,7 @@
150149
# One entry per manual page. List of tuples
151150
# (source start file, name, description, authors, manual section).
152151
man_pages = [
153-
(master_doc, 'python-scrapinghub', u'python-scrapinghub Documentation',
152+
(master_doc, 'python-scrapinghub', 'python-scrapinghub Documentation',
154153
[author], 1)
155154
]
156155

@@ -161,7 +160,7 @@
161160
# (source start file, target name, title, author,
162161
# dir menu entry, description, category)
163162
texinfo_documents = [
164-
(master_doc, 'python-scrapinghub', u'python-scrapinghub Documentation',
163+
(master_doc, 'python-scrapinghub', 'python-scrapinghub Documentation',
165164
author, 'python-scrapinghub', 'One line description of project.',
166165
'Miscellaneous'),
167166
]
@@ -189,7 +188,7 @@ def handle_item(fieldarg, content):
189188
# inconsistencies later when references are resolved
190189
fieldtype = types.pop(fieldarg)
191190
if len(fieldtype) == 1 and isinstance(fieldtype[0], nodes.Text):
192-
typename = u''.join(n.astext() for n in fieldtype)
191+
typename = ''.join(n.astext() for n in fieldtype)
193192
par.extend(self.make_xrefs(self.typerolename, domain, typename,
194193
addnodes.literal_emphasis))
195194
else:

scrapinghub/client/__init__.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,18 +16,18 @@ class Connection(_Connection):
1616

1717
@_wrap_http_errors
1818
def _request(self, *args, **kwargs):
19-
return super(Connection, self)._request(*args, **kwargs)
19+
return super()._request(*args, **kwargs)
2020

2121

2222
class HubstorageClient(_HubstorageClient):
2323

2424
@_wrap_http_errors
2525
def request(self, *args, **kwargs):
26-
return super(HubstorageClient, self).request(*args, **kwargs)
26+
return super().request(*args, **kwargs)
2727

2828

29-
class ScrapinghubClient(object):
30-
"""Main class to work with Scrapinghub API.
29+
class ScrapinghubClient:
30+
r"""Main class to work with Scrapinghub API.
3131
3232
:param auth: (optional) Scrapinghub APIKEY or other SH auth credentials.
3333
If not provided, it will read, respectively, from

scrapinghub/client/activity.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
from __future__ import absolute_import
2-
31
from .proxy import _Proxy
42
from .utils import parse_job_key, update_kwargs
53

scrapinghub/client/collections.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
from __future__ import absolute_import
2-
31
from six import string_types
42
from six.moves import collections_abc
53

@@ -96,7 +94,7 @@ def list(self):
9694
return list(self.iter())
9795

9896

99-
class Collection(object):
97+
class Collection:
10098
"""Representation of a project collection object.
10199
102100
Not a public constructor: use :class:`Collections` instance to get a
@@ -157,7 +155,7 @@ def __init__(self, client, collections, type_, name):
157155
self._origin = _Collection(type_, name, collections._origin)
158156

159157
def get(self, key, **params):
160-
"""Get item from collection by key.
158+
r"""Get item from collection by key.
161159
162160
:param key: string item key.
163161
:param \*\*params: (optional) additional query params for the request.
@@ -184,7 +182,7 @@ def delete(self, keys):
184182
185183
The method returns ``None`` (original method returns an empty generator).
186184
"""
187-
if (not isinstance(keys, string_types) and
185+
if (not isinstance(keys, str) and
188186
not isinstance(keys, collections_abc.Iterable)):
189187
raise ValueError("You should provide string key or iterable "
190188
"object providing string keys")
@@ -209,7 +207,7 @@ def count(self, *args, **kwargs):
209207

210208
def iter(self, key=None, prefix=None, prefixcount=None, startts=None,
211209
endts=None, requests_params=None, **params):
212-
"""A method to iterate through collection items.
210+
r"""A method to iterate through collection items.
213211
214212
:param key: a string key or a list of keys to filter with.
215213
:param prefix: a string prefix to filter items.
@@ -230,7 +228,7 @@ def iter(self, key=None, prefix=None, prefixcount=None, startts=None,
230228

231229
def list(self, key=None, prefix=None, prefixcount=None, startts=None,
232230
endts=None, requests_params=None, **params):
233-
"""Convenient shortcut to list iter results.
231+
r"""Convenient shortcut to list iter results.
234232
235233
Please note that :meth:`list` method can use a lot of memory and for a
236234
large amount of logs it's recommended to iterate through it

scrapinghub/client/exceptions.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
# -*- coding: utf-8 -*-
2-
from __future__ import absolute_import
31
from functools import wraps
42

53
from requests import HTTPError
@@ -29,7 +27,7 @@ def __init__(self, message=None, http_error=None):
2927
self.http_error = http_error
3028
if not message:
3129
message = _get_http_error_msg(http_error)
32-
super(ScrapinghubAPIError, self).__init__(message)
30+
super().__init__(message)
3331

3432

3533
class BadRequest(ScrapinghubAPIError):

scrapinghub/client/frontiers.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
from __future__ import absolute_import
21
from functools import partial
32
from collections import defaultdict
43

@@ -15,7 +14,7 @@ class _HSFrontier(_Frontier):
1514
"""Modified hubstorage Frontier with newcount per slot."""
1615

1716
def __init__(self, *args, **kwargs):
18-
super(_HSFrontier, self).__init__(*args, **kwargs)
17+
super().__init__(*args, **kwargs)
1918
self.newcount = defaultdict(int)
2019

2120
def _get_writer(self, frontier, slot):
@@ -84,7 +83,7 @@ class Frontiers(_Proxy):
8483
>>> project.frontiers.close()
8584
"""
8685
def __init__(self, *args, **kwargs):
87-
super(Frontiers, self).__init__(*args, **kwargs)
86+
super().__init__(*args, **kwargs)
8887

8988
def get(self, name):
9089
"""Get a frontier by name.
@@ -125,7 +124,7 @@ def close(self):
125124
self._origin.close()
126125

127126

128-
class Frontier(object):
127+
class Frontier:
129128
"""Representation of a frontier object.
130129
131130
Not a public constructor: use :class:`Frontiers` instance to get a
@@ -201,7 +200,7 @@ def newcount(self):
201200
if frontier == self.key)
202201

203202

204-
class FrontierSlot(object):
203+
class FrontierSlot:
205204
"""Representation of a frontier slot object.
206205
207206
Not a public constructor: use :class:`Frontier` instance to get a
@@ -295,7 +294,7 @@ def newcount(self):
295294
return newcount_values.get((self._frontier.key, self.key), 0)
296295

297296

298-
class FrontierSlotFingerprints(object):
297+
class FrontierSlotFingerprints:
299298
"""Representation of request fingerprints collection stored in slot."""
300299

301300
def __init__(self, slot):
@@ -311,13 +310,13 @@ def add(self, fps):
311310
origin = self._frontier._frontiers._origin
312311
writer = origin._get_writer(self._frontier.key, self.key)
313312
fps = list(fps) if not isinstance(fps, list) else fps
314-
if not all(isinstance(fp, string_types) for fp in fps):
313+
if not all(isinstance(fp, str) for fp in fps):
315314
raise ValueError('Fingerprint should be of a string type')
316315
for fp in fps:
317316
writer.write({'fp': fp})
318317

319318
def iter(self, **params):
320-
"""Iterate through fingerprints in the slot.
319+
r"""Iterate through fingerprints in the slot.
321320
322321
:param \*\*params: (optional) additional query params for the request.
323322
:return: an iterator over fingerprints.
@@ -329,7 +328,7 @@ def iter(self, **params):
329328
yield fp.get('fp')
330329

331330
def list(self, **params):
332-
"""List fingerprints in the slot.
331+
r"""List fingerprints in the slot.
333332
334333
:param \*\*params: (optional) additional query params for the request.
335334
:return: a list of fingerprints.
@@ -338,7 +337,7 @@ def list(self, **params):
338337
return list(self.iter(**params))
339338

340339

341-
class FrontierSlotQueue(object):
340+
class FrontierSlotQueue:
342341
"""Representation of request batches queue stored in slot."""
343342

344343
def __init__(self, slot):
@@ -352,7 +351,7 @@ def add(self, fps):
352351
return origin.add(self._frontier.key, self.key, fps)
353352

354353
def iter(self, mincount=None, **params):
355-
"""Iterate through batches in the queue.
354+
r"""Iterate through batches in the queue.
356355
357356
:param mincount: (optional) limit results with min amount of requests.
358357
:param \*\*params: (optional) additional query params for the request.
@@ -366,7 +365,7 @@ def iter(self, mincount=None, **params):
366365
return origin.apiget(path, params=params)
367366

368367
def list(self, mincount=None, **params):
369-
"""List request batches in the queue.
368+
r"""List request batches in the queue.
370369
371370
:param mincount: (optional) limit results with min amount of requests.
372371
:param \*\*params: (optional) additional query params for the request.

scrapinghub/client/items.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
from __future__ import absolute_import
2-
31
import sys
42

53
from .proxy import _ItemsResourceProxy, _DownloadableProxyMixin
@@ -84,10 +82,10 @@ def _modify_iter_params(self, params):
8482
:return: a dict with updated set of params.
8583
:rtype: :class:`dict`
8684
"""
87-
params = super(Items, self)._modify_iter_params(params)
85+
params = super()._modify_iter_params(params)
8886
offset = params.pop('offset', None)
8987
if offset:
90-
params['start'] = '{}/{}'.format(self.key, offset)
88+
params['start'] = f'{self.key}/{offset}'
9189
return params
9290

9391
def list_iter(self, chunksize=1000, *args, **kwargs):

scrapinghub/client/jobs.py

Lines changed: 13 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
from __future__ import absolute_import
2-
31
import json
42

53
from ..hubstorage.job import JobMeta as _JobMeta
@@ -17,7 +15,7 @@
1715
from .utils import get_tags_for_update, parse_job_key, update_kwargs
1816

1917

20-
class Jobs(object):
18+
class Jobs:
2119
"""Class representing a collection of jobs for a project/spider.
2220
2321
Not a public constructor: use :class:`~scrapinghub.client.projects.Project`
@@ -45,7 +43,7 @@ def __init__(self, client, project_id, spider=None):
4543

4644
def count(self, spider=None, state=None, has_tag=None, lacks_tag=None,
4745
startts=None, endts=None, **params):
48-
"""Count jobs with a given set of filters.
46+
r"""Count jobs with a given set of filters.
4947
5048
:param spider: (optional) filter by spider name.
5149
:param state: (optional) a job state, a string or a list of strings.
@@ -140,7 +138,7 @@ def cancel(self, keys=None, count=None, **params):
140138
def iter(self, count=None, start=None, spider=None, state=None,
141139
has_tag=None, lacks_tag=None, startts=None, endts=None,
142140
meta=None, **params):
143-
"""Iterate over jobs collection for a given set of params.
141+
r"""Iterate over jobs collection for a given set of params.
144142
145143
:param count: (optional) limit amount of returned jobs.
146144
:param start: (optional) number of jobs to skip in the beginning.
@@ -211,7 +209,7 @@ def iter(self, count=None, start=None, spider=None, state=None,
211209
def list(self, count=None, start=None, spider=None, state=None,
212210
has_tag=None, lacks_tag=None, startts=None, endts=None,
213211
meta=None, **params):
214-
"""Convenient shortcut to list iter results.
212+
r"""Convenient shortcut to list iter results.
215213
216214
:param count: (optional) limit amount of returned jobs.
217215
:param start: (optional) number of jobs to skip in the beginning.
@@ -250,7 +248,7 @@ def list(self, count=None, start=None, spider=None, state=None,
250248
def run(self, spider=None, units=None, priority=None, meta=None,
251249
add_tag=None, job_args=None, job_settings=None, cmd_args=None,
252250
environment=None, **params):
253-
"""Schedule a new job and returns its job key.
251+
r"""Schedule a new job and returns its job key.
254252
255253
:param spider: a spider name string
256254
(not needed if job is scheduled via :attr:`Spider.jobs`).
@@ -329,7 +327,7 @@ def get(self, job_key):
329327
return Job(self._client, str(job_key))
330328

331329
def summary(self, state=None, spider=None, **params):
332-
"""Get jobs summary (optionally by state).
330+
r"""Get jobs summary (optionally by state).
333331
334332
:param state: (optional) a string state to filter jobs.
335333
:param spider: (optional) a spider name (not needed if instantiated
@@ -355,7 +353,7 @@ def summary(self, state=None, spider=None, **params):
355353

356354
def iter_last(self, start=None, start_after=None, count=None,
357355
spider=None, **params):
358-
"""Iterate through last jobs for each spider.
356+
r"""Iterate through last jobs for each spider.
359357
360358
:param start: (optional)
361359
:param start_after: (optional)
@@ -445,7 +443,7 @@ def update_tags(self, add=None, remove=None, spider=None):
445443
return result['count']
446444

447445

448-
class Job(object):
446+
class Job:
449447
"""Class representing a job object.
450448
451449
Not a public constructor: use :class:`~scrapinghub.client.ScrapinghubClient`
@@ -510,7 +508,7 @@ def close_writers(self):
510508
self._job.close_writers()
511509

512510
def start(self, **params):
513-
"""Move job to running state.
511+
r"""Move job to running state.
514512
515513
:param \*\*params: (optional) keyword meta parameters to update.
516514
:return: a previous string job state.
@@ -524,7 +522,7 @@ def start(self, **params):
524522
return self.update(state='running', **params)
525523

526524
def finish(self, **params):
527-
"""Move running job to finished state.
525+
r"""Move running job to finished state.
528526
529527
:param \*\*params: (optional) keyword meta parameters to update.
530528
:return: a previous string job state.
@@ -538,7 +536,7 @@ def finish(self, **params):
538536
return self.update(state='finished', **params)
539537

540538
def delete(self, **params):
541-
"""Mark finished job for deletion.
539+
r"""Mark finished job for deletion.
542540
543541
:param \*\*params: (optional) keyword meta parameters to update.
544542
:return: a previous string job state.
@@ -552,7 +550,7 @@ def delete(self, **params):
552550
return self.update(state='deleted', **params)
553551

554552
def update(self, state, **params):
555-
"""Update job state.
553+
r"""Update job state.
556554
557555
:param state: a new job state.
558556
:param \*\*params: (optional) keyword meta parameters to update.
@@ -568,7 +566,7 @@ def update(self, state, **params):
568566
job = next(self._project.jobq.update(self, state=state, **params))
569567
return job['prevstate']
570568
except StopIteration:
571-
raise NotFound("Job {} doesn't exist".format(self.key))
569+
raise NotFound(f"Job {self.key} doesn't exist")
572570

573571
def cancel(self):
574572
"""Schedule a running job for cancellation.

0 commit comments

Comments
 (0)