Skip to content

Commit e77cbd0

Browse files
committed
Apply the new code style to fetcher docstrings
Ensure that the newly added files' docstrings adhere to the recently adopted code style guideline (#1232). Small code style improvements in comments and imports. Signed-off-by: Teodora Sechkova <[email protected]>
1 parent 5d3133d commit e77cbd0

File tree

2 files changed

+64
-70
lines changed

2 files changed

+64
-70
lines changed

tuf/client/fetcher.py

Lines changed: 20 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,53 +1,38 @@
1-
"""
2-
<Program Name>
3-
fetcher.py
4-
5-
<Author>
6-
Teodora Sechkova <[email protected]>
7-
8-
<Started>
9-
December 14, 2020
1+
# Copyright 2021, New York University and the TUF contributors
2+
# SPDX-License-Identifier: MIT OR Apache-2.0
103

11-
<Copyright>
12-
See LICENSE-MIT OR LICENSE for licensing information.
13-
14-
<Purpose>
15-
Provides an interface for network IO abstraction.
4+
"""Provides an interface for network IO abstraction.
165
"""
176

7+
# Imports
188
import abc
199

20-
10+
# Classes
2111
class FetcherInterface():
12+
"""Defines an interface for abstract network download.
13+
14+
By providing a concrete implementation of the abstract interface,
15+
users of the framework can plug-in their preferred/customized
16+
network stack.
2217
"""
23-
<Purpose>
24-
Defines an interface for abstract network download which can be implemented
25-
for a variety of network libraries and configurations.
26-
"""
27-
__metaclass__ = abc.ABCMeta
2818

19+
__metaclass__ = abc.ABCMeta
2920

3021
@abc.abstractmethod
3122
def fetch(self, url, required_length):
32-
"""
33-
<Purpose>
34-
Fetches the contents of HTTP/HTTPS url from a remote server up to
35-
required_length and returns a bytes iterator.
23+
"""Fetches the contents of HTTP/HTTPS url from a remote server.
3624
37-
<Arguments>
38-
url:
39-
A URL string that represents the location of the file.
25+
Ensures the length of the downloaded data is up to 'required_length'.
4026
41-
required_length:
42-
An integer value representing the length of the file in bytes.
27+
Arguments:
28+
url: A URL string that represents a file location.
29+
required_length: An integer value representing the file length in bytes.
4330
44-
<Exceptions>
45-
tuf.exceptions.SlowRetrievalError, if a timeout occurs while receiving
46-
data from a server
31+
Raises:
32+
tuf.exceptions.SlowRetrievalError: A timeout occurs while receiving data.
33+
tuf.exceptions.FetcherHTTPError: An HTTP error code is received.
4734
48-
tuf.exceptions.FetcherHTTPError, if an HTTP error code was received
49-
<Returns>
35+
Returns:
5036
A bytes iterator
5137
"""
5238
raise NotImplementedError # pragma: no cover
53-

tuf/requests_fetcher.py

Lines changed: 44 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,59 +1,69 @@
1-
"""
2-
<Program Name>
3-
fetcher.py
4-
5-
<Author>
6-
Teodora Sechkova <[email protected]>
7-
8-
<Started>
9-
December 14, 2020
10-
11-
<Copyright>
12-
See LICENSE-MIT OR LICENSE for licensing information.
1+
# Copyright 2021, New York University and the TUF contributors
2+
# SPDX-License-Identifier: MIT OR Apache-2.0
133

14-
<Purpose>
15-
Provides an implementation of FetcherInterface using the requests HTTP
4+
"""Provides an implementation of FetcherInterface using the Requests HTTP
165
library.
176
"""
187

8+
# Imports
199
import requests
2010
import six
2111
import logging
2212
import time
2313

2414
import urllib3.exceptions
15+
2516
import tuf.exceptions
2617
import tuf.settings
27-
import tuf.client.fetcher
2818

29-
logger = logging.getLogger(__name__)
19+
from tuf.client.fetcher import FetcherInterface
3020

21+
# Globals
22+
logger = logging.getLogger(__name__)
3123

32-
class RequestsFetcher(tuf.client.fetcher.FetcherInterface):
33-
"""
34-
<Purpose>
35-
A concrete implementation of FetcherInterface based on the Requests
24+
# Classess
25+
class RequestsFetcher(FetcherInterface):
26+
"""A concrete implementation of FetcherInterface based on the Requests
3627
library.
28+
29+
Attributes:
30+
_sessions: A dictionary of Requests.Session objects storing a separate
31+
session per scheme+hostname combination.
3732
"""
3833

3934
def __init__(self):
4035
# From http://docs.python-requests.org/en/master/user/advanced/#session-objects:
4136
#
42-
# "The Session object allows you to persist certain parameters across requests.
43-
# It also persists cookies across all requests made from the Session instance,
44-
# and will use urllib3's connection pooling. So if you're making several
45-
# requests to the same host, the underlying TCP connection will be reused,
46-
# which can result in a significant performance increase (see HTTP persistent
47-
# connection)."
37+
# "The Session object allows you to persist certain parameters across
38+
# requests. It also persists cookies across all requests made from the
39+
# Session instance, and will use urllib3's connection pooling. So if you're
40+
# making several requests to the same host, the underlying TCP connection
41+
# will be reused, which can result in a significant performance increase
42+
# (see HTTP persistent connection)."
4843
#
49-
# NOTE: We use a separate requests.Session per scheme+hostname combination, in
50-
# order to reuse connections to the same hostname to improve efficiency, but
51-
# avoiding sharing state between different hosts-scheme combinations to
44+
# NOTE: We use a separate requests.Session per scheme+hostname combination,
45+
# in order to reuse connections to the same hostname to improve efficiency,
46+
# but avoiding sharing state between different hosts-scheme combinations to
5247
# minimize subtle security issues. Some cookies may not be HTTP-safe.
5348
self._sessions = {}
5449

5550

5651
def fetch(self, url, required_length):
52+
"""Fetches the contents of HTTP/HTTPS url from a remote server.
53+
54+
Ensures the length of the downloaded data is up to 'required_length'.
55+
56+
Arguments:
57+
url: A URL string that represents a file location.
58+
required_length: An integer value representing the file length in bytes.
59+
60+
Raises:
61+
tuf.exceptions.SlowRetrievalError: A timeout occurs while receiving data.
62+
tuf.exceptions.FetcherHTTPError: An HTTP error code is received.
63+
64+
Returns:
65+
A bytes iterator
66+
"""
5767
# Get a customized session for each new schema+hostname combination.
5868
session = self._get_session(url)
5969

@@ -81,10 +91,10 @@ def chunks():
8191
bytes_received = 0
8292
while True:
8393
# We download a fixed chunk of data in every round. This is so that we
84-
# can defend against slow retrieval attacks. Furthermore, we do not wish
85-
# to download an extremely large file in one shot.
86-
# Before beginning the round, sleep (if set) for a short amount of time
87-
# so that the CPU is not hogged in the while loop.
94+
# can defend against slow retrieval attacks. Furthermore, we do not
95+
# wish to download an extremely large file in one shot.
96+
# Before beginning the round, sleep (if set) for a short amount of
97+
# time so that the CPU is not hogged in the while loop.
8898
if tuf.settings.SLEEP_BEFORE_ROUND:
8999
time.sleep(tuf.settings.SLEEP_BEFORE_ROUND)
90100

@@ -121,8 +131,7 @@ def chunks():
121131

122132

123133
def _get_session(self, url):
124-
"""
125-
Returns a different customized requests.Session per schema+hostname
134+
"""Returns a different customized requests.Session per schema+hostname
126135
combination.
127136
"""
128137
# Use a different requests.Session per schema+hostname combination, to

0 commit comments

Comments
 (0)