Skip to content

attempt an optimistic localhost-only search for "==" requirements #2114

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 27 additions & 7 deletions pip/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,23 @@ def _validate_secure_origin(self, logger, location):
)

def find_requirement(self, req, upgrade):
if any(op == '==' for op, ver in req.req.specs):
# if the version is pinned-down by a ==, do an optimistic search
# for a satisfactory package on the local filesystem.
try:
result = self._find_requirement(
req, upgrade, network_allowed=False,
)
except DistributionNotFound:
result = None

if result is not None:
return result

# otherwise, do the full network search
return self._find_requirement(req, upgrade, network_allowed=True)

def _find_requirement(self, req, upgrade, network_allowed):

def mkurl_pypi_url(url):
loc = posixpath.join(url, url_name)
Expand All @@ -302,7 +319,7 @@ def mkurl_pypi_url(url):
trusted=True,
)

page = self._get_page(main_index_url, req)
page = self._get_page(main_index_url, req, network_allowed)
if page is None and PyPI.netloc not in str(main_index_url):
warnings.warn(
"Failed to find %r at %s. It is suggested to upgrade "
Expand Down Expand Up @@ -348,7 +365,7 @@ def mkurl_pypi_url(url):
)
)
page_versions = []
for page in self._get_pages(locations, req):
for page in self._get_pages(locations, req, network_allowed):
logger.debug('Analyzing links from page %s', page.url)
with indent_log():
page_versions.extend(
Expand Down Expand Up @@ -533,7 +550,7 @@ def _find_url_name(self, index_url, url_name, req):
# Vaguely part of the PyPI API... weird but true.
# FIXME: bad to modify this?
index_url.url += '/'
page = self._get_page(index_url, req)
page = self._get_page(index_url, req, network_allowed=True)
if page is None:
logger.critical('Cannot fetch index base URL %s', index_url)
return
Expand All @@ -547,7 +564,7 @@ def _find_url_name(self, index_url, url_name, req):
return base
return None

def _get_pages(self, locations, req):
def _get_pages(self, locations, req, network_allowed):
"""
Yields (page, page_url) from the given locations, skipping
locations that have errors, and adding download/homepage links
Expand All @@ -561,7 +578,7 @@ def _get_pages(self, locations, req):
continue
seen.add(location)

page = self._get_page(location, req)
page = self._get_page(location, req, network_allowed)
if page is None:
continue

Expand Down Expand Up @@ -777,8 +794,11 @@ def _egg_info_matches(self, egg_info, search_name, link):
else:
return None

def _get_page(self, link, req):
return HTMLPage.get_page(link, req, session=self.session)
def _get_page(self, link, req, network_allowed):
if network_allowed or link.url.startswith('file:'):
return HTMLPage.get_page(link, req, session=self.session)
else:
return HTMLPage('', 'fake://' + link.url)


class HTMLPage(object):
Expand Down
16 changes: 8 additions & 8 deletions tests/unit/test_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,15 +296,15 @@ def test_finder_priority_nonegg_over_eggfragments():

finder = PackageFinder(links, [], session=PipSession())

with patch.object(finder, "_get_pages", lambda x, y: []):
with patch.object(finder, "_get_pages", lambda x, y, z: []):
link = finder.find_requirement(req, False)

assert link.url.endswith('tar.gz')

links.reverse()
finder = PackageFinder(links, [], session=PipSession())

with patch.object(finder, "_get_pages", lambda x, y: []):
with patch.object(finder, "_get_pages", lambda x, y, z: []):
link = finder.find_requirement(req, False)

assert link.url.endswith('tar.gz')
Expand All @@ -326,14 +326,14 @@ def test_finder_only_installs_stable_releases(data):
links = ["https://foo/bar-1.0.tar.gz", "https://foo/bar-2.0b1.tar.gz"]
finder = PackageFinder(links, [], session=PipSession())

with patch.object(finder, "_get_pages", lambda x, y: []):
with patch.object(finder, "_get_pages", lambda x, y, z: []):
link = finder.find_requirement(req, False)
assert link.url == "https://foo/bar-1.0.tar.gz"

links.reverse()
finder = PackageFinder(links, [], session=PipSession())

with patch.object(finder, "_get_pages", lambda x, y: []):
with patch.object(finder, "_get_pages", lambda x, y, z: []):
link = finder.find_requirement(req, False)
assert link.url == "https://foo/bar-1.0.tar.gz"

Expand Down Expand Up @@ -362,7 +362,7 @@ def test_finder_installs_pre_releases(data):
session=PipSession(),
)

with patch.object(finder, "_get_pages", lambda x, y: []):
with patch.object(finder, "_get_pages", lambda x, y, z: []):
link = finder.find_requirement(req, False)
assert link.url == "https://foo/bar-2.0b1.tar.gz"

Expand All @@ -373,7 +373,7 @@ def test_finder_installs_pre_releases(data):
session=PipSession(),
)

with patch.object(finder, "_get_pages", lambda x, y: []):
with patch.object(finder, "_get_pages", lambda x, y, z: []):
link = finder.find_requirement(req, False)
assert link.url == "https://foo/bar-2.0b1.tar.gz"

Expand Down Expand Up @@ -404,14 +404,14 @@ def test_finder_installs_pre_releases_with_version_spec():

finder = PackageFinder(links, [], session=PipSession())

with patch.object(finder, "_get_pages", lambda x, y: []):
with patch.object(finder, "_get_pages", lambda x, y, z: []):
link = finder.find_requirement(req, False)
assert link.url == "https://foo/bar-2.0b1.tar.gz"

links.reverse()
finder = PackageFinder(links, [], session=PipSession())

with patch.object(finder, "_get_pages", lambda x, y: []):
with patch.object(finder, "_get_pages", lambda x, y, z: []):
link = finder.find_requirement(req, False)
assert link.url == "https://foo/bar-2.0b1.tar.gz"

Expand Down