diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index f891388f4f..4d236b576b 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -24,8 +24,7 @@ from securesystemslib.storage import FilesystemBackend, StorageBackendInterface from securesystemslib.util import persist_temp_file -import tuf.exceptions -import tuf.formats +from tuf import exceptions, formats from tuf.api.serialization import ( MetadataDeserializer, MetadataSerializer, @@ -266,10 +265,10 @@ def verify( ) if not signatures_for_keyid: - raise tuf.exceptions.Error(f"no signature for key {key['keyid']}.") + raise exceptions.Error(f"no signature for key {key['keyid']}.") if len(signatures_for_keyid) > 1: - raise tuf.exceptions.Error( + raise exceptions.Error( f"{len(signatures_for_keyid)} signatures for key " f"{key['keyid']}, not sure which one to verify." ) @@ -337,7 +336,7 @@ def _common_fields_from_dict(signed_dict: Mapping[str, Any]) -> list: # Convert 'expires' TUF metadata string to a datetime object, which is # what the constructor expects and what we store. The inverse operation # is implemented in '_common_fields_to_dict'. - expires = tuf.formats.expiry_string_to_datetime(expires_str) + expires = formats.expiry_string_to_datetime(expires_str) return [_type, version, spec_version, expires] def _common_fields_to_dict(self) -> Dict[str, Any]: diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 1897b1f599..8084dffa64 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -54,12 +54,13 @@ # The client first imports the 'updater.py' module, the only module the # client is required to import. The client will utilize a single class # from this module. - import tuf.client.updater + from tuf.client.updater import Updater # The only other module the client interacts with is 'tuf.settings'. The # client accesses this module solely to set the repository directory. # This directory will hold the files downloaded from a remote repository. - tuf.settings.repositories_directory = 'local-repository' + from tuf import settings + settings.repositories_directory = 'local-repository' # Next, the client creates a dictionary object containing the repository # mirrors. The client may download content from any one of these mirrors. @@ -82,7 +83,7 @@ # is called with two arguments. The first argument assigns a name to this # particular updater and the second argument the repository mirrors defined # above. - updater = tuf.client.updater.Updater('updater', repository_mirrors) + updater = Updater('updater', repository_mirrors) # The client next calls the refresh() method to ensure it has the latest # copies of the metadata files. @@ -127,38 +128,39 @@ import time import fnmatch import copy +import six import warnings import io +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats +from securesystemslib import hash as sslib_hash +from securesystemslib import keys as sslib_keys +from securesystemslib import util as sslib_util + import tuf -import tuf.download -import tuf.requests_fetcher -import tuf.formats -import tuf.settings -import tuf.keydb -import tuf.log -import tuf.mirrors -import tuf.roledb -import tuf.sig -import tuf.exceptions - -import securesystemslib.exceptions -import securesystemslib.hash -import securesystemslib.keys -import securesystemslib.util -import six +from tuf import download +from tuf import exceptions +from tuf import formats +from tuf import keydb +from tuf import log # pylint: disable=unused-import +from tuf import mirrors +from tuf import roledb +from tuf import settings +from tuf import sig +from tuf import requests_fetcher # The Timestamp role does not have signed metadata about it; otherwise we # would need an infinite regress of metadata. Therefore, we use some # default, but sane, upper file length for its metadata. -DEFAULT_TIMESTAMP_UPPERLENGTH = tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH +DEFAULT_TIMESTAMP_UPPERLENGTH = settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH # The Root role may be updated without knowing its version number if # top-level metadata cannot be safely downloaded (e.g., keys may have been # revoked, thus requiring a new Root file that includes the updated keys) # and 'unsafely_update_root_if_necessary' is True. # We use some default, but sane, upper file length for its metadata. -DEFAULT_ROOT_UPPERLENGTH = tuf.settings.DEFAULT_ROOT_REQUIRED_LENGTH +DEFAULT_ROOT_UPPERLENGTH = settings.DEFAULT_ROOT_REQUIRED_LENGTH # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -198,21 +200,21 @@ def __init__(self, map_file): # Is 'map_file' a path? If not, raise # 'securesystemslib.exceptions.FormatError'. The actual content of the map # file is validated later on in this method. - securesystemslib.formats.PATH_SCHEMA.check_match(map_file) + sslib_formats.PATH_SCHEMA.check_match(map_file) # A dictionary mapping repositories to TUF updaters. self.repository_names_to_updaters = {} try: # The map file dictionary that associates targets with repositories. - self.map_file = securesystemslib.util.load_json_file(map_file) + self.map_file = sslib_util.load_json_file(map_file) - except (securesystemslib.exceptions.Error) as e: - raise tuf.exceptions.Error('Cannot load the map file: ' + str(e)) + except (sslib_exceptions.Error) as e: + raise exceptions.Error('Cannot load the map file: ' + str(e)) # Raise securesystemslib.exceptions.FormatError if the map file is # improperly formatted. - tuf.formats.MAPFILE_SCHEMA.check_match(self.map_file) + formats.MAPFILE_SCHEMA.check_match(self.map_file) # Save the "repositories" entry of the map file, with the following # example format: @@ -263,14 +265,14 @@ def get_valid_targetinfo(self, target_filename, match_custom_field=True): # Is the argument properly formatted? If not, raise # 'tuf.exceptions.FormatError'. - tuf.formats.RELPATH_SCHEMA.check_match(target_filename) + formats.RELPATH_SCHEMA.check_match(target_filename) # TAP 4 requires that the following attributes be present in mappings: # "paths", "repositories", "terminating", and "threshold". - tuf.formats.MAPPING_SCHEMA.check_match(self.map_file['mapping']) + formats.MAPPING_SCHEMA.check_match(self.map_file['mapping']) # Set the top-level directory containing the metadata for each repository. - repositories_directory = tuf.settings.repositories_directory + repositories_directory = settings.repositories_directory # Verify that the required local directories exist for each repository. self._verify_metadata_directories(repositories_directory) @@ -309,14 +311,14 @@ def get_valid_targetinfo(self, target_filename, match_custom_field=True): continue else: - raise tuf.exceptions.UnknownTargetError('The repositories in the' + raise exceptions.UnknownTargetError('The repositories in the' ' mapping do not agree on the target, or none of them have' ' signed for the target, and "terminating" was set to True.') # If we are here, it means either there were no mappings, or none of the # mappings provided the target. logger.debug('Did not find valid targetinfo for ' + repr(target_filename)) - raise tuf.exceptions.UnknownTargetError('The repositories in the map' + raise exceptions.UnknownTargetError('The repositories in the map' ' file do not agree on the target, or none of them have signed' ' for the target.') @@ -336,7 +338,7 @@ def _verify_metadata_directories(self, repositories_directory): repository_name) if not os.path.isdir(repository_directory): - raise tuf.exceptions.Error('The metadata directory' + raise exceptions.Error('The metadata directory' ' for ' + repr(repository_name) + ' must exist' ' at ' + repr(repository_directory)) @@ -348,7 +350,7 @@ def _verify_metadata_directories(self, repositories_directory): repository_directory, 'metadata', 'current', 'root.json') if not os.path.isfile(root_file): - raise tuf.exceptions.Error( + raise exceptions.Error( 'The Root file must exist at ' + repr(root_file)) else: @@ -372,7 +374,7 @@ def _matching_targetinfo( targetinfo, updater = self._update_from_repository( repository_name, target_filename) - except (tuf.exceptions.UnknownTargetError, tuf.exceptions.Error): + except (exceptions.UnknownTargetError, exceptions.Error): continue valid_targetinfo[updater] = targetinfo @@ -486,7 +488,7 @@ def get_updater(self, repository_name): # Are the arguments properly formatted? If not, raise # 'tuf.exceptions.FormatError'. - tuf.formats.NAME_SCHEMA.check_match(repository_name) + formats.NAME_SCHEMA.check_match(repository_name) updater = self.repository_names_to_updaters.get(repository_name) @@ -499,10 +501,10 @@ def get_updater(self, repository_name): # Create repository mirrors object needed by the # tuf.client.updater.Updater(). Each 'repository_name' can have more # than one mirror. - mirrors = {} + repo_mirrors = {} for url in self.repository_names_to_mirrors[repository_name]: - mirrors[url] = { + repo_mirrors[url] = { 'url_prefix': url, 'metadata_path': 'metadata', 'targets_path': 'targets'} @@ -511,7 +513,7 @@ def get_updater(self, repository_name): # NOTE: State (e.g., keys) should NOT be shared across different # updater instances. logger.debug('Adding updater for ' + repr(repository_name)) - updater = tuf.client.updater.Updater(repository_name, mirrors) + updater = Updater(repository_name, repo_mirrors) except Exception: return None @@ -535,7 +537,7 @@ def _update_from_repository(self, repository_name, target_filename): updater = self.get_updater(repository_name) if not updater: - raise tuf.exceptions.Error( + raise exceptions.Error( 'Cannot load updater for ' + repr(repository_name)) else: @@ -687,8 +689,8 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # number of objects and object types and that all dict # keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mistmatch. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - tuf.formats.MIRRORDICT_SCHEMA.check_match(repository_mirrors) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + formats.MIRRORDICT_SCHEMA.check_match(repository_mirrors) # Save the validated arguments. self.repository_name = repository_name @@ -697,7 +699,7 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # Initialize Updater with an externally provided 'fetcher' implementing # the network download. By default tuf.fetcher.RequestsFetcher is used. if fetcher is None: - self.fetcher = tuf.requests_fetcher.RequestsFetcher() + self.fetcher = requests_fetcher.RequestsFetcher() else: self.fetcher = fetcher @@ -731,25 +733,25 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): self.consistent_snapshot = False # Ensure the repository metadata directory has been set. - if tuf.settings.repositories_directory is None: - raise tuf.exceptions.RepositoryError('The TUF update client' + if settings.repositories_directory is None: + raise exceptions.RepositoryError('The TUF update client' ' module must specify the directory containing the local repository' ' files. "tuf.settings.repositories_directory" MUST be set.') # Set the path for the current set of metadata files. - repositories_directory = tuf.settings.repositories_directory + repositories_directory = settings.repositories_directory repository_directory = os.path.join(repositories_directory, self.repository_name) # raise MissingLocalRepository if the repo does not exist at all. if not os.path.exists(repository_directory): - raise tuf.exceptions.MissingLocalRepositoryError('Local repository ' + + raise exceptions.MissingLocalRepositoryError('Local repository ' + repr(repository_directory) + ' does not exist.') current_path = os.path.join(repository_directory, 'metadata', 'current') # Ensure the current path is valid/exists before saving it. if not os.path.exists(current_path): - raise tuf.exceptions.RepositoryError('Missing' + raise exceptions.RepositoryError('Missing' ' ' + repr(current_path) + '. This path must exist and, at a minimum,' ' contain the Root metadata file.') @@ -760,20 +762,20 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None): # Ensure the previous path is valid/exists. if not os.path.exists(previous_path): - raise tuf.exceptions.RepositoryError('Missing ' + repr(previous_path) + '.' + raise exceptions.RepositoryError('Missing ' + repr(previous_path) + '.' ' This path MUST exist.') self.metadata_directory['previous'] = previous_path # Load current and previous metadata. for metadata_set in ['current', 'previous']: - for metadata_role in tuf.roledb.TOP_LEVEL_ROLES: + for metadata_role in roledb.TOP_LEVEL_ROLES: self._load_metadata_from_file(metadata_set, metadata_role) # Raise an exception if the repository is missing the required 'root' # metadata. if 'root' not in self.metadata['current']: - raise tuf.exceptions.RepositoryError('No root of trust!' + raise exceptions.RepositoryError('No root of trust!' ' Could not find the "root.json" file.') @@ -829,7 +831,7 @@ def _load_metadata_from_file(self, metadata_set, metadata_role): # Ensure we have a valid metadata set. if metadata_set not in ['current', 'previous']: - raise securesystemslib.exceptions.Error( + raise sslib_exceptions.Error( 'Invalid metadata set: ' + repr(metadata_set)) # Save and construct the full metadata path. @@ -842,17 +844,17 @@ def _load_metadata_from_file(self, metadata_set, metadata_role): # Load the file. The loaded object should conform to # 'tuf.formats.SIGNABLE_SCHEMA'. try: - metadata_signable = securesystemslib.util.load_json_file( + metadata_signable = sslib_util.load_json_file( metadata_filepath) # Although the metadata file may exist locally, it may not # be a valid json file. On the next refresh cycle, it will be # updated as required. If Root if cannot be loaded from disk # successfully, an exception should be raised by the caller. - except securesystemslib.exceptions.Error: + except sslib_exceptions.Error: return - tuf.formats.check_signable_object_format(metadata_signable) + formats.check_signable_object_format(metadata_signable) # Extract the 'signed' role object from 'metadata_signable'. metadata_object = metadata_signable['signed'] @@ -911,10 +913,10 @@ def _rebuild_key_and_role_db(self): # metadata files for delegated roles are also not loaded when the # repository is first instantiated. Due to this setup, reloading delegated # roles is not required here. - tuf.keydb.create_keydb_from_root_metadata(self.metadata['current']['root'], + keydb.create_keydb_from_root_metadata(self.metadata['current']['root'], self.repository_name) - tuf.roledb.create_roledb_from_root_metadata(self.metadata['current']['root'], + roledb.create_roledb_from_root_metadata(self.metadata['current']['root'], self.repository_name) @@ -964,14 +966,14 @@ def _import_delegations(self, parent_role): # We specify the keyid to ensure that it's the correct keyid # for the key. try: - key, _ = securesystemslib.keys.format_metadata_to_key(keyinfo, keyid) + key, _ = sslib_keys.format_metadata_to_key(keyinfo, keyid) - tuf.keydb.add_key(key, repository_name=self.repository_name) + keydb.add_key(key, repository_name=self.repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: pass - except (securesystemslib.exceptions.FormatError, securesystemslib.exceptions.Error): + except (sslib_exceptions.FormatError, sslib_exceptions.Error): logger.warning('Invalid key: ' + repr(keyid) + '. Aborting role ' + 'delegation for parent role \'' + parent_role + '\'.') raise @@ -983,13 +985,13 @@ def _import_delegations(self, parent_role): # Add the roles to the role database. for roleinfo in roles_info: try: - # NOTE: tuf.roledb.add_role will take care of the case where rolename + # NOTE: roledb.add_role will take care of the case where rolename # is None. rolename = roleinfo.get('name') logger.debug('Adding delegated role: ' + str(rolename) + '.') - tuf.roledb.add_role(rolename, roleinfo, self.repository_name) + roledb.add_role(rolename, roleinfo, self.repository_name) - except tuf.exceptions.RoleAlreadyExistsError: + except exceptions.RoleAlreadyExistsError: logger.warning('Role already exists: ' + rolename) except Exception: @@ -1050,7 +1052,7 @@ def refresh(self, unsafely_update_root_if_necessary=True): # number of objects and object types, and that all dict # keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fail. - securesystemslib.formats.BOOLEAN_SCHEMA.check_match( + sslib_formats.BOOLEAN_SCHEMA.check_match( unsafely_update_root_if_necessary) # Update the top-level metadata. The _update_metadata_if_changed() and @@ -1062,7 +1064,7 @@ def refresh(self, unsafely_update_root_if_necessary=True): try: self._ensure_not_expired(root_metadata, 'root') - except tuf.exceptions.ExpiredMetadataError: + except exceptions.ExpiredMetadataError: # Raise 'tuf.exceptions.NoWorkingMirrorError' if a valid (not # expired, properly signed, and valid metadata) 'root.json' cannot be # installed. @@ -1138,7 +1140,7 @@ def neither_403_nor_404(mirror_error): # Following the spec, try downloading the N+1th root for a certain maximum # number of times. lower_bound = current_root_metadata['version'] + 1 - upper_bound = lower_bound + tuf.settings.MAX_NUMBER_ROOT_ROTATIONS + upper_bound = lower_bound + settings.MAX_NUMBER_ROOT_ROTATIONS # Try downloading the next root. for next_version in range(lower_bound, upper_bound): @@ -1148,7 +1150,7 @@ def neither_403_nor_404(mirror_error): version=next_version) # When we run into HTTP 403/404 error from ALL mirrors, break out of # loop, because the next root metadata file is most likely missing. - except tuf.exceptions.NoWorkingMirrorError as exception: + except exceptions.NoWorkingMirrorError as exception: for mirror_error in exception.mirror_errors.values(): # Otherwise, reraise the error, because it is not a simple HTTP # error. @@ -1204,12 +1206,12 @@ def _check_hashes(self, file_object, trusted_hashes): # Verify each hash, raise an exception if any hash fails to verify for algorithm, trusted_hash in six.iteritems(trusted_hashes): - digest_object = securesystemslib.hash.digest_fileobject(file_object, + digest_object = sslib_hash.digest_fileobject(file_object, algorithm) computed_hash = digest_object.hexdigest() if trusted_hash != computed_hash: - raise securesystemslib.exceptions.BadHashError(trusted_hash, + raise sslib_exceptions.BadHashError(trusted_hash, computed_hash) else: @@ -1225,7 +1227,7 @@ def _check_file_length(self, file_object, trusted_file_length): Non-public method that ensures the length of 'file_object' is strictly equal to 'trusted_file_length'. This is a deliberately redundant implementation designed to complement - tuf.download._check_downloaded_length(). + download._check_downloaded_length(). file_object: @@ -1254,7 +1256,7 @@ def _check_file_length(self, file_object, trusted_file_length): # ensures that a downloaded file strictly matches a known, or trusted, # file length. if observed_length != trusted_file_length: - raise tuf.exceptions.DownloadLengthMismatchError(trusted_file_length, + raise exceptions.DownloadLengthMismatchError(trusted_file_length, observed_length) else: @@ -1313,7 +1315,7 @@ def _get_target_file(self, target_filepath, file_length, file_hashes, dirname, basename = os.path.split(target_filepath) target_filepath = os.path.join(dirname, target_digest + '.' + basename) - file_mirrors = tuf.mirrors.get_list_of_mirrors('target', target_filepath, + file_mirrors = mirrors.get_list_of_mirrors('target', target_filepath, self.mirrors) # file_mirror (URL): error (Exception) @@ -1322,7 +1324,7 @@ def _get_target_file(self, target_filepath, file_length, file_hashes, for file_mirror in file_mirrors: try: - file_object = tuf.download.safe_download(file_mirror, + file_object = download.safe_download(file_mirror, file_length, self.fetcher) # Verify 'file_object' against the expected length and hashes. @@ -1341,7 +1343,7 @@ def _get_target_file(self, target_filepath, file_length, file_hashes, logger.debug('Failed to update ' + repr(target_filepath) + ' from' ' all mirrors: ' + repr(file_mirror_errors)) - raise tuf.exceptions.NoWorkingMirrorError(file_mirror_errors) + raise exceptions.NoWorkingMirrorError(file_mirror_errors) @@ -1356,7 +1358,7 @@ def _verify_root_self_signed(self, signable): keyids = signable['signed']['roles']['root']['keyids'] keys = signable['signed']['keys'] signatures = signable['signatures'] - signed = securesystemslib.formats.encode_canonical( + signed = sslib_formats.encode_canonical( signable['signed']).encode('utf-8') verified_sig_keyids = set() @@ -1373,7 +1375,7 @@ def _verify_root_self_signed(self, signable): # The ANYKEY_SCHEMA check in verify_signature expects the keydict to # include a keyid key['keyid'] = keyid - valid_sig = securesystemslib.keys.verify_signature(key, signature, signed) + valid_sig = sslib_keys.verify_signature(key, signature, signed) if valid_sig: verified_sig_keyids.add(keyid) @@ -1431,15 +1433,15 @@ def _verify_metadata_file(self, metadata_file_object, metadata = metadata_file_object.read().decode('utf-8') try: - metadata_signable = securesystemslib.util.load_json_string(metadata) + metadata_signable = sslib_util.load_json_string(metadata) except Exception as exception: - raise tuf.exceptions.InvalidMetadataJSONError(exception) + raise exceptions.InvalidMetadataJSONError(exception) else: # Ensure the loaded 'metadata_signable' is properly formatted. Raise # 'securesystemslib.exceptions.FormatError' if not. - tuf.formats.check_signable_object_format(metadata_signable) + formats.check_signable_object_format(metadata_signable) # Is 'metadata_signable' expired? self._ensure_not_expired(metadata_signable['signed'], metadata_role) @@ -1449,11 +1451,11 @@ def _verify_metadata_file(self, metadata_file_object, # metadata. # Verify the signature on the downloaded metadata object. - valid = tuf.sig.verify(metadata_signable, metadata_role, + valid = sig.verify(metadata_signable, metadata_role, self.repository_name) if not valid: - raise securesystemslib.exceptions.BadSignatureError(metadata_role) + raise sslib_exceptions.BadSignatureError(metadata_role) # For root metadata, verify the downloaded root metadata object with the # new threshold of new signatures contained within the downloaded root @@ -1467,7 +1469,7 @@ def _verify_metadata_file(self, metadata_file_object, if valid and metadata_role == 'root': valid = self._verify_root_self_signed(metadata_signable) if not valid: - raise securesystemslib.exceptions.BadSignatureError(metadata_role) + raise sslib_exceptions.BadSignatureError(metadata_role) @@ -1511,7 +1513,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, A file object containing the metadata. """ - file_mirrors = tuf.mirrors.get_list_of_mirrors('meta', remote_filename, + file_mirrors = mirrors.get_list_of_mirrors('meta', remote_filename, self.mirrors) # file_mirror (URL): error (Exception) @@ -1520,7 +1522,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, for file_mirror in file_mirrors: try: - file_object = tuf.download.unsafe_download(file_mirror, + file_object = download.unsafe_download(file_mirror, upperbound_filelength, self.fetcher) file_object.seek(0) @@ -1528,7 +1530,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, # 'file_object' is also verified if decompressed above (i.e., the # uncompressed version). metadata_signable = \ - securesystemslib.util.load_json_string(file_object.read().decode('utf-8')) + sslib_util.load_json_string(file_object.read().decode('utf-8')) # Determine if the specification version number is supported. It is # assumed that "spec_version" is in (major.minor.fix) format, (for @@ -1547,7 +1549,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, code_spec_minor_version = int(code_spec_version_split[1]) if metadata_spec_major_version != code_spec_major_version: - raise tuf.exceptions.UnsupportedSpecificationError( + raise exceptions.UnsupportedSpecificationError( 'Downloaded metadata that specifies an unsupported ' 'spec_version. This code supports major version number: ' + repr(code_spec_major_version) + '; however, the obtained ' @@ -1563,7 +1565,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, ". The update will continue as the major versions match.") except (ValueError, TypeError) as error: - six.raise_from(securesystemslib.exceptions.FormatError('Improperly' + six.raise_from(sslib_exceptions.FormatError('Improperly' ' formatted spec_version, which must be in major.minor.fix format'), error) @@ -1576,7 +1578,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, # Verify that the downloaded version matches the version expected by # the caller. if version_downloaded != expected_version: - raise tuf.exceptions.BadVersionNumberError('Downloaded' + raise exceptions.BadVersionNumberError('Downloaded' ' version number: ' + repr(version_downloaded) + '. Version' ' number MUST be: ' + repr(expected_version)) @@ -1594,7 +1596,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, self.metadata['current'][metadata_role]['version'] if version_downloaded < current_version: - raise tuf.exceptions.ReplayedMetadataError(metadata_role, + raise exceptions.ReplayedMetadataError(metadata_role, version_downloaded, current_version) except KeyError: @@ -1619,7 +1621,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, else: logger.debug('Failed to update ' + repr(remote_filename) + ' from all' ' mirrors: ' + repr(file_mirror_errors)) - raise tuf.exceptions.NoWorkingMirrorError(file_mirror_errors) + raise exceptions.NoWorkingMirrorError(file_mirror_errors) @@ -1703,7 +1705,7 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None): current_filepath = os.path.join(self.metadata_directory['current'], metadata_filename) current_filepath = os.path.abspath(current_filepath) - securesystemslib.util.ensure_parent_dir(current_filepath) + sslib_util.ensure_parent_dir(current_filepath) previous_filepath = os.path.join(self.metadata_directory['previous'], metadata_filename) @@ -1711,15 +1713,15 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None): if os.path.exists(current_filepath): # Previous metadata might not exist, say when delegations are added. - securesystemslib.util.ensure_parent_dir(previous_filepath) + sslib_util.ensure_parent_dir(previous_filepath) shutil.move(current_filepath, previous_filepath) # Next, move the verified updated metadata file to the 'current' directory. metadata_file_object.seek(0) metadata_signable = \ - securesystemslib.util.load_json_string(metadata_file_object.read().decode('utf-8')) + sslib_util.load_json_string(metadata_file_object.read().decode('utf-8')) - securesystemslib.util.persist_temp_file(metadata_file_object, current_filepath) + sslib_util.persist_temp_file(metadata_file_object, current_filepath) # Extract the metadata object so we can store it to the metadata store. # 'current_metadata_object' set to 'None' if there is not an object @@ -1810,7 +1812,7 @@ def _update_metadata_if_changed(self, metadata_role, # Ensure the referenced metadata has been loaded. The 'root' role may be # updated without having 'snapshot' available. if referenced_metadata not in self.metadata['current']: - raise tuf.exceptions.RepositoryError('Cannot update' + raise exceptions.RepositoryError('Cannot update' ' ' + repr(metadata_role) + ' because ' + referenced_metadata + ' is' ' missing.') @@ -1852,11 +1854,11 @@ def _update_metadata_if_changed(self, metadata_role, # expected role. Note: The Timestamp role is not updated via this # function. if metadata_role == 'snapshot': - upperbound_filelength = tuf.settings.DEFAULT_SNAPSHOT_REQUIRED_LENGTH + upperbound_filelength = settings.DEFAULT_SNAPSHOT_REQUIRED_LENGTH # The metadata is considered Targets (or delegated Targets metadata). else: - upperbound_filelength = tuf.settings.DEFAULT_TARGETS_REQUIRED_LENGTH + upperbound_filelength = settings.DEFAULT_TARGETS_REQUIRED_LENGTH try: self._update_metadata(metadata_role, upperbound_filelength, @@ -2009,7 +2011,7 @@ def _update_versioninfo(self, metadata_filename): # client's copy of snapshot.json. try: timestamp_version_number = self.metadata['current']['snapshot']['version'] - trusted_versioninfo = tuf.formats.make_versioninfo( + trusted_versioninfo = formats.make_versioninfo( timestamp_version_number) except KeyError: @@ -2025,7 +2027,7 @@ def _update_versioninfo(self, metadata_filename): targets_version_number = \ self.metadata['current'][metadata_filename[:-len('.json')]]['version'] trusted_versioninfo = \ - tuf.formats.make_versioninfo(targets_version_number) + formats.make_versioninfo(targets_version_number) except KeyError: trusted_versioninfo = \ @@ -2150,9 +2152,8 @@ def _update_fileinfo(self, metadata_filename): # Extract the file information from the actual file and save it # to the fileinfo store. - file_length, hashes = securesystemslib.util.get_file_details( - current_filepath) - metadata_fileinfo = tuf.formats.make_targets_fileinfo(file_length, hashes) + file_length, hashes = sslib_util.get_file_details(current_filepath) + metadata_fileinfo = formats.make_targets_fileinfo(file_length, hashes) self.fileinfo[metadata_filename] = metadata_fileinfo @@ -2196,7 +2197,7 @@ def _move_current_to_previous(self, metadata_role): # Move the current path to the previous path. if os.path.exists(current_filepath): - securesystemslib.util.ensure_parent_dir(previous_filepath) + sslib_util.ensure_parent_dir(previous_filepath) os.rename(current_filepath, previous_filepath) @@ -2237,7 +2238,7 @@ def _delete_metadata(self, metadata_role): # Remove knowledge of the role. if metadata_role in self.metadata['current']: del self.metadata['current'][metadata_role] - tuf.roledb.remove_role(metadata_role, self.repository_name) + roledb.remove_role(metadata_role, self.repository_name) @@ -2273,15 +2274,15 @@ def _ensure_not_expired(self, metadata_object, metadata_rolename): # Extract the expiration time. Convert it to a unix timestamp and compare it # against the current time.time() (also in Unix/POSIX time format, although # with microseconds attached.) - expires_datetime = tuf.formats.expiry_string_to_datetime( + expires_datetime = formats.expiry_string_to_datetime( metadata_object['expires']) - expires_timestamp = tuf.formats.datetime_to_unix_timestamp(expires_datetime) + expires_timestamp = formats.datetime_to_unix_timestamp(expires_datetime) current_time = int(time.time()) if expires_timestamp <= current_time: message = 'Metadata '+repr(metadata_rolename)+' expired on ' + \ expires_datetime.ctime() + ' (UTC).' - raise tuf.exceptions.ExpiredMetadataError(message) + raise exceptions.ExpiredMetadataError(message) @@ -2341,8 +2342,8 @@ def all_targets(self): # Fetch the targets of the delegated roles. get_rolenames returns # all roles available on the repository. delegated_targets = [] - for role in tuf.roledb.get_rolenames(self.repository_name): - if role in tuf.roledb.TOP_LEVEL_ROLES: + for role in roledb.get_rolenames(self.repository_name): + if role in roledb.TOP_LEVEL_ROLES: continue else: @@ -2477,8 +2478,8 @@ def _targets_of_role(self, rolename, targets=None, skip_refresh=False): targets_of_role = list(targets) logger.debug('Getting targets of role: ' + repr(rolename) + '.') - if not tuf.roledb.role_exists(rolename, self.repository_name): - raise tuf.exceptions.UnknownRoleError(rolename) + if not roledb.role_exists(rolename, self.repository_name): + raise exceptions.UnknownRoleError(rolename) # We do not need to worry about the target paths being trusted because # this is enforced before any new metadata is accepted. @@ -2557,7 +2558,7 @@ def targets_of_role(self, rolename='targets'): # Does 'rolename' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(rolename) + formats.RELPATH_SCHEMA.check_match(rolename) # If we've been given a delegated targets role, we don't know how to # validate it without knowing what the delegating role is -- there could @@ -2579,8 +2580,8 @@ def targets_of_role(self, rolename='targets'): self._refresh_targets_metadata(refresh_all_delegated_roles=True) - if not tuf.roledb.role_exists(rolename, self.repository_name): - raise tuf.exceptions.UnknownRoleError(rolename) + if not roledb.role_exists(rolename, self.repository_name): + raise exceptions.UnknownRoleError(rolename) return self._targets_of_role(rolename, skip_refresh=True) @@ -2623,12 +2624,12 @@ def get_one_valid_targetinfo(self, target_filepath): # Does 'target_filepath' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(target_filepath) + formats.RELPATH_SCHEMA.check_match(target_filepath) target_filepath = target_filepath.replace('\\', '/') if target_filepath.startswith('/'): - raise tuf.exceptions.FormatError('The requested target file cannot' + raise exceptions.FormatError('The requested target file cannot' ' contain a leading path separator: ' + repr(target_filepath)) # Get target by looking at roles in order of priority tags. @@ -2636,7 +2637,7 @@ def get_one_valid_targetinfo(self, target_filepath): # Raise an exception if the target information could not be retrieved. if target is None: - raise tuf.exceptions.UnknownTargetError(repr(target_filepath) + ' not' + raise exceptions.UnknownTargetError(repr(target_filepath) + ' not' ' found.') # Otherwise, return the found target. @@ -2681,7 +2682,7 @@ def _preorder_depth_first_walk(self, target_filepath): current_metadata = self.metadata['current'] role_names = ['targets'] visited_role_names = set() - number_of_delegations = tuf.settings.MAX_NUMBER_OF_DELEGATIONS + number_of_delegations = settings.MAX_NUMBER_OF_DELEGATIONS # Ensure the client has the most up-to-date version of 'targets.json'. # Raise 'tuf.exceptions.NoWorkingMirrorError' if the changed metadata @@ -2755,7 +2756,7 @@ def _preorder_depth_first_walk(self, target_filepath): if target is None and number_of_delegations == 0 and len(role_names) > 0: logger.debug(repr(len(role_names)) + ' roles left to visit, ' + 'but allowed to visit at most ' + - repr(tuf.settings.MAX_NUMBER_OF_DELEGATIONS) + ' delegations.') + repr(settings.MAX_NUMBER_OF_DELEGATIONS) + ' delegations.') return target @@ -2890,7 +2891,7 @@ def _visit_child_role(self, child_role, target_filepath): # 'role_name' should have been validated when it was downloaded. # The 'paths' or 'path_hash_prefixes' fields should not be missing, # so we raise a format error here in case they are both missing. - raise securesystemslib.exceptions.FormatError(repr(child_role_name) + ' ' + raise sslib_exceptions.FormatError(repr(child_role_name) + ' ' 'has neither a "paths" nor "path_hash_prefixes". At least' ' one of these attributes must be present.') @@ -2931,7 +2932,7 @@ def _get_target_hash(self, target_filepath, hash_function='sha256'): # Calculate the hash of the filepath to determine which bin to find the # target. The client currently assumes the repository (i.e., repository # tool) uses 'hash_function' to generate hashes and UTF-8. - digest_object = securesystemslib.hash.digest(hash_function) + digest_object = sslib_hash.digest(hash_function) encoded_target_filepath = target_filepath.encode('utf-8') digest_object.update(encoded_target_filepath) target_filepath_hash = digest_object.hexdigest() @@ -2969,11 +2970,11 @@ def remove_obsolete_targets(self, destination_directory): # Does 'destination_directory' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(destination_directory) + sslib_formats.PATH_SCHEMA.check_match(destination_directory) # Iterate the rolenames and verify whether the 'previous' directory # contains a target no longer found in 'current'. - for role in tuf.roledb.get_rolenames(self.repository_name): + for role in roledb.get_rolenames(self.repository_name): if role.startswith('targets'): if role in self.metadata['previous'] and self.metadata['previous'][role] != None: for target in self.metadata['previous'][role]['targets']: @@ -3058,8 +3059,8 @@ def updated_targets(self, targets, destination_directory): # Do the arguments have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.TARGETINFOS_SCHEMA.check_match(targets) - securesystemslib.formats.PATH_SCHEMA.check_match(destination_directory) + formats.TARGETINFOS_SCHEMA.check_match(targets) + sslib_formats.PATH_SCHEMA.check_match(destination_directory) # Keep track of the target objects and filepaths of updated targets. # Return 'updated_targets' and use 'updated_targetpaths' to avoid @@ -3086,11 +3087,11 @@ def updated_targets(self, targets, destination_directory): for algorithm, digest in six.iteritems(target['fileinfo']['hashes']): digest_object = None try: - digest_object = securesystemslib.hash.digest_filename(target_filepath, + digest_object = sslib_hash.digest_filename(target_filepath, algorithm=algorithm) # This exception would occur if the target does not exist locally. - except securesystemslib.exceptions.StorageError: + except sslib_exceptions.StorageError: updated_targets.append(target) updated_targetpaths.append(target_filepath) break @@ -3156,8 +3157,8 @@ def download_target(self, target, destination_directory, # number of objects and object types, and that all dict # keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fail. - tuf.formats.TARGETINFO_SCHEMA.check_match(target) - securesystemslib.formats.PATH_SCHEMA.check_match(destination_directory) + formats.TARGETINFO_SCHEMA.check_match(target) + sslib_formats.PATH_SCHEMA.check_match(destination_directory) # Extract the target file information. target_filepath = target['filepath'] @@ -3192,4 +3193,4 @@ def download_target(self, target, destination_directory, target_file_object = self._get_target_file(target_filepath, trusted_length, trusted_hashes, prefix_filename_with_hash) - securesystemslib.util.persist_temp_file(target_file_object, destination) + sslib_util.persist_temp_file(target_file_object, destination) diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index e3269b088b..df1c17c212 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -37,21 +37,22 @@ import shutil import tempfile import json +import six -import tuf -import tuf.formats -import tuf.keydb -import tuf.roledb -import tuf.sig -import tuf.log -import tuf.repository_lib as repo_lib -import tuf.repository_tool -import securesystemslib -import securesystemslib.util -import securesystemslib.keys +import securesystemslib # pylint: disable=unused-import +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats +from securesystemslib import storage as sslib_storage +from securesystemslib import util as sslib_util -import six +from tuf import exceptions +from tuf import formats +from tuf import keydb +from tuf import log # pylint: disable=unused-import +from tuf import repository_lib as repo_lib +from tuf import roledb +from tuf import sig from tuf.repository_tool import Targets from tuf.repository_lib import _check_role_keys @@ -72,7 +73,8 @@ import_rsa_privatekey_from_file) from securesystemslib.keys import ( - format_keyval_to_metadata) + format_keyval_to_metadata, + format_metadata_to_key) from securesystemslib.interface import ( generate_and_write_rsa_keypair, @@ -194,11 +196,11 @@ def __init__(self, project_name, metadata_directory, targets_directory, # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly # formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(project_name) - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - securesystemslib.formats.ANY_STRING_SCHEMA.check_match(file_prefix) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(project_name) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) + sslib_formats.ANY_STRING_SCHEMA.check_match(file_prefix) + sslib_formats.NAME_SCHEMA.check_match(repository_name) self.metadata_directory = metadata_directory self.targets_directory = targets_directory @@ -249,14 +251,14 @@ def write(self, write_partial=False): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(write_partial) + sslib_formats.BOOLEAN_SCHEMA.check_match(write_partial) - # At this point the tuf.keydb and tuf.roledb stores must be fully + # At this point the keydb and roledb stores must be fully # populated, otherwise write() throwns a 'tuf.Repository' exception if # any of the project roles are missing signatures, keys, etc. # Write the metadata files of all the delegated roles of the project. - delegated_rolenames = tuf.roledb.get_delegated_rolenames(self.project_name, + delegated_rolenames = roledb.get_delegated_rolenames(self.project_name, self.repository_name) for delegated_rolename in delegated_rolenames: @@ -266,7 +268,7 @@ def write(self, write_partial=False): # Ensure the parent directories of 'metadata_filepath' exist, otherwise an # IO exception is raised if 'metadata_filepath' is written to a # sub-directory. - securesystemslib.util.ensure_parent_dir(delegated_filename) + sslib_util.ensure_parent_dir(delegated_filename) _generate_and_write_metadata(delegated_rolename, delegated_filename, write_partial, self.targets_directory, prefix=self.prefix, @@ -310,7 +312,7 @@ def add_verification_key(self, key, expires=None): securesystemslib.exceptions.Error, if the project already contains a key. - The role's entries in 'tuf.keydb.py' and 'tuf.roledb.py' are updated. + The role's entries in 'keydb' and 'roledb' are updated. None @@ -321,7 +323,7 @@ def add_verification_key(self, key, expires=None): # more than one key. # TODO: Add condition check for the requirement stated above. if len(self.keys) > 0: - raise securesystemslib.exceptions.Error("This project already contains a key.") + raise sslib_exceptions.Error("This project already contains a key.") super(Project, self).add_verification_key(key, expires) @@ -369,7 +371,7 @@ def status(self): filenames['targets'] = os.path.join(metadata_directory, self.project_name) # Delegated roles. - delegated_roles = tuf.roledb.get_delegated_rolenames(self.project_name, + delegated_roles = roledb.get_delegated_rolenames(self.project_name, self.repository_name) insufficient_keys = [] insufficient_signatures = [] @@ -378,7 +380,7 @@ def status(self): try: _check_role_keys(delegated_role, self.repository_name) - except tuf.exceptions.InsufficientKeysError: + except exceptions.InsufficientKeysError: insufficient_keys.append(delegated_role) continue @@ -388,7 +390,7 @@ def status(self): repository_name=self.repository_name) self._log_status(delegated_role, signable[0], self.repository_name) - except securesystemslib.exceptions.Error: + except sslib_exceptions.Error: insufficient_signatures.append(delegated_role) if len(insufficient_keys): @@ -407,7 +409,7 @@ def status(self): try: _check_role_keys(self.rolename, self.repository_name) - except tuf.exceptions.InsufficientKeysError as e: + except exceptions.InsufficientKeysError as e: logger.info(str(e)) return @@ -417,7 +419,7 @@ def status(self): self.repository_name) self._log_status(self.project_name, signable, self.repository_name) - except tuf.exceptions.UnsignedMetadataError as e: + except exceptions.UnsignedMetadataError as e: # This error is raised if the metadata has insufficient signatures to # meet the threshold. self._log_status(self.project_name, e.signable, self.repository_name) @@ -436,7 +438,7 @@ def _log_status(self, rolename, signable, repository_name): 'rolename'. """ - status = tuf.sig.get_signature_status(signable, rolename, repository_name) + status = sig.get_signature_status(signable, rolename, repository_name) message = repr(rolename) + ' role contains ' +\ repr(len(status['good_sigs'])) + ' / ' + repr(status['threshold']) +\ @@ -463,7 +465,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, # Retrieve the roleinfo of 'rolename' to extract the needed metadata # attributes, such as version number, expiration, etc. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + roleinfo = roledb.get_roleinfo(rolename, repository_name) metadata = generate_targets_metadata(targets_directory, roleinfo['paths'], roleinfo['version'], roleinfo['expires'], roleinfo['delegations'], @@ -489,7 +491,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, temp_signable = repo_lib.sign_metadata(metadata, [], metadata_filename, repository_name) temp_signable['signatures'].extend(roleinfo['signatures']) - status = tuf.sig.get_signature_status(temp_signable, rolename, + status = sig.get_signature_status(temp_signable, rolename, repository_name) if len(status['good_sigs']) == 0: metadata['version'] = metadata['version'] + 1 @@ -498,7 +500,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, # non-partial write() else: - if tuf.sig.verify(signable, rolename, repository_name): + if sig.verify(signable, rolename, repository_name): metadata['version'] = metadata['version'] + 1 signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename, repository_name) @@ -506,16 +508,16 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, # Write the metadata to file if contains a threshold of signatures. signable['signatures'].extend(roleinfo['signatures']) - if tuf.sig.verify(signable, rolename, repository_name) or write_partial: + if sig.verify(signable, rolename, repository_name) or write_partial: repo_lib._remove_invalid_and_duplicate_signatures(signable, repository_name) - storage_backend = securesystemslib.storage.FilesystemBackend() + storage_backend = sslib_storage.FilesystemBackend() filename = repo_lib.write_metadata_file(signable, metadata_filename, metadata['version'], False, storage_backend) # 'signable' contains an invalid threshold of signatures. else: message = 'Not enough signatures for ' + repr(metadata_filename) - raise securesystemslib.exceptions.Error(message, signable) + raise sslib_exceptions.Error(message, signable) return signable, filename @@ -586,13 +588,13 @@ def create_new_project(project_name, metadata_directory, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) # Do the same for the location in the repo and the project name, we must # ensure they are valid pathnames. - securesystemslib.formats.NAME_SCHEMA.check_match(project_name) - securesystemslib.formats.ANY_STRING_SCHEMA.check_match(location_in_repository) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(project_name) + sslib_formats.ANY_STRING_SCHEMA.check_match(location_in_repository) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # for the targets directory we do the same, but first, let's find out what # layout the user needs, layout_type is a variable that is usually set to @@ -608,10 +610,10 @@ def create_new_project(project_name, metadata_directory, layout_type = 'repo-like' if targets_directory is not None: - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) if key is not None: - securesystemslib.formats.KEY_SCHEMA.check_match(key) + sslib_formats.KEY_SCHEMA.check_match(key) # Set the metadata and targets directories. These directories # are created if they do not exist. @@ -719,10 +721,10 @@ def _save_project_configuration(metadata_directory, targets_directory, """ # Schema check for the arguments. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(prefix) - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - tuf.formats.RELPATH_SCHEMA.check_match(project_name) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(prefix) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) + formats.RELPATH_SCHEMA.check_match(project_name) cfg_file_directory = metadata_directory @@ -749,7 +751,7 @@ def _save_project_configuration(metadata_directory, targets_directory, # Build a dictionary containing the actual keys. for key in public_keys: - key_info = tuf.keydb.get_key(key) + key_info = keydb.get_key(key) key_metadata = format_keyval_to_metadata(key_info['keytype'], key_info['scheme'], key_info['keyval']) project_config['public_keys'][key] = key_metadata @@ -800,15 +802,15 @@ def load_project(project_directory, prefix='', new_targets_location=None, # Does 'repository_directory' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(project_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.PATH_SCHEMA.check_match(project_directory) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Do the same for the prefix - securesystemslib.formats.ANY_STRING_SCHEMA.check_match(prefix) + sslib_formats.ANY_STRING_SCHEMA.check_match(prefix) # Clear the role and key databases since we are loading in a new project. - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) + roledb.clear_roledb(clear_all=True) + keydb.clear_keydb(clear_all=True) # Locate metadata filepaths and targets filepath. project_directory = os.path.abspath(project_directory) @@ -816,8 +818,8 @@ def load_project(project_directory, prefix='', new_targets_location=None, # Load the cfg file and the project. config_filename = os.path.join(project_directory, PROJECT_FILENAME) - project_configuration = securesystemslib.util.load_json_file(config_filename) - tuf.formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) + project_configuration = sslib_util.load_json_file(config_filename) + formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) targets_directory = os.path.join(project_directory, project_configuration['targets_location']) @@ -855,16 +857,16 @@ def load_project(project_directory, prefix='', new_targets_location=None, keydict = project_configuration['public_keys'] for keyid in keydict: - key, junk = securesystemslib.keys.format_metadata_to_key(keydict[keyid]) + key, junk = format_metadata_to_key(keydict[keyid]) project.add_verification_key(key) # Load the project's metadata. targets_metadata_path = os.path.join(project_directory, metadata_directory, project_filename) - signable = securesystemslib.util.load_json_file(targets_metadata_path) + signable = sslib_util.load_json_file(targets_metadata_path) try: - tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: + formats.check_signable_object_format(signable) + except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) @@ -877,7 +879,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, project.add_signature(signature) # Update roledb.py containing the loaded project attributes. - roleinfo = tuf.roledb.get_roleinfo(project_name, repository_name) + roleinfo = roledb.get_roleinfo(project_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = targets_metadata['version'] roleinfo['paths'] = targets_metadata['targets'] @@ -890,12 +892,12 @@ def load_project(project_directory, prefix='', new_targets_location=None, repository_name=repository_name): roleinfo['partial_loaded'] = True - tuf.roledb.update_roleinfo(project_name, roleinfo, mark_role_as_dirty=False, + roledb.update_roleinfo(project_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) for key_metadata in targets_metadata['delegations']['keys'].values(): - key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) - tuf.keydb.add_key(key_object, repository_name=repository_name) + key_object, junk = format_metadata_to_key(key_metadata) + keydb.add_key(key_object, repository_name=repository_name) for role in targets_metadata['delegations']['roles']: rolename = role['name'] @@ -904,7 +906,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, 'signing_keyids': [], 'signatures': [], 'partial_loaded':False, 'delegations': {'keys':{}, 'roles':[]} } - tuf.roledb.add_role(rolename, roleinfo, repository_name=repository_name) + roledb.add_role(rolename, roleinfo, repository_name=repository_name) # Load the delegated metadata and generate their fileinfo. targets_objects = {} @@ -932,7 +934,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, continue signable = None - signable = securesystemslib.util.load_json_file(metadata_path) + signable = sslib_util.load_json_file(metadata_path) # Strip the prefix from the local working copy, it will be added again # when the targets metadata is written to disk. @@ -940,7 +942,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, metadata_object = _strip_prefix_from_targets_metadata(metadata_object, prefix) - roleinfo = tuf.roledb.get_roleinfo(metadata_name, repository_name) + roleinfo = roledb.get_roleinfo(metadata_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = metadata_object['version'] roleinfo['expires'] = metadata_object['expires'] @@ -957,7 +959,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, roleinfo['partial_loaded'] = True - tuf.roledb.update_roleinfo(metadata_name, roleinfo, + roledb.update_roleinfo(metadata_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) # Append to list of elements to avoid reloading repeated metadata. @@ -972,12 +974,12 @@ def load_project(project_directory, prefix='', new_targets_location=None, # Add the keys specified in the delegations field of the Targets role. for key_metadata in metadata_object['delegations']['keys'].values(): - key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) + key_object, junk = format_metadata_to_key(key_metadata) try: - tuf.keydb.add_key(key_object, repository_name=repository_name) + keydb.add_key(key_object, repository_name=repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: pass for role in metadata_object['delegations']['roles']: @@ -988,7 +990,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, 'partial_loaded': False, 'delegations': {'keys': {}, 'roles': []}} - tuf.roledb.add_role(rolename, roleinfo, repository_name=repository_name) + roledb.add_role(rolename, roleinfo, repository_name=repository_name) if new_prefix: project.prefix = new_prefix diff --git a/tuf/download.py b/tuf/download.py index 2d946ef891..6b56ba2569 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -32,16 +32,16 @@ from __future__ import unicode_literals import logging +import six import timeit import tempfile -import securesystemslib -import securesystemslib.util -import six +import securesystemslib # pylint: disable=unused-import +from securesystemslib import formats as sslib_formats -import tuf -import tuf.exceptions -import tuf.formats +from tuf import exceptions +from tuf import formats +from tuf import settings # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -53,7 +53,7 @@ def safe_download(url, required_length, fetcher): Given the 'url' and 'required_length' of the desired file, open a connection to 'url', download it, and return the contents of the file. Also ensure the length of the downloaded file matches 'required_length' exactly. - tuf.download.unsafe_download() may be called if an upper download limit is + download.unsafe_download() may be called if an upper download limit is preferred. @@ -86,8 +86,8 @@ def safe_download(url, required_length, fetcher): # Do all of the arguments have the appropriate format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.URL_SCHEMA.check_match(url) - tuf.formats.LENGTH_SCHEMA.check_match(required_length) + sslib_formats.URL_SCHEMA.check_match(url) + formats.LENGTH_SCHEMA.check_match(required_length) return _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=True) @@ -101,7 +101,7 @@ def unsafe_download(url, required_length, fetcher): Given the 'url' and 'required_length' of the desired file, open a connection to 'url', download it, and return the contents of the file. Also ensure the length of the downloaded file is up to 'required_length', and no larger. - tuf.download.safe_download() may be called if an exact download limit is + download.safe_download() may be called if an exact download limit is preferred. @@ -134,8 +134,8 @@ def unsafe_download(url, required_length, fetcher): # Do all of the arguments have the appropriate format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.URL_SCHEMA.check_match(url) - tuf.formats.LENGTH_SCHEMA.check_match(required_length) + sslib_formats.URL_SCHEMA.check_match(url) + formats.LENGTH_SCHEMA.check_match(required_length) return _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=False) @@ -208,15 +208,14 @@ def _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=True): seconds_spent_receiving = stop_time - start_time average_download_speed = number_of_bytes_received / seconds_spent_receiving - if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED: + if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED: logger.debug('The average download speed dropped below the minimum' - ' average download speed set in tuf.settings.py. Stopping the' - ' download!') + ' average download speed set in settings. Stopping the download!.') break else: logger.debug('The average download speed has not dipped below the' - ' minimum average download speed set in tuf.settings.py.') + ' minimum average download speed set in settings.') # Does the total number of downloaded bytes match the required length? _check_downloaded_length(number_of_bytes_received, required_length, @@ -273,7 +272,7 @@ def _check_downloaded_length(total_downloaded, required_length, tuf.exceptions.SlowRetrievalError, if the total downloaded was done in less than the acceptable download speed (as set in - tuf.settings.py). + tuf.settings). None. @@ -296,24 +295,24 @@ def _check_downloaded_length(total_downloaded, required_length, # If the average download speed is below a certain threshold, we flag # this as a possible slow-retrieval attack. logger.debug('Average download speed: ' + repr(average_download_speed)) - logger.debug('Minimum average download speed: ' + repr(tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED)) + logger.debug('Minimum average download speed: ' + repr(settings.MIN_AVERAGE_DOWNLOAD_SPEED)) - if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED: - raise tuf.exceptions.SlowRetrievalError(average_download_speed) + if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED: + raise exceptions.SlowRetrievalError(average_download_speed) else: logger.debug('Good average download speed: ' + repr(average_download_speed) + ' bytes per second') - raise tuf.exceptions.DownloadLengthMismatchError(required_length, total_downloaded) + raise exceptions.DownloadLengthMismatchError(required_length, total_downloaded) else: # We specifically disabled strict checking of required length, but we # will log a warning anyway. This is useful when we wish to download the # Timestamp or Root metadata, for which we have no signed metadata; so, # we must guess a reasonable required_length for it. - if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED: - raise tuf.exceptions.SlowRetrievalError(average_download_speed) + if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED: + raise exceptions.SlowRetrievalError(average_download_speed) else: logger.debug('Good average download speed: ' + diff --git a/tuf/formats.py b/tuf/formats.py index dc51ba9c98..9527b51223 100755 --- a/tuf/formats.py +++ b/tuf/formats.py @@ -69,10 +69,12 @@ import time import copy -import securesystemslib.formats -import securesystemslib.schema as SCHEMA +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats +from securesystemslib import schema as SCHEMA import tuf +from tuf import exceptions import six @@ -133,9 +135,9 @@ ROLE_SCHEMA = SCHEMA.Object( object_name = 'ROLE_SCHEMA', name = SCHEMA.Optional(ROLENAME_SCHEMA), - keyids = securesystemslib.formats.KEYIDS_SCHEMA, + keyids = sslib_formats.KEYIDS_SCHEMA, threshold = THRESHOLD_SCHEMA, - terminating = SCHEMA.Optional(securesystemslib.formats.BOOLEAN_SCHEMA), + terminating = SCHEMA.Optional(sslib_formats.BOOLEAN_SCHEMA), paths = SCHEMA.Optional(RELPATHS_SCHEMA), path_hash_prefixes = SCHEMA.Optional(PATH_HASH_PREFIXES_SCHEMA)) @@ -150,13 +152,13 @@ # repository (corresponding to the repository belonging to named repository in # the dictionary key) ROLEDICTDB_SCHEMA = SCHEMA.DictOf( - key_schema = securesystemslib.formats.NAME_SCHEMA, + key_schema = sslib_formats.NAME_SCHEMA, value_schema = ROLEDICT_SCHEMA) # Command argument list, as used by the CLI tool. # Example: {'keytype': ed25519, 'expires': 365,} COMMAND_SCHEMA = SCHEMA.DictOf( - key_schema = securesystemslib.formats.NAME_SCHEMA, + key_schema = sslib_formats.NAME_SCHEMA, value_schema = SCHEMA.Any()) # A dictionary holding version information. @@ -274,7 +276,7 @@ # A dict of repository names to mirrors. REPO_NAMES_TO_MIRRORS_SCHEMA = SCHEMA.DictOf( key_schema = NAME_SCHEMA, - value_schema = SCHEMA.ListOf(securesystemslib.formats.URL_SCHEMA)) + value_schema = SCHEMA.ListOf(sslib_formats.URL_SCHEMA)) # An object containing the map file's "mapping" attribute. MAPPING_SCHEMA = SCHEMA.ListOf(SCHEMA.Object( @@ -333,7 +335,7 @@ previous_threshold = SCHEMA.Optional(THRESHOLD_SCHEMA), version = SCHEMA.Optional(METADATAVERSION_SCHEMA), expires = SCHEMA.Optional(ISO8601_DATETIME_SCHEMA), - signatures = SCHEMA.Optional(securesystemslib.formats.SIGNATURES_SCHEMA), + signatures = SCHEMA.Optional(sslib_formats.SIGNATURES_SCHEMA), paths = SCHEMA.Optional(SCHEMA.OneOf([RELPATHS_SCHEMA, PATH_FILEINFO_SCHEMA])), path_hash_prefixes = SCHEMA.Optional(PATH_HASH_PREFIXES_SCHEMA), delegations = SCHEMA.Optional(DELEGATIONS_SCHEMA), @@ -343,7 +345,7 @@ SIGNABLE_SCHEMA = SCHEMA.Object( object_name = 'SIGNABLE_SCHEMA', signed = SCHEMA.Any(), - signatures = SCHEMA.ListOf(securesystemslib.formats.SIGNATURE_SCHEMA)) + signatures = SCHEMA.ListOf(sslib_formats.SIGNATURE_SCHEMA)) # Root role: indicates root keys and top-level roles. ROOT_SCHEMA = SCHEMA.Object( @@ -372,7 +374,7 @@ object_name = 'SNAPSHOT_SCHEMA', _type = SCHEMA.String('snapshot'), version = METADATAVERSION_SCHEMA, - expires = securesystemslib.formats.ISO8601_DATETIME_SCHEMA, + expires = sslib_formats.ISO8601_DATETIME_SCHEMA, spec_version = SPECIFICATION_VERSION_SCHEMA, meta = FILEINFODICT_SCHEMA) @@ -382,7 +384,7 @@ _type = SCHEMA.String('timestamp'), spec_version = SPECIFICATION_VERSION_SCHEMA, version = METADATAVERSION_SCHEMA, - expires = securesystemslib.formats.ISO8601_DATETIME_SCHEMA, + expires = sslib_formats.ISO8601_DATETIME_SCHEMA, meta = FILEINFODICT_SCHEMA) @@ -391,10 +393,10 @@ object_name = 'PROJECT_CFG_SCHEMA', project_name = SCHEMA.AnyString(), layout_type = SCHEMA.OneOf([SCHEMA.String('repo-like'), SCHEMA.String('flat')]), - targets_location = securesystemslib.formats.PATH_SCHEMA, - metadata_location = securesystemslib.formats.PATH_SCHEMA, - prefix = securesystemslib.formats.PATH_SCHEMA, - public_keys = securesystemslib.formats.KEYDICT_SCHEMA, + targets_location = sslib_formats.PATH_SCHEMA, + metadata_location = sslib_formats.PATH_SCHEMA, + prefix = sslib_formats.PATH_SCHEMA, + public_keys = sslib_formats.KEYDICT_SCHEMA, threshold = SCHEMA.Integer(lo = 0, hi = 2) ) @@ -402,7 +404,7 @@ # such as a url, the path of the directory metadata files, etc. MIRROR_SCHEMA = SCHEMA.Object( object_name = 'MIRROR_SCHEMA', - url_prefix = securesystemslib.formats.URL_SCHEMA, + url_prefix = sslib_formats.URL_SCHEMA, metadata_path = SCHEMA.Optional(RELPATH_SCHEMA), targets_path = SCHEMA.Optional(RELPATH_SCHEMA), confined_target_dirs = SCHEMA.Optional(RELPATHS_SCHEMA), @@ -422,7 +424,7 @@ object_name = 'MIRRORLIST_SCHEMA', _type = SCHEMA.String('mirrors'), version = METADATAVERSION_SCHEMA, - expires = securesystemslib.formats.ISO8601_DATETIME_SCHEMA, + expires = sslib_formats.ISO8601_DATETIME_SCHEMA, mirrors = SCHEMA.ListOf(MIRROR_SCHEMA)) # Any of the role schemas (e.g., TIMESTAMP_SCHEMA, SNAPSHOT_SCHEMA, etc.) @@ -440,14 +442,14 @@ general = SCHEMA.Object( object_name = '[general]', transfer_module = SCHEMA.String('scp'), - metadata_path = securesystemslib.formats.PATH_SCHEMA, - targets_directory = securesystemslib.formats.PATH_SCHEMA), + metadata_path = sslib_formats.PATH_SCHEMA, + targets_directory = sslib_formats.PATH_SCHEMA), scp=SCHEMA.Object( object_name = '[scp]', - host = securesystemslib.formats.URL_SCHEMA, - user = securesystemslib.formats.NAME_SCHEMA, - identity_file = securesystemslib.formats.PATH_SCHEMA, - remote_directory = securesystemslib.formats.PATH_SCHEMA)) + host = sslib_formats.URL_SCHEMA, + user = sslib_formats.NAME_SCHEMA, + identity_file = sslib_formats.PATH_SCHEMA, + remote_directory = sslib_formats.PATH_SCHEMA)) # The format of the resulting "receive config dict" after extraction from the # receive configuration file (i.e., receive.cfg). The receive config file @@ -457,11 +459,11 @@ RECEIVECONFIG_SCHEMA = SCHEMA.Object( object_name = 'RECEIVECONFIG_SCHEMA', general=SCHEMA.Object( object_name = '[general]', - pushroots = SCHEMA.ListOf(securesystemslib.formats.PATH_SCHEMA), - repository_directory = securesystemslib.formats.PATH_SCHEMA, - metadata_directory = securesystemslib.formats.PATH_SCHEMA, - targets_directory = securesystemslib.formats.PATH_SCHEMA, - backup_directory = securesystemslib.formats.PATH_SCHEMA)) + pushroots = SCHEMA.ListOf(sslib_formats.PATH_SCHEMA), + repository_directory = sslib_formats.PATH_SCHEMA, + metadata_directory = sslib_formats.PATH_SCHEMA, + targets_directory = sslib_formats.PATH_SCHEMA, + backup_directory = sslib_formats.PATH_SCHEMA)) @@ -631,12 +633,12 @@ def expiry_string_to_datetime(expires): """ # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expires) + sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expires) try: return datetime.datetime.strptime(expires, "%Y-%m-%dT%H:%M:%SZ") except ValueError as error: - six.raise_from(securesystemslib.exceptions.FormatError( + six.raise_from(sslib_exceptions.FormatError( 'Failed to parse ' + repr(expires) + ' as an expiry time'), error) @@ -674,7 +676,7 @@ def datetime_to_unix_timestamp(datetime_object): # Raise 'securesystemslib.exceptions.FormatError' if not. if not isinstance(datetime_object, datetime.datetime): message = repr(datetime_object) + ' is not a datetime.datetime() object.' - raise securesystemslib.exceptions.FormatError(message) + raise sslib_exceptions.FormatError(message) unix_timestamp = calendar.timegm(datetime_object.timetuple()) @@ -713,7 +715,7 @@ def unix_timestamp_to_datetime(unix_timestamp): # Is 'unix_timestamp' properly formatted? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.UNIX_TIMESTAMP_SCHEMA.check_match(unix_timestamp) + sslib_formats.UNIX_TIMESTAMP_SCHEMA.check_match(unix_timestamp) # Convert 'unix_timestamp' to a 'time.struct_time', in UTC. The Daylight # Savings Time (DST) flag is set to zero. datetime.fromtimestamp() is not @@ -752,7 +754,7 @@ def format_base64(data): return binascii.b2a_base64(data).decode('utf-8').rstrip('=\n ') except (TypeError, binascii.Error) as e: - raise securesystemslib.exceptions.FormatError('Invalid base64' + raise sslib_exceptions.FormatError('Invalid base64' ' encoding: ' + str(e)) @@ -781,7 +783,7 @@ def parse_base64(base64_string): if not isinstance(base64_string, six.string_types): message = 'Invalid argument: '+repr(base64_string) - raise securesystemslib.exceptions.FormatError(message) + raise sslib_exceptions.FormatError(message) extra = len(base64_string) % 4 if extra: @@ -792,7 +794,7 @@ def parse_base64(base64_string): return binascii.a2b_base64(base64_string.encode('utf-8')) except (TypeError, binascii.Error) as e: - raise securesystemslib.exceptions.FormatError('Invalid base64' + raise sslib_exceptions.FormatError('Invalid base64' ' encoding: ' + str(e)) @@ -944,7 +946,7 @@ def expected_meta_rolename(meta_rolename): # This check ensures 'meta_rolename' conforms to # 'securesystemslib.formats.NAME_SCHEMA'. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.NAME_SCHEMA.check_match(meta_rolename) + sslib_formats.NAME_SCHEMA.check_match(meta_rolename) return meta_rolename.lower() @@ -989,18 +991,18 @@ def check_signable_object_format(signable): role_type = signable['signed']['_type'] except (KeyError, TypeError) as error: - six.raise_from(securesystemslib.exceptions.FormatError( + six.raise_from(sslib_exceptions.FormatError( 'Untyped signable object.'), error) try: schema = SCHEMAS_BY_TYPE[role_type] except KeyError as error: - six.raise_from(securesystemslib.exceptions.FormatError( + six.raise_from(sslib_exceptions.FormatError( 'Unrecognized type ' + repr(role_type)), error) if not signable['signatures']: - raise tuf.exceptions.UnsignedMetadataError('Signable object of type ' + + raise exceptions.UnsignedMetadataError('Signable object of type ' + repr(role_type) + ' has no signatures ', signable) # 'securesystemslib.exceptions.FormatError' raised if 'signable' does not diff --git a/tuf/keydb.py b/tuf/keydb.py index bc306bac25..71ef1058a4 100755 --- a/tuf/keydb.py +++ b/tuf/keydb.py @@ -44,10 +44,15 @@ import logging import copy -import tuf.formats +import securesystemslib # pylint: disable=unused-import +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats +from securesystemslib import keys as sslib_keys + +from tuf import exceptions +from tuf import formats import six -import securesystemslib # List of strings representing the key types supported by TUF. _SUPPORTED_KEY_TYPES = ['rsa', 'ed25519', 'ecdsa-sha2-nistp256'] @@ -99,10 +104,10 @@ def create_keydb_from_root_metadata(root_metadata, repository_name='default'): # This check will ensure 'root_metadata' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.ROOT_SCHEMA.check_match(root_metadata) + formats.ROOT_SCHEMA.check_match(root_metadata) # Does 'repository_name' have the correct format? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Clear the key database for 'repository_name', or create it if non-existent. if repository_name in _keydb_dict: @@ -122,7 +127,7 @@ def create_keydb_from_root_metadata(root_metadata, repository_name='default'): # format_metadata_to_key() uses the provided keyid as the default keyid. # All other keyids returned are ignored. - key_dict, _ = securesystemslib.keys.format_metadata_to_key(key_metadata, + key_dict, _ = sslib_keys.format_metadata_to_key(key_metadata, keyid) # Make sure to update key_dict['keyid'] to use one of the other valid @@ -133,7 +138,7 @@ def create_keydb_from_root_metadata(root_metadata, repository_name='default'): # Although keyid duplicates should *not* occur (unique dict keys), log a # warning and continue. However, 'key_dict' may have already been # adding to the keydb elsewhere. - except tuf.exceptions.KeyAlreadyExistsError as e: # pragma: no cover + except exceptions.KeyAlreadyExistsError as e: # pragma: no cover logger.warning(e) continue @@ -167,10 +172,10 @@ def create_keydb(repository_name): """ # Is 'repository_name' properly formatted? Raise 'securesystemslib.exceptions.FormatError' if not. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if repository_name in _keydb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name already exists:' + raise sslib_exceptions.InvalidNameError('Repository name already exists:' ' ' + repr(repository_name)) _keydb_dict[repository_name] = {} @@ -203,14 +208,14 @@ def remove_keydb(repository_name): """ # Is 'repository_name' properly formatted? Raise 'securesystemslib.exceptions.FormatError' if not. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if repository_name not in _keydb_dict: logger.warning('Repository name does not exist: ' + repr(repository_name)) return if repository_name == 'default': - raise securesystemslib.exceptions.InvalidNameError('Cannot remove the default repository:' + raise sslib_exceptions.InvalidNameError('Cannot remove the default repository:' ' ' + repr(repository_name)) del _keydb_dict[repository_name] @@ -264,30 +269,30 @@ def add_key(key_dict, keyid=None, repository_name='default'): # This check will ensure 'key_dict' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError if the check fails. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key_dict) + sslib_formats.ANYKEY_SCHEMA.check_match(key_dict) # Does 'repository_name' have the correct format? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Does 'keyid' have the correct format? if keyid is not None: # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.KEYID_SCHEMA.check_match(keyid) + sslib_formats.KEYID_SCHEMA.check_match(keyid) # Check if each keyid found in 'key_dict' matches 'keyid'. if keyid != key_dict['keyid']: - raise securesystemslib.exceptions.Error('Incorrect keyid. Got ' + key_dict['keyid'] + ' but expected ' + keyid) + raise sslib_exceptions.Error('Incorrect keyid. Got ' + key_dict['keyid'] + ' but expected ' + keyid) # Ensure 'repository_name' is actually set in the key database. if repository_name not in _keydb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist:' + raise sslib_exceptions.InvalidNameError('Repository name does not exist:' ' ' + repr(repository_name)) # Check if the keyid belonging to 'key_dict' is not already # available in the key database before returning. keyid = key_dict['keyid'] if keyid in _keydb_dict[repository_name]: - raise tuf.exceptions.KeyAlreadyExistsError('Key: ' + keyid) + raise exceptions.KeyAlreadyExistsError('Key: ' + keyid) _keydb_dict[repository_name][keyid] = copy.deepcopy(key_dict) @@ -329,13 +334,13 @@ def get_key(keyid, repository_name='default'): # This check will ensure 'keyid' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' is the match fails. - securesystemslib.formats.KEYID_SCHEMA.check_match(keyid) + sslib_formats.KEYID_SCHEMA.check_match(keyid) # Does 'repository_name' have the correct format? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if repository_name not in _keydb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist:' + raise sslib_exceptions.InvalidNameError('Repository name does not exist:' ' ' + repr(repository_name)) # Return the key belonging to 'keyid', if found in the key database. @@ -343,7 +348,7 @@ def get_key(keyid, repository_name='default'): return copy.deepcopy(_keydb_dict[repository_name][keyid]) except KeyError as error: - six.raise_from(tuf.exceptions.UnknownKeyError('Key: ' + keyid), error) + six.raise_from(exceptions.UnknownKeyError('Key: ' + keyid), error) @@ -382,13 +387,13 @@ def remove_key(keyid, repository_name='default'): # This check will ensure 'keyid' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' is the match fails. - securesystemslib.formats.KEYID_SCHEMA.check_match(keyid) + sslib_formats.KEYID_SCHEMA.check_match(keyid) # Does 'repository_name' have the correct format? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if repository_name not in _keydb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist:' + raise sslib_exceptions.InvalidNameError('Repository name does not exist:' ' ' + repr(repository_name)) # Remove the key belonging to 'keyid' if found in the key database. @@ -396,7 +401,7 @@ def remove_key(keyid, repository_name='default'): del _keydb_dict[repository_name][keyid] else: - raise tuf.exceptions.UnknownKeyError('Key: ' + keyid) + raise exceptions.UnknownKeyError('Key: ' + keyid) @@ -431,8 +436,8 @@ def clear_keydb(repository_name='default', clear_all=False): # Do the arguments have the correct format? Raise 'securesystemslib.exceptions.FormatError' if # 'repository_name' is improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(clear_all) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(clear_all) global _keydb_dict @@ -441,7 +446,7 @@ def clear_keydb(repository_name='default', clear_all=False): _keydb_dict['default'] = {} if repository_name not in _keydb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist:' + raise sslib_exceptions.InvalidNameError('Repository name does not exist:' ' ' + repr(repository_name)) _keydb_dict[repository_name] = {} diff --git a/tuf/log.py b/tuf/log.py index 8a6a84d20a..368845333e 100755 --- a/tuf/log.py +++ b/tuf/log.py @@ -72,11 +72,12 @@ import logging import time -import tuf -import tuf.settings -import tuf.exceptions +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats + +from tuf import exceptions +from tuf import settings -import securesystemslib.formats # Setting a handler's log level filters only logging messages of that level # (and above). For example, setting the built-in StreamHandler's log level to @@ -125,8 +126,8 @@ # '_DEFAULT_LOG_LEVEL'. The log level of messages handled by 'file_handler' # may be modified with 'set_filehandler_log_level()'. 'settings.LOG_FILENAME' # will be opened in append mode. -if tuf.settings.ENABLE_FILE_LOGGING: - file_handler = logging.FileHandler(tuf.settings.LOG_FILENAME) +if settings.ENABLE_FILE_LOGGING: + file_handler = logging.FileHandler(settings.LOG_FILENAME) file_handler.setLevel(_DEFAULT_FILE_LOG_LEVEL) file_handler.setFormatter(formatter) logger.addHandler(file_handler) @@ -212,7 +213,7 @@ def set_log_level(log_level=_DEFAULT_LOG_LEVEL): # Does 'log_level' have the correct format? # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.LOGLEVEL_SCHEMA.check_match(log_level) + sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) logger.setLevel(log_level) @@ -243,13 +244,13 @@ def set_filehandler_log_level(log_level=_DEFAULT_FILE_LOG_LEVEL): # Does 'log_level' have the correct format? # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.LOGLEVEL_SCHEMA.check_match(log_level) + sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) if file_handler: file_handler.setLevel(log_level) else: - raise tuf.exceptions.Error( + raise exceptions.Error( 'File handler has not been set. Enable file logging' ' before attempting to set its log level') @@ -269,7 +270,7 @@ def set_console_log_level(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): 'log_level' examples: logging.INFO; logging.CRITICAL. - securesystems.exceptions.Error, if the 'log.py' console handler has not + securesystemslib.exceptions.Error, if the 'log.py' console handler has not been set yet with add_console_handler(). @@ -281,7 +282,7 @@ def set_console_log_level(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): # Does 'log_level' have the correct format? # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.LOGLEVEL_SCHEMA.check_match(log_level) + sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) # Assign to the global console_handler object. global console_handler @@ -291,7 +292,7 @@ def set_console_log_level(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): else: message = 'The console handler has not been set with add_console_handler().' - raise securesystemslib.exceptions.Error(message) + raise sslib_exceptions.Error(message) @@ -320,7 +321,7 @@ def add_console_handler(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): # Does 'log_level' have the correct format? # Raise 'securesystems.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.LOGLEVEL_SCHEMA.check_match(log_level) + sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level) # Assign to the global console_handler object. global console_handler @@ -381,7 +382,7 @@ def remove_console_handler(): -def enable_file_logging(log_filename=tuf.settings.LOG_FILENAME): +def enable_file_logging(log_filename=settings.LOG_FILENAME): """ Log messages to a file (i.e., 'log_filename'). The log level for the file @@ -406,7 +407,7 @@ def enable_file_logging(log_filename=tuf.settings.LOG_FILENAME): """ # Are the arguments properly formatted? - securesystemslib.formats.PATH_SCHEMA.check_match(log_filename) + sslib_formats.PATH_SCHEMA.check_match(log_filename) global file_handler @@ -418,7 +419,7 @@ def enable_file_logging(log_filename=tuf.settings.LOG_FILENAME): logger.addHandler(file_handler) else: - raise tuf.exceptions.Error( + raise exceptions.Error( 'The file handler has already been been set. A new file handler' ' can be set by first calling disable_file_logging()') diff --git a/tuf/mirrors.py b/tuf/mirrors.py index 50d32a3b09..78d5053402 100755 --- a/tuf/mirrors.py +++ b/tuf/mirrors.py @@ -32,10 +32,13 @@ import os -import tuf -import tuf.formats +import securesystemslib # pylint: disable=unused-import +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats +from securesystemslib.util import file_in_confined_directories + +from tuf import formats -import securesystemslib import six # The type of file to be downloaded from a repository. The @@ -84,24 +87,16 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): """ # Checking if all the arguments have appropriate format. - tuf.formats.RELPATH_SCHEMA.check_match(file_path) - tuf.formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict) - securesystemslib.formats.NAME_SCHEMA.check_match(file_type) + formats.RELPATH_SCHEMA.check_match(file_path) + formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict) + sslib_formats.NAME_SCHEMA.check_match(file_type) # Verify 'file_type' is supported. if file_type not in _SUPPORTED_FILE_TYPES: - raise securesystemslib.exceptions.Error('Invalid file_type argument.' + raise sslib_exceptions.Error('Invalid file_type argument.' ' Supported file types: ' + repr(_SUPPORTED_FILE_TYPES)) path_key = 'metadata_path' if file_type == 'meta' else 'targets_path' - # Reference to 'securesystemslib.util.file_in_confined_directories()' (improve - # readability). This function checks whether a mirror should serve a file to - # the client. A client may be confined to certain paths on a repository - # mirror when fetching target files. This field may be set by the client - # when the repository mirror is added to the 'tuf.client.updater.Updater' - # object. - in_confined_directory = securesystemslib.util.file_in_confined_directories - list_of_mirrors = [] for junk, mirror_info in six.iteritems(mirrors_dict): # Does mirror serve this file type at all? @@ -113,8 +108,9 @@ def get_list_of_mirrors(file_type, file_path, mirrors_dict): if path_key == 'targets_path': full_filepath = os.path.join(path, file_path) confined_target_dirs = mirror_info.get('confined_target_dirs') - # confined_target_dirs is an optional field - if confined_target_dirs and not in_confined_directory(full_filepath, + # confined_target_dirs is optional and can used to confine the client to + # certain paths on a repository mirror when fetching target files. + if confined_target_dirs and not file_in_confined_directories(full_filepath, confined_target_dirs): continue diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 1f64f66413..d3158cb6c9 100644 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -37,24 +37,25 @@ import logging import shutil import json +import six import tempfile -import tuf -import tuf.formats -import tuf.exceptions -import tuf.keydb -import tuf.roledb -import tuf.sig -import tuf.log -import tuf.settings - -import securesystemslib -import securesystemslib.hash -import securesystemslib.interface -import securesystemslib.util -import six +import securesystemslib # pylint: disable=unused-import +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats +from securesystemslib import hash as sslib_hash +from securesystemslib import interface as sslib_interface +from securesystemslib import keys as sslib_keys +from securesystemslib import util as sslib_util +from securesystemslib import storage as sslib_storage -import securesystemslib.storage +from tuf import exceptions +from tuf import formats +from tuf import keydb +from tuf import log +from tuf import roledb +from tuf import settings +from tuf import sig # See 'log.py' to learn how logging is handled in TUF. @@ -87,7 +88,7 @@ # The algorithm used by the repository to generate the path hash prefixes # of hashed bin delegations. Please see delegate_hashed_bins() -HASH_FUNCTION = tuf.settings.DEFAULT_HASH_ALGORITHM +HASH_FUNCTION = settings.DEFAULT_HASH_ALGORITHM @@ -109,7 +110,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, # Retrieve the roleinfo of 'rolename' to extract the needed metadata # attributes, such as version number, expiration, etc. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + roleinfo = roledb.get_roleinfo(rolename, repository_name) previous_keyids = roleinfo.get('previous_keyids', []) previous_threshold = roleinfo.get('previous_threshold', 1) signing_keyids = sorted(set(roleinfo['signing_keyids'])) @@ -164,7 +165,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, # Update roledb with the latest delegations info collected during # generate_targets_metadata() - tuf.roledb.update_roleinfo(rolename, roleinfo, + roledb.update_roleinfo(rolename, roleinfo, repository_name=repository_name) @@ -179,16 +180,16 @@ def _generate_and_write_metadata(rolename, metadata_filename, # properly signed). current_version = metadata['version'] if increment_version_number: - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + roleinfo = roledb.get_roleinfo(rolename, repository_name) metadata['version'] = metadata['version'] + 1 roleinfo['version'] = roleinfo['version'] + 1 - tuf.roledb.update_roleinfo(rolename, roleinfo, + roledb.update_roleinfo(rolename, roleinfo, repository_name=repository_name) else: logger.debug('Not incrementing ' + repr(rolename) + '\'s version number.') - if rolename in tuf.roledb.TOP_LEVEL_ROLES and not allow_partially_signed: + if rolename in roledb.TOP_LEVEL_ROLES and not allow_partially_signed: # Verify that the top-level 'rolename' is fully signed. Only a delegated # role should not be written to disk without full verification of its # signature(s), since it can only be considered fully signed depending on @@ -200,7 +201,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, def should_write(): # Root must be signed by its previous keys and threshold. if rolename == 'root' and len(previous_keyids) > 0: - if not tuf.sig.verify(signable, rolename, repository_name, + if not sig.verify(signable, rolename, repository_name, previous_threshold, previous_keyids): return False @@ -208,7 +209,7 @@ def should_write(): logger.debug('Root is signed by a threshold of its previous keyids.') # In the normal case, we should write metadata if the threshold is met. - return tuf.sig.verify(signable, rolename, repository_name, + return sig.verify(signable, rolename, repository_name, roleinfo['threshold'], roleinfo['signing_keyids']) @@ -226,13 +227,13 @@ def should_write(): else: # Since new metadata cannot be successfully written, restore the current # version number. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + roleinfo = roledb.get_roleinfo(rolename, repository_name) roleinfo['version'] = current_version - tuf.roledb.update_roleinfo(rolename, roleinfo, + roledb.update_roleinfo(rolename, roleinfo, repository_name=repository_name) # Note that 'signable' is an argument to tuf.UnsignedMetadataError(). - raise tuf.exceptions.UnsignedMetadataError('Not enough' + raise exceptions.UnsignedMetadataError('Not enough' ' signatures for ' + repr(metadata_filename), signable) # 'rolename' is a delegated role or a top-level role that is partially @@ -269,14 +270,14 @@ def _metadata_is_partially_loaded(rolename, signable, repository_name): signatures. If 'rolename' is found to be partially loaded, mark it as partially loaded in - its 'tuf.roledb' roleinfo. This function exists to assist in deciding whether + its 'roledb' roleinfo. This function exists to assist in deciding whether a role's version number should be incremented when write() or write_parital() is called. Return True if 'rolename' was partially loaded, False otherwise. """ # The signature status lists the number of good signatures, including # bad, untrusted, unknown, etc. - status = tuf.sig.get_signature_status(signable, rolename, repository_name) + status = sig.get_signature_status(signable, rolename, repository_name) if len(status['good_sigs']) < status['threshold'] and \ len(status['good_sigs']) >= 0: @@ -296,8 +297,8 @@ def _check_role_keys(rolename, repository_name): """ # Extract the total number of public and private keys of 'rolename' from its - # roleinfo in 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + # roleinfo in 'roledb'. + roleinfo = roledb.get_roleinfo(rolename, repository_name) total_keyids = len(roleinfo['keyids']) threshold = roleinfo['threshold'] total_signatures = len(roleinfo['signatures']) @@ -305,12 +306,12 @@ def _check_role_keys(rolename, repository_name): # Raise an exception for an invalid threshold of public keys. if total_keyids < threshold: - raise tuf.exceptions.InsufficientKeysError(repr(rolename) + ' role contains' + raise exceptions.InsufficientKeysError(repr(rolename) + ' role contains' ' ' + repr(total_keyids) + ' / ' + repr(threshold) + ' public keys.') # Raise an exception for an invalid threshold of signing keys. if total_signatures == 0 and total_signing_keys < threshold: - raise tuf.exceptions.InsufficientKeysError(repr(rolename) + ' role contains' + raise exceptions.InsufficientKeysError(repr(rolename) + ' role contains' ' ' + repr(total_signing_keys) + ' / ' + repr(threshold) + ' signing keys.') @@ -332,21 +333,21 @@ def _remove_invalid_and_duplicate_signatures(signable, repository_name): signature_keyids = [] for signature in signable['signatures']: - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') + signed = sslib_formats.encode_canonical(signable['signed']).encode('utf-8') keyid = signature['keyid'] key = None # Remove 'signature' from 'signable' if the listed keyid does not exist - # in 'tuf.keydb'. + # in 'keydb'. try: - key = tuf.keydb.get_key(keyid, repository_name=repository_name) + key = keydb.get_key(keyid, repository_name=repository_name) - except tuf.exceptions.UnknownKeyError: + except exceptions.UnknownKeyError: signable['signatures'].remove(signature) continue # Remove 'signature' from 'signable' if it is an invalid signature. - if not securesystemslib.keys.verify_signature(key, signature, signed): + if not sslib_keys.verify_signature(key, signature, signed): logger.debug('Removing invalid signature for ' + repr(keyid)) signable['signatures'].remove(signature) @@ -415,14 +416,14 @@ def _delete_obsolete_metadata(metadata_directory, snapshot_metadata, logger.debug(repr(metadata_role) + ' does not match' ' supported extension ' + repr(METADATA_EXTENSION)) - if metadata_role in tuf.roledb.TOP_LEVEL_ROLES: + if metadata_role in roledb.TOP_LEVEL_ROLES: logger.debug('Not removing top-level metadata ' + repr(metadata_role)) return - # Delete the metadata file if it does not exist in 'tuf.roledb'. + # Delete the metadata file if it does not exist in 'roledb'. # 'repository_tool.py' might have removed 'metadata_name,' # but its metadata file is not actually deleted yet. Do it now. - if not tuf.roledb.role_exists(metadata_role, repository_name): + if not roledb.role_exists(metadata_role, repository_name): logger.info('Removing outdated metadata: ' + repr(metadata_path)) storage_backend.remove(metadata_path) @@ -498,20 +499,20 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): # written. try: # Initialize the key and role metadata of the top-level roles. - signable = securesystemslib.util.load_json_file(root_filename) + signable = sslib_util.load_json_file(root_filename) try: - tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: + formats.check_signable_object_format(signable) + except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) root_metadata = signable['signed'] - tuf.keydb.create_keydb_from_root_metadata(root_metadata, repository_name) - tuf.roledb.create_roledb_from_root_metadata(root_metadata, repository_name) + keydb.create_keydb_from_root_metadata(root_metadata, repository_name) + roledb.create_roledb_from_root_metadata(root_metadata, repository_name) - # Load Root's roleinfo and update 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo('root', repository_name) + # Load Root's roleinfo and update 'roledb'. + roleinfo = roledb.get_roleinfo('root', repository_name) roleinfo['consistent_snapshot'] = root_metadata['consistent_snapshot'] roleinfo['signatures'] = [] for signature in signable['signatures']: @@ -534,26 +535,26 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): _log_warning_if_expires_soon(ROOT_FILENAME, roleinfo['expires'], ROOT_EXPIRES_WARN_SECONDS) - tuf.roledb.update_roleinfo('root', roleinfo, mark_role_as_dirty=False, + roledb.update_roleinfo('root', roleinfo, mark_role_as_dirty=False, repository_name=repository_name) # Ensure the 'consistent_snapshot' field is extracted. consistent_snapshot = root_metadata['consistent_snapshot'] - except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('Cannot load the required' + except sslib_exceptions.StorageError as error: + six.raise_from(exceptions.RepositoryError('Cannot load the required' ' root file: ' + repr(root_filename)), error) # Load 'timestamp.json'. A Timestamp role file without a version number is # always written. try: - signable = securesystemslib.util.load_json_file(timestamp_filename) + signable = sslib_util.load_json_file(timestamp_filename) timestamp_metadata = signable['signed'] for signature in signable['signatures']: repository.timestamp.add_signature(signature, mark_role_as_dirty=False) - # Load Timestamp's roleinfo and update 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo('timestamp', repository_name) + # Load Timestamp's roleinfo and update 'roledb'. + roleinfo = roledb.get_roleinfo('timestamp', repository_name) roleinfo['expires'] = timestamp_metadata['expires'] roleinfo['version'] = timestamp_metadata['version'] @@ -566,11 +567,11 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): _log_warning_if_expires_soon(TIMESTAMP_FILENAME, roleinfo['expires'], TIMESTAMP_EXPIRES_WARN_SECONDS) - tuf.roledb.update_roleinfo('timestamp', roleinfo, mark_role_as_dirty=False, + roledb.update_roleinfo('timestamp', roleinfo, mark_role_as_dirty=False, repository_name=repository_name) - except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('Cannot load the Timestamp ' + except sslib_exceptions.StorageError as error: + six.raise_from(exceptions.RepositoryError('Cannot load the Timestamp ' 'file: ' + repr(timestamp_filename)), error) # Load 'snapshot.json'. A consistent snapshot.json must be calculated if @@ -585,10 +586,10 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): str(snapshot_version) + '.' + basename + METADATA_EXTENSION) try: - signable = securesystemslib.util.load_json_file(snapshot_filename) + signable = sslib_util.load_json_file(snapshot_filename) try: - tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: + formats.check_signable_object_format(signable) + except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) @@ -598,8 +599,8 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): for signature in signable['signatures']: repository.snapshot.add_signature(signature, mark_role_as_dirty=False) - # Load Snapshot's roleinfo and update 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo('snapshot', repository_name) + # Load Snapshot's roleinfo and update 'roledb'. + roleinfo = roledb.get_roleinfo('snapshot', repository_name) roleinfo['expires'] = snapshot_metadata['expires'] roleinfo['version'] = snapshot_metadata['version'] @@ -612,11 +613,11 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): _log_warning_if_expires_soon(SNAPSHOT_FILENAME, roleinfo['expires'], SNAPSHOT_EXPIRES_WARN_SECONDS) - tuf.roledb.update_roleinfo('snapshot', roleinfo, mark_role_as_dirty=False, + roledb.update_roleinfo('snapshot', roleinfo, mark_role_as_dirty=False, repository_name=repository_name) - except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('The Snapshot file ' + except sslib_exceptions.StorageError as error: + six.raise_from(exceptions.RepositoryError('The Snapshot file ' 'cannot be loaded: '+ repr(snapshot_filename)), error) # Load 'targets.json'. A consistent snapshot of the Targets role must be @@ -627,10 +628,10 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): targets_filename = os.path.join(dirname, str(targets_version) + '.' + basename) try: - signable = securesystemslib.util.load_json_file(targets_filename) + signable = sslib_util.load_json_file(targets_filename) try: - tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: + formats.check_signable_object_format(signable) + except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) @@ -640,8 +641,8 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): for signature in signable['signatures']: repository.targets.add_signature(signature, mark_role_as_dirty=False) - # Update 'targets.json' in 'tuf.roledb.py' - roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) + # Update 'targets.json' in 'roledb' + roleinfo = roledb.get_roleinfo('targets', repository_name) roleinfo['paths'] = targets_metadata['targets'] roleinfo['version'] = targets_metadata['version'] roleinfo['expires'] = targets_metadata['expires'] @@ -656,14 +657,14 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): _log_warning_if_expires_soon(TARGETS_FILENAME, roleinfo['expires'], TARGETS_EXPIRES_WARN_SECONDS) - tuf.roledb.update_roleinfo('targets', roleinfo, mark_role_as_dirty=False, + roledb.update_roleinfo('targets', roleinfo, mark_role_as_dirty=False, repository_name=repository_name) # Add the keys specified in the delegations field of the Targets role. for keyid, key_metadata in six.iteritems(targets_metadata['delegations']['keys']): # Use the keyid found in the delegation - key_object, _ = securesystemslib.keys.format_metadata_to_key(key_metadata, + key_object, _ = sslib_keys.format_metadata_to_key(key_metadata, keyid) # Add 'key_object' to the list of recognized keys. Keys may be shared, @@ -673,13 +674,13 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): # repository maintainer should have also been made aware of the duplicate # key when it was added. try: - tuf.keydb.add_key(key_object, keyid=None, repository_name=repository_name) + keydb.add_key(key_object, keyid=None, repository_name=repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: pass - except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('The Targets file ' + except sslib_exceptions.StorageError as error: + six.raise_from(exceptions.RepositoryError('The Targets file ' 'can not be loaded: ' + repr(targets_filename)), error) return repository, consistent_snapshot @@ -698,10 +699,10 @@ def _log_warning_if_expires_soon(rolename, expires_iso8601_timestamp, # unix timestamp, subtract from current time.time() (also in POSIX time) # and compare against 'seconds_remaining_to_warn'. Log a warning message # to console if 'rolename' expires soon. - datetime_object = tuf.formats.expiry_string_to_datetime( + datetime_object = formats.expiry_string_to_datetime( expires_iso8601_timestamp) expires_unix_timestamp = \ - tuf.formats.datetime_to_unix_timestamp(datetime_object) + formats.datetime_to_unix_timestamp(datetime_object) seconds_until_expires = expires_unix_timestamp - int(time.time()) if seconds_until_expires <= seconds_remaining_to_warn: @@ -754,14 +755,14 @@ def import_rsa_privatekey_from_file(filepath, password=None): # prompt for a password if the key file is encrypted and a password isn't # given. try: - private_key = securesystemslib.interface.import_rsa_privatekey_from_file( + private_key = sslib_interface.import_rsa_privatekey_from_file( filepath, password) # The user might not have given a password for an encrypted private key. # Prompt for a password for convenience. - except securesystemslib.exceptions.CryptoError: + except sslib_exceptions.CryptoError: if password is None: - private_key = securesystemslib.interface.import_rsa_privatekey_from_file( + private_key = sslib_interface.import_rsa_privatekey_from_file( filepath, password, prompt=True) else: @@ -803,7 +804,7 @@ def import_ed25519_privatekey_from_file(filepath, password=None): securesystemslib.exceptions.UnsupportedLibraryError, if 'filepath' cannot be decrypted due to an invalid configuration setting (i.e., invalid - 'tuf.settings.py' setting). + 'tuf.settings' setting). 'password' is used to decrypt the 'filepath' key file. @@ -817,14 +818,14 @@ def import_ed25519_privatekey_from_file(filepath, password=None): # automatically prompt for a password if the key file is encrypted and a # password isn't given. try: - private_key = securesystemslib.interface.import_ed25519_privatekey_from_file( + private_key = sslib_interface.import_ed25519_privatekey_from_file( filepath, password) # The user might not have given a password for an encrypted private key. # Prompt for a password for convenience. - except securesystemslib.exceptions.CryptoError: + except sslib_exceptions.CryptoError: if password is None: - private_key = securesystemslib.interface.import_ed25519_privatekey_from_file( + private_key = sslib_interface.import_ed25519_privatekey_from_file( filepath, password, prompt=True) else: @@ -872,7 +873,7 @@ def get_delegated_roles_metadata_filenames(metadata_directory, continue # Skip top-level roles, only interested in delegated roles. - if metadata_name in tuf.roledb.TOP_LEVEL_ROLES: + if metadata_name in roledb.TOP_LEVEL_ROLES: continue # Prevent reloading duplicate versions if consistent_snapshot is True @@ -918,7 +919,7 @@ def get_top_level_metadata_filenames(metadata_directory): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) # Store the filepaths of the top-level roles, including the # 'metadata_directory' for each one. @@ -983,19 +984,19 @@ def get_targets_metadata_fileinfo(filename, storage_backend, custom=None): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(filename) + sslib_formats.PATH_SCHEMA.check_match(filename) if custom is not None: - tuf.formats.CUSTOM_SCHEMA.check_match(custom) + formats.CUSTOM_SCHEMA.check_match(custom) # Note: 'filehashes' is a dictionary of the form # {'sha256': 1233dfba312, ...}. 'custom' is an optional # dictionary that a client might define to include additional # file information, such as the file's author, version/revision # numbers, etc. - filesize, filehashes = securesystemslib.util.get_file_details(filename, - tuf.settings.FILE_HASH_ALGORITHMS, storage_backend) + filesize, filehashes = sslib_util.get_file_details(filename, + settings.FILE_HASH_ALGORITHMS, storage_backend) - return tuf.formats.make_targets_fileinfo(filesize, filehashes, custom=custom) + return formats.make_targets_fileinfo(filesize, filehashes, custom=custom) @@ -1037,9 +1038,9 @@ def get_metadata_versioninfo(rolename, repository_name): # Does 'rolename' have the correct format? # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) + roleinfo = roledb.get_roleinfo(rolename, repository_name) versioninfo = {'version': roleinfo['version']} return versioninfo @@ -1116,7 +1117,7 @@ def get_bin_numbers(number_of_bins): # Note: x % y != 0 does not guarantee that y is not a power of 2 for # arbitrary x and y values. However, due to the relationship between # number_of_bins and prefix_count, it is true for them. - raise securesystemslib.exceptions.Error('The "number_of_bins" argument' + raise sslib_exceptions.Error('The "number_of_bins" argument' ' must be a power of 2.') return prefix_length, prefix_count, bin_size @@ -1182,9 +1183,9 @@ def get_target_hash(target_filepath): The hash of 'target_filepath'. """ - tuf.formats.RELPATH_SCHEMA.check_match(target_filepath) + formats.RELPATH_SCHEMA.check_match(target_filepath) - digest_object = securesystemslib.hash.digest(algorithm=HASH_FUNCTION) + digest_object = sslib_hash.digest(algorithm=HASH_FUNCTION) digest_object.update(target_filepath.encode('utf-8')) return digest_object.hexdigest() @@ -1195,7 +1196,7 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, repository_name='default'): """ - Create the root metadata. 'tuf.roledb.py' and 'tuf.keydb.py' + Create the root metadata. 'roledb' and 'keydb' are read and the information returned by these modules is used to generate the root metadata object. @@ -1224,10 +1225,10 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, securesystemslib.exceptions.Error, if an error is encountered while generating the root metadata object (e.g., a required top-level role not - found in 'tuf.roledb'.) + found in 'roledb'.) - The contents of 'tuf.keydb.py' and 'tuf.roledb.py' are read. + The contents of 'keydb' and 'roledb' are read. A root metadata object, conformant to 'tuf.formats.ROOT_SCHEMA'. @@ -1238,10 +1239,10 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any of the arguments are # improperly formatted. - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + formats.METADATAVERSION_SCHEMA.check_match(version) + sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) + sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # The role and key dictionaries to be saved in the root metadata object. # Conformant to 'ROLEDICT_SCHEMA' and 'KEYDICT_SCHEMA', respectively. @@ -1252,24 +1253,24 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # Extract the role, threshold, and keyid information of the top-level roles, # which Root stores in its metadata. The necessary role metadata is generated # from this information. - for rolename in tuf.roledb.TOP_LEVEL_ROLES: + for rolename in roledb.TOP_LEVEL_ROLES: - # If a top-level role is missing from 'tuf.roledb.py', raise an exception. - if not tuf.roledb.role_exists(rolename, repository_name): - raise securesystemslib.exceptions.Error(repr(rolename) + ' not in' - ' "tuf.roledb".') + # If a top-level role is missing from 'roledb', raise an exception. + if not roledb.role_exists(rolename, repository_name): + raise sslib_exceptions.Error(repr(rolename) + ' not in' + ' "roledb".') # Collect keys from all roles in a list - keyids = tuf.roledb.get_role_keyids(rolename, repository_name) + keyids = roledb.get_role_keyids(rolename, repository_name) for keyid in keyids: - key = tuf.keydb.get_key(keyid, repository_name=repository_name) + key = keydb.get_key(keyid, repository_name=repository_name) keylist.append(key) # Generate the authentication information Root establishes for each # top-level role. - role_threshold = tuf.roledb.get_role_threshold(rolename, repository_name) - role_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, + role_threshold = roledb.get_role_threshold(rolename, repository_name) + role_metadata = formats.build_dict_conforming_to_schema( + formats.ROLE_SCHEMA, keyids=keyids, threshold=role_threshold) roledict[rolename] = role_metadata @@ -1285,8 +1286,8 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # generate_root_metadata, etc. with one function that generates # metadata, possibly rolling that upwards into the calling function. # There are very few things that really need to be done differently. - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, + return formats.build_dict_conforming_to_schema( + formats.ROOT_SCHEMA, version=version, expires=expiration_date, keys=keydict, @@ -1388,32 +1389,32 @@ def generate_targets_metadata(targets_directory, target_files, version, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - tuf.formats.PATH_FILEINFO_SCHEMA.check_match(target_files) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(write_consistent_targets) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_existing_fileinfo) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) + formats.PATH_FILEINFO_SCHEMA.check_match(target_files) + formats.METADATAVERSION_SCHEMA.check_match(version) + sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) + sslib_formats.BOOLEAN_SCHEMA.check_match(write_consistent_targets) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_existing_fileinfo) if write_consistent_targets and use_existing_fileinfo: - raise securesystemslib.exceptions.Error('Cannot support writing consistent' + raise sslib_exceptions.Error('Cannot support writing consistent' ' targets and using existing fileinfo.') if delegations is not None: - tuf.formats.DELEGATIONS_SCHEMA.check_match(delegations) + formats.DELEGATIONS_SCHEMA.check_match(delegations) # If targets role has delegations, collect the up-to-date 'keyids' and # 'threshold' for each role. Update the delegations keys dictionary. delegations_keys = [] # Update 'keyids' and 'threshold' for each delegated role for role in delegations['roles']: - role['keyids'] = tuf.roledb.get_role_keyids(role['name'], + role['keyids'] = roledb.get_role_keyids(role['name'], repository_name) - role['threshold'] = tuf.roledb.get_role_threshold(role['name'], + role['threshold'] = roledb.get_role_threshold(role['name'], repository_name) # Collect all delegations keys for generating the delegations keydict for keyid in role['keyids']: - key = tuf.keydb.get_key(keyid, repository_name=repository_name) + key = keydb.get_key(keyid, repository_name=repository_name) delegations_keys.append(key) _, delegations['keys'] = keys_to_keydict(delegations_keys) @@ -1431,12 +1432,12 @@ def generate_targets_metadata(targets_directory, target_files, version, # Ensure all fileinfo entries in target_files have a non-empty hashes dict if not fileinfo.get('hashes', None): - raise securesystemslib.exceptions.Error('use_existing_fileinfo option' + raise sslib_exceptions.Error('use_existing_fileinfo option' ' set but no hashes exist in fileinfo for ' + repr(target)) # and a non-empty length if fileinfo.get('length', -1) < 0: - raise securesystemslib.exceptions.Error('use_existing_fileinfo option' + raise sslib_exceptions.Error('use_existing_fileinfo option' ' set but no length exists in fileinfo for ' + repr(target)) filedict[target] = fileinfo @@ -1445,7 +1446,7 @@ def generate_targets_metadata(targets_directory, target_files, version, # Generate the fileinfo dicts by accessing the target files on storage. # Default to accessing files on local storage. if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() + storage_backend = sslib_storage.FilesystemBackend() filedict = _generate_targets_fileinfo(target_files, targets_directory, write_consistent_targets, storage_backend) @@ -1460,15 +1461,15 @@ def generate_targets_metadata(targets_directory, target_files, version, # metadata, possibly rolling that upwards into the calling function. # There are very few things that really need to be done differently. if delegations is not None: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, + return formats.build_dict_conforming_to_schema( + formats.TARGETS_SCHEMA, version=version, expires=expiration_date, targets=filedict, delegations=delegations) else: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, + return formats.build_dict_conforming_to_schema( + formats.TARGETS_SCHEMA, version=version, expires=expiration_date, targets=filedict) @@ -1538,12 +1539,12 @@ def _get_hashes_and_length_if_needed(use_length, use_hashes, full_file_path, length = None hashes = None if use_length: - length = securesystemslib.util.get_file_length(full_file_path, + length = sslib_util.get_file_length(full_file_path, storage_backend) if use_hashes: - hashes = securesystemslib.util.get_file_hashes(full_file_path, - tuf.settings.FILE_HASH_ALGORITHMS, storage_backend) + hashes = sslib_util.get_file_hashes(full_file_path, + settings.FILE_HASH_ALGORITHMS, storage_backend) return length, hashes @@ -1620,13 +1621,13 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, # This check ensures arguments have the appropriate number of objects and # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_hashes) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) + formats.METADATAVERSION_SCHEMA.check_match(version) + sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) + sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_length) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_hashes) # Snapshot's 'fileinfodict' shall contain the version number of Root, # Targets, and all delegated roles of the repository. @@ -1643,7 +1644,7 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, # Make file info dictionary with make_metadata_fileinfo because # in the tuf spec length and hashes are optional for all # METAFILES in snapshot.json including the top-level targets file. - fileinfodict[TARGETS_FILENAME] = tuf.formats.make_metadata_fileinfo( + fileinfodict[TARGETS_FILENAME] = formats.make_metadata_fileinfo( targets_file_version['version'], length, hashes) # Search the metadata directory and generate the versioninfo of all the role @@ -1666,8 +1667,8 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, # in the roledb are included in the Snapshot metadata. Since the # snapshot and timestamp roles are not listed in snapshot.json, do not # list these roles found in the metadata directory. - if tuf.roledb.role_exists(rolename, repository_name) and \ - rolename not in tuf.roledb.TOP_LEVEL_ROLES: + if roledb.role_exists(rolename, repository_name) and \ + rolename not in roledb.TOP_LEVEL_ROLES: length, hashes = _get_hashes_and_length_if_needed(use_length, use_hashes, os.path.join(metadata_directory, metadata_filename), storage_backend) @@ -1675,7 +1676,7 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, file_version = get_metadata_versioninfo(rolename, repository_name) - fileinfodict[metadata_name] = tuf.formats.make_metadata_fileinfo( + fileinfodict[metadata_name] = formats.make_metadata_fileinfo( file_version['version'], length, hashes) else: @@ -1691,8 +1692,8 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, # generate_root_metadata, etc. with one function that generates # metadata, possibly rolling that upwards into the calling function. # There are very few things that really need to be done differently. - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, + return formats.build_dict_conforming_to_schema( + formats.SNAPSHOT_SCHEMA, version=version, expires=expiration_date, meta=fileinfodict) @@ -1757,12 +1758,12 @@ def generate_timestamp_metadata(snapshot_file_path, version, expiration_date, # This check ensures arguments have the appropriate number of objects and # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.PATH_SCHEMA.check_match(snapshot_file_path) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_hashes) + sslib_formats.PATH_SCHEMA.check_match(snapshot_file_path) + formats.METADATAVERSION_SCHEMA.check_match(version) + sslib_formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_length) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_hashes) snapshot_fileinfo = {} @@ -1773,7 +1774,7 @@ def generate_timestamp_metadata(snapshot_file_path, version, expiration_date, # Retrieve the versioninfo of the Snapshot metadata file. snapshot_version = get_metadata_versioninfo('snapshot', repository_name) snapshot_fileinfo[snapshot_filename] = \ - tuf.formats.make_metadata_fileinfo(snapshot_version['version'], + formats.make_metadata_fileinfo(snapshot_version['version'], length, hashes) # Generate the timestamp metadata object. @@ -1785,8 +1786,8 @@ def generate_timestamp_metadata(snapshot_file_path, version, expiration_date, # generate_root_metadata, etc. with one function that generates # metadata, possibly rolling that upwards into the calling function. # There are very few things that really need to be done differently. - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, + return formats.build_dict_conforming_to_schema( + formats.TIMESTAMP_SCHEMA, version=version, expires=expiration_date, meta=snapshot_fileinfo) @@ -1800,7 +1801,7 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): Sign a metadata object. If any of the keyids have already signed the file, the old signature is replaced. The keys in 'keyids' must already be - loaded in 'tuf.keydb'. + loaded in 'keydb'. metadata_object: @@ -1838,29 +1839,29 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): # This check ensures arguments have the appropriate number of objects and # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.ANYROLE_SCHEMA.check_match(metadata_object) - securesystemslib.formats.KEYIDS_SCHEMA.check_match(keyids) - securesystemslib.formats.PATH_SCHEMA.check_match(filename) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + formats.ANYROLE_SCHEMA.check_match(metadata_object) + sslib_formats.KEYIDS_SCHEMA.check_match(keyids) + sslib_formats.PATH_SCHEMA.check_match(filename) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Make sure the metadata is in 'signable' format. That is, # it contains a 'signatures' field containing the result # of signing the 'signed' field of 'metadata' with each # keyid of 'keyids'. - signable = tuf.formats.make_signable(metadata_object) + signable = formats.make_signable(metadata_object) # Sign the metadata with each keyid in 'keyids'. 'signable' should have # zero signatures (metadata_object contained none). for keyid in keyids: # Load the signing key. - key = tuf.keydb.get_key(keyid, repository_name=repository_name) + key = keydb.get_key(keyid, repository_name=repository_name) # Generate the signature using the appropriate signing method. if key['keytype'] in SUPPORTED_KEY_TYPES: if 'private' in key['keyval']: - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') + signed = sslib_formats.encode_canonical(signable['signed']).encode('utf-8') try: - signature = securesystemslib.keys.create_signature(key, signed) + signature = sslib_keys.create_signature(key, signed) signable['signatures'].append(signature) except Exception: @@ -1870,14 +1871,14 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): logger.debug('Private key unset. Skipping: ' + repr(keyid)) else: - raise securesystemslib.exceptions.Error('The keydb contains a key with' + raise sslib_exceptions.Error('The keydb contains a key with' ' an invalid key type.' + repr(key['keytype'])) # Raise 'securesystemslib.exceptions.FormatError' if the resulting 'signable' # is not formatted correctly. try: - tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: + formats.check_signable_object_format(signable) + except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) @@ -1936,13 +1937,13 @@ def write_metadata_file(metadata, filename, version_number, consistent_snapshot, # This check ensures arguments have the appropriate number of objects and # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.SIGNABLE_SCHEMA.check_match(metadata) - securesystemslib.formats.PATH_SCHEMA.check_match(filename) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version_number) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) + formats.SIGNABLE_SCHEMA.check_match(metadata) + sslib_formats.PATH_SCHEMA.check_match(filename) + formats.METADATAVERSION_SCHEMA.check_match(version_number) + sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() + storage_backend = sslib_storage.FilesystemBackend() # Generate the actual metadata file content of 'metadata'. Metadata is # saved as JSON and includes formatting, such as indentation and sorted @@ -1976,7 +1977,7 @@ def write_metadata_file(metadata, filename, version_number, consistent_snapshot, # the consistent snapshot and point 'written_filename' to it. logger.debug('Creating a consistent file for ' + repr(filename)) logger.debug('Saving ' + repr(written_consistent_filename)) - securesystemslib.util.persist_temp_file(file_object, + sslib_util.persist_temp_file(file_object, written_consistent_filename, should_close=False) else: @@ -2028,15 +2029,15 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, try: _check_role_keys(rolename, repository_name) - except tuf.exceptions.InsufficientKeysError as e: + except exceptions.InsufficientKeysError as e: logger.info(str(e)) # Do the top-level roles contain a valid threshold of signatures? Top-level # metadata is verified in Root -> Targets -> Snapshot -> Timestamp order. # Verify the metadata of the Root role. - dirty_rolenames = tuf.roledb.get_dirty_roles(repository_name) + dirty_rolenames = roledb.get_dirty_roles(repository_name) - root_roleinfo = tuf.roledb.get_roleinfo('root', repository_name) + root_roleinfo = roledb.get_roleinfo('root', repository_name) root_is_dirty = None if 'root' in dirty_rolenames: root_is_dirty = True @@ -2053,17 +2054,17 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, # 'tuf.exceptions.UnsignedMetadataError' raised if metadata contains an # invalid threshold of signatures. log the valid/threshold message, where # valid < threshold. - except tuf.exceptions.UnsignedMetadataError as e: + except exceptions.UnsignedMetadataError as e: _log_status('root', e.signable, repository_name) return finally: - tuf.roledb.unmark_dirty(['root'], repository_name) - tuf.roledb.update_roleinfo('root', root_roleinfo, + roledb.unmark_dirty(['root'], repository_name) + roledb.update_roleinfo('root', root_roleinfo, mark_role_as_dirty=root_is_dirty, repository_name=repository_name) # Verify the metadata of the Targets role. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) + targets_roleinfo = roledb.get_roleinfo('targets', repository_name) targets_is_dirty = None if 'targets' in dirty_rolenames: targets_is_dirty = True @@ -2078,17 +2079,17 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, repository_name=repository_name) _log_status('targets', signable, repository_name) - except tuf.exceptions.UnsignedMetadataError as e: + except exceptions.UnsignedMetadataError as e: _log_status('targets', e.signable, repository_name) return finally: - tuf.roledb.unmark_dirty(['targets'], repository_name) - tuf.roledb.update_roleinfo('targets', targets_roleinfo, + roledb.unmark_dirty(['targets'], repository_name) + roledb.update_roleinfo('targets', targets_roleinfo, mark_role_as_dirty=targets_is_dirty, repository_name=repository_name) # Verify the metadata of the snapshot role. - snapshot_roleinfo = tuf.roledb.get_roleinfo('snapshot', repository_name) + snapshot_roleinfo = roledb.get_roleinfo('snapshot', repository_name) snapshot_is_dirty = None if 'snapshot' in dirty_rolenames: snapshot_is_dirty = True @@ -2104,17 +2105,17 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, filenames, repository_name=repository_name) _log_status('snapshot', signable, repository_name) - except tuf.exceptions.UnsignedMetadataError as e: + except exceptions.UnsignedMetadataError as e: _log_status('snapshot', e.signable, repository_name) return finally: - tuf.roledb.unmark_dirty(['snapshot'], repository_name) - tuf.roledb.update_roleinfo('snapshot', snapshot_roleinfo, + roledb.unmark_dirty(['snapshot'], repository_name) + roledb.update_roleinfo('snapshot', snapshot_roleinfo, mark_role_as_dirty=snapshot_is_dirty, repository_name=repository_name) # Verify the metadata of the Timestamp role. - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', repository_name) + timestamp_roleinfo = roledb.get_roleinfo('timestamp', repository_name) timestamp_is_dirty = None if 'timestamp' in dirty_rolenames: timestamp_is_dirty = True @@ -2130,13 +2131,13 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory, False, filenames, repository_name=repository_name) _log_status('timestamp', signable, repository_name) - except tuf.exceptions.UnsignedMetadataError as e: + except exceptions.UnsignedMetadataError as e: _log_status('timestamp', e.signable, repository_name) return finally: - tuf.roledb.unmark_dirty(['timestamp'], repository_name) - tuf.roledb.update_roleinfo('timestamp', timestamp_roleinfo, + roledb.unmark_dirty(['timestamp'], repository_name) + roledb.update_roleinfo('timestamp', timestamp_roleinfo, mark_role_as_dirty=timestamp_is_dirty, repository_name=repository_name) @@ -2147,7 +2148,7 @@ def _log_status(rolename, signable, repository_name): 'rolename'. """ - status = tuf.sig.get_signature_status(signable, rolename, repository_name) + status = sig.get_signature_status(signable, rolename, repository_name) logger.info(repr(rolename) + ' role contains ' + \ repr(len(status['good_sigs'])) + ' / ' + repr(status['threshold']) + \ @@ -2201,8 +2202,8 @@ def create_tuf_client_directory(repository_directory, client_directory): # This check ensures arguments have the appropriate number of objects and # object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(client_directory) + sslib_formats.PATH_SCHEMA.check_match(repository_directory) + sslib_formats.PATH_SCHEMA.check_match(client_directory) # Set the absolute path of the Repository's metadata directory. The metadata # directory should be the one served by the Live repository. At a minimum, @@ -2227,7 +2228,7 @@ def create_tuf_client_directory(repository_directory, client_directory): if e.errno == errno.EEXIST: message = 'Cannot create a fresh client metadata directory: ' +\ repr(client_metadata_directory) + '. Already exists.' - raise tuf.exceptions.RepositoryError(message) + raise exceptions.RepositoryError(message) # Testing of non-errno.EEXIST exceptions have been verified on all # supported OSs. An unexpected exception (the '/' directory exists, rather @@ -2269,7 +2270,7 @@ def disable_console_log_messages(): None. """ - tuf.log.remove_console_handler() + log.remove_console_handler() @@ -2295,7 +2296,7 @@ def keys_to_keydict(keys): for key in keys: keyid = key['keyid'] - key_metadata_format = securesystemslib.keys.format_keyval_to_metadata( + key_metadata_format = sslib_keys.format_keyval_to_metadata( key['keytype'], key['scheme'], key['keyval']) new_keydict = {keyid: key_metadata_format} diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index 1fe6a51e83..9d195da000 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -39,23 +39,21 @@ import tempfile import shutil import json +import six from collections import deque -import tuf -import tuf.formats -import tuf.roledb -import tuf.sig -import tuf.log -import tuf.exceptions -import tuf.repository_lib as repo_lib - -import securesystemslib.keys -import securesystemslib.formats -import securesystemslib.util -import six +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats +from securesystemslib import util as sslib_util +from securesystemslib import storage as sslib_storage -import securesystemslib.storage +from tuf import exceptions +from tuf import formats +from tuf import keydb +from tuf import log +from tuf import repository_lib as repo_lib +from tuf import roledb # Copy API @@ -88,6 +86,7 @@ import_ecdsa_privatekey_from_file) from securesystemslib.keys import ( + format_metadata_to_key, generate_rsa_key, generate_ecdsa_key, generate_ed25519_key, @@ -100,8 +99,8 @@ # Add a console handler so that users are aware of potentially unintended # states, such as multiple roles that share keys. -tuf.log.add_console_handler() -tuf.log.set_console_log_level(logging.INFO) +log.add_console_handler() +log.set_console_log_level(logging.INFO) # Recommended RSA key sizes: # https://en.wikipedia.org/wiki/Key_size#Asymmetric_algorithm_key_lengths @@ -230,14 +229,14 @@ def __init__(self, repository_directory, metadata_directory, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_timestamp_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_timestamp_hashes) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_snapshot_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_snapshot_hashes) + sslib_formats.PATH_SCHEMA.check_match(repository_directory) + sslib_formats.PATH_SCHEMA.check_match(metadata_directory) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_timestamp_length) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_timestamp_hashes) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_snapshot_length) + sslib_formats.BOOLEAN_SCHEMA.check_match(use_snapshot_hashes) self._repository_directory = repository_directory self._metadata_directory = metadata_directory @@ -250,10 +249,10 @@ def __init__(self, repository_directory, metadata_directory, self._use_snapshot_hashes = use_snapshot_hashes try: - tuf.roledb.create_roledb(repository_name) - tuf.keydb.create_keydb(repository_name) + roledb.create_roledb(repository_name) + keydb.create_keydb(repository_name) - except securesystemslib.exceptions.InvalidNameError: + except sslib_exceptions.InvalidNameError: logger.debug(repr(repository_name) + ' already exists. Overwriting' ' its contents.') @@ -312,9 +311,9 @@ def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly # formatted. - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) + sslib_formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - # At this point, tuf.keydb and tuf.roledb must be fully populated, + # At this point, keydb and roledb must be fully populated, # otherwise writeall() throws a 'tuf.exceptions.UnsignedMetadataError' for # the top-level roles. exception if any of the top-level roles are missing # signatures, keys, etc. @@ -328,12 +327,12 @@ def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): repo_lib.TIMESTAMP_FILENAME)} snapshot_signable = None - dirty_rolenames = tuf.roledb.get_dirty_roles(self._repository_name) + dirty_rolenames = roledb.get_dirty_roles(self._repository_name) for dirty_rolename in dirty_rolenames: # Ignore top-level roles, they will be generated later in this method. - if dirty_rolename in tuf.roledb.TOP_LEVEL_ROLES: + if dirty_rolename in roledb.TOP_LEVEL_ROLES: continue dirty_filename = os.path.join(self._metadata_directory, @@ -349,7 +348,7 @@ def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): # metadata file. _generate_and_write_metadata() raises a # 'securesystemslib.exceptions.Error' exception if the metadata cannot be # written. - root_roleinfo = tuf.roledb.get_roleinfo('root', self._repository_name) + root_roleinfo = roledb.get_roleinfo('root', self._repository_name) old_consistent_snapshot = root_roleinfo['consistent_snapshot'] if 'root' in dirty_rolenames or consistent_snapshot != old_consistent_snapshot: repo_lib._generate_and_write_metadata('root', filenames['root'], @@ -384,9 +383,9 @@ def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): use_timestamp_length=self._use_timestamp_length, use_timestamp_hashes=self._use_timestamp_hashes) - tuf.roledb.unmark_dirty(dirty_rolenames, self._repository_name) + roledb.unmark_dirty(dirty_rolenames, self._repository_name) - # Delete the metadata of roles no longer in 'tuf.roledb'. Obsolete roles + # Delete the metadata of roles no longer in 'roledb'. Obsolete roles # may have been revoked and should no longer have their metadata files # available on disk, otherwise loading a repository may unintentionally # load them. @@ -460,7 +459,7 @@ def write(self, rolename, consistent_snapshot=False, increment_version_number=Tr use_existing_fileinfo=use_existing_fileinfo) # Ensure 'rolename' is no longer marked as dirty after the successful write(). - tuf.roledb.unmark_dirty([rolename], self._repository_name) + roledb.unmark_dirty([rolename], self._repository_name) @@ -536,7 +535,7 @@ def dirty_roles(self): None. """ - logger.info('Dirty roles: ' + str(tuf.roledb.get_dirty_roles(self._repository_name))) + logger.info('Dirty roles: ' + str(roledb.get_dirty_roles(self._repository_name))) @@ -560,7 +559,7 @@ def mark_dirty(self, roles): None. """ - tuf.roledb.mark_dirty(roles, self._repository_name) + roledb.mark_dirty(roles, self._repository_name) @@ -584,7 +583,7 @@ def unmark_dirty(self, roles): None. """ - tuf.roledb.unmark_dirty(roles, self._repository_name) + roledb.unmark_dirty(roles, self._repository_name) @@ -625,13 +624,13 @@ def get_filepaths_in_directory(files_directory, recursive_walk=False, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.PATH_SCHEMA.check_match(files_directory) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(recursive_walk) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(followlinks) + sslib_formats.PATH_SCHEMA.check_match(files_directory) + sslib_formats.BOOLEAN_SCHEMA.check_match(recursive_walk) + sslib_formats.BOOLEAN_SCHEMA.check_match(followlinks) # Ensure a valid directory is given. if not os.path.isdir(files_directory): - raise securesystemslib.exceptions.Error(repr(files_directory) + ' is not' + raise sslib_exceptions.Error(repr(files_directory) + ' is not' ' a directory.') # A list of the target filepaths found in 'files_directory'. @@ -721,8 +720,7 @@ def add_verification_key(self, key, expires=None): expired. - The role's entries in 'tuf.keydb.py' and 'tuf.roledb.py' are - updated. + The role's entries in 'keydb' and 'roledb' are updated. None. @@ -732,7 +730,7 @@ def add_verification_key(self, key, expires=None): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) + sslib_formats.ANYKEY_SCHEMA.check_match(key) # If 'expires' is unset, choose a default expiration for 'key'. By # default, Root, Targets, Snapshot, and Timestamp keys are set to expire @@ -740,28 +738,28 @@ def add_verification_key(self, key, expires=None): if expires is None: if self.rolename == 'root': expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + ROOT_EXPIRATION)) + formats.unix_timestamp_to_datetime(int(time.time() + ROOT_EXPIRATION)) elif self.rolename == 'Targets': expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TARGETS_EXPIRATION)) + formats.unix_timestamp_to_datetime(int(time.time() + TARGETS_EXPIRATION)) elif self.rolename == 'Snapshot': expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + SNAPSHOT_EXPIRATION)) + formats.unix_timestamp_to_datetime(int(time.time() + SNAPSHOT_EXPIRATION)) elif self.rolename == 'Timestamp': expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) + formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) else: expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) + formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) # Is 'expires' a datetime.datetime() object? # Raise 'securesystemslib.exceptions.FormatError' if not. if not isinstance(expires, datetime.datetime): - raise securesystemslib.exceptions.FormatError(repr(expires) + ' is not a' + raise sslib_exceptions.FormatError(repr(expires) + ' is not a' ' datetime.datetime() object.') # Truncate the microseconds value to produce a correct schema string @@ -770,10 +768,10 @@ def add_verification_key(self, key, expires=None): # Ensure the expiration has not already passed. current_datetime = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time())) + formats.unix_timestamp_to_datetime(int(time.time())) if expires < current_datetime: - raise securesystemslib.exceptions.Error(repr(key) + ' has already' + raise sslib_exceptions.Error(repr(key) + ' has already' ' expired.') # Update the key's 'expires' entry. @@ -781,29 +779,29 @@ def add_verification_key(self, key, expires=None): key['expires'] = expires # Ensure 'key', which should contain the public portion, is added to - # 'tuf.keydb.py'. Add 'key' to the list of recognized keys. + # 'keydb'. Add 'key' to the list of recognized keys. # Keys may be shared, so do not raise an exception if 'key' has already # been loaded. try: - tuf.keydb.add_key(key, repository_name=self._repository_name) + keydb.add_key(key, repository_name=self._repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: logger.warning('Adding a verification key that has already been used.') keyid = key['keyid'] - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) # Save the keyids that are being replaced since certain roles will need to # re-sign metadata with these keys (e.g., root). Use list() to make a copy # of roleinfo['keyids'] to ensure we're modifying distinct lists. previous_keyids = list(roleinfo['keyids']) - # Add 'key' to the role's entry in 'tuf.roledb.py', and avoid duplicates. + # Add 'key' to the role's entry in 'roledb', and avoid duplicates. if keyid not in roleinfo['keyids']: roleinfo['keyids'].append(keyid) roleinfo['previous_keyids'] = previous_keyids - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) @@ -833,7 +831,7 @@ def remove_verification_key(self, key): previously added. - Updates the role's 'tuf.roledb.py' entry. + Updates the role's 'roledb' entry. None. @@ -843,19 +841,19 @@ def remove_verification_key(self, key): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) + sslib_formats.ANYKEY_SCHEMA.check_match(key) keyid = key['keyid'] - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) if keyid in roleinfo['keyids']: roleinfo['keyids'].remove(keyid) - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) else: - raise securesystemslib.exceptions.Error('Verification key not found.') + raise sslib_exceptions.Error('Verification key not found.') @@ -883,7 +881,7 @@ def load_signing_key(self, key): securesystemslib.exceptions.Error, if the private key is not found in 'key'. - Updates the role's 'tuf.keydb.py' and 'tuf.roledb.py' entries. + Updates the role's 'keydb' and 'roledb' entries. None. @@ -893,28 +891,28 @@ def load_signing_key(self, key): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) + sslib_formats.ANYKEY_SCHEMA.check_match(key) # Ensure the private portion of the key is available, otherwise signatures # cannot be generated when the metadata file is written to disk. if 'private' not in key['keyval'] or not len(key['keyval']['private']): - raise securesystemslib.exceptions.Error('This is not a private key.') + raise sslib_exceptions.Error('This is not a private key.') # Has the key, with the private portion included, been added to the keydb? # The public version of the key may have been previously added. try: - tuf.keydb.add_key(key, repository_name=self._repository_name) + keydb.add_key(key, repository_name=self._repository_name) - except tuf.exceptions.KeyAlreadyExistsError: - tuf.keydb.remove_key(key['keyid'], self._repository_name) - tuf.keydb.add_key(key, repository_name=self._repository_name) + except exceptions.KeyAlreadyExistsError: + keydb.remove_key(key['keyid'], self._repository_name) + keydb.add_key(key, repository_name=self._repository_name) - # Update the role's 'signing_keys' field in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + # Update the role's 'signing_keys' field in 'roledb'. + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) if key['keyid'] not in roleinfo['signing_keyids']: roleinfo['signing_keyids'].append(key['keyid']) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) @@ -943,7 +941,7 @@ def unload_signing_key(self, key): previously loaded. - Updates the signing keys of the role in 'tuf.roledb.py'. + Updates the signing keys of the role in 'roledb'. None. @@ -953,10 +951,10 @@ def unload_signing_key(self, key): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) + sslib_formats.ANYKEY_SCHEMA.check_match(key) - # Update the role's 'signing_keys' field in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + # Update the role's 'signing_keys' field in 'roledb'. + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) # TODO: Should we consider removing keys from keydb that are no longer # associated with any roles? There could be many no-longer-used keys @@ -964,11 +962,11 @@ def unload_signing_key(self, key): if key['keyid'] in roleinfo['signing_keyids']: roleinfo['signing_keyids'].remove(key['keyid']) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) else: - raise securesystemslib.exceptions.Error('Signing key not found.') + raise sslib_exceptions.Error('Signing key not found.') @@ -1004,7 +1002,7 @@ def add_signature(self, signature, mark_role_as_dirty=True): Adds 'signature', if not already added, to the role's 'signatures' field - in 'tuf.roledb.py'. + in 'roledb'. None. @@ -1014,10 +1012,10 @@ def add_signature(self, signature, mark_role_as_dirty=True): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) + sslib_formats.SIGNATURE_SCHEMA.check_match(signature) + sslib_formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) # Ensure the roleinfo contains a 'signatures' field. if 'signatures' not in roleinfo: @@ -1027,7 +1025,7 @@ def add_signature(self, signature, mark_role_as_dirty=True): # added. if signature not in roleinfo['signatures']: roleinfo['signatures'].append(signature) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, mark_role_as_dirty, + roledb.update_roleinfo(self.rolename, roleinfo, mark_role_as_dirty, repository_name=self._repository_name) else: @@ -1058,7 +1056,7 @@ def remove_signature(self, signature): added to this role. - Updates the 'signatures' field of the role in 'tuf.roledb.py'. + Updates the 'signatures' field of the role in 'roledb'. None. @@ -1068,18 +1066,18 @@ def remove_signature(self, signature): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) + sslib_formats.SIGNATURE_SCHEMA.check_match(signature) - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) if signature in roleinfo['signatures']: roleinfo['signatures'].remove(signature) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) else: - raise securesystemslib.exceptions.Error('Signature not found.') + raise sslib_exceptions.Error('Signature not found.') @@ -1106,7 +1104,7 @@ def signatures(self): 'securesystemslib.formats.SIGNATURES_SCHEMA'. """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) signatures = roleinfo['signatures'] return signatures @@ -1134,7 +1132,7 @@ def keys(self): A list of the role's keyids (i.e., keyids of the keys). """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) keyids = roleinfo['keyids'] return keyids @@ -1187,7 +1185,7 @@ def version(self): 'tuf.formats.VERSION_SCHEMA'. """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) version = roleinfo['version'] return version @@ -1224,7 +1222,7 @@ def version(self, version): Modifies the 'version' attribute of the Repository object and updates the - role's version in 'tuf.roledb.py'. + role's version in 'roledb'. None. @@ -1234,12 +1232,12 @@ def version(self, version): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) + formats.METADATAVERSION_SCHEMA.check_match(version) - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) roleinfo['version'] = version - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) @@ -1265,7 +1263,7 @@ def threshold(self): 'tuf.formats.THRESHOLD_SCHEMA'. """ - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) threshold = roleinfo['threshold'] return threshold @@ -1296,7 +1294,7 @@ def threshold(self, threshold): Modifies the threshold attribute of the Repository object and updates - the roles threshold in 'tuf.roledb.py'. + the roles threshold in 'roledb'. None. @@ -1306,13 +1304,13 @@ def threshold(self, threshold): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) + formats.THRESHOLD_SCHEMA.check_match(threshold) - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) roleinfo['previous_threshold'] = roleinfo['threshold'] roleinfo['threshold'] = threshold - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) @@ -1336,10 +1334,10 @@ def expiration(self): The role's expiration datetime, a datetime.datetime() object. """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) expires = roleinfo['expires'] - return tuf.formats.expiry_string_to_datetime(expires) + return formats.expiry_string_to_datetime(expires) @@ -1377,7 +1375,7 @@ def expiration(self, datetime_object): # Is 'datetime_object' a datetime.datetime() object? # Raise 'securesystemslib.exceptions.FormatError' if not. if not isinstance(datetime_object, datetime.datetime): - raise securesystemslib.exceptions.FormatError( + raise sslib_exceptions.FormatError( repr(datetime_object) + ' is not a datetime.datetime() object.') # truncate the microseconds value to produce a correct schema string @@ -1386,18 +1384,18 @@ def expiration(self, datetime_object): # Ensure the expiration has not already passed. current_datetime_object = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time())) + formats.unix_timestamp_to_datetime(int(time.time())) if datetime_object < current_datetime_object: - raise securesystemslib.exceptions.Error(repr(self.rolename) + ' has' + raise sslib_exceptions.Error(repr(self.rolename) + ' has' ' already expired.') - # Update the role's 'expires' entry in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + # Update the role's 'expires' entry in 'roledb'. + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) expires = datetime_object.isoformat() + 'Z' roleinfo['expires'] = expires - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) @@ -1426,7 +1424,7 @@ def signing_keys(self): 'securesystemslib.formats.KEYIDS_SCHEMA'. """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) signing_keyids = roleinfo['signing_keyids'] return signing_keyids @@ -1462,7 +1460,7 @@ class Root(Metadata): tuf.exceptions.FormatError, if the argument is improperly formatted. - A 'root' role is added to 'tuf.roledb.py'. + A 'root' role is added to 'roledb'. None. @@ -1477,11 +1475,11 @@ def __init__(self, repository_name): # Is 'repository_name' properly formatted? Otherwise, raise a # tuf.exceptions.FormatError exception. - tuf.formats.ROLENAME_SCHEMA.check_match(repository_name) + formats.ROLENAME_SCHEMA.check_match(repository_name) # By default, 'snapshot' metadata is set to expire 1 week from the current # time. The expiration may be modified. - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + ROOT_EXPIRATION)) expiration = expiration.isoformat() + 'Z' @@ -1489,9 +1487,9 @@ def __init__(self, repository_name): 'signatures': [], 'version': 0, 'consistent_snapshot': False, 'expires': expiration, 'partial_loaded': False} try: - tuf.roledb.add_role(self._rolename, roleinfo, self._repository_name) + roledb.add_role(self._rolename, roleinfo, self._repository_name) - except tuf.exceptions.RoleAlreadyExistsError: + except exceptions.RoleAlreadyExistsError: pass @@ -1530,7 +1528,7 @@ class Timestamp(Metadata): tuf.exceptions.FormatError, if the argument is improperly formatted. - A 'timestamp' role is added to 'tuf.roledb.py'. + A 'timestamp' role is added to 'roledb'. None. @@ -1545,11 +1543,11 @@ def __init__(self, repository_name): # Is 'repository_name' properly formatted? Otherwise, raise a # tuf.exceptions.FormatError exception. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # By default, 'root' metadata is set to expire 1 year from the current # time. The expiration may be modified. - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + TIMESTAMP_EXPIRATION)) expiration = expiration.isoformat() + 'Z' @@ -1558,9 +1556,9 @@ def __init__(self, repository_name): 'partial_loaded': False} try: - tuf.roledb.add_role(self.rolename, roleinfo, self._repository_name) + roledb.add_role(self.rolename, roleinfo, self._repository_name) - except tuf.exceptions.RoleAlreadyExistsError: + except exceptions.RoleAlreadyExistsError: pass @@ -1593,7 +1591,7 @@ class Snapshot(Metadata): tuf.exceptions.FormatError, if the argument is improperly formatted. - A 'snapshot' role is added to 'tuf.roledb.py'. + A 'snapshot' role is added to 'roledb'. None. @@ -1608,11 +1606,11 @@ def __init__(self, repository_name): # Is 'repository_name' properly formatted? Otherwise, raise a # tuf.exceptions.FormatError exception. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # By default, 'snapshot' metadata is set to expire 1 week from the current # time. The expiration may be modified. - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + SNAPSHOT_EXPIRATION)) expiration = expiration.isoformat() + 'Z' @@ -1621,9 +1619,9 @@ def __init__(self, repository_name): 'partial_loaded': False} try: - tuf.roledb.add_role(self._rolename, roleinfo, self._repository_name) + roledb.add_role(self._rolename, roleinfo, self._repository_name) - except tuf.exceptions.RoleAlreadyExistsError: + except exceptions.RoleAlreadyExistsError: pass @@ -1678,7 +1676,7 @@ class Targets(Metadata): formatted. - Modifies the roleinfo of the targets role in 'tuf.roledb', or creates + Modifies the roleinfo of the targets role in 'roledb', or creates a default one named 'targets'. @@ -1692,12 +1690,12 @@ def __init__(self, targets_directory, rolename='targets', roleinfo=None, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.PATH_SCHEMA.check_match(targets_directory) + formats.ROLENAME_SCHEMA.check_match(rolename) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if roleinfo is not None: - tuf.formats.ROLEDB_SCHEMA.check_match(roleinfo) + formats.ROLEDB_SCHEMA.check_match(roleinfo) super(Targets, self).__init__() self._targets_directory = targets_directory @@ -1715,7 +1713,7 @@ def __init__(self, targets_directory, rolename='targets', roleinfo=None, # By default, Targets objects are set to expire 3 months from the current # time. May be later modified. - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + TARGETS_EXPIRATION)) expiration = expiration.isoformat() + 'Z' @@ -1727,11 +1725,11 @@ def __init__(self, targets_directory, rolename='targets', roleinfo=None, 'partial_loaded': False, 'delegations': {'keys': {}, 'roles': []}} - # Add the new role to the 'tuf.roledb'. + # Add the new role to the 'roledb'. try: - tuf.roledb.add_role(self.rolename, roleinfo, self._repository_name) + roledb.add_role(self.rolename, roleinfo, self._repository_name) - except tuf.exceptions.RoleAlreadyExistsError: + except exceptions.RoleAlreadyExistsError: pass @@ -1756,7 +1754,7 @@ def __call__(self, rolename): delegated by this Targets object. - Modifies the roleinfo of the targets role in 'tuf.roledb'. + Modifies the roleinfo of the targets role in 'roledb'. The Targets object of 'rolename'. @@ -1766,13 +1764,13 @@ def __call__(self, rolename): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) if rolename in self._delegated_roles: return self._delegated_roles[rolename] else: - raise tuf.exceptions.UnknownRoleError(repr(rolename) + ' has' + raise exceptions.UnknownRoleError(repr(rolename) + ' has' ' not been delegated by ' + repr(self.rolename)) @@ -1808,10 +1806,10 @@ def add_delegated_role(self, rolename, targets_object): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) if not isinstance(targets_object, Targets): - raise securesystemslib.exceptions.FormatError(repr(targets_object) + ' is' + raise sslib_exceptions.FormatError(repr(targets_object) + ' is' ' not a Targets object.') @@ -1826,7 +1824,7 @@ def add_delegated_role(self, rolename, targets_object): def remove_delegated_role(self, rolename): """ Remove 'rolename' from this Targets object's list of delegated roles. - This method does not update tuf.roledb and others. + This method does not update roledb and others. rolename: @@ -1848,7 +1846,7 @@ def remove_delegated_role(self, rolename): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) if rolename not in self._delegated_roles: logger.debug(repr(rolename) + ' has not been delegated.') @@ -1883,7 +1881,7 @@ def target_files(self): None. """ - target_files = tuf.roledb.get_roleinfo(self._rolename, + target_files = roledb.get_roleinfo(self._rolename, self._repository_name)['paths'] return target_files @@ -1935,13 +1933,13 @@ def add_paths(self, paths, child_rolename): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATHS_SCHEMA.check_match(paths) - tuf.formats.ROLENAME_SCHEMA.check_match(child_rolename) + sslib_formats.PATHS_SCHEMA.check_match(paths) + formats.ROLENAME_SCHEMA.check_match(child_rolename) # Ensure that 'child_rolename' exists, otherwise it will not have an entry # in the parent role's delegations field. - if not tuf.roledb.role_exists(child_rolename, self._repository_name): - raise securesystemslib.exceptions.Error(repr(child_rolename) + ' does' + if not roledb.role_exists(child_rolename, self._repository_name): + raise sslib_exceptions.Error(repr(child_rolename) + ' does' ' not exist.') for path in paths: @@ -1953,7 +1951,7 @@ def add_paths(self, paths, child_rolename): # Get the current role's roleinfo, so that its delegations field can be # updated. - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) # Update the delegated paths of 'child_rolename' to add relative paths. for role in roleinfo['delegations']['roles']: @@ -1967,7 +1965,7 @@ def add_paths(self, paths, child_rolename): else: logger.debug(repr(role['name']) + ' does not match child rolename.') - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) @@ -2014,7 +2012,7 @@ def add_target(self, filepath, custom=None, fileinfo=None): Adds 'filepath' to this role's list of targets. This role's - 'tuf.roledb.py' entry is also updated. + 'roledb' entry is also updated. None. @@ -2024,19 +2022,19 @@ def add_target(self, filepath, custom=None, fileinfo=None): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(filepath) + formats.RELPATH_SCHEMA.check_match(filepath) if fileinfo and custom: - raise securesystemslib.exceptions.Error("Can only take one of" + raise sslib_exceptions.Error("Can only take one of" " custom or fileinfo, not both.") if fileinfo: - tuf.formats.TARGETS_FILEINFO_SCHEMA.check_match(fileinfo) + formats.TARGETS_FILEINFO_SCHEMA.check_match(fileinfo) if custom is None: custom = {} else: - tuf.formats.CUSTOM_SCHEMA.check_match(custom) + formats.CUSTOM_SCHEMA.check_match(custom) # Add 'filepath' (i.e., relative to the targets directory) to the role's # list of targets. 'filepath' will not be verified as an allowed path @@ -2051,8 +2049,8 @@ def add_target(self, filepath, custom=None, fileinfo=None): # later calls to write() will fail. self._check_path(filepath) - # Update the role's 'tuf.roledb.py' entry and avoid duplicates. - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) + # Update the role's 'roledb' entry and avoid duplicates. + roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) if filepath not in roleinfo['paths']: logger.debug('Adding new target: ' + repr(filepath)) @@ -2065,7 +2063,7 @@ def add_target(self, filepath, custom=None, fileinfo=None): else: roleinfo['paths'].update({filepath: {'custom': custom}}) - tuf.roledb.update_roleinfo(self._rolename, roleinfo, + roledb.update_roleinfo(self._rolename, roleinfo, repository_name=self._repository_name) @@ -2104,7 +2102,7 @@ def add_targets(self, list_of_targets): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATHS_SCHEMA.check_match(list_of_targets) + formats.RELPATHS_SCHEMA.check_match(list_of_targets) # Ensure the paths in 'list_of_targets' are relative and use forward slash # as a separator or raise an exception. The paths of 'list_of_targets' @@ -2115,8 +2113,8 @@ def add_targets(self, list_of_targets): for target in list_of_targets: self._check_path(target) - # Update this Targets 'tuf.roledb.py' entry. - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) + # Update this Targets 'roledb' entry. + roleinfo = roledb.get_roleinfo(self._rolename, self._repository_name) for relative_target in list_of_targets: if relative_target not in roleinfo['paths']: logger.debug('Adding new target: ' + repr(relative_target)) @@ -2124,7 +2122,7 @@ def add_targets(self, list_of_targets): logger.debug('Replacing target: ' + repr(relative_target)) roleinfo['paths'].update({relative_target: {}}) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) @@ -2152,7 +2150,7 @@ def remove_target(self, filepath): repository's targets directory, or not found. - Modifies this Targets 'tuf.roledb.py' entry. + Modifies this Targets 'roledb' entry. None. @@ -2162,17 +2160,17 @@ def remove_target(self, filepath): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(filepath) + formats.RELPATH_SCHEMA.check_match(filepath) # Remove 'relative_filepath', if found, and update this Targets roleinfo. - fileinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + fileinfo = roledb.get_roleinfo(self.rolename, self._repository_name) if filepath in fileinfo['paths']: del fileinfo['paths'][filepath] - tuf.roledb.update_roleinfo(self.rolename, fileinfo, + roledb.update_roleinfo(self.rolename, fileinfo, repository_name=self._repository_name) else: - raise securesystemslib.exceptions.Error('Target file path not found.') + raise sslib_exceptions.Error('Target file path not found.') @@ -2192,16 +2190,16 @@ def clear_targets(self): None. - Modifies this Targets' 'tuf.roledb.py' entry. + Modifies this Targets' 'roledb' entry. None. """ - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) roleinfo['paths'] = {} - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) @@ -2229,7 +2227,7 @@ def get_delegated_rolenames(self): A list of rolenames. """ - return tuf.roledb.get_delegated_rolenames(self.rolename, self._repository_name) + return roledb.get_delegated_rolenames(self.rolename, self._repository_name) @@ -2241,7 +2239,7 @@ def _create_delegated_target(self, rolename, keyids, threshold, paths): expiration is set (3 months from the current time). """ - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + TARGETS_EXPIRATION)) expiration = expiration.isoformat() + 'Z' @@ -2267,13 +2265,13 @@ def _update_roledb_delegations(self, keydict, delegations_roleinfo): roles in delegations_roleinfo """ - current_roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + current_roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) current_roleinfo['delegations']['keys'].update(keydict) for roleinfo in delegations_roleinfo: current_roleinfo['delegations']['roles'].append(roleinfo) - tuf.roledb.update_roleinfo(self.rolename, current_roleinfo, + roledb.update_roleinfo(self.rolename, current_roleinfo, repository_name=self._repository_name) @@ -2352,8 +2350,8 @@ def delegate(self, rolename, public_keys, paths, threshold=1, A new Target object is created for 'rolename' that is accessible to the - caller (i.e., targets.). The 'tuf.keydb.py' and - 'tuf.roledb.py' stores are updated with 'public_keys'. + caller (i.e., targets.). The 'keydb' and + 'roledb' stores are updated with 'public_keys'. None. @@ -2363,17 +2361,17 @@ def delegate(self, rolename, public_keys, paths, threshold=1, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - securesystemslib.formats.ANYKEYLIST_SCHEMA.check_match(public_keys) - tuf.formats.RELPATHS_SCHEMA.check_match(paths) - tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(terminating) + formats.ROLENAME_SCHEMA.check_match(rolename) + sslib_formats.ANYKEYLIST_SCHEMA.check_match(public_keys) + formats.RELPATHS_SCHEMA.check_match(paths) + formats.THRESHOLD_SCHEMA.check_match(threshold) + sslib_formats.BOOLEAN_SCHEMA.check_match(terminating) if list_of_targets is not None: - tuf.formats.RELPATHS_SCHEMA.check_match(list_of_targets) + formats.RELPATHS_SCHEMA.check_match(list_of_targets) if path_hash_prefixes is not None: - tuf.formats.PATH_HASH_PREFIXES_SCHEMA.check_match(path_hash_prefixes) + formats.PATH_HASH_PREFIXES_SCHEMA.check_match(path_hash_prefixes) # Keep track of the valid keyids (added to the new Targets object) and # their keydicts (added to this Targets delegations). @@ -2446,7 +2444,7 @@ def revoke(self, rolename): Revoke this Targets' 'rolename' delegation. Its 'rolename' attribute is deleted, including the entries in its 'delegations' field and in - 'tuf.roledb'. + 'roledb'. Actual metadata files are not updated, only when repository.write() or repository.write() is called. @@ -2465,7 +2463,7 @@ def revoke(self, rolename): formatted. - The delegations dictionary of 'rolename' is modified, and its 'tuf.roledb' + The delegations dictionary of 'rolename' is modified, and its 'roledb' entry is updated. This Targets' 'rolename' delegation attribute is also deleted. @@ -2477,27 +2475,27 @@ def revoke(self, rolename): # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) # Remove 'rolename' from this Target's delegations dict. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) + roleinfo = roledb.get_roleinfo(self.rolename, self._repository_name) for role in roleinfo['delegations']['roles']: if role['name'] == rolename: roleinfo['delegations']['roles'].remove(role) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, + roledb.update_roleinfo(self.rolename, roleinfo, repository_name=self._repository_name) - # Remove 'rolename' from 'tuf.roledb.py'. + # Remove 'rolename' from 'roledb'. try: - tuf.roledb.remove_role(rolename, self._repository_name) + roledb.remove_role(rolename, self._repository_name) # Remove the rolename delegation from the current role. For example, the # 'django' role is removed from repository.targets('django'). del self._delegated_roles[rolename] self._parent_targets_object.remove_delegated_role(rolename) - except (tuf.exceptions.UnknownRoleError, KeyError): + except (exceptions.UnknownRoleError, KeyError): pass @@ -2567,9 +2565,9 @@ def delegate_hashed_bins(self, list_of_targets, keys_of_hashed_bins, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATHS_SCHEMA.check_match(list_of_targets) - securesystemslib.formats.ANYKEYLIST_SCHEMA.check_match(keys_of_hashed_bins) - tuf.formats.NUMBINS_SCHEMA.check_match(number_of_bins) + sslib_formats.PATHS_SCHEMA.check_match(list_of_targets) + sslib_formats.ANYKEYLIST_SCHEMA.check_match(keys_of_hashed_bins) + formats.NUMBINS_SCHEMA.check_match(number_of_bins) prefix_length, prefix_count, bin_size = repo_lib.get_bin_numbers(number_of_bins) @@ -2711,8 +2709,8 @@ def add_target_to_bin(self, target_filepath, number_of_bins=DEFAULT_NUM_BINS, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(target_filepath) - tuf.formats.NUMBINS_SCHEMA.check_match(number_of_bins) + sslib_formats.PATH_SCHEMA.check_match(target_filepath) + formats.NUMBINS_SCHEMA.check_match(number_of_bins) # TODO: check target_filepath is sane @@ -2721,7 +2719,7 @@ def add_target_to_bin(self, target_filepath, number_of_bins=DEFAULT_NUM_BINS, # Ensure the Targets object has delegated to hashed bins if not self._delegated_roles.get(bin_name, None): - raise securesystemslib.exceptions.Error(self.rolename + ' does not have' + raise sslib_exceptions.Error(self.rolename + ' does not have' ' a delegated role ' + bin_name) self._delegated_roles[bin_name].add_target(target_filepath, @@ -2773,8 +2771,8 @@ def remove_target_from_bin(self, target_filepath, # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(target_filepath) - tuf.formats.NUMBINS_SCHEMA.check_match(number_of_bins) + sslib_formats.PATH_SCHEMA.check_match(target_filepath) + formats.NUMBINS_SCHEMA.check_match(number_of_bins) # TODO: check target_filepath is sane? @@ -2783,7 +2781,7 @@ def remove_target_from_bin(self, target_filepath, # Ensure the Targets object has delegated to hashed bins if not self._delegated_roles.get(bin_name, None): - raise securesystemslib.exceptions.Error(self.rolename + ' does not have' + raise sslib_exceptions.Error(self.rolename + ' does not have' ' a delegated role ' + bin_name) self._delegated_roles[bin_name].remove_target(target_filepath) @@ -2806,7 +2804,7 @@ def delegations(self): tuf.exceptions.UnknownRoleError, if this Targets' rolename - does not exist in 'tuf.roledb'. + does not exist in 'roledb'. None. @@ -2843,14 +2841,14 @@ def _check_path(self, pathname): None. """ - tuf.formats.RELPATH_SCHEMA.check_match(pathname) + formats.RELPATH_SCHEMA.check_match(pathname) if '\\' in pathname: - raise tuf.exceptions.InvalidNameError('Path ' + repr(pathname) + raise exceptions.InvalidNameError('Path ' + repr(pathname) + ' does not use the forward slash (/) as directory separator.') if pathname.startswith('/'): - raise tuf.exceptions.InvalidNameError('Path ' + repr(pathname) + raise exceptions.InvalidNameError('Path ' + repr(pathname) + ' starts with a directory separator. All paths should be relative' ' to targets directory.') @@ -2925,11 +2923,11 @@ def create_new_repository(repository_directory, repository_name='default', # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.PATH_SCHEMA.check_match(repository_directory) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() + storage_backend = sslib_storage.FilesystemBackend() # Set the repository, metadata, and targets directories. These directories # are created if they do not exist. @@ -3035,11 +3033,11 @@ def load_repository(repository_directory, repository_name='default', # Does 'repository_directory' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.PATH_SCHEMA.check_match(repository_directory) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() + storage_backend = sslib_storage.FilesystemBackend() repository_directory = os.path.abspath(repository_directory) metadata_directory = os.path.join(repository_directory, @@ -3085,7 +3083,7 @@ def load_repository(repository_directory, repository_name='default', # Store the delegations in the form of delegated-delegating role tuples, # starting from the top-level targets: # [('role1', 'targets'), ('role2', 'targets'), ... ] - roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) + roleinfo = roledb.get_roleinfo('targets', repository_name) for role in roleinfo['delegations']['roles']: delegations.append((role, 'targets')) @@ -3112,9 +3110,9 @@ def load_repository(repository_directory, repository_name='default', signable = None try: - signable = securesystemslib.util.load_json_file(metadata_path) + signable = sslib_util.load_json_file(metadata_path) - except (securesystemslib.exceptions.Error, ValueError, IOError): + except (sslib_exceptions.Error, ValueError, IOError): logger.debug('Tried to load metadata with invalid JSON' ' content: ' + repr(metadata_path)) continue @@ -3167,15 +3165,15 @@ def load_repository(repository_directory, repository_name='default', # The repo may have used hashing algorithms for the generated keyids # that doesn't match the client's set of hash algorithms. Make sure # to only used the repo's selected hashing algorithms. - key_object, keyids = securesystemslib.keys.format_metadata_to_key(key_metadata, + key_object, keyids = format_metadata_to_key(key_metadata, keyid_hash_algorithms=key_metadata['keyid_hash_algorithms']) try: for keyid in keyids: # pragma: no branch key_object['keyid'] = keyid - tuf.keydb.add_key(key_object, keyid=None, + keydb.add_key(key_object, keyid=None, repository_name=repository_name) - except tuf.exceptions.KeyAlreadyExistsError: + except exceptions.KeyAlreadyExistsError: pass return repository @@ -3217,14 +3215,14 @@ def dump_signable_metadata(metadata_filepath): """ # Are the argument properly formatted? - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_filepath) + sslib_formats.PATH_SCHEMA.check_match(metadata_filepath) - signable = securesystemslib.util.load_json_file(metadata_filepath) + signable = sslib_util.load_json_file(metadata_filepath) # Is 'signable' a valid metadata file? - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) + formats.SIGNABLE_SCHEMA.check_match(signable) - return securesystemslib.formats.encode_canonical(signable['signed']) + return sslib_formats.encode_canonical(signable['signed']) @@ -3272,13 +3270,13 @@ def append_signature(signature, metadata_filepath): """ # Are the arguments properly formatted? - securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_filepath) + sslib_formats.SIGNATURE_SCHEMA.check_match(signature) + sslib_formats.PATH_SCHEMA.check_match(metadata_filepath) - signable = securesystemslib.util.load_json_file(metadata_filepath) + signable = sslib_util.load_json_file(metadata_filepath) # Is 'signable' a valid metadata file? - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) + formats.SIGNABLE_SCHEMA.check_match(signable) signable['signatures'].append(signature) @@ -3288,7 +3286,7 @@ def append_signature(signature, metadata_filepath): separators=(',', ': '), sort_keys=True).encode('utf-8') file_object.write(written_metadata_content) - securesystemslib.util.persist_temp_file(file_object, metadata_filepath) + sslib_util.persist_temp_file(file_object, metadata_filepath) diff --git a/tuf/requests_fetcher.py b/tuf/requests_fetcher.py index e867e6b82b..25a2f9d0db 100644 --- a/tuf/requests_fetcher.py +++ b/tuf/requests_fetcher.py @@ -10,11 +10,11 @@ import six import logging import time +from urllib3.exceptions import ReadTimeoutError -import urllib3.exceptions - -import tuf.exceptions -import tuf.settings +import tuf +from tuf import exceptions +from tuf import settings from tuf.client.fetcher import FetcherInterface @@ -74,14 +74,14 @@ def fetch(self, url, required_length): # - connect timeout (max delay before first byte is received) # - read (gap) timeout (max delay between bytes received) response = session.get(url, stream=True, - timeout=tuf.settings.SOCKET_TIMEOUT) + timeout=settings.SOCKET_TIMEOUT) # Check response status. try: response.raise_for_status() except requests.HTTPError as e: response.close() status = e.response.status_code - raise tuf.exceptions.FetcherHTTPError(str(e), status) + raise exceptions.FetcherHTTPError(str(e), status) # Define a generator function to be returned by fetch. This way the caller @@ -96,11 +96,11 @@ def chunks(): # wish to download an extremely large file in one shot. # Before beginning the round, sleep (if set) for a short amount of # time so that the CPU is not hogged in the while loop. - if tuf.settings.SLEEP_BEFORE_ROUND: - time.sleep(tuf.settings.SLEEP_BEFORE_ROUND) + if settings.SLEEP_BEFORE_ROUND: + time.sleep(settings.SLEEP_BEFORE_ROUND) read_amount = min( - tuf.settings.CHUNK_SIZE, required_length - bytes_received) + settings.CHUNK_SIZE, required_length - bytes_received) # NOTE: This may not handle some servers adding a Content-Encoding # header, which may cause urllib3 to misbehave: @@ -121,8 +121,8 @@ def chunks(): if bytes_received >= required_length: break - except urllib3.exceptions.ReadTimeoutError as e: - raise tuf.exceptions.SlowRetrievalError(str(e)) + except ReadTimeoutError as e: + raise exceptions.SlowRetrievalError(str(e)) finally: response.close() @@ -140,7 +140,7 @@ def _get_session(self, url): parsed_url = six.moves.urllib.parse.urlparse(url) if not parsed_url.scheme or not parsed_url.hostname: - raise tuf.exceptions.URLParsingError( + raise exceptions.URLParsingError( 'Could not get scheme and hostname from URL: ' + url) session_index = parsed_url.scheme + '+' + parsed_url.hostname diff --git a/tuf/roledb.py b/tuf/roledb.py index 37add72e3a..62f83bb8ae 100755 --- a/tuf/roledb.py +++ b/tuf/roledb.py @@ -52,11 +52,13 @@ import logging import copy -import tuf -import tuf.log -import tuf.formats +import securesystemslib # pylint: disable=unused-import +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats + +from tuf import exceptions +from tuf import formats -import securesystemslib import six # See 'tuf.log' to learn how logging is handled in TUF. @@ -111,10 +113,10 @@ def create_roledb_from_root_metadata(root_metadata, repository_name='default'): # This check will ensure 'root_metadata' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raises securesystemslib.exceptions.FormatError. - tuf.formats.ROOT_SCHEMA.check_match(root_metadata) + formats.ROOT_SCHEMA.check_match(root_metadata) # Is 'repository_name' formatted correctly? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles @@ -182,13 +184,13 @@ def create_roledb(repository_name): # Is 'repository_name' properly formatted? If not, raise # 'securesystemslib.exceptions.FormatError'. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles if repository_name in _roledb_dict or repository_name in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name' + raise sslib_exceptions.InvalidNameError('Repository name' ' already exists: ' + repr(repository_name)) _roledb_dict[repository_name] = {} @@ -225,7 +227,7 @@ def remove_roledb(repository_name): # Is 'repository_name' properly formatted? If not, raise # 'securesystemslib.exceptions.FormatError'. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles @@ -236,7 +238,7 @@ def remove_roledb(repository_name): return if repository_name == 'default': - raise securesystemslib.exceptions.InvalidNameError('Cannot remove the' + raise sslib_exceptions.InvalidNameError('Cannot remove the' ' default repository: ' + repr(repository_name)) del _roledb_dict[repository_name] @@ -294,13 +296,13 @@ def add_role(rolename, roleinfo, repository_name='default'): # Does 'rolename' have the correct object format? # This check will ensure 'rolename' has the appropriate number of objects # and object types, and that all dict keys are properly named. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) # Does 'roleinfo' have the correct object format? - tuf.formats.ROLEDB_SCHEMA.check_match(roleinfo) + formats.ROLEDB_SCHEMA.check_match(roleinfo) # Is 'repository_name' correctly formatted? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict @@ -308,10 +310,10 @@ def add_role(rolename, roleinfo, repository_name='default'): _validate_rolename(rolename) if repository_name not in _roledb_dict: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not exist: ' + repository_name) + raise sslib_exceptions.InvalidNameError('Repository name does not exist: ' + repository_name) if rolename in _roledb_dict[repository_name]: - raise tuf.exceptions.RoleAlreadyExistsError('Role already exists: ' + rolename) + raise exceptions.RoleAlreadyExistsError('Role already exists: ' + rolename) _roledb_dict[repository_name][rolename] = copy.deepcopy(roleinfo) @@ -378,12 +380,12 @@ def update_roleinfo(rolename, roleinfo, mark_role_as_dirty=True, repository_name # Does the arguments have the correct object format? # This check will ensure arguments have the appropriate number of objects # and object types, and that all dict keys are properly named. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + formats.ROLENAME_SCHEMA.check_match(rolename) + sslib_formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Does 'roleinfo' have the correct object format? - tuf.formats.ROLEDB_SCHEMA.check_match(roleinfo) + formats.ROLEDB_SCHEMA.check_match(roleinfo) # Raises securesystemslib.exceptions.InvalidNameError. _validate_rolename(rolename) @@ -392,11 +394,11 @@ def update_roleinfo(rolename, roleinfo, mark_role_as_dirty=True, repository_name global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not' ' exist: ' + + raise sslib_exceptions.InvalidNameError('Repository name does not' ' exist: ' + repository_name) if rolename not in _roledb_dict[repository_name]: - raise tuf.exceptions.UnknownRoleError('Role does not exist: ' + rolename) + raise exceptions.UnknownRoleError('Role does not exist: ' + rolename) # Update the global _roledb_dict and _dirty_roles structures so that # the latest 'roleinfo' is available to other modules, and the repository @@ -438,13 +440,13 @@ def get_dirty_roles(repository_name='default'): # Does 'repository_name' have the correct format? Raise # 'securesystemslib.exceptions.FormatError' if not. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does' + raise sslib_exceptions.InvalidNameError('Repository name does' ' not' ' exist: ' + repository_name) return sorted(list(_dirty_roles[repository_name])) @@ -480,14 +482,14 @@ def mark_dirty(roles, repository_name='default'): # Are the arguments properly formatted? If not, raise # securesystemslib.exceptions.FormatError. - securesystemslib.formats.NAMES_SCHEMA.check_match(roles) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAMES_SCHEMA.check_match(roles) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does' + raise sslib_exceptions.InvalidNameError('Repository name does' ' not' ' exist: ' + repository_name) _dirty_roles[repository_name].update(roles) @@ -523,14 +525,14 @@ def unmark_dirty(roles, repository_name='default'): # Are the arguments properly formatted? If not, raise # securesystemslib.exceptions.FormatError. - securesystemslib.formats.NAMES_SCHEMA.check_match(roles) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAMES_SCHEMA.check_match(roles) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does' + raise sslib_exceptions.InvalidNameError('Repository name does' ' not exist: ' + repository_name) for role in roles: @@ -577,7 +579,7 @@ def role_exists(rolename, repository_name='default'): try: _check_rolename(rolename, repository_name) - except tuf.exceptions.UnknownRoleError: + except exceptions.UnknownRoleError: return False return True @@ -624,7 +626,7 @@ def remove_role(rolename, repository_name='default'): # Does 'repository_name' have the correct format? Raise # 'securesystemslib.exceptions.FormatError' if it is improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -668,13 +670,13 @@ def get_rolenames(repository_name='default'): # Does 'repository_name' have the correct format? Raise # 'securesystemslib.exceptions.FormatError' if it is improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) global _roledb_dict global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does' + raise sslib_exceptions.InvalidNameError('Repository name does' ' not' ' exist: ' + repository_name) return list(_roledb_dict[repository_name].keys()) @@ -726,7 +728,7 @@ def get_roleinfo(rolename, repository_name='default'): # Is 'repository_name' properly formatted? If not, raise # 'securesystemslib.exceptions.FormatError'. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -779,7 +781,7 @@ def get_role_keyids(rolename, repository_name='default'): # Raise 'securesystemslib.exceptions.FormatError' if 'repository_name' is # improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -831,7 +833,7 @@ def get_role_threshold(rolename, repository_name='default'): # Raise 'securesystemslib.exceptions.FormatError' if 'repository_name' is # improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -882,7 +884,7 @@ def get_role_paths(rolename, repository_name='default'): # Raise 'securesystemslib.exceptions.FormatError' if 'repository_name' is # improperly formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -942,7 +944,7 @@ def get_delegated_rolenames(rolename, repository_name='default'): # Does 'repository_name' have the correct format? Raise # 'securesystemslib.exceptions.FormatError' if it does not. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.FormatError, # tuf.exceptions.UnknownRoleError, or @@ -995,14 +997,14 @@ def clear_roledb(repository_name='default', clear_all=False): # Do the arguments have the correct format? If not, raise # 'securesystemslib.exceptions.FormatError' - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(clear_all) + sslib_formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.BOOLEAN_SCHEMA.check_match(clear_all) global _roledb_dict global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not' + raise sslib_exceptions.InvalidNameError('Repository name does not' ' exist: ' + repository_name) if clear_all: @@ -1030,10 +1032,10 @@ def _check_rolename(rolename, repository_name='default'): # Does 'rolename' have the correct object format? # This check will ensure 'rolename' has the appropriate number of objects # and object types, and that all dict keys are properly named. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) + formats.ROLENAME_SCHEMA.check_match(rolename) # Does 'repository_name' have the correct format? - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Raises securesystemslib.exceptions.InvalidNameError. _validate_rolename(rolename) @@ -1042,11 +1044,11 @@ def _check_rolename(rolename, repository_name='default'): global _dirty_roles if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not' + raise sslib_exceptions.InvalidNameError('Repository name does not' ' exist: ' + repository_name) if rolename not in _roledb_dict[repository_name]: - raise tuf.exceptions.UnknownRoleError('Role name does not exist: ' + rolename) + raise exceptions.UnknownRoleError('Role name does not exist: ' + rolename) @@ -1059,13 +1061,13 @@ def _validate_rolename(rolename): 'ROLENAME_SCHEMA' prior to calling this function. """ if rolename == '': - raise securesystemslib.exceptions.InvalidNameError('Rolename must *not* be' + raise sslib_exceptions.InvalidNameError('Rolename must *not* be' ' an empty string.') if rolename != rolename.strip(): - raise securesystemslib.exceptions.InvalidNameError('Invalid rolename.' + raise sslib_exceptions.InvalidNameError('Invalid rolename.' ' Cannot start or end with whitespace: ' + rolename) if rolename.startswith('/') or rolename.endswith('/'): - raise securesystemslib.exceptions.InvalidNameError('Invalid rolename.' + raise sslib_exceptions.InvalidNameError('Invalid rolename.' ' Cannot start or end with a "/": ' + rolename) diff --git a/tuf/scripts/client.py b/tuf/scripts/client.py index f9d8c9dbaa..c1c7bd7cb2 100755 --- a/tuf/scripts/client.py +++ b/tuf/scripts/client.py @@ -71,10 +71,10 @@ import argparse import logging -import tuf -import tuf.client.updater -import tuf.settings -import tuf.log +from tuf import exceptions +from tuf import log +from tuf import settings +from tuf.client.updater import Updater # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -106,13 +106,13 @@ def update_client(parsed_arguments): """ if not isinstance(parsed_arguments, argparse.Namespace): - raise tuf.exceptions.Error('Invalid namespace object.') + raise exceptions.Error('Invalid namespace object.') else: logger.debug('We have a valid argparse Namespace object.') # Set the local repositories directory containing all of the metadata files. - tuf.settings.repositories_directory = '.' + settings.repositories_directory = '.' # Set the repository mirrors. This dictionary is needed by the Updater # class of updater.py. @@ -121,7 +121,7 @@ def update_client(parsed_arguments): # Create the repository object using the repository name 'repository' # and the repository mirrors defined above. - updater = tuf.client.updater.Updater('tufrepo', repository_mirrors) + updater = Updater('tufrepo', repository_mirrors) # The local destination directory to save the target files. destination_directory = './tuftargets' @@ -143,7 +143,7 @@ def update_client(parsed_arguments): try: updater.download_target(target, destination_directory) - except tuf.exceptions.DownloadError: + except exceptions.DownloadError: pass # Remove any files from the destination directory that are no longer being @@ -203,22 +203,22 @@ def parse_arguments(): # Set the logging level. if parsed_arguments.verbose == 5: - tuf.log.set_log_level(logging.CRITICAL) + log.set_log_level(logging.CRITICAL) elif parsed_arguments.verbose == 4: - tuf.log.set_log_level(logging.ERROR) + log.set_log_level(logging.ERROR) elif parsed_arguments.verbose == 3: - tuf.log.set_log_level(logging.WARNING) + log.set_log_level(logging.WARNING) elif parsed_arguments.verbose == 2: - tuf.log.set_log_level(logging.INFO) + log.set_log_level(logging.INFO) elif parsed_arguments.verbose == 1: - tuf.log.set_log_level(logging.DEBUG) + log.set_log_level(logging.DEBUG) else: - tuf.log.set_log_level(logging.NOTSET) + log.set_log_level(logging.NOTSET) # Return the repository mirror containing the metadata and target files. return parsed_arguments @@ -235,8 +235,8 @@ def parse_arguments(): try: update_client(arguments) - except (tuf.exceptions.NoWorkingMirrorError, tuf.exceptions.RepositoryError, - tuf.exceptions.FormatError, tuf.exceptions.Error) as e: + except (exceptions.NoWorkingMirrorError, exceptions.RepositoryError, + exceptions.FormatError, exceptions.Error) as e: sys.stderr.write('Error: ' + str(e) + '\n') sys.exit(1) diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py index da1664a86e..5866a8853c 100755 --- a/tuf/scripts/repo.py +++ b/tuf/scripts/repo.py @@ -146,17 +146,22 @@ import shutil import time import fnmatch +import six -import tuf -import tuf.log -import tuf.formats -import tuf.repository_tool as repo_tool +import securesystemslib # pylint: disable=unused-import +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats +from securesystemslib import interface as sslib_interface +from securesystemslib import keys as sslib_keys +from securesystemslib import settings as sslib_settings +from securesystemslib import util as sslib_util -# 'pip install securesystemslib[crypto,pynacl]' is required for the CLI, -# which installs the cryptography and pynacl. -import securesystemslib -from securesystemslib import interface -import six +from tuf import exceptions +from tuf import formats +from tuf import keydb +from tuf import log +from tuf import repository_tool as repo_tool +from tuf import roledb # See 'log.py' to learn how logging is handled in TUF. @@ -218,7 +223,7 @@ def process_command_line_arguments(parsed_arguments): # Do we have a valid argparse Namespace? if not isinstance(parsed_arguments, argparse.Namespace): - raise tuf.exceptions.Error('Invalid namespace: ' + repr(parsed_arguments)) + raise exceptions.Error('Invalid namespace: ' + repr(parsed_arguments)) else: logger.debug('We have a valid argparse Namespace.') @@ -266,15 +271,15 @@ def process_command_line_arguments(parsed_arguments): def delegate(parsed_arguments): if not parsed_arguments.delegatee: - raise tuf.exceptions.Error( + raise exceptions.Error( '--delegatee must be set to perform the delegation.') if parsed_arguments.delegatee in ('root', 'snapshot', 'timestamp', 'targets'): - raise tuf.exceptions.Error( + raise exceptions.Error( 'Cannot delegate to the top-level role: ' + repr(parsed_arguments.delegatee)) if not parsed_arguments.pubkeys: - raise tuf.exceptions.Error( + raise exceptions.Error( '--pubkeys must be set to perform the delegation.') public_keys = [] @@ -319,7 +324,7 @@ def delegate(parsed_arguments): repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] repository.writeall(consistent_snapshot=consistent_snapshot) @@ -364,7 +369,7 @@ def revoke(parsed_arguments): repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] repository.writeall(consistent_snapshot=consistent_snapshot) @@ -388,21 +393,21 @@ def gen_key(parsed_arguments): } if parsed_arguments.key not in SUPPORTED_CLI_KEYTYPES: - tuf.exceptions.Error( + exceptions.Error( 'Invalid key type: ' + repr(parsed_arguments.key) + '. Supported' ' key types: ' + repr(SUPPORTED_CLI_KEYTYPES)) elif parsed_arguments.key == ECDSA_KEYTYPE: - keypath = securesystemslib.interface._generate_and_write_ecdsa_keypair( + keypath = sslib_interface._generate_and_write_ecdsa_keypair( **keygen_kwargs) elif parsed_arguments.key == ED25519_KEYTYPE: - keypath = securesystemslib.interface._generate_and_write_ed25519_keypair( + keypath = sslib_interface._generate_and_write_ed25519_keypair( **keygen_kwargs) # RSA key.. else: - keypath = securesystemslib.interface._generate_and_write_rsa_keypair( + keypath = sslib_interface._generate_and_write_rsa_keypair( **keygen_kwargs) @@ -415,8 +420,8 @@ def gen_key(parsed_arguments): pubkey_repo_path = os.path.join(parsed_arguments.path, KEYSTORE_DIR, os.path.basename(keypath + '.pub')) - securesystemslib.util.ensure_parent_dir(privkey_repo_path) - securesystemslib.util.ensure_parent_dir(pubkey_repo_path) + sslib_util.ensure_parent_dir(privkey_repo_path) + sslib_util.ensure_parent_dir(pubkey_repo_path) # Move them from the CWD to the repo's keystore. shutil.move(keypath, privkey_repo_path) @@ -436,12 +441,12 @@ def import_privatekey_from_file(keypath, password=None): # worry about leaking sensitive information about the key's location. # However, care should be taken when including the full path in exceptions # and log files. - password = securesystemslib.interface.get_password('Enter a password for' - ' the encrypted key (' + interface.TERM_RED + repr(keypath) + interface.TERM_RED + '): ', + password = sslib_interface.get_password('Enter a password for' + ' the encrypted key (' + sslib_interface.TERM_RED + repr(keypath) + sslib_interface.TERM_RED + '): ', confirm=False) # Does 'password' have the correct format? - securesystemslib.formats.PASSWORD_SCHEMA.check_match(password) + sslib_formats.PASSWORD_SCHEMA.check_match(password) # Store the encrypted contents of 'filepath' prior to calling the decryption # routine. @@ -454,29 +459,29 @@ def import_privatekey_from_file(keypath, password=None): # the derived encryption key from 'password'. Raise # 'securesystemslib.exceptions.CryptoError' if the decryption fails. try: - key_object = securesystemslib.keys.decrypt_key(encrypted_key, password) + key_object = sslib_keys.decrypt_key(encrypted_key, password) - except securesystemslib.exceptions.CryptoError: + except sslib_exceptions.CryptoError: try: logger.debug( 'Decryption failed. Attempting to import a private PEM instead.') - key_object = securesystemslib.keys.import_rsakey_from_private_pem( + key_object = sslib_keys.import_rsakey_from_private_pem( encrypted_key, 'rsassa-pss-sha256', password) - except securesystemslib.exceptions.CryptoError as error: - six.raise_from(tuf.exceptions.Error(repr(keypath) + ' cannot be ' + except sslib_exceptions.CryptoError as error: + six.raise_from(exceptions.Error(repr(keypath) + ' cannot be ' ' imported, possibly because an invalid key file is given or ' ' the decryption password is incorrect.'), error) if key_object['keytype'] not in SUPPORTED_KEY_TYPES: - raise tuf.exceptions.Error('Trying to import an unsupported key' + raise exceptions.Error('Trying to import an unsupported key' ' type: ' + repr(key_object['keytype'] + '.' ' Supported key types: ' + repr(SUPPORTED_KEY_TYPES))) else: # Add "keyid_hash_algorithms" so that equal keys with different keyids can # be associated using supported keyid_hash_algorithms. - key_object['keyid_hash_algorithms'] = securesystemslib.settings.HASH_ALGORITHMS + key_object['keyid_hash_algorithms'] = sslib_settings.HASH_ALGORITHMS return key_object @@ -485,19 +490,19 @@ def import_privatekey_from_file(keypath, password=None): def import_publickey_from_file(keypath): try: - key_metadata = securesystemslib.util.load_json_file(keypath) + key_metadata = sslib_util.load_json_file(keypath) # An RSA public key is saved to disk in PEM format (not JSON), so the # load_json_file() call above can fail for this reason. Try to potentially # load the PEM string in keypath if an exception is raised. - except securesystemslib.exceptions.Error: - key_metadata = securesystemslib.interface.import_rsa_publickey_from_file( + except sslib_exceptions.Error: + key_metadata = sslib_interface.import_rsa_publickey_from_file( keypath) - key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) + key_object, junk = sslib_keys.format_metadata_to_key(key_metadata) if key_object['keytype'] not in SUPPORTED_KEY_TYPES: - raise tuf.exceptions.Error('Trying to import an unsupported key' + raise exceptions.Error('Trying to import an unsupported key' ' type: ' + repr(key_object['keytype'] + '.' ' Supported key types: ' + repr(SUPPORTED_KEY_TYPES))) @@ -508,7 +513,7 @@ def import_publickey_from_file(keypath): def add_verification_key(parsed_arguments): if not parsed_arguments.pubkeys: - raise tuf.exceptions.Error('--pubkeys must be given with --trust.') + raise exceptions.Error('--pubkeys must be given with --trust.') repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) @@ -517,7 +522,7 @@ def add_verification_key(parsed_arguments): imported_pubkey = import_publickey_from_file(keypath) if parsed_arguments.role not in ('root', 'targets', 'snapshot', 'timestamp'): - raise tuf.exceptions.Error('The given --role is not a top-level role.') + raise exceptions.Error('The given --role is not a top-level role.') elif parsed_arguments.role == 'root': repository.root.add_verification_key(imported_pubkey) @@ -532,7 +537,7 @@ def add_verification_key(parsed_arguments): else: repository.timestamp.add_verification_key(imported_pubkey) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] repository.write('root', consistent_snapshot=consistent_snapshot, increment_version_number=False) @@ -544,7 +549,7 @@ def add_verification_key(parsed_arguments): def remove_verification_key(parsed_arguments): if not parsed_arguments.pubkeys: - raise tuf.exceptions.Error('--pubkeys must be given with --distrust.') + raise exceptions.Error('--pubkeys must be given with --distrust.') repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) @@ -554,7 +559,7 @@ def remove_verification_key(parsed_arguments): try: if parsed_arguments.role not in ('root', 'targets', 'snapshot', 'timestamp'): - raise tuf.exceptions.Error('The given --role is not a top-level role.') + raise exceptions.Error('The given --role is not a top-level role.') elif parsed_arguments.role == 'root': repository.root.remove_verification_key(imported_pubkey) @@ -574,10 +579,10 @@ def remove_verification_key(parsed_arguments): # securesystemslib.exceptions.FormatError, and the latter is not raised # because a valid key should have been returned by # import_publickey_from_file(). - except securesystemslib.exceptions.Error: + except sslib_exceptions.Error: print(repr(keypath) + ' is not a trusted key. Skipping.') - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] repository.write('root', consistent_snapshot=consistent_snapshot, increment_version_number=False) @@ -591,7 +596,7 @@ def sign_role(parsed_arguments): repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] for keypath in parsed_arguments.sign: @@ -613,17 +618,17 @@ def sign_role(parsed_arguments): else: # TODO: repository_tool.py will be refactored to clean up the following # code, which adds and signs for a non-existent role. - if not tuf.roledb.role_exists(parsed_arguments.role): + if not roledb.role_exists(parsed_arguments.role): # Load the private key keydb and set the roleinfo in roledb so that # metadata can be written with repository.write(). - tuf.keydb.remove_key(role_privatekey['keyid'], + keydb.remove_key(role_privatekey['keyid'], repository_name = repository._repository_name) - tuf.keydb.add_key( + keydb.add_key( role_privatekey, repository_name = repository._repository_name) # Set the delegated metadata file to expire in 3 months. - expiration = tuf.formats.unix_timestamp_to_datetime( + expiration = formats.unix_timestamp_to_datetime( int(time.time() + 7889230)) expiration = expiration.isoformat() + 'Z' @@ -634,7 +639,7 @@ def sign_role(parsed_arguments): 'signatures': [], 'version': 1, 'expires': expiration, 'delegations': {'keys': {}, 'roles': []}} - tuf.roledb.add_role(parsed_arguments.role, roleinfo, + roledb.add_role(parsed_arguments.role, roleinfo, repository_name=repository._repository_name) # Generate the Targets object of --role, and add it to the top-level @@ -710,12 +715,11 @@ def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, logger.debug(repr(target_path) + ' does not exist. Skipping.') else: - securesystemslib.util.ensure_parent_dir( - os.path.join(repo_targets_path, target_path)) + sslib_util.ensure_parent_dir(os.path.join(repo_targets_path, target_path)) shutil.copy(target_path, os.path.join(repo_targets_path, target_path)) - roleinfo = tuf.roledb.get_roleinfo( + roleinfo = roledb.get_roleinfo( parsed_arguments.role, repository_name=repository._repository_name) # It is assumed we have a delegated role, and that the caller has made @@ -728,7 +732,7 @@ def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, logger.debug('Replacing target: ' + repr(target_path)) roleinfo['paths'].update({target_path: custom}) - tuf.roledb.update_roleinfo(parsed_arguments.role, roleinfo, + roledb.update_roleinfo(parsed_arguments.role, roleinfo, mark_role_as_dirty=True, repository_name=repository._repository_name) @@ -736,15 +740,15 @@ def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, def remove_target_files_from_metadata(parsed_arguments, repository): if parsed_arguments.role in ('root', 'snapshot', 'timestamp'): - raise tuf.exceptions.Error( + raise exceptions.Error( 'Invalid rolename specified: ' + repr(parsed_arguments.role) + '.' ' It must be "targets" or a delegated rolename.') else: - # NOTE: The following approach of using tuf.roledb to update the target + # NOTE: The following approach of using roledb to update the target # files will be modified in the future when the repository tool's API is # refactored. - roleinfo = tuf.roledb.get_roleinfo( + roleinfo = roledb.get_roleinfo( parsed_arguments.role, repository._repository_name) for glob_pattern in parsed_arguments.remove: @@ -757,7 +761,7 @@ def remove_target_files_from_metadata(parsed_arguments, repository): ' given path/glob pattern ' + repr(glob_pattern)) continue - tuf.roledb.update_roleinfo( + roledb.update_roleinfo( parsed_arguments.role, roleinfo, mark_role_as_dirty=True, repository_name=repository._repository_name) @@ -782,7 +786,7 @@ def add_targets(parsed_arguments): add_target_to_repo(parsed_arguments, target_path, repo_targets_path, repository) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] if parsed_arguments.role == 'targets': @@ -831,7 +835,7 @@ def remove_targets(parsed_arguments): # repo.py --init --pw my_password: parsed_arguments.pw = 'my_password' # repo.py --init --pw: The user is prompted for a password, as follows: if not parsed_arguments.pw: - parsed_arguments.pw = securesystemslib.interface.get_password( + parsed_arguments.pw = sslib_interface.get_password( prompt='Enter a password for the top-level role keys: ', confirm=True) targets_private = import_privatekey_from_file( @@ -852,7 +856,7 @@ def remove_targets(parsed_arguments): repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) - consistent_snapshot = tuf.roledb.get_roleinfo('root', + consistent_snapshot = roledb.get_roleinfo('root', repository._repository_name)['consistent_snapshot'] repository.writeall(consistent_snapshot=consistent_snapshot) @@ -903,19 +907,19 @@ def set_top_level_keys(repository, parsed_arguments): # repo.py --init --*_pw my_pw: parsed_arguments.*_pw = 'my_pw' # repo.py --init --*_pw: The user is prompted for a password. - securesystemslib.interface._generate_and_write_ed25519_keypair( + sslib_interface._generate_and_write_ed25519_keypair( password=parsed_arguments.root_pw, filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, ROOT_KEY_NAME), prompt=(not parsed_arguments.root_pw)) - securesystemslib.interface._generate_and_write_ed25519_keypair( + sslib_interface._generate_and_write_ed25519_keypair( password=parsed_arguments.targets_pw, filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), prompt=(not parsed_arguments.targets_pw)) - securesystemslib.interface._generate_and_write_ed25519_keypair( + sslib_interface._generate_and_write_ed25519_keypair( password=parsed_arguments.snapshot_pw, filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), prompt=(not parsed_arguments.snapshot_pw)) - securesystemslib.interface._generate_and_write_ed25519_keypair( + sslib_interface._generate_and_write_ed25519_keypair( password=parsed_arguments.timestamp_pw, filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, TIMESTAMP_KEY_NAME), prompt=(not parsed_arguments.timestamp_pw)) @@ -1126,7 +1130,7 @@ def parse_arguments(): logging_levels = [logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL] - tuf.log.set_log_level(logging_levels[parsed_args.verbose]) + log.set_log_level(logging_levels[parsed_args.verbose]) return parsed_args @@ -1146,7 +1150,7 @@ def parse_arguments(): try: process_command_line_arguments(arguments) - except (tuf.exceptions.Error) as e: + except (exceptions.Error) as e: sys.stderr.write('Error: ' + str(e) + '\n') sys.exit(1) diff --git a/tuf/sig.py b/tuf/sig.py index 2351e0e381..cc572ae647 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -50,12 +50,15 @@ import logging -import tuf -import tuf.keydb -import tuf.roledb -import tuf.formats +import securesystemslib # pylint: disable=unused-import +from securesystemslib import exceptions as sslib_exceptions +from securesystemslib import formats as sslib_formats +from securesystemslib import keys as sslib_keys -import securesystemslib +from tuf import exceptions +from tuf import formats +from tuf import keydb +from tuf import roledb # See 'log.py' to learn how logging is handled in TUF. logger = logging.getLogger(__name__) @@ -70,14 +73,14 @@ def get_signature_status(signable, role=None, repository_name='default', * bad -- Invalid signature * good -- Valid signature from key that is available in 'tuf.keydb', and is - authorized for the passed role as per 'tuf.roledb' (authorization may be + authorized for the passed role as per 'roledb' (authorization may be overwritten by passed 'keyids'). * unknown -- Signature from key that is not available in 'tuf.keydb', or if 'role' is None. * unknown signing schemes -- Signature from key with unknown signing scheme. * untrusted -- Valid signature from key that is available in 'tuf.keydb', - but is not trusted for the passed role as per 'tuf.roledb' or the passed + but is not trusted for the passed role as per 'roledb' or the passed 'keyids'. NOTE: The result may contain duplicate keyids or keyids that reference the @@ -96,7 +99,7 @@ def get_signature_status(signable, role=None, repository_name='default', TUF role string (e.g. 'root', 'targets', 'snapshot' or timestamp). threshold: - Rather than reference the role's threshold as set in tuf.roledb.py, use + Rather than reference the role's threshold as set in roledb, use the given 'threshold' to calculate the signature status of 'signable'. 'threshold' is an integer value that sets the role's threshold value, or the minimum number of signatures needed for metadata to be considered @@ -105,7 +108,7 @@ def get_signature_status(signable, role=None, repository_name='default', keyids: Similar to the 'threshold' argument, use the supplied list of 'keyids' to calculate the signature status, instead of referencing the keyids - in tuf.roledb.py for 'role'. + in roledb for 'role'. securesystemslib.exceptions.FormatError, if 'signable' does not have the @@ -125,17 +128,17 @@ def get_signature_status(signable, role=None, repository_name='default', # arguments have the appropriate number of objects and object types, and that # all dict keys are properly named. Raise # 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + formats.SIGNABLE_SCHEMA.check_match(signable) + sslib_formats.NAME_SCHEMA.check_match(repository_name) if role is not None: - tuf.formats.ROLENAME_SCHEMA.check_match(role) + formats.ROLENAME_SCHEMA.check_match(role) if threshold is not None: - tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) + formats.THRESHOLD_SCHEMA.check_match(threshold) if keyids is not None: - securesystemslib.formats.KEYIDS_SCHEMA.check_match(keyids) + sslib_formats.KEYIDS_SCHEMA.check_match(keyids) # The signature status dictionary returned. signature_status = {} @@ -147,7 +150,7 @@ def get_signature_status(signable, role=None, repository_name='default', # Extract the relevant fields from 'signable' that will allow us to identify # the different classes of keys (i.e., good_sigs, bad_sigs, etc.). - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') + signed = sslib_formats.encode_canonical(signable['signed']).encode('utf-8') signatures = signable['signatures'] # Iterate the signatures and enumerate the signature_status fields. @@ -157,17 +160,17 @@ def get_signature_status(signable, role=None, repository_name='default', # Does the signature use an unrecognized key? try: - key = tuf.keydb.get_key(keyid, repository_name) + key = keydb.get_key(keyid, repository_name) - except tuf.exceptions.UnknownKeyError: + except exceptions.UnknownKeyError: unknown_sigs.append(keyid) continue # Does the signature use an unknown/unsupported signing scheme? try: - valid_sig = securesystemslib.keys.verify_signature(key, signature, signed) + valid_sig = sslib_keys.verify_signature(key, signature, signed) - except securesystemslib.exceptions.UnsupportedAlgorithmError: + except sslib_exceptions.UnsupportedAlgorithmError: unknown_signing_schemes.append(keyid) continue @@ -179,7 +182,7 @@ def get_signature_status(signable, role=None, repository_name='default', # Note that if the role is not known, tuf.exceptions.UnknownRoleError # is raised here. if keyids is None: - keyids = tuf.roledb.get_role_keyids(role, repository_name) + keyids = roledb.get_role_keyids(role, repository_name) if keyid not in keyids: untrusted_sigs.append(keyid) @@ -203,7 +206,7 @@ def get_signature_status(signable, role=None, repository_name='default', if threshold is None: # Note that if the role is not known, tuf.exceptions.UnknownRoleError is # raised here. - threshold = tuf.roledb.get_role_threshold( + threshold = roledb.get_role_threshold( role, repository_name=repository_name) else: @@ -233,7 +236,7 @@ def verify(signable, role, repository_name='default', threshold=None, Verify that 'signable' has a valid threshold of authorized signatures identified by unique keyids. The threshold and whether a keyid is authorized is determined by querying the 'threshold' and 'keyids' info for - the passed 'role' in 'tuf.roledb'. Both values can be overwritten by + the passed 'role' in 'roledb'. Both values can be overwritten by passing the 'threshold' or 'keyids' arguments. NOTE: @@ -251,7 +254,7 @@ def verify(signable, role, repository_name='default', threshold=None, TUF role string (e.g. 'root', 'targets', 'snapshot' or timestamp). threshold: - Rather than reference the role's threshold as set in tuf.roledb.py, use + Rather than reference the role's threshold as set in roledb, use the given 'threshold' to calculate the signature status of 'signable'. 'threshold' is an integer value that sets the role's threshold value, or the minimum number of signatures needed for metadata to be considered @@ -260,7 +263,7 @@ def verify(signable, role, repository_name='default', threshold=None, keyids: Similar to the 'threshold' argument, use the supplied list of 'keyids' to calculate the signature status, instead of referencing the keyids - in tuf.roledb.py for 'role'. + in roledb for 'role'. tuf.exceptions.UnknownRoleError, if 'role' is not recognized. @@ -279,9 +282,9 @@ def verify(signable, role, repository_name='default', threshold=None, role's threshold, False otherwise. """ - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) - tuf.formats.ROLENAME_SCHEMA.check_match(role) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) + formats.SIGNABLE_SCHEMA.check_match(signable) + formats.ROLENAME_SCHEMA.check_match(role) + sslib_formats.NAME_SCHEMA.check_match(repository_name) # Retrieve the signature status. tuf.sig.get_signature_status() raises: # tuf.exceptions.UnknownRoleError @@ -298,11 +301,11 @@ def verify(signable, role, repository_name='default', threshold=None, # Note: get_signature_status() is expected to verify that 'threshold' is # not None or <= 0. if threshold is None or threshold <= 0: #pragma: no cover - raise securesystemslib.exceptions.Error("Invalid threshold: " + repr(threshold)) + raise sslib_exceptions.Error("Invalid threshold: " + repr(threshold)) unique_keys = set() for keyid in good_sigs: - key = tuf.keydb.get_key(keyid, repository_name) + key = keydb.get_key(keyid, repository_name) unique_keys.add(key['keyval']['public']) return len(unique_keys) >= threshold @@ -337,7 +340,7 @@ def may_need_new_keys(signature_status): # This check will ensure 'signature_status' has the appropriate number # of objects and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.SIGNATURESTATUS_SCHEMA.check_match(signature_status) + formats.SIGNATURESTATUS_SCHEMA.check_match(signature_status) unknown = signature_status['unknown_sigs'] untrusted = signature_status['untrusted_sigs'] @@ -391,10 +394,10 @@ def generate_rsa_signature(signed, rsakey_dict): # We need 'signed' in canonical JSON format to generate # the 'method' and 'sig' fields of the signature. - signed = securesystemslib.formats.encode_canonical(signed).encode('utf-8') + signed = sslib_formats.encode_canonical(signed).encode('utf-8') # Generate the RSA signature. # Raises securesystemslib.exceptions.FormatError and TypeError. - signature = securesystemslib.keys.create_signature(rsakey_dict, signed) + signature = sslib_keys.create_signature(rsakey_dict, signed) return signature