From e620b005a8161d682856a09a8deb59a40367f113 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Thu, 1 Sep 2016 11:07:02 -0400 Subject: [PATCH 01/20] Add remaining code coverage for repository_lib.py --- tests/test_repository_lib.py | 7 +++++++ tuf/repository_lib.py | 4 ++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/test_repository_lib.py b/tests/test_repository_lib.py index 4d798248d0..107add4cb6 100755 --- a/tests/test_repository_lib.py +++ b/tests/test_repository_lib.py @@ -961,6 +961,13 @@ def test__load_top_level_metadata(self): shutil.copytree(os.path.join('repository_data', 'repository', 'targets'), targets_directory) + # Add a duplicate signature to the Root file for testing purposes). + root_file = os.path.join(metadata_directory, 'root.json') + signable = tuf.util.load_json_file(os.path.join(metadata_directory, 'root.json')) + signable['signatures'].append(signable['signatures'][0]) + + repo_lib.write_metadata_file(signable, root_file, 8, ['gz'], False) + # Remove compressed metadata so that we can test for loading of a # repository with no compression enabled. for role_file in os.listdir(metadata_directory): diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index ae3fa760f0..643c6c23df 100755 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -464,7 +464,7 @@ def _delete_obsolete_metadata(metadata_directory, snapshot_metadata, # metadata extension. metadata_name_extension = metadata_name - for metadata_extension in METADATA_EXTENSIONS: + for metadata_extension in METADATA_EXTENSIONS: #pragma: no branch if metadata_name.endswith(metadata_extension): metadata_name = metadata_name[:-len(metadata_extension)] break @@ -731,7 +731,7 @@ def _load_top_level_metadata(repository, top_level_filenames): # key when it was added. try: tuf.keydb.add_key(key_object) - for keyid in keyids: + for keyid in keyids: #pragma: no branch key_object['keyid'] = keyid tuf.keydb.add_key(key_object, keyid=None) From 2876702f48d5c240f1eaf584c8aa42ffb92d7956 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Thu, 1 Sep 2016 12:53:28 -0400 Subject: [PATCH 02/20] Add code coverage for keys.extract_pem() --- tests/test_keys.py | 44 +++++++++++++++++++++++++++++++++++++++++--- tuf/keys.py | 4 ++++ 2 files changed, 45 insertions(+), 3 deletions(-) diff --git a/tests/test_keys.py b/tests/test_keys.py index a59f79f61f..3bbb1c4928 100755 --- a/tests/test_keys.py +++ b/tests/test_keys.py @@ -99,14 +99,22 @@ def test_format_keyval_to_metadata(self): self.assertRaises(tuf.FormatError, KEYS.format_keyval_to_metadata, 'bad_keytype', keyvalue) + # Test for missing 'public' entry. public = keyvalue['public'] del keyvalue['public'] self.assertRaises(tuf.FormatError, KEYS.format_keyval_to_metadata, keytype, keyvalue) keyvalue['public'] = public - - - + + # Test for missing 'private' entry. + private = keyvalue['private'] + del keyvalue['private'] + self.assertRaises(tuf.FormatError, KEYS.format_keyval_to_metadata, + keytype, keyvalue, private=True) + keyvalue['private'] = private + + + def test_format_rsakey_from_pem(self): pem = self.rsakey_dict['keyval']['public'] rsa_key = KEYS.format_rsakey_from_pem(pem) @@ -327,6 +335,36 @@ def test_decrypt_key(self): KEYS._GENERAL_CRYPTO_LIBRARY = default_general_library + + def test_extract_pem(self): + # Normal case. + private_pem = KEYS.extract_pem(self.rsakey_dict['keyval']['private'], + private_pem=True) + self.assertTrue(tuf.formats.PEMRSA_SCHEMA.matches(private_pem)) + + # Test for an invalid PEM. + pem_header = '-----BEGIN RSA PRIVATE KEY-----' + pem_footer = '-----END RSA PRIVATE KEY-----' + + header_start = private_pem.index(pem_header) + footer_start = private_pem.index(pem_footer, header_start + len(pem_header)) + + missing_header = private_pem[header_start + len(pem_header):footer_start + len(pem_footer)] + missing_footer = private_pem[header_start:footer_start] + #print('missing header: ' + repr(missing_header)) + #print('missing footer: ' + repr(missing_footer)) + + self.assertRaises(tuf.FormatError, KEYS.extract_pem, + 'invalid_pem', private_pem=True) + + self.assertRaises(tuf.FormatError, KEYS.extract_pem, + missing_header, private_pem=True) + + self.assertRaises(tuf.FormatError, KEYS.extract_pem, + missing_footer, private_pem=True) + + + # Run the unit tests. if __name__ == '__main__': unittest.main() diff --git a/tuf/keys.py b/tuf/keys.py index 5d4c3ab422..fc723fc186 100755 --- a/tuf/keys.py +++ b/tuf/keys.py @@ -1128,6 +1128,10 @@ def extract_pem(pem, private_pem=False): pem: A string in PEM format. + private_pem: + Boolean indicating whether 'pem' contains a private key. 'pem' is + expected to begin and end with a private header and footer. + tuf.FormatError, if 'pem' is improperly formatted. From daa79ea6407cf5701e3c535f97392dec874d895c Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Thu, 1 Sep 2016 16:39:03 -0400 Subject: [PATCH 03/20] Add code coverage for en(de)cryption functions of pyca_crypto_keys.py --- tests/test_keys.py | 1 - tests/test_pyca_crypto_keys.py | 40 ++++++++++++++++++++++++++++++---- tuf/pyca_crypto_keys.py | 2 +- 3 files changed, 37 insertions(+), 6 deletions(-) diff --git a/tests/test_keys.py b/tests/test_keys.py index 3bbb1c4928..0845457b5f 100755 --- a/tests/test_keys.py +++ b/tests/test_keys.py @@ -15,7 +15,6 @@ Test cases for test_keys.py. - TODO: test case for ed25519 key generation and refactor. """ # Help with Python 3 compatibility, where the print statement is a function, an diff --git a/tests/test_pyca_crypto_keys.py b/tests/test_pyca_crypto_keys.py index 790e021320..64ed7a8553 100755 --- a/tests/test_pyca_crypto_keys.py +++ b/tests/test_pyca_crypto_keys.py @@ -145,11 +145,43 @@ def test__decrypt(self): def test_encrypt_key(self): - # Verify that a key argument with a missing private key is rejected. - global public_rsa - + # Normal case. + ed25519_key = {'keytype': 'ed25519', + 'keyid': 'd62247f817883f593cf6c66a5a55292488d457bcf638ae03207dbbba9dbe457d', + 'keyval': {'public': '74addb5ad544a4306b34741bc1175a3613a8d7dc69ff64724243efdec0e301ad', + 'private': '1f26964cc8d4f7ee5f3c5da2fbb7ab35811169573ac367b860a537e47789f8c4'}} + + crypto_keys.encrypt_key(ed25519_key, 'password') + + # Verify that a key with a missing 'private' key is rejected. + del ed25519_key['keyval']['private'] self.assertRaises(tuf.FormatError, crypto_keys.encrypt_key, - public_rsa, 'password') + ed25519_key, 'password') + + + def test__decrypt_key(self): + ed25519_key = {'keytype': 'ed25519', + 'keyid': 'd62247f817883f593cf6c66a5a55292488d457bcf638ae03207dbbba9dbe457d', + 'keyval': {'public': '74addb5ad544a4306b34741bc1175a3613a8d7dc69ff64724243efdec0e301ad', + 'private': '1f26964cc8d4f7ee5f3c5da2fbb7ab35811169573ac367b860a537e47789f8c4'}} + + encrypted_key = crypto_keys.encrypt_key(ed25519_key, 'password') + crypto_keys.encrypt_key(ed25519_key, 'password') + + salt, iterations, hmac, iv, ciphertext = \ + encrypted_key.split(crypto_keys._ENCRYPTION_DELIMITER) + + encrypted_key_invalid_hmac = encrypted_key.replace(hmac, '123abc') + + self.assertRaises(tuf.CryptoError, crypto_keys._decrypt, + encrypted_key_invalid_hmac, 'password') + + + + def test_create_rsa_public_and_private_from_encrypted_pem(self): + self.assertRaises(tuf.CryptoError, + crypto_keys.create_rsa_public_and_private_from_encrypted_pem, + 'bad_encrypted_key', 'password') diff --git a/tuf/pyca_crypto_keys.py b/tuf/pyca_crypto_keys.py index 164ca3cfff..724d1b276a 100755 --- a/tuf/pyca_crypto_keys.py +++ b/tuf/pyca_crypto_keys.py @@ -738,7 +738,7 @@ def encrypt_key(key_object, password): tuf.formats.PASSWORD_SCHEMA.check_match(password) # Ensure the private portion of the key is included in 'key_object'. - if not key_object['keyval']['private']: + if 'private' not in key_object['keyval'] or not key_object['keyval']['private']: raise tuf.FormatError('Key object does not contain a private part.') # Derive a key (i.e., an appropriate encryption key and not the From 26016479e9dbaf69922de5ad7d3eef7d3869fde0 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Thu, 15 Sep 2016 15:19:27 -0400 Subject: [PATCH 04/20] Add test repository following changes to repository writing and backtracking --- tests/repository_data/client/basic_client.py | 226 ------------------ .../client/metadata/current/empty_file.json | 0 .../client/metadata/current/role1.json | 27 ++- .../client/metadata/current/role1.json.gz | Bin 465 -> 619 bytes .../client/metadata/current/role2.json | 19 ++ .../client/metadata/current/role2.json.gz | Bin 0 -> 302 bytes .../client/metadata/current/root.json | 2 +- .../client/metadata/current/root.json.gz | Bin 1680 -> 1679 bytes .../client/metadata/current/snapshot.json | 13 +- .../client/metadata/current/snapshot.json.gz | Bin 380 -> 397 bytes .../client/metadata/current/targets.json | 4 +- .../client/metadata/current/targets.json.gz | Bin 816 -> 816 bytes .../client/metadata/current/timestamp.json | 6 +- .../client/metadata/current/timestamp.json.gz | Bin 368 -> 367 bytes .../client/metadata/previous/role1.json | 27 ++- .../client/metadata/previous/role1.json.gz | Bin 465 -> 619 bytes .../client/metadata/previous/role2.json | 19 ++ .../client/metadata/previous/role2.json.gz | Bin 0 -> 302 bytes .../client/metadata/previous/root.json | 2 +- .../client/metadata/previous/root.json.gz | Bin 1680 -> 1679 bytes .../client/metadata/previous/snapshot.json | 13 +- .../client/metadata/previous/snapshot.json.gz | Bin 380 -> 397 bytes .../client/metadata/previous/targets.json | 4 +- .../client/metadata/previous/targets.json.gz | Bin 816 -> 816 bytes .../client/metadata/previous/timestamp.json | 6 +- .../metadata/previous/timestamp.json.gz | Bin 368 -> 367 bytes tests/repository_data/generate.py | 6 +- .../repository/metadata.staged/role1.json | 27 ++- .../repository/metadata.staged/role1.json.gz | Bin 465 -> 619 bytes .../repository/metadata.staged/role2.json | 19 ++ .../repository/metadata.staged/role2.json.gz | Bin 0 -> 302 bytes .../repository/metadata.staged/root.json | 2 +- .../repository/metadata.staged/root.json.gz | Bin 1680 -> 1679 bytes .../repository/metadata.staged/snapshot.json | 13 +- .../metadata.staged/snapshot.json.gz | Bin 380 -> 397 bytes .../repository/metadata.staged/targets.json | 4 +- .../metadata.staged/targets.json.gz | Bin 816 -> 816 bytes .../repository/metadata.staged/timestamp.json | 6 +- .../metadata.staged/timestamp.json.gz | Bin 368 -> 367 bytes .../repository/metadata/role1.json | 27 ++- .../repository/metadata/role1.json.gz | Bin 465 -> 619 bytes .../repository/metadata/role2.json | 19 ++ .../repository/metadata/role2.json.gz | Bin 0 -> 302 bytes .../repository/metadata/root.json | 2 +- .../repository/metadata/root.json.gz | Bin 1680 -> 1679 bytes .../repository/metadata/snapshot.json | 13 +- .../repository/metadata/snapshot.json.gz | Bin 380 -> 397 bytes .../repository/metadata/targets.json | 4 +- .../repository/metadata/targets.json.gz | Bin 816 -> 816 bytes .../repository/metadata/timestamp.json | 6 +- .../repository/metadata/timestamp.json.gz | Bin 368 -> 367 bytes 51 files changed, 245 insertions(+), 271 deletions(-) delete mode 100755 tests/repository_data/client/basic_client.py delete mode 100644 tests/repository_data/client/metadata/current/empty_file.json create mode 100644 tests/repository_data/client/metadata/current/role2.json create mode 100644 tests/repository_data/client/metadata/current/role2.json.gz create mode 100644 tests/repository_data/client/metadata/previous/role2.json create mode 100644 tests/repository_data/client/metadata/previous/role2.json.gz create mode 100644 tests/repository_data/repository/metadata.staged/role2.json create mode 100644 tests/repository_data/repository/metadata.staged/role2.json.gz create mode 100644 tests/repository_data/repository/metadata/role2.json create mode 100644 tests/repository_data/repository/metadata/role2.json.gz diff --git a/tests/repository_data/client/basic_client.py b/tests/repository_data/client/basic_client.py deleted file mode 100755 index cec2193d60..0000000000 --- a/tests/repository_data/client/basic_client.py +++ /dev/null @@ -1,226 +0,0 @@ -#!/usr/bin/env python - -""" - - basic_client.py - - - Vladimir Diaz - - - September 2012 - - - See LICENSE for licensing information. - - - Provide a basic TUF client that can update all of the metatada and target - files provided by the user-specified repository mirror. Updated files are - saved to the 'targets' directory in the current working directory. The - repository mirror is specified by the user through the '--repo' command- - line option. - - Normally, a software updater integrating TUF will develop their own costum - client module by importing 'tuf.client.updater', instantiating the required - object, and calling the desired methods to perform an update. This basic - client is provided to users who wish to give TUF a quick test run without - the hassle of writing client code. This module can also used by updaters that - do not need the customization and only require their clients to perform an - update of all the files provided by their repository mirror(s). - - For software updaters that DO require customization, see the 'example_client.py' - script. The 'example_client.py' script provides an outline of the client code - that software updaters may develop and then tailor to their specific software - updater or package manager. - - Additional tools for clients running legacy applications will also be made - available. These tools will allow secure software updates using The Update - Framework without the need to modify the original application. - - - $ python basic_client.py --repo http://localhost:8001 - $ python basic_client.py --repo http://localhost:8001 --verbose 3 - - - --verbose: - Set the verbosity level of logging messages. Accepts values 1-5. - - --repo: - Set the repository mirror that will be responding to client requests. - E.g., 'http://locahost:8001'. -""" - -# Help with Python 3 compatibility, where the print statement is a function, an -# implicit relative import is invalid, and the '/' operator performs true -# division. Example: print 'hello world' raises a 'SyntaxError' exception. -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -import sys -import optparse -import logging - -import tuf -import tuf.formats -import tuf.client.updater -import tuf.log - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger('tuf.basic_client') - - -def update_client(repository_mirror): - """ - - Perform an update of the metadata and target files located at - 'repository_mirror'. Target files are saved to the 'targets' directory - in the current working directory. The current directory must already - include a 'metadata' directory, which in turn must contain the 'current' - and 'previous' directories. At a minimum, these two directories require - the 'root.json' metadata file. - - - repository_mirror: - The URL to the repository mirror hosting the metadata and target - files. E.g., 'http://localhost:8001' - - - tuf.RepositoryError, if 'repository_mirror' is improperly formatted. - - - Connects to a repository mirror and updates the metadata files and - any target files. Obsolete targets are also removed locally. - - - None. - """ - - # Does 'repository_mirror' have the correct format? - try: - tuf.formats.URL_SCHEMA.check_match(repository_mirror) - except tuf.FormatError as e: - message = 'The repository mirror supplied is invalid.' - raise tuf.RepositoryError(message) - - # Set the local repository directory containing all of the metadata files. - tuf.conf.repository_directory = '.' - - # Set the repository mirrors. This dictionary is needed by the Updater - # class of updater.py. - repository_mirrors = {'mirror': {'url_prefix': repository_mirror, - 'metadata_path': 'metadata', - 'targets_path': 'targets', - 'confined_target_dirs': ['']}} - - # Create the repository object using the repository name 'repository' - # and the repository mirrors defined above. - updater = tuf.client.updater.Updater('repository', repository_mirrors) - - # The local destination directory to save the target files. - destination_directory = './targets' - - # Refresh the repository's top-level roles, store the target information for - # all the targets tracked, and determine which of these targets have been - # updated. - updater.refresh() - """ - all_targets = updater.all_targets() - updated_targets = updater.updated_targets(all_targets, destination_directory) - - # Download each of these updated targets and save them locally. - for target in updated_targets: - try: - updater.download_target(target, destination_directory) - except tuf.DownloadError as e: - pass - - # Remove any files from the destination directory that are no longer being - # tracked. - updater.remove_obsolete_targets(destination_directory) - """ - - - - -def parse_options(): - """ - - Parse the command-line options and set the logging level - as specified by the user through the --verbose option. - 'basic_client' expects the '--repo' to be set by the user. - - Example: - $ python basic_client.py --repo http://localhost:8001 - - If the required option is unset, a parser error is printed - and the scripts exits. - - - None. - - - None. - - - Sets the logging level for TUF logging. - - - The 'options.REPOSITORY_MIRROR' string. - """ - - parser = optparse.OptionParser() - - # Add the options supported by 'basic_client' to the option parser. - parser.add_option('--verbose', dest='VERBOSE', type=int, default=2, - help='Set the verbosity level of logging messages.' - 'The lower the setting, the greater the verbosity.') - - parser.add_option('--repo', dest='REPOSITORY_MIRROR', type='string', - help='Specifiy the repository mirror\'s URL prefix ' - '(e.g., http://www.example.com:8001/tuf/).' - ' The client will download updates from this mirror.') - - options, args = parser.parse_args() - - # Set the logging level. - if options.VERBOSE == 5: - tuf.log.set_log_level(logging.CRITICAL) - elif options.VERBOSE == 4: - tuf.log.set_log_level(logging.ERROR) - elif options.VERBOSE == 3: - tuf.log.set_log_level(logging.WARNING) - elif options.VERBOSE == 2: - tuf.log.set_log_level(logging.INFO) - elif options.VERBOSE == 1: - tuf.log.set_log_level(logging.DEBUG) - else: - tuf.log.set_log_level(logging.NOTSET) - - # Ensure the '--repo' option was set by the user. - if options.REPOSITORY_MIRROR is None: - message = '"--repo" must be set on the command-line.' - parser.error(message) - - # Return the repository mirror containing the metadata and target files. - return options.REPOSITORY_MIRROR - - - -if __name__ == '__main__': - - # Parse the options and set the logging level. - repository_mirror = parse_options() - - # Perform an update of all the files in the 'targets' directory located in - # the current directory. - try: - update_client(repository_mirror) - - except (tuf.NoWorkingMirrorError, tuf.RepositoryError) as e: - sys.stderr.write('Error: '+str(e)+'\n') - sys.exit(1) - - # Successfully updated the client's target files. - sys.exit(0) diff --git a/tests/repository_data/client/metadata/current/empty_file.json b/tests/repository_data/client/metadata/current/empty_file.json deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/repository_data/client/metadata/current/role1.json b/tests/repository_data/client/metadata/current/role1.json index 37b378d3e2..96d921b5eb 100644 --- a/tests/repository_data/client/metadata/current/role1.json +++ b/tests/repository_data/client/metadata/current/role1.json @@ -3,14 +3,35 @@ { "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", "method": "ed25519", - "sig": "98c7634186f7a02b3a56d8204e62b3a1d25225935dc47c720426ef591d09931e071f96f8d47ef3ec814dd7278f05c01190e60386ad03e546869c7aeeb3249703" + "sig": "e8f6db97fcad5eb2ca1cf5fc6b6d4579d026811581b0d2061af90c7cb26d966e15a06e7c596f663b05aa061308929f96136167359fc9d44919a36383403abd09" } ], "signed": { "_type": "Targets", "delegations": { - "keys": {}, - "roles": [] + "keys": { + "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" + } + } + }, + "roles": [ + { + "keyids": [ + "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + ], + "name": "role2", + "paths": [], + "terminating": false, + "threshold": 1 + } + ] }, "expires": "2030-01-01T00:00:00Z", "targets": { diff --git a/tests/repository_data/client/metadata/current/role1.json.gz b/tests/repository_data/client/metadata/current/role1.json.gz index 53d950b9bdf648f12a61366d5bb37b1bf079acf7..9c2e24c6b4bc138cc178fc95188a2deb61e510bc 100644 GIT binary patch literal 619 zcmV-x0+jt9iwFRC>e^QV|E*KWZrm^oz56Q+pVO9-NLk8v=aQeM%e2+ia}~KwJXKfb-gCUv^g6 z){xK9Cq%D28*AJQh(iM3lx8kf{rs>oB6F)V6V1!?agw>kCqD0F8kfGCB!ldD>W_zU zOI#?s9sUDv*GiifFCCBF3lF>Fxu3fIRjnmq>{yds8QspLbh93xAMRzYSUzd6UcK>f zf3j{)uVLupoNIzoy3i^*1y;yfvIZ1FEnZN0(%yua4WPC}Vp1z8nK+9nQOjE!m+JPv ztkR#4gWO?!0{aQwKQs9k4+q{aB+T#Y8wDqxy5-qh-X!OJUuL5}%ofc&jB>l`%Di;P zVRnhS1ADGEaE-j5`gNol4F3zD}XjWauKg_Uc zT?#8yxkboiTBnkVOc)#!aN#6|awUjSEB4-H*6es53XVDgXy=+Xmis0Kac;J$tZMo$L63De=%Gjb#*|jDbAaNmQ zP8tch2$T`gK~^N0rL-RttWvH1sGC6!yQwREX;*C&uk60bc`T#1l7Fc#zX8(V<;ute F000m*C~*J) literal 465 zcmV;?0WSU@iwFR3sD@Vp|2>k?Zrd;nMc?xkhM&8Qq9}>#cl2pPF;F6v1kI8n_J$Tk z{(Z8Y7LWkzQo6c2^v7-(r{n%C^WUpY5d8_P5HDTfgp@MT6EQ04K3f=-m zj=3f=D;_B)@M8~i+YQeTcOte#T}X_-=l54zB0kA=Z}YU~9@S2^m-%=(Pxp;0vUPFW zUUt2lHm1*u_j=0SUXL5-5y8VZaD6-hJg(#CWLD1H z)+iwFRC>e^QV|22?HZo@DPME5?0p>+ySAIf6RkyVSJpiL?@njdVpXb|M? zmF)ze78*!zhI6lKGxmowPd|f>W?O$$b$u_VHs5;dr6`6WB4kG*8hrrYbwRariES5S zb}l-J-c#^_p-7%nvK&&YBGh(FX0z~4I(=Ohpo&AZFTL2$V#?BSvKTBjO36!#np+#m zHZ0T>zLm@ka%hC8c(k6p03l|JkT_?=Hbwy?m$PGWIe05ExhN4s03ar3TVz%QpM7o! zTObt-lz~kN^Qh|m=GlnOQRgPi=XAU39C63PL8o!aZA6cH;Hkfy$LGVPvgPtvHybX; zwbT8~UViD{wO@-i4m`YpEwcmIF7v*kr>D=0WPWuRi%}75RYE@g0ng)=5VipT0N`PZ Ae*gdg literal 0 HcmV?d00001 diff --git a/tests/repository_data/client/metadata/current/root.json b/tests/repository_data/client/metadata/current/root.json index 94327a115a..aebb36c2e8 100644 --- a/tests/repository_data/client/metadata/current/root.json +++ b/tests/repository_data/client/metadata/current/root.json @@ -3,7 +3,7 @@ { "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", "method": "RSASSA-PSS", - "sig": "3851d11ed11ea69ab5d873cfd015de79dc856d83e0a060e73d535d705da086c26191e6bc1ed6bbdde9305c3816c1c5885b48cf51c41fedc906a5ebe0e33a6b823145d40bd3e588e77c6bc724b62f4b2ca9700da03e0ba603170bfd365ea1d25ee7f9661848a14f5916869f00f3e03aa4cb468a4de647bbf205b96f9aa8dd408e3e0b1f9d53fe74654dfe139441dfe3651b3473b67bd104d754112e594a9c6ed0127e94b9057322d630f70c93c01d0cd0c2b98f6abdfd2ed7ac7dc5d3e201d191e168992574edfa935bb2a2cbaa67532c7aaddd4582b53a015c11e567d7fe7ba38cc743e7a939b9e7f2e334b48f46bdf4b82b66e639189644998d90a27847e63e8ade170f8c8aa15c8076b0af8032d78870ac18278663eddb08a7eed30c199c97c81d30bdf47d6649c7ab297120b983d9b6a1da648026d552be73bb77a9346f98a3b8db1a583b71bb706c397a3142f8194c80e62a1632152cd2ffd340605325ea39baf60fb30cf574701e5ae07efee75fc51df4f1810f3ce14345c466d25e36a3" + "sig": "02a93c6d09ddc20e13e2b7f3ff29e400ab350346297f181d7f9e55486eeda62aa0ca60786ada23eee937e9b6286f9dc7750a36778a0ee5b1bd4cfecf040f3a425932e6e5f74abedcbd8e4eb29ae73c0aaf0047154565e1cb0d4501d05b23e413901aaa50ff70dafeaacb0fe15729da1e3483d4448fc2afedde986f082eef5f0cac57010a45db5f91e90373fe95db9899037f2056b24d3bac4179f24f02d8ea62e21b4c80414253e56ff26580d18595cb4b6549ce7580148ca8cc10f18420c6c14cf9c7208bede73a862b70d2d88942eab4cd435e5ad52fcd6d80504d9491b2bcd23df679e1d1f7d60e0535cf00943f92ec90e4037f5793c62c7e0f13f2e59c6ebdda0831dd0ef9a71bea4660eb5f080e7318411fddb53edda4be93d52194b36ee2268961666679a3aaade1e97ed1c07943331fb24746b77ee3f3cce6891dc6e35d126f0f2b29be91b5113cecfb4cb13017fcdba512c6dd48669cdde86d44a6f0b51e4633edff91dca220a62b52a2393499d38589021c778ac77136261ab7a95a" } ], "signed": { diff --git a/tests/repository_data/client/metadata/current/root.json.gz b/tests/repository_data/client/metadata/current/root.json.gz index 4a13f7efd616dff425bc51ec67ad3e32a6f7402b..527b3ca18946d69b3793ca74726f3657f81cde3b 100644 GIT binary patch literal 1679 zcmV;A25|WwiwFRC>e^QV|HW2aQzAOU2?#bV zjSAfGzuyF%x$fTGyAxq-+>WM$uBy(;JelWY{kB)BmXlF#mye5BR=-sKxmT(DhQCU6 z8rPG6BUK@RCLR*_K&7LE&;*(>M|{*lB=4LsLJ4jp3CsddOgW>8h}6d*fK@;3?A^!Z zc)l}uKF9`x>~nuGc$-2Gw&OspVO|C>A$SUrut=RsEG23p2e6KzIWDPDiD(j3GEoSw zWsJc}YAtvxL1}4&r7Xs1n2N?ps%0|4DH}vfLLnpORV7MWUIxG(sl2j&=K#~d)A_yiBUoe~{L!&o< zdBiA$+9sNM6>)o(XcWfF=t8hqStJCA$yh~Pv|OSYL<2OSOXw;gDIh!y;j`RfiqRKh zxMNraluC_NDDhIpGE7E@h@y&tcu?pBVYKcj;_#v*3TrzYmrL9RzDPUThpjeKEabp70 zLK~pOZ>ta=gh?uib;=rHt4M~Iy~?*wZ={S{wrt5*{k~imaYvo`e7Pe{)z9w>q-;5v z=ilvYG+#`XwYK)<(oT|D$$e@>9hv$0s?9SUYxmY?vjCk?Dwb$STPZB&|>*ftC?|Z zo77-_7*^{Wdn+vm%x zh_M7FNiMY&lw#!qlN|HTh|mftgr>9-AqA|zlz8-k(L@!IJP1#jCx1UCf}$c(&E-}B zkRe1{gWUJZi=Yt(g9%1Kq7gxnC=3C`F>;4(z&Wp(|1%~ZZXN$8Octg61w1})uX?l9 zKB@FC>&NYbO1F8l8@$bX?RI^Bmz~r{(}(eN(lXCb&(4~MS$2Am-RNvPKN)rLe={o^ z50~@FVP|-~FGlyGShvaSHox+<6SltVT%EROrnR1Q;r@KRxnk|j<=JL@_n`Cf6ykB( zTtCz6=XQ2;+BuNnytZtew|J+qy3IS&xDkhkM|^WUFBi4z&4oH>iOs6Rrrq|zaf3XW z#qew)hNJBKtZ`aQSDXHZ%S9`1LU|MvMp5KNzc=t1DSGYAbB~toq%-LF;OEI<1`* z!?fIAlT>nALZcT%{q(Rt-yiz<_3_Pl?nh?8GJ2b@R<*keY3fUq+DSHi$P1NZZ-2O% zUry2>&g}eHowaX9wbryb+lTq;q5bf0tMZ}Vc{a&JDgANboO*m`1@6^PukbR$L{|ALf2Mh^ES^ej~N^tsW7_A1!Pa5 zZ#(%zc6xrnPG{DAg+|vD3;*2B*PCqAt7lpBAGi6gn3^Yzf3L4UGs%Jlq!H2wPZmkp z6iCE&uBj5(ewfk9T0AQN#rBWK3~9HV#}kbL=`Att?^hR)273UPh6$8_AhjT&^Y3#H3#P+>C%n{}3drbD;aH%flv-n3_TFiHy?R}u{Bm1x#{R;@o zG4{;ye70@UnB2#5Y43}_@Z`hI^M82q ZvtXX$VtZtIop{-M`5jqtZW93w006R%I57YK literal 1680 zcmV;B25SwF5qS^rY|=RvLZ8~keZ zdDx~NM(SK35}A+4c_J*qm_(!~wb%vad34rt%_V1AU{4KVkxHu-=7G52IimGXdwa{U zp5=Rk7vp3+PCk#u{VNbQ~C!Ydk(K>`IyJ*V7D z#JxdEI3lpdL0AVCgtguWO%ZogVd1djN^#4SiyS+KWAIKRVR*0s1xk&uicrkBXUKXQ zV0DnvfiX##6%fBA&S;4c>;(>3BM`+B+32a@!C+5#2r_CRuwu$!7P-bk2^|rN@BuZ3 zIm?7H%m=}wwUHohwTRjnqeo0dmMi z;F=ljL;#eC44PSuxTJ)5K~a>*Y3dO6kUZoF&TS`#i= z@X=_>ttEyyYmAVb5+{xE;3-$ca!54h4id$M^b-8FhAQVIqd~%_+G;?90J01~k4(Ux z3?Q~b1VJ@cS}>-y@){W;6_YR;l<@&Djmjxwz`8M8;tS%LOd<-XdjW z2%aK`wRT!M1qFzzoun6nX>iyQEipk5H}%>I$deIFAtF5I#6k&KD=U8)$BcMcem*PEibmaYuejgo$qG%4=S6@ zQ8dF6cB35=t8Ugo<*}C#MUfSw;n*d(81{D0Lwu2s#w=S;(xQjm zLJ#Hf@LVkC%|bQWr-RmmX@5;O{Rh*XeqGIGgS%|jYvARsxk!hDReHJ^4aTkR_2O*a zI4>q~eYnN3WTZsxq36xBqh@|Marw>3?M3FMdQ1~?mu)tU`%9slYmme=nLK2LjAD2= z+2vPh9EXL;Pvm*;cG~F9JBve0JDX?K?uW1Gp-X=d*q#?#&~bk>X5 z;H`YSDv2nzN3o&1_ObMSG~R4B&%J&__WF-Ua)fWU=xZ^#%jUyytLRK;r028FZF;yW zygliU&b>S;9?53q6MB@-?^W{DD%^6Dsng!genstpE>`Y&kZpI#bl6Oi&Oh$5eKB=T+y7o)e`b<_GY`-R=;KX{ z(nODg70`+$9Elj!3YxP72_gzHrl6ZtoF`mYJZ_W4VWtj%f~*5zOL@!_KSH9yj2DA#6L{DmhUZl3?c alb;3i6js%d>2>1e;N^EOHy%3y4FCXe^QV|BX^jj}tKrz0a?RaxN<;j+1!*2An_$<-_R|mQK|f5X-9n zoy@d*U=Ps9k&+kd`T4~!gVOV1e@x4dv&{NYeHxT{>09Y<@_eWr(I#4Je2x*I5Y*bB zFNYaTKQq*5s04B;JAn=+VzMbBXACAwWxd<@zRNOB8=+K7WM1)JeyftAc-LYi@YNWI zYzf5KUYH=}+PtG0Ah^gus7WDgrBqu>DJ^-9J~p2@R{-n)Gkc5KnczYIYH`(u*@7|L znoA~Qrf4`NR}DI-x{J9C>htdAh^$gqC-j%)`6R2v_s4Xa$7$JwYx#9LTs_kk5Z;05 z@d4m*i;vf&-gLV0D1Dxu#5{bRr{gQ6^iMg@hYc_*aM``deAxfKC)wUjYwxF>mN%?p zn#Zl<>%AK_CQE*`fShN-J|hgdi3h3KV*fpMIn=@k9H~_A1wlAqse&cu&_{?|tVnue ryp~q_Nsjwv?7WDUcmGnkr1M^u`9JLsxBR~!F2m&y9pUXd00RI3@Uh5i literal 380 zcmV-?0fYV@iwFR3sD@Vp|AkP?ZW}QSyzf^CI=clWQWllpNKP$^f|4k&)6{Etw?%`% z|6c9dIkbmX&~rr04Cieyc0L|Xvi!K{Y#+?$!I-zcjQyss$J!C=*n79n%m5`otquAh z%;@?(L1h<83?!KpIw&CpFN`T;bXjY%yN&O=F5|QjYV{$wcf8AQDvh0sP@Af6{nGaA zsAS(2QUj?Vo>4P4&M|pv$gLNl-!rZ@Zc6V7~0`iRfx|O~QU%UeCH(d_2i{9;an9ZuQssc+1RsK==Tz&nJL~ zZGO5H^`_;{W9(&mQTOqAo=)$Ou|M@PA2-0Qz;$=8oR+@|N14Y>?e^{_NAY35%BKoi zwQuN(d^c(U{4Tgq$sTHLqDi+Yj$J2NgMm3r^7OKUPPaE|8lv= a<)F*_pN@um{huG#;rbgb+g8AQ0ssIZ?!p8B diff --git a/tests/repository_data/client/metadata/current/targets.json b/tests/repository_data/client/metadata/current/targets.json index 8c9337d88c..0620bfe42a 100644 --- a/tests/repository_data/client/metadata/current/targets.json +++ b/tests/repository_data/client/metadata/current/targets.json @@ -3,7 +3,7 @@ { "keyid": "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b", "method": "ed25519", - "sig": "e96f9ca4425a37919dc91d5679c5319150b41f729389d70be7d8c8dc3dda647aa9fd11ca3c6a959c10819d652e516b375caf147721f96af329b54c0720373c06" + "sig": "74ee9970ed709ab65586ef99c0005102676a92f11e2a448bb685875b641d2efe3fd2bdefaa90e1a050bfbb34163834aadb43d13ac0c7452aa7df27c454c34507" } ], "signed": { @@ -23,7 +23,6 @@ }, "roles": [ { - "backtrack": true, "keyids": [ "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" ], @@ -31,6 +30,7 @@ "paths": [ "/file3.txt" ], + "terminating": false, "threshold": 1 } ] diff --git a/tests/repository_data/client/metadata/current/targets.json.gz b/tests/repository_data/client/metadata/current/targets.json.gz index 44e222812c4df2940e014474c46921f721396e71..e2f9e949af6455e9da30ef1aad29e6702f0a2e9a 100644 GIT binary patch literal 816 zcmV-01JC>)iwFRC>e^QV|D{yhZW}oaefL)w^PKKdBz5a|^l2ACKuJ_=)Q$tsq-as( z-^&?KoVeKCmj%WKL{a47;URx+w!^r8e3I$qtmE)``}bzM{b|4L@Kt~8n^^_{fey7p zYSMd@-kg@;6Gk5zQ%+GNQU^@HoG6I*E~C#8LKTa;s80kz@mvf3^EY>cd$ z?TxkwSuDO!L3_>>0K5YXF-XSloT6BoQbO`6`V!bR)UMPUmZn`KgE|2ZrI$kN0wrRR zR+yS2sZb+(G{~)Etn9fGdx(R9dEIR9?^Z^1hB`AbJWM~Hb%yvw&W}2c3+|!mK_6w> zpPt6maiQ#bn0X5xI_B0i5Chd}C00!0Y6u-MU<*W<4IP+?Bqb+SnYNZom3)n~_TphD z($N%X?Hrz z@^SAK-*48c2mSuMzs%ZzK=28ieO~}RFW+Ambxo_(-juaKaSN2IzUs?3osJ(=GW$F{ z&(Owk)#VTZU%+1|-wv~gbo}sYmCo$zOR6Du?`qD}Qch^JGl=*oG?j93oa^iY#V1t~44Rk6p$FNnCs7q=QlNXLM3pU2v+7J5lIPDA7P2v)qbKOV?~eFPi*PmSjwAKz8B?w)AOvWi?CleufG5x${dzG1^@t!-j#O% literal 816 zcmV-01JC>)iwFR3sD@Vp|D{yhj@vj4ea}|}exB}1yhvidqffgS3@C}31WhJE;wf4b z`S;3>Co?H__hkXwfGGOt@bFMSSL@-teSXpL^*1^Y*w!8y)TwK( zG?%Kj7PVDxIdFo=p%O}psu7!m}wdFDM-+u5^@ z+r!IwNt`9SEhgOpy9QFL1tf>1kXeDGzJ$;K0@Q#ggru&AP*d_sDO;_Qi^>gY>BZAV z&zmRRJs(cnakGEx)|_zOC~&wcx~$p5a9ut=yu-ZJa@T|F);Ha~AGscmuX(pEQ!Zxj zp|{7NX7!|C4ICL0m>7BC-jrM#I)cD#Sfi0O)@InFK^GdA)$Mz^w*GY3*?Spp#bzyE z$5a3Go$>T)46x}U|Fc=?+&M;$kJ$Nt&2JHtQ5@8j>5lPD1sVvQ8NNo=Mm=)d-2TUj~y~W4y0@XD`p=rt?Do!Gg24 zaIfuus6-6K61v3B)r6fG0i~KOxB$ok-itu%ThUYuqcaZPTJFBe^QV|5cGoZrv~pMdx0Hq48@WlA=CYM@9*Xg0iUCY2OF26Eq0C z_sV|mpa4106>)e;es#T>r&S0`K6yzX^Nq0Q}xbAVZ^Lr$vZ(J z;25w(XKf5zYRTwobvhKniUGVUJRl8VPCM_c%Xr;^T96sip||#%2*f$1EXiA7gs2cA zH%cZZ*Zw`8#nlPNFVw(PrDh|gta@W|NmIK N^)DlVi0mo?000M=wl)9& literal 368 zcmV-$0gwJ4iwFR3sD@Vp|5cF9j@=*(h4*=ii1lrS4d!Q_V^*!I%78t&)7~FV(yF6T z-n}F@T~sm>93MUo{uo9xpI@%L{CbnwJdIz6as23OG(Y71+`3|%+N{Bc#DrLctqY#f zIiM7=C2wtt)U*}!s$Obys2CymMipHJbJ}?qS*F_#l!io*54}IXi4c8`m^+gt=h6y` zDrE|&z&2E6r&v;T&}>P4r(m@fUn^-*?4G76kx8921zGxSk#l{gms=GIefn;gEjI+kD=`z$UE2;r_p| O4(ngMc3Ltj0ssJ;y1kVE diff --git a/tests/repository_data/client/metadata/previous/role1.json b/tests/repository_data/client/metadata/previous/role1.json index 37b378d3e2..96d921b5eb 100644 --- a/tests/repository_data/client/metadata/previous/role1.json +++ b/tests/repository_data/client/metadata/previous/role1.json @@ -3,14 +3,35 @@ { "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", "method": "ed25519", - "sig": "98c7634186f7a02b3a56d8204e62b3a1d25225935dc47c720426ef591d09931e071f96f8d47ef3ec814dd7278f05c01190e60386ad03e546869c7aeeb3249703" + "sig": "e8f6db97fcad5eb2ca1cf5fc6b6d4579d026811581b0d2061af90c7cb26d966e15a06e7c596f663b05aa061308929f96136167359fc9d44919a36383403abd09" } ], "signed": { "_type": "Targets", "delegations": { - "keys": {}, - "roles": [] + "keys": { + "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" + } + } + }, + "roles": [ + { + "keyids": [ + "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + ], + "name": "role2", + "paths": [], + "terminating": false, + "threshold": 1 + } + ] }, "expires": "2030-01-01T00:00:00Z", "targets": { diff --git a/tests/repository_data/client/metadata/previous/role1.json.gz b/tests/repository_data/client/metadata/previous/role1.json.gz index 53d950b9bdf648f12a61366d5bb37b1bf079acf7..9c2e24c6b4bc138cc178fc95188a2deb61e510bc 100644 GIT binary patch literal 619 zcmV-x0+jt9iwFRC>e^QV|E*KWZrm^oz56Q+pVO9-NLk8v=aQeM%e2+ia}~KwJXKfb-gCUv^g6 z){xK9Cq%D28*AJQh(iM3lx8kf{rs>oB6F)V6V1!?agw>kCqD0F8kfGCB!ldD>W_zU zOI#?s9sUDv*GiifFCCBF3lF>Fxu3fIRjnmq>{yds8QspLbh93xAMRzYSUzd6UcK>f zf3j{)uVLupoNIzoy3i^*1y;yfvIZ1FEnZN0(%yua4WPC}Vp1z8nK+9nQOjE!m+JPv ztkR#4gWO?!0{aQwKQs9k4+q{aB+T#Y8wDqxy5-qh-X!OJUuL5}%ofc&jB>l`%Di;P zVRnhS1ADGEaE-j5`gNol4F3zD}XjWauKg_Uc zT?#8yxkboiTBnkVOc)#!aN#6|awUjSEB4-H*6es53XVDgXy=+Xmis0Kac;J$tZMo$L63De=%Gjb#*|jDbAaNmQ zP8tch2$T`gK~^N0rL-RttWvH1sGC6!yQwREX;*C&uk60bc`T#1l7Fc#zX8(V<;ute F000m*C~*J) literal 465 zcmV;?0WSU@iwFR3sD@Vp|2>k?Zrd;nMc?xkhM&8Qq9}>#cl2pPF;F6v1kI8n_J$Tk z{(Z8Y7LWkzQo6c2^v7-(r{n%C^WUpY5d8_P5HDTfgp@MT6EQ04K3f=-m zj=3f=D;_B)@M8~i+YQeTcOte#T}X_-=l54zB0kA=Z}YU~9@S2^m-%=(Pxp;0vUPFW zUUt2lHm1*u_j=0SUXL5-5y8VZaD6-hJg(#CWLD1H z)+iwFRC>e^QV|22?HZo@DPME5?0p>+ySAIf6RkyVSJpiL?@njdVpXb|M? zmF)ze78*!zhI6lKGxmowPd|f>W?O$$b$u_VHs5;dr6`6WB4kG*8hrrYbwRariES5S zb}l-J-c#^_p-7%nvK&&YBGh(FX0z~4I(=Ohpo&AZFTL2$V#?BSvKTBjO36!#np+#m zHZ0T>zLm@ka%hC8c(k6p03l|JkT_?=Hbwy?m$PGWIe05ExhN4s03ar3TVz%QpM7o! zTObt-lz~kN^Qh|m=GlnOQRgPi=XAU39C63PL8o!aZA6cH;Hkfy$LGVPvgPtvHybX; zwbT8~UViD{wO@-i4m`YpEwcmIF7v*kr>D=0WPWuRi%}75RYE@g0ng)=5VipT0N`PZ Ae*gdg literal 0 HcmV?d00001 diff --git a/tests/repository_data/client/metadata/previous/root.json b/tests/repository_data/client/metadata/previous/root.json index 94327a115a..aebb36c2e8 100644 --- a/tests/repository_data/client/metadata/previous/root.json +++ b/tests/repository_data/client/metadata/previous/root.json @@ -3,7 +3,7 @@ { "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", "method": "RSASSA-PSS", - "sig": "3851d11ed11ea69ab5d873cfd015de79dc856d83e0a060e73d535d705da086c26191e6bc1ed6bbdde9305c3816c1c5885b48cf51c41fedc906a5ebe0e33a6b823145d40bd3e588e77c6bc724b62f4b2ca9700da03e0ba603170bfd365ea1d25ee7f9661848a14f5916869f00f3e03aa4cb468a4de647bbf205b96f9aa8dd408e3e0b1f9d53fe74654dfe139441dfe3651b3473b67bd104d754112e594a9c6ed0127e94b9057322d630f70c93c01d0cd0c2b98f6abdfd2ed7ac7dc5d3e201d191e168992574edfa935bb2a2cbaa67532c7aaddd4582b53a015c11e567d7fe7ba38cc743e7a939b9e7f2e334b48f46bdf4b82b66e639189644998d90a27847e63e8ade170f8c8aa15c8076b0af8032d78870ac18278663eddb08a7eed30c199c97c81d30bdf47d6649c7ab297120b983d9b6a1da648026d552be73bb77a9346f98a3b8db1a583b71bb706c397a3142f8194c80e62a1632152cd2ffd340605325ea39baf60fb30cf574701e5ae07efee75fc51df4f1810f3ce14345c466d25e36a3" + "sig": "02a93c6d09ddc20e13e2b7f3ff29e400ab350346297f181d7f9e55486eeda62aa0ca60786ada23eee937e9b6286f9dc7750a36778a0ee5b1bd4cfecf040f3a425932e6e5f74abedcbd8e4eb29ae73c0aaf0047154565e1cb0d4501d05b23e413901aaa50ff70dafeaacb0fe15729da1e3483d4448fc2afedde986f082eef5f0cac57010a45db5f91e90373fe95db9899037f2056b24d3bac4179f24f02d8ea62e21b4c80414253e56ff26580d18595cb4b6549ce7580148ca8cc10f18420c6c14cf9c7208bede73a862b70d2d88942eab4cd435e5ad52fcd6d80504d9491b2bcd23df679e1d1f7d60e0535cf00943f92ec90e4037f5793c62c7e0f13f2e59c6ebdda0831dd0ef9a71bea4660eb5f080e7318411fddb53edda4be93d52194b36ee2268961666679a3aaade1e97ed1c07943331fb24746b77ee3f3cce6891dc6e35d126f0f2b29be91b5113cecfb4cb13017fcdba512c6dd48669cdde86d44a6f0b51e4633edff91dca220a62b52a2393499d38589021c778ac77136261ab7a95a" } ], "signed": { diff --git a/tests/repository_data/client/metadata/previous/root.json.gz b/tests/repository_data/client/metadata/previous/root.json.gz index 4a13f7efd616dff425bc51ec67ad3e32a6f7402b..527b3ca18946d69b3793ca74726f3657f81cde3b 100644 GIT binary patch literal 1679 zcmV;A25|WwiwFRC>e^QV|HW2aQzAOU2?#bV zjSAfGzuyF%x$fTGyAxq-+>WM$uBy(;JelWY{kB)BmXlF#mye5BR=-sKxmT(DhQCU6 z8rPG6BUK@RCLR*_K&7LE&;*(>M|{*lB=4LsLJ4jp3CsddOgW>8h}6d*fK@;3?A^!Z zc)l}uKF9`x>~nuGc$-2Gw&OspVO|C>A$SUrut=RsEG23p2e6KzIWDPDiD(j3GEoSw zWsJc}YAtvxL1}4&r7Xs1n2N?ps%0|4DH}vfLLnpORV7MWUIxG(sl2j&=K#~d)A_yiBUoe~{L!&o< zdBiA$+9sNM6>)o(XcWfF=t8hqStJCA$yh~Pv|OSYL<2OSOXw;gDIh!y;j`RfiqRKh zxMNraluC_NDDhIpGE7E@h@y&tcu?pBVYKcj;_#v*3TrzYmrL9RzDPUThpjeKEabp70 zLK~pOZ>ta=gh?uib;=rHt4M~Iy~?*wZ={S{wrt5*{k~imaYvo`e7Pe{)z9w>q-;5v z=ilvYG+#`XwYK)<(oT|D$$e@>9hv$0s?9SUYxmY?vjCk?Dwb$STPZB&|>*ftC?|Z zo77-_7*^{Wdn+vm%x zh_M7FNiMY&lw#!qlN|HTh|mftgr>9-AqA|zlz8-k(L@!IJP1#jCx1UCf}$c(&E-}B zkRe1{gWUJZi=Yt(g9%1Kq7gxnC=3C`F>;4(z&Wp(|1%~ZZXN$8Octg61w1})uX?l9 zKB@FC>&NYbO1F8l8@$bX?RI^Bmz~r{(}(eN(lXCb&(4~MS$2Am-RNvPKN)rLe={o^ z50~@FVP|-~FGlyGShvaSHox+<6SltVT%EROrnR1Q;r@KRxnk|j<=JL@_n`Cf6ykB( zTtCz6=XQ2;+BuNnytZtew|J+qy3IS&xDkhkM|^WUFBi4z&4oH>iOs6Rrrq|zaf3XW z#qew)hNJBKtZ`aQSDXHZ%S9`1LU|MvMp5KNzc=t1DSGYAbB~toq%-LF;OEI<1`* z!?fIAlT>nALZcT%{q(Rt-yiz<_3_Pl?nh?8GJ2b@R<*keY3fUq+DSHi$P1NZZ-2O% zUry2>&g}eHowaX9wbryb+lTq;q5bf0tMZ}Vc{a&JDgANboO*m`1@6^PukbR$L{|ALf2Mh^ES^ej~N^tsW7_A1!Pa5 zZ#(%zc6xrnPG{DAg+|vD3;*2B*PCqAt7lpBAGi6gn3^Yzf3L4UGs%Jlq!H2wPZmkp z6iCE&uBj5(ewfk9T0AQN#rBWK3~9HV#}kbL=`Att?^hR)273UPh6$8_AhjT&^Y3#H3#P+>C%n{}3drbD;aH%flv-n3_TFiHy?R}u{Bm1x#{R;@o zG4{;ye70@UnB2#5Y43}_@Z`hI^M82q ZvtXX$VtZtIop{-M`5jqtZW93w006R%I57YK literal 1680 zcmV;B25SwF5qS^rY|=RvLZ8~keZ zdDx~NM(SK35}A+4c_J*qm_(!~wb%vad34rt%_V1AU{4KVkxHu-=7G52IimGXdwa{U zp5=Rk7vp3+PCk#u{VNbQ~C!Ydk(K>`IyJ*V7D z#JxdEI3lpdL0AVCgtguWO%ZogVd1djN^#4SiyS+KWAIKRVR*0s1xk&uicrkBXUKXQ zV0DnvfiX##6%fBA&S;4c>;(>3BM`+B+32a@!C+5#2r_CRuwu$!7P-bk2^|rN@BuZ3 zIm?7H%m=}wwUHohwTRjnqeo0dmMi z;F=ljL;#eC44PSuxTJ)5K~a>*Y3dO6kUZoF&TS`#i= z@X=_>ttEyyYmAVb5+{xE;3-$ca!54h4id$M^b-8FhAQVIqd~%_+G;?90J01~k4(Ux z3?Q~b1VJ@cS}>-y@){W;6_YR;l<@&Djmjxwz`8M8;tS%LOd<-XdjW z2%aK`wRT!M1qFzzoun6nX>iyQEipk5H}%>I$deIFAtF5I#6k&KD=U8)$BcMcem*PEibmaYuejgo$qG%4=S6@ zQ8dF6cB35=t8Ugo<*}C#MUfSw;n*d(81{D0Lwu2s#w=S;(xQjm zLJ#Hf@LVkC%|bQWr-RmmX@5;O{Rh*XeqGIGgS%|jYvARsxk!hDReHJ^4aTkR_2O*a zI4>q~eYnN3WTZsxq36xBqh@|Marw>3?M3FMdQ1~?mu)tU`%9slYmme=nLK2LjAD2= z+2vPh9EXL;Pvm*;cG~F9JBve0JDX?K?uW1Gp-X=d*q#?#&~bk>X5 z;H`YSDv2nzN3o&1_ObMSG~R4B&%J&__WF-Ua)fWU=xZ^#%jUyytLRK;r028FZF;yW zygliU&b>S;9?53q6MB@-?^W{DD%^6Dsng!genstpE>`Y&kZpI#bl6Oi&Oh$5eKB=T+y7o)e`b<_GY`-R=;KX{ z(nODg70`+$9Elj!3YxP72_gzHrl6ZtoF`mYJZ_W4VWtj%f~*5zOL@!_KSH9yj2DA#6L{DmhUZl3?c alb;3i6js%d>2>1e;N^EOHy%3y4FCXe^QV|BX^jj}tKrz0a?RaxN<;j+1!*2An_$<-_R|mQK|f5X-9n zoy@d*U=Ps9k&+kd`T4~!gVOV1e@x4dv&{NYeHxT{>09Y<@_eWr(I#4Je2x*I5Y*bB zFNYaTKQq*5s04B;JAn=+VzMbBXACAwWxd<@zRNOB8=+K7WM1)JeyftAc-LYi@YNWI zYzf5KUYH=}+PtG0Ah^gus7WDgrBqu>DJ^-9J~p2@R{-n)Gkc5KnczYIYH`(u*@7|L znoA~Qrf4`NR}DI-x{J9C>htdAh^$gqC-j%)`6R2v_s4Xa$7$JwYx#9LTs_kk5Z;05 z@d4m*i;vf&-gLV0D1Dxu#5{bRr{gQ6^iMg@hYc_*aM``deAxfKC)wUjYwxF>mN%?p zn#Zl<>%AK_CQE*`fShN-J|hgdi3h3KV*fpMIn=@k9H~_A1wlAqse&cu&_{?|tVnue ryp~q_Nsjwv?7WDUcmGnkr1M^u`9JLsxBR~!F2m&y9pUXd00RI3@Uh5i literal 380 zcmV-?0fYV@iwFR3sD@Vp|AkP?ZW}QSyzf^CI=clWQWllpNKP$^f|4k&)6{Etw?%`% z|6c9dIkbmX&~rr04Cieyc0L|Xvi!K{Y#+?$!I-zcjQyss$J!C=*n79n%m5`otquAh z%;@?(L1h<83?!KpIw&CpFN`T;bXjY%yN&O=F5|QjYV{$wcf8AQDvh0sP@Af6{nGaA zsAS(2QUj?Vo>4P4&M|pv$gLNl-!rZ@Zc6V7~0`iRfx|O~QU%UeCH(d_2i{9;an9ZuQssc+1RsK==Tz&nJL~ zZGO5H^`_;{W9(&mQTOqAo=)$Ou|M@PA2-0Qz;$=8oR+@|N14Y>?e^{_NAY35%BKoi zwQuN(d^c(U{4Tgq$sTHLqDi+Yj$J2NgMm3r^7OKUPPaE|8lv= a<)F*_pN@um{huG#;rbgb+g8AQ0ssIZ?!p8B diff --git a/tests/repository_data/client/metadata/previous/targets.json b/tests/repository_data/client/metadata/previous/targets.json index 8c9337d88c..0620bfe42a 100644 --- a/tests/repository_data/client/metadata/previous/targets.json +++ b/tests/repository_data/client/metadata/previous/targets.json @@ -3,7 +3,7 @@ { "keyid": "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b", "method": "ed25519", - "sig": "e96f9ca4425a37919dc91d5679c5319150b41f729389d70be7d8c8dc3dda647aa9fd11ca3c6a959c10819d652e516b375caf147721f96af329b54c0720373c06" + "sig": "74ee9970ed709ab65586ef99c0005102676a92f11e2a448bb685875b641d2efe3fd2bdefaa90e1a050bfbb34163834aadb43d13ac0c7452aa7df27c454c34507" } ], "signed": { @@ -23,7 +23,6 @@ }, "roles": [ { - "backtrack": true, "keyids": [ "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" ], @@ -31,6 +30,7 @@ "paths": [ "/file3.txt" ], + "terminating": false, "threshold": 1 } ] diff --git a/tests/repository_data/client/metadata/previous/targets.json.gz b/tests/repository_data/client/metadata/previous/targets.json.gz index 44e222812c4df2940e014474c46921f721396e71..e2f9e949af6455e9da30ef1aad29e6702f0a2e9a 100644 GIT binary patch literal 816 zcmV-01JC>)iwFRC>e^QV|D{yhZW}oaefL)w^PKKdBz5a|^l2ACKuJ_=)Q$tsq-as( z-^&?KoVeKCmj%WKL{a47;URx+w!^r8e3I$qtmE)``}bzM{b|4L@Kt~8n^^_{fey7p zYSMd@-kg@;6Gk5zQ%+GNQU^@HoG6I*E~C#8LKTa;s80kz@mvf3^EY>cd$ z?TxkwSuDO!L3_>>0K5YXF-XSloT6BoQbO`6`V!bR)UMPUmZn`KgE|2ZrI$kN0wrRR zR+yS2sZb+(G{~)Etn9fGdx(R9dEIR9?^Z^1hB`AbJWM~Hb%yvw&W}2c3+|!mK_6w> zpPt6maiQ#bn0X5xI_B0i5Chd}C00!0Y6u-MU<*W<4IP+?Bqb+SnYNZom3)n~_TphD z($N%X?Hrz z@^SAK-*48c2mSuMzs%ZzK=28ieO~}RFW+Ambxo_(-juaKaSN2IzUs?3osJ(=GW$F{ z&(Owk)#VTZU%+1|-wv~gbo}sYmCo$zOR6Du?`qD}Qch^JGl=*oG?j93oa^iY#V1t~44Rk6p$FNnCs7q=QlNXLM3pU2v+7J5lIPDA7P2v)qbKOV?~eFPi*PmSjwAKz8B?w)AOvWi?CleufG5x${dzG1^@t!-j#O% literal 816 zcmV-01JC>)iwFR3sD@Vp|D{yhj@vj4ea}|}exB}1yhvidqffgS3@C}31WhJE;wf4b z`S;3>Co?H__hkXwfGGOt@bFMSSL@-teSXpL^*1^Y*w!8y)TwK( zG?%Kj7PVDxIdFo=p%O}psu7!m}wdFDM-+u5^@ z+r!IwNt`9SEhgOpy9QFL1tf>1kXeDGzJ$;K0@Q#ggru&AP*d_sDO;_Qi^>gY>BZAV z&zmRRJs(cnakGEx)|_zOC~&wcx~$p5a9ut=yu-ZJa@T|F);Ha~AGscmuX(pEQ!Zxj zp|{7NX7!|C4ICL0m>7BC-jrM#I)cD#Sfi0O)@InFK^GdA)$Mz^w*GY3*?Spp#bzyE z$5a3Go$>T)46x}U|Fc=?+&M;$kJ$Nt&2JHtQ5@8j>5lPD1sVvQ8NNo=Mm=)d-2TUj~y~W4y0@XD`p=rt?Do!Gg24 zaIfuus6-6K61v3B)r6fG0i~KOxB$ok-itu%ThUYuqcaZPTJFBe^QV|5cGoZrv~pMdx0Hq48@WlA=CYM@9*Xg0iUCY2OF26Eq0C z_sV|mpa4106>)e;es#T>r&S0`K6yzX^Nq0Q}xbAVZ^Lr$vZ(J z;25w(XKf5zYRTwobvhKniUGVUJRl8VPCM_c%Xr;^T96sip||#%2*f$1EXiA7gs2cA zH%cZZ*Zw`8#nlPNFVw(PrDh|gta@W|NmIK N^)DlVi0mo?000M=wl)9& literal 368 zcmV-$0gwJ4iwFR3sD@Vp|5cF9j@=*(h4*=ii1lrS4d!Q_V^*!I%78t&)7~FV(yF6T z-n}F@T~sm>93MUo{uo9xpI@%L{CbnwJdIz6as23OG(Y71+`3|%+N{Bc#DrLctqY#f zIiM7=C2wtt)U*}!s$Obys2CymMipHJbJ}?qS*F_#l!io*54}IXi4c8`m^+gt=h6y` zDrE|&z&2E6r&v;T&}>P4r(m@fUn^-*?4G76kx8921zGxSk#l{gms=GIefn;gEjI+kD=`z$UE2;r_p| O4(ngMc3Ltj0ssJ;y1kVE diff --git a/tests/repository_data/generate.py b/tests/repository_data/generate.py index 32a96ea64c..3ea31f9825 100755 --- a/tests/repository_data/generate.py +++ b/tests/repository_data/generate.py @@ -122,6 +122,9 @@ repository.targets.delegate('role1', [delegation_public], [target3_filepath]) repository.targets('role1').load_signing_key(delegation_private) +repository.targets('role1').delegate('role2', [delegation_public], []) +repository.targets('role2').load_signing_key(delegation_private) + # Set the top-level expiration times far into the future so that # they do not expire anytime soon, or else the tests fail. Unit tests may # modify the expiration datetimes (of the copied files), if they wish. @@ -130,6 +133,7 @@ repository.snapshot.expiration = datetime.datetime(2030, 1, 1, 0, 0) repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 0, 0) repository.targets('role1').expiration = datetime.datetime(2030, 1, 1, 0, 0) +repository.targets('role2').expiration = datetime.datetime(2030, 1, 1, 0, 0) # Compress the top-level role metadata so that the unit tests have a # pre-generated example of compressed metadata. @@ -140,7 +144,7 @@ # Create the actual metadata files, which are saved to 'metadata.staged'. if not options.dry_run: - repository.write() + repository.writeall() # Move the staged.metadata to 'metadata' and create the client folder. The # client folder, which includes the required directory structure and metadata diff --git a/tests/repository_data/repository/metadata.staged/role1.json b/tests/repository_data/repository/metadata.staged/role1.json index 37b378d3e2..96d921b5eb 100644 --- a/tests/repository_data/repository/metadata.staged/role1.json +++ b/tests/repository_data/repository/metadata.staged/role1.json @@ -3,14 +3,35 @@ { "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", "method": "ed25519", - "sig": "98c7634186f7a02b3a56d8204e62b3a1d25225935dc47c720426ef591d09931e071f96f8d47ef3ec814dd7278f05c01190e60386ad03e546869c7aeeb3249703" + "sig": "e8f6db97fcad5eb2ca1cf5fc6b6d4579d026811581b0d2061af90c7cb26d966e15a06e7c596f663b05aa061308929f96136167359fc9d44919a36383403abd09" } ], "signed": { "_type": "Targets", "delegations": { - "keys": {}, - "roles": [] + "keys": { + "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" + } + } + }, + "roles": [ + { + "keyids": [ + "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + ], + "name": "role2", + "paths": [], + "terminating": false, + "threshold": 1 + } + ] }, "expires": "2030-01-01T00:00:00Z", "targets": { diff --git a/tests/repository_data/repository/metadata.staged/role1.json.gz b/tests/repository_data/repository/metadata.staged/role1.json.gz index 53d950b9bdf648f12a61366d5bb37b1bf079acf7..9c2e24c6b4bc138cc178fc95188a2deb61e510bc 100644 GIT binary patch literal 619 zcmV-x0+jt9iwFRC>e^QV|E*KWZrm^oz56Q+pVO9-NLk8v=aQeM%e2+ia}~KwJXKfb-gCUv^g6 z){xK9Cq%D28*AJQh(iM3lx8kf{rs>oB6F)V6V1!?agw>kCqD0F8kfGCB!ldD>W_zU zOI#?s9sUDv*GiifFCCBF3lF>Fxu3fIRjnmq>{yds8QspLbh93xAMRzYSUzd6UcK>f zf3j{)uVLupoNIzoy3i^*1y;yfvIZ1FEnZN0(%yua4WPC}Vp1z8nK+9nQOjE!m+JPv ztkR#4gWO?!0{aQwKQs9k4+q{aB+T#Y8wDqxy5-qh-X!OJUuL5}%ofc&jB>l`%Di;P zVRnhS1ADGEaE-j5`gNol4F3zD}XjWauKg_Uc zT?#8yxkboiTBnkVOc)#!aN#6|awUjSEB4-H*6es53XVDgXy=+Xmis0Kac;J$tZMo$L63De=%Gjb#*|jDbAaNmQ zP8tch2$T`gK~^N0rL-RttWvH1sGC6!yQwREX;*C&uk60bc`T#1l7Fc#zX8(V<;ute F000m*C~*J) literal 465 zcmV;?0WSU@iwFR3sD@Vp|2>k?Zrd;nMc?xkhM&8Qq9}>#cl2pPF;F6v1kI8n_J$Tk z{(Z8Y7LWkzQo6c2^v7-(r{n%C^WUpY5d8_P5HDTfgp@MT6EQ04K3f=-m zj=3f=D;_B)@M8~i+YQeTcOte#T}X_-=l54zB0kA=Z}YU~9@S2^m-%=(Pxp;0vUPFW zUUt2lHm1*u_j=0SUXL5-5y8VZaD6-hJg(#CWLD1H z)+iwFRC>e^QV|22?HZo@DPME5?0p>+ySAIf6RkyVSJpiL?@njdVpXb|M? zmF)ze78*!zhI6lKGxmowPd|f>W?O$$b$u_VHs5;dr6`6WB4kG*8hrrYbwRariES5S zb}l-J-c#^_p-7%nvK&&YBGh(FX0z~4I(=Ohpo&AZFTL2$V#?BSvKTBjO36!#np+#m zHZ0T>zLm@ka%hC8c(k6p03l|JkT_?=Hbwy?m$PGWIe05ExhN4s03ar3TVz%QpM7o! zTObt-lz~kN^Qh|m=GlnOQRgPi=XAU39C63PL8o!aZA6cH;Hkfy$LGVPvgPtvHybX; zwbT8~UViD{wO@-i4m`YpEwcmIF7v*kr>D=0WPWuRi%}75RYE@g0ng)=5VipT0N`PZ Ae*gdg literal 0 HcmV?d00001 diff --git a/tests/repository_data/repository/metadata.staged/root.json b/tests/repository_data/repository/metadata.staged/root.json index 94327a115a..aebb36c2e8 100644 --- a/tests/repository_data/repository/metadata.staged/root.json +++ b/tests/repository_data/repository/metadata.staged/root.json @@ -3,7 +3,7 @@ { "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", "method": "RSASSA-PSS", - "sig": "3851d11ed11ea69ab5d873cfd015de79dc856d83e0a060e73d535d705da086c26191e6bc1ed6bbdde9305c3816c1c5885b48cf51c41fedc906a5ebe0e33a6b823145d40bd3e588e77c6bc724b62f4b2ca9700da03e0ba603170bfd365ea1d25ee7f9661848a14f5916869f00f3e03aa4cb468a4de647bbf205b96f9aa8dd408e3e0b1f9d53fe74654dfe139441dfe3651b3473b67bd104d754112e594a9c6ed0127e94b9057322d630f70c93c01d0cd0c2b98f6abdfd2ed7ac7dc5d3e201d191e168992574edfa935bb2a2cbaa67532c7aaddd4582b53a015c11e567d7fe7ba38cc743e7a939b9e7f2e334b48f46bdf4b82b66e639189644998d90a27847e63e8ade170f8c8aa15c8076b0af8032d78870ac18278663eddb08a7eed30c199c97c81d30bdf47d6649c7ab297120b983d9b6a1da648026d552be73bb77a9346f98a3b8db1a583b71bb706c397a3142f8194c80e62a1632152cd2ffd340605325ea39baf60fb30cf574701e5ae07efee75fc51df4f1810f3ce14345c466d25e36a3" + "sig": "02a93c6d09ddc20e13e2b7f3ff29e400ab350346297f181d7f9e55486eeda62aa0ca60786ada23eee937e9b6286f9dc7750a36778a0ee5b1bd4cfecf040f3a425932e6e5f74abedcbd8e4eb29ae73c0aaf0047154565e1cb0d4501d05b23e413901aaa50ff70dafeaacb0fe15729da1e3483d4448fc2afedde986f082eef5f0cac57010a45db5f91e90373fe95db9899037f2056b24d3bac4179f24f02d8ea62e21b4c80414253e56ff26580d18595cb4b6549ce7580148ca8cc10f18420c6c14cf9c7208bede73a862b70d2d88942eab4cd435e5ad52fcd6d80504d9491b2bcd23df679e1d1f7d60e0535cf00943f92ec90e4037f5793c62c7e0f13f2e59c6ebdda0831dd0ef9a71bea4660eb5f080e7318411fddb53edda4be93d52194b36ee2268961666679a3aaade1e97ed1c07943331fb24746b77ee3f3cce6891dc6e35d126f0f2b29be91b5113cecfb4cb13017fcdba512c6dd48669cdde86d44a6f0b51e4633edff91dca220a62b52a2393499d38589021c778ac77136261ab7a95a" } ], "signed": { diff --git a/tests/repository_data/repository/metadata.staged/root.json.gz b/tests/repository_data/repository/metadata.staged/root.json.gz index 4a13f7efd616dff425bc51ec67ad3e32a6f7402b..527b3ca18946d69b3793ca74726f3657f81cde3b 100644 GIT binary patch literal 1679 zcmV;A25|WwiwFRC>e^QV|HW2aQzAOU2?#bV zjSAfGzuyF%x$fTGyAxq-+>WM$uBy(;JelWY{kB)BmXlF#mye5BR=-sKxmT(DhQCU6 z8rPG6BUK@RCLR*_K&7LE&;*(>M|{*lB=4LsLJ4jp3CsddOgW>8h}6d*fK@;3?A^!Z zc)l}uKF9`x>~nuGc$-2Gw&OspVO|C>A$SUrut=RsEG23p2e6KzIWDPDiD(j3GEoSw zWsJc}YAtvxL1}4&r7Xs1n2N?ps%0|4DH}vfLLnpORV7MWUIxG(sl2j&=K#~d)A_yiBUoe~{L!&o< zdBiA$+9sNM6>)o(XcWfF=t8hqStJCA$yh~Pv|OSYL<2OSOXw;gDIh!y;j`RfiqRKh zxMNraluC_NDDhIpGE7E@h@y&tcu?pBVYKcj;_#v*3TrzYmrL9RzDPUThpjeKEabp70 zLK~pOZ>ta=gh?uib;=rHt4M~Iy~?*wZ={S{wrt5*{k~imaYvo`e7Pe{)z9w>q-;5v z=ilvYG+#`XwYK)<(oT|D$$e@>9hv$0s?9SUYxmY?vjCk?Dwb$STPZB&|>*ftC?|Z zo77-_7*^{Wdn+vm%x zh_M7FNiMY&lw#!qlN|HTh|mftgr>9-AqA|zlz8-k(L@!IJP1#jCx1UCf}$c(&E-}B zkRe1{gWUJZi=Yt(g9%1Kq7gxnC=3C`F>;4(z&Wp(|1%~ZZXN$8Octg61w1})uX?l9 zKB@FC>&NYbO1F8l8@$bX?RI^Bmz~r{(}(eN(lXCb&(4~MS$2Am-RNvPKN)rLe={o^ z50~@FVP|-~FGlyGShvaSHox+<6SltVT%EROrnR1Q;r@KRxnk|j<=JL@_n`Cf6ykB( zTtCz6=XQ2;+BuNnytZtew|J+qy3IS&xDkhkM|^WUFBi4z&4oH>iOs6Rrrq|zaf3XW z#qew)hNJBKtZ`aQSDXHZ%S9`1LU|MvMp5KNzc=t1DSGYAbB~toq%-LF;OEI<1`* z!?fIAlT>nALZcT%{q(Rt-yiz<_3_Pl?nh?8GJ2b@R<*keY3fUq+DSHi$P1NZZ-2O% zUry2>&g}eHowaX9wbryb+lTq;q5bf0tMZ}Vc{a&JDgANboO*m`1@6^PukbR$L{|ALf2Mh^ES^ej~N^tsW7_A1!Pa5 zZ#(%zc6xrnPG{DAg+|vD3;*2B*PCqAt7lpBAGi6gn3^Yzf3L4UGs%Jlq!H2wPZmkp z6iCE&uBj5(ewfk9T0AQN#rBWK3~9HV#}kbL=`Att?^hR)273UPh6$8_AhjT&^Y3#H3#P+>C%n{}3drbD;aH%flv-n3_TFiHy?R}u{Bm1x#{R;@o zG4{;ye70@UnB2#5Y43}_@Z`hI^M82q ZvtXX$VtZtIop{-M`5jqtZW93w006R%I57YK literal 1680 zcmV;B25SwF5qS^rY|=RvLZ8~keZ zdDx~NM(SK35}A+4c_J*qm_(!~wb%vad34rt%_V1AU{4KVkxHu-=7G52IimGXdwa{U zp5=Rk7vp3+PCk#u{VNbQ~C!Ydk(K>`IyJ*V7D z#JxdEI3lpdL0AVCgtguWO%ZogVd1djN^#4SiyS+KWAIKRVR*0s1xk&uicrkBXUKXQ zV0DnvfiX##6%fBA&S;4c>;(>3BM`+B+32a@!C+5#2r_CRuwu$!7P-bk2^|rN@BuZ3 zIm?7H%m=}wwUHohwTRjnqeo0dmMi z;F=ljL;#eC44PSuxTJ)5K~a>*Y3dO6kUZoF&TS`#i= z@X=_>ttEyyYmAVb5+{xE;3-$ca!54h4id$M^b-8FhAQVIqd~%_+G;?90J01~k4(Ux z3?Q~b1VJ@cS}>-y@){W;6_YR;l<@&Djmjxwz`8M8;tS%LOd<-XdjW z2%aK`wRT!M1qFzzoun6nX>iyQEipk5H}%>I$deIFAtF5I#6k&KD=U8)$BcMcem*PEibmaYuejgo$qG%4=S6@ zQ8dF6cB35=t8Ugo<*}C#MUfSw;n*d(81{D0Lwu2s#w=S;(xQjm zLJ#Hf@LVkC%|bQWr-RmmX@5;O{Rh*XeqGIGgS%|jYvARsxk!hDReHJ^4aTkR_2O*a zI4>q~eYnN3WTZsxq36xBqh@|Marw>3?M3FMdQ1~?mu)tU`%9slYmme=nLK2LjAD2= z+2vPh9EXL;Pvm*;cG~F9JBve0JDX?K?uW1Gp-X=d*q#?#&~bk>X5 z;H`YSDv2nzN3o&1_ObMSG~R4B&%J&__WF-Ua)fWU=xZ^#%jUyytLRK;r028FZF;yW zygliU&b>S;9?53q6MB@-?^W{DD%^6Dsng!genstpE>`Y&kZpI#bl6Oi&Oh$5eKB=T+y7o)e`b<_GY`-R=;KX{ z(nODg70`+$9Elj!3YxP72_gzHrl6ZtoF`mYJZ_W4VWtj%f~*5zOL@!_KSH9yj2DA#6L{DmhUZl3?c alb;3i6js%d>2>1e;N^EOHy%3y4FCXe^QV|BX^jj}tKrz0a?RaxN<;j+1!*2An_$<-_R|mQK|f5X-9n zoy@d*U=Ps9k&+kd`T4~!gVOV1e@x4dv&{NYeHxT{>09Y<@_eWr(I#4Je2x*I5Y*bB zFNYaTKQq*5s04B;JAn=+VzMbBXACAwWxd<@zRNOB8=+K7WM1)JeyftAc-LYi@YNWI zYzf5KUYH=}+PtG0Ah^gus7WDgrBqu>DJ^-9J~p2@R{-n)Gkc5KnczYIYH`(u*@7|L znoA~Qrf4`NR}DI-x{J9C>htdAh^$gqC-j%)`6R2v_s4Xa$7$JwYx#9LTs_kk5Z;05 z@d4m*i;vf&-gLV0D1Dxu#5{bRr{gQ6^iMg@hYc_*aM``deAxfKC)wUjYwxF>mN%?p zn#Zl<>%AK_CQE*`fShN-J|hgdi3h3KV*fpMIn=@k9H~_A1wlAqse&cu&_{?|tVnue ryp~q_Nsjwv?7WDUcmGnkr1M^u`9JLsxBR~!F2m&y9pUXd00RI3@Uh5i literal 380 zcmV-?0fYV@iwFR3sD@Vp|AkP?ZW}QSyzf^CI=clWQWllpNKP$^f|4k&)6{Etw?%`% z|6c9dIkbmX&~rr04Cieyc0L|Xvi!K{Y#+?$!I-zcjQyss$J!C=*n79n%m5`otquAh z%;@?(L1h<83?!KpIw&CpFN`T;bXjY%yN&O=F5|QjYV{$wcf8AQDvh0sP@Af6{nGaA zsAS(2QUj?Vo>4P4&M|pv$gLNl-!rZ@Zc6V7~0`iRfx|O~QU%UeCH(d_2i{9;an9ZuQssc+1RsK==Tz&nJL~ zZGO5H^`_;{W9(&mQTOqAo=)$Ou|M@PA2-0Qz;$=8oR+@|N14Y>?e^{_NAY35%BKoi zwQuN(d^c(U{4Tgq$sTHLqDi+Yj$J2NgMm3r^7OKUPPaE|8lv= a<)F*_pN@um{huG#;rbgb+g8AQ0ssIZ?!p8B diff --git a/tests/repository_data/repository/metadata.staged/targets.json b/tests/repository_data/repository/metadata.staged/targets.json index 8c9337d88c..0620bfe42a 100644 --- a/tests/repository_data/repository/metadata.staged/targets.json +++ b/tests/repository_data/repository/metadata.staged/targets.json @@ -3,7 +3,7 @@ { "keyid": "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b", "method": "ed25519", - "sig": "e96f9ca4425a37919dc91d5679c5319150b41f729389d70be7d8c8dc3dda647aa9fd11ca3c6a959c10819d652e516b375caf147721f96af329b54c0720373c06" + "sig": "74ee9970ed709ab65586ef99c0005102676a92f11e2a448bb685875b641d2efe3fd2bdefaa90e1a050bfbb34163834aadb43d13ac0c7452aa7df27c454c34507" } ], "signed": { @@ -23,7 +23,6 @@ }, "roles": [ { - "backtrack": true, "keyids": [ "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" ], @@ -31,6 +30,7 @@ "paths": [ "/file3.txt" ], + "terminating": false, "threshold": 1 } ] diff --git a/tests/repository_data/repository/metadata.staged/targets.json.gz b/tests/repository_data/repository/metadata.staged/targets.json.gz index 44e222812c4df2940e014474c46921f721396e71..e2f9e949af6455e9da30ef1aad29e6702f0a2e9a 100644 GIT binary patch literal 816 zcmV-01JC>)iwFRC>e^QV|D{yhZW}oaefL)w^PKKdBz5a|^l2ACKuJ_=)Q$tsq-as( z-^&?KoVeKCmj%WKL{a47;URx+w!^r8e3I$qtmE)``}bzM{b|4L@Kt~8n^^_{fey7p zYSMd@-kg@;6Gk5zQ%+GNQU^@HoG6I*E~C#8LKTa;s80kz@mvf3^EY>cd$ z?TxkwSuDO!L3_>>0K5YXF-XSloT6BoQbO`6`V!bR)UMPUmZn`KgE|2ZrI$kN0wrRR zR+yS2sZb+(G{~)Etn9fGdx(R9dEIR9?^Z^1hB`AbJWM~Hb%yvw&W}2c3+|!mK_6w> zpPt6maiQ#bn0X5xI_B0i5Chd}C00!0Y6u-MU<*W<4IP+?Bqb+SnYNZom3)n~_TphD z($N%X?Hrz z@^SAK-*48c2mSuMzs%ZzK=28ieO~}RFW+Ambxo_(-juaKaSN2IzUs?3osJ(=GW$F{ z&(Owk)#VTZU%+1|-wv~gbo}sYmCo$zOR6Du?`qD}Qch^JGl=*oG?j93oa^iY#V1t~44Rk6p$FNnCs7q=QlNXLM3pU2v+7J5lIPDA7P2v)qbKOV?~eFPi*PmSjwAKz8B?w)AOvWi?CleufG5x${dzG1^@t!-j#O% literal 816 zcmV-01JC>)iwFR3sD@Vp|D{yhj@vj4ea}|}exB}1yhvidqffgS3@C}31WhJE;wf4b z`S;3>Co?H__hkXwfGGOt@bFMSSL@-teSXpL^*1^Y*w!8y)TwK( zG?%Kj7PVDxIdFo=p%O}psu7!m}wdFDM-+u5^@ z+r!IwNt`9SEhgOpy9QFL1tf>1kXeDGzJ$;K0@Q#ggru&AP*d_sDO;_Qi^>gY>BZAV z&zmRRJs(cnakGEx)|_zOC~&wcx~$p5a9ut=yu-ZJa@T|F);Ha~AGscmuX(pEQ!Zxj zp|{7NX7!|C4ICL0m>7BC-jrM#I)cD#Sfi0O)@InFK^GdA)$Mz^w*GY3*?Spp#bzyE z$5a3Go$>T)46x}U|Fc=?+&M;$kJ$Nt&2JHtQ5@8j>5lPD1sVvQ8NNo=Mm=)d-2TUj~y~W4y0@XD`p=rt?Do!Gg24 zaIfuus6-6K61v3B)r6fG0i~KOxB$ok-itu%ThUYuqcaZPTJFBe^QV|5cGoZrv~pMdx0Hq48@WlA=CYM@9*Xg0iUCY2OF26Eq0C z_sV|mpa4106>)e;es#T>r&S0`K6yzX^Nq0Q}xbAVZ^Lr$vZ(J z;25w(XKf5zYRTwobvhKniUGVUJRl8VPCM_c%Xr;^T96sip||#%2*f$1EXiA7gs2cA zH%cZZ*Zw`8#nlPNFVw(PrDh|gta@W|NmIK N^)DlVi0mo?000M=wl)9& literal 368 zcmV-$0gwJ4iwFR3sD@Vp|5cF9j@=*(h4*=ii1lrS4d!Q_V^*!I%78t&)7~FV(yF6T z-n}F@T~sm>93MUo{uo9xpI@%L{CbnwJdIz6as23OG(Y71+`3|%+N{Bc#DrLctqY#f zIiM7=C2wtt)U*}!s$Obys2CymMipHJbJ}?qS*F_#l!io*54}IXi4c8`m^+gt=h6y` zDrE|&z&2E6r&v;T&}>P4r(m@fUn^-*?4G76kx8921zGxSk#l{gms=GIefn;gEjI+kD=`z$UE2;r_p| O4(ngMc3Ltj0ssJ;y1kVE diff --git a/tests/repository_data/repository/metadata/role1.json b/tests/repository_data/repository/metadata/role1.json index 37b378d3e2..96d921b5eb 100644 --- a/tests/repository_data/repository/metadata/role1.json +++ b/tests/repository_data/repository/metadata/role1.json @@ -3,14 +3,35 @@ { "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", "method": "ed25519", - "sig": "98c7634186f7a02b3a56d8204e62b3a1d25225935dc47c720426ef591d09931e071f96f8d47ef3ec814dd7278f05c01190e60386ad03e546869c7aeeb3249703" + "sig": "e8f6db97fcad5eb2ca1cf5fc6b6d4579d026811581b0d2061af90c7cb26d966e15a06e7c596f663b05aa061308929f96136167359fc9d44919a36383403abd09" } ], "signed": { "_type": "Targets", "delegations": { - "keys": {}, - "roles": [] + "keys": { + "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" + } + } + }, + "roles": [ + { + "keyids": [ + "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + ], + "name": "role2", + "paths": [], + "terminating": false, + "threshold": 1 + } + ] }, "expires": "2030-01-01T00:00:00Z", "targets": { diff --git a/tests/repository_data/repository/metadata/role1.json.gz b/tests/repository_data/repository/metadata/role1.json.gz index 53d950b9bdf648f12a61366d5bb37b1bf079acf7..9c2e24c6b4bc138cc178fc95188a2deb61e510bc 100644 GIT binary patch literal 619 zcmV-x0+jt9iwFRC>e^QV|E*KWZrm^oz56Q+pVO9-NLk8v=aQeM%e2+ia}~KwJXKfb-gCUv^g6 z){xK9Cq%D28*AJQh(iM3lx8kf{rs>oB6F)V6V1!?agw>kCqD0F8kfGCB!ldD>W_zU zOI#?s9sUDv*GiifFCCBF3lF>Fxu3fIRjnmq>{yds8QspLbh93xAMRzYSUzd6UcK>f zf3j{)uVLupoNIzoy3i^*1y;yfvIZ1FEnZN0(%yua4WPC}Vp1z8nK+9nQOjE!m+JPv ztkR#4gWO?!0{aQwKQs9k4+q{aB+T#Y8wDqxy5-qh-X!OJUuL5}%ofc&jB>l`%Di;P zVRnhS1ADGEaE-j5`gNol4F3zD}XjWauKg_Uc zT?#8yxkboiTBnkVOc)#!aN#6|awUjSEB4-H*6es53XVDgXy=+Xmis0Kac;J$tZMo$L63De=%Gjb#*|jDbAaNmQ zP8tch2$T`gK~^N0rL-RttWvH1sGC6!yQwREX;*C&uk60bc`T#1l7Fc#zX8(V<;ute F000m*C~*J) literal 465 zcmV;?0WSU@iwFR3sD@Vp|2>k?Zrd;nMc?xkhM&8Qq9}>#cl2pPF;F6v1kI8n_J$Tk z{(Z8Y7LWkzQo6c2^v7-(r{n%C^WUpY5d8_P5HDTfgp@MT6EQ04K3f=-m zj=3f=D;_B)@M8~i+YQeTcOte#T}X_-=l54zB0kA=Z}YU~9@S2^m-%=(Pxp;0vUPFW zUUt2lHm1*u_j=0SUXL5-5y8VZaD6-hJg(#CWLD1H z)+iwFRC>e^QV|22?HZo@DPME5?0p>+ySAIf6RkyVSJpiL?@njdVpXb|M? zmF)ze78*!zhI6lKGxmowPd|f>W?O$$b$u_VHs5;dr6`6WB4kG*8hrrYbwRariES5S zb}l-J-c#^_p-7%nvK&&YBGh(FX0z~4I(=Ohpo&AZFTL2$V#?BSvKTBjO36!#np+#m zHZ0T>zLm@ka%hC8c(k6p03l|JkT_?=Hbwy?m$PGWIe05ExhN4s03ar3TVz%QpM7o! zTObt-lz~kN^Qh|m=GlnOQRgPi=XAU39C63PL8o!aZA6cH;Hkfy$LGVPvgPtvHybX; zwbT8~UViD{wO@-i4m`YpEwcmIF7v*kr>D=0WPWuRi%}75RYE@g0ng)=5VipT0N`PZ Ae*gdg literal 0 HcmV?d00001 diff --git a/tests/repository_data/repository/metadata/root.json b/tests/repository_data/repository/metadata/root.json index 94327a115a..aebb36c2e8 100644 --- a/tests/repository_data/repository/metadata/root.json +++ b/tests/repository_data/repository/metadata/root.json @@ -3,7 +3,7 @@ { "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", "method": "RSASSA-PSS", - "sig": "3851d11ed11ea69ab5d873cfd015de79dc856d83e0a060e73d535d705da086c26191e6bc1ed6bbdde9305c3816c1c5885b48cf51c41fedc906a5ebe0e33a6b823145d40bd3e588e77c6bc724b62f4b2ca9700da03e0ba603170bfd365ea1d25ee7f9661848a14f5916869f00f3e03aa4cb468a4de647bbf205b96f9aa8dd408e3e0b1f9d53fe74654dfe139441dfe3651b3473b67bd104d754112e594a9c6ed0127e94b9057322d630f70c93c01d0cd0c2b98f6abdfd2ed7ac7dc5d3e201d191e168992574edfa935bb2a2cbaa67532c7aaddd4582b53a015c11e567d7fe7ba38cc743e7a939b9e7f2e334b48f46bdf4b82b66e639189644998d90a27847e63e8ade170f8c8aa15c8076b0af8032d78870ac18278663eddb08a7eed30c199c97c81d30bdf47d6649c7ab297120b983d9b6a1da648026d552be73bb77a9346f98a3b8db1a583b71bb706c397a3142f8194c80e62a1632152cd2ffd340605325ea39baf60fb30cf574701e5ae07efee75fc51df4f1810f3ce14345c466d25e36a3" + "sig": "02a93c6d09ddc20e13e2b7f3ff29e400ab350346297f181d7f9e55486eeda62aa0ca60786ada23eee937e9b6286f9dc7750a36778a0ee5b1bd4cfecf040f3a425932e6e5f74abedcbd8e4eb29ae73c0aaf0047154565e1cb0d4501d05b23e413901aaa50ff70dafeaacb0fe15729da1e3483d4448fc2afedde986f082eef5f0cac57010a45db5f91e90373fe95db9899037f2056b24d3bac4179f24f02d8ea62e21b4c80414253e56ff26580d18595cb4b6549ce7580148ca8cc10f18420c6c14cf9c7208bede73a862b70d2d88942eab4cd435e5ad52fcd6d80504d9491b2bcd23df679e1d1f7d60e0535cf00943f92ec90e4037f5793c62c7e0f13f2e59c6ebdda0831dd0ef9a71bea4660eb5f080e7318411fddb53edda4be93d52194b36ee2268961666679a3aaade1e97ed1c07943331fb24746b77ee3f3cce6891dc6e35d126f0f2b29be91b5113cecfb4cb13017fcdba512c6dd48669cdde86d44a6f0b51e4633edff91dca220a62b52a2393499d38589021c778ac77136261ab7a95a" } ], "signed": { diff --git a/tests/repository_data/repository/metadata/root.json.gz b/tests/repository_data/repository/metadata/root.json.gz index 4a13f7efd616dff425bc51ec67ad3e32a6f7402b..527b3ca18946d69b3793ca74726f3657f81cde3b 100644 GIT binary patch literal 1679 zcmV;A25|WwiwFRC>e^QV|HW2aQzAOU2?#bV zjSAfGzuyF%x$fTGyAxq-+>WM$uBy(;JelWY{kB)BmXlF#mye5BR=-sKxmT(DhQCU6 z8rPG6BUK@RCLR*_K&7LE&;*(>M|{*lB=4LsLJ4jp3CsddOgW>8h}6d*fK@;3?A^!Z zc)l}uKF9`x>~nuGc$-2Gw&OspVO|C>A$SUrut=RsEG23p2e6KzIWDPDiD(j3GEoSw zWsJc}YAtvxL1}4&r7Xs1n2N?ps%0|4DH}vfLLnpORV7MWUIxG(sl2j&=K#~d)A_yiBUoe~{L!&o< zdBiA$+9sNM6>)o(XcWfF=t8hqStJCA$yh~Pv|OSYL<2OSOXw;gDIh!y;j`RfiqRKh zxMNraluC_NDDhIpGE7E@h@y&tcu?pBVYKcj;_#v*3TrzYmrL9RzDPUThpjeKEabp70 zLK~pOZ>ta=gh?uib;=rHt4M~Iy~?*wZ={S{wrt5*{k~imaYvo`e7Pe{)z9w>q-;5v z=ilvYG+#`XwYK)<(oT|D$$e@>9hv$0s?9SUYxmY?vjCk?Dwb$STPZB&|>*ftC?|Z zo77-_7*^{Wdn+vm%x zh_M7FNiMY&lw#!qlN|HTh|mftgr>9-AqA|zlz8-k(L@!IJP1#jCx1UCf}$c(&E-}B zkRe1{gWUJZi=Yt(g9%1Kq7gxnC=3C`F>;4(z&Wp(|1%~ZZXN$8Octg61w1})uX?l9 zKB@FC>&NYbO1F8l8@$bX?RI^Bmz~r{(}(eN(lXCb&(4~MS$2Am-RNvPKN)rLe={o^ z50~@FVP|-~FGlyGShvaSHox+<6SltVT%EROrnR1Q;r@KRxnk|j<=JL@_n`Cf6ykB( zTtCz6=XQ2;+BuNnytZtew|J+qy3IS&xDkhkM|^WUFBi4z&4oH>iOs6Rrrq|zaf3XW z#qew)hNJBKtZ`aQSDXHZ%S9`1LU|MvMp5KNzc=t1DSGYAbB~toq%-LF;OEI<1`* z!?fIAlT>nALZcT%{q(Rt-yiz<_3_Pl?nh?8GJ2b@R<*keY3fUq+DSHi$P1NZZ-2O% zUry2>&g}eHowaX9wbryb+lTq;q5bf0tMZ}Vc{a&JDgANboO*m`1@6^PukbR$L{|ALf2Mh^ES^ej~N^tsW7_A1!Pa5 zZ#(%zc6xrnPG{DAg+|vD3;*2B*PCqAt7lpBAGi6gn3^Yzf3L4UGs%Jlq!H2wPZmkp z6iCE&uBj5(ewfk9T0AQN#rBWK3~9HV#}kbL=`Att?^hR)273UPh6$8_AhjT&^Y3#H3#P+>C%n{}3drbD;aH%flv-n3_TFiHy?R}u{Bm1x#{R;@o zG4{;ye70@UnB2#5Y43}_@Z`hI^M82q ZvtXX$VtZtIop{-M`5jqtZW93w006R%I57YK literal 1680 zcmV;B25SwF5qS^rY|=RvLZ8~keZ zdDx~NM(SK35}A+4c_J*qm_(!~wb%vad34rt%_V1AU{4KVkxHu-=7G52IimGXdwa{U zp5=Rk7vp3+PCk#u{VNbQ~C!Ydk(K>`IyJ*V7D z#JxdEI3lpdL0AVCgtguWO%ZogVd1djN^#4SiyS+KWAIKRVR*0s1xk&uicrkBXUKXQ zV0DnvfiX##6%fBA&S;4c>;(>3BM`+B+32a@!C+5#2r_CRuwu$!7P-bk2^|rN@BuZ3 zIm?7H%m=}wwUHohwTRjnqeo0dmMi z;F=ljL;#eC44PSuxTJ)5K~a>*Y3dO6kUZoF&TS`#i= z@X=_>ttEyyYmAVb5+{xE;3-$ca!54h4id$M^b-8FhAQVIqd~%_+G;?90J01~k4(Ux z3?Q~b1VJ@cS}>-y@){W;6_YR;l<@&Djmjxwz`8M8;tS%LOd<-XdjW z2%aK`wRT!M1qFzzoun6nX>iyQEipk5H}%>I$deIFAtF5I#6k&KD=U8)$BcMcem*PEibmaYuejgo$qG%4=S6@ zQ8dF6cB35=t8Ugo<*}C#MUfSw;n*d(81{D0Lwu2s#w=S;(xQjm zLJ#Hf@LVkC%|bQWr-RmmX@5;O{Rh*XeqGIGgS%|jYvARsxk!hDReHJ^4aTkR_2O*a zI4>q~eYnN3WTZsxq36xBqh@|Marw>3?M3FMdQ1~?mu)tU`%9slYmme=nLK2LjAD2= z+2vPh9EXL;Pvm*;cG~F9JBve0JDX?K?uW1Gp-X=d*q#?#&~bk>X5 z;H`YSDv2nzN3o&1_ObMSG~R4B&%J&__WF-Ua)fWU=xZ^#%jUyytLRK;r028FZF;yW zygliU&b>S;9?53q6MB@-?^W{DD%^6Dsng!genstpE>`Y&kZpI#bl6Oi&Oh$5eKB=T+y7o)e`b<_GY`-R=;KX{ z(nODg70`+$9Elj!3YxP72_gzHrl6ZtoF`mYJZ_W4VWtj%f~*5zOL@!_KSH9yj2DA#6L{DmhUZl3?c alb;3i6js%d>2>1e;N^EOHy%3y4FCXe^QV|BX^jj}tKrz0a?RaxN<;j+1!*2An_$<-_R|mQK|f5X-9n zoy@d*U=Ps9k&+kd`T4~!gVOV1e@x4dv&{NYeHxT{>09Y<@_eWr(I#4Je2x*I5Y*bB zFNYaTKQq*5s04B;JAn=+VzMbBXACAwWxd<@zRNOB8=+K7WM1)JeyftAc-LYi@YNWI zYzf5KUYH=}+PtG0Ah^gus7WDgrBqu>DJ^-9J~p2@R{-n)Gkc5KnczYIYH`(u*@7|L znoA~Qrf4`NR}DI-x{J9C>htdAh^$gqC-j%)`6R2v_s4Xa$7$JwYx#9LTs_kk5Z;05 z@d4m*i;vf&-gLV0D1Dxu#5{bRr{gQ6^iMg@hYc_*aM``deAxfKC)wUjYwxF>mN%?p zn#Zl<>%AK_CQE*`fShN-J|hgdi3h3KV*fpMIn=@k9H~_A1wlAqse&cu&_{?|tVnue ryp~q_Nsjwv?7WDUcmGnkr1M^u`9JLsxBR~!F2m&y9pUXd00RI3@Uh5i literal 380 zcmV-?0fYV@iwFR3sD@Vp|AkP?ZW}QSyzf^CI=clWQWllpNKP$^f|4k&)6{Etw?%`% z|6c9dIkbmX&~rr04Cieyc0L|Xvi!K{Y#+?$!I-zcjQyss$J!C=*n79n%m5`otquAh z%;@?(L1h<83?!KpIw&CpFN`T;bXjY%yN&O=F5|QjYV{$wcf8AQDvh0sP@Af6{nGaA zsAS(2QUj?Vo>4P4&M|pv$gLNl-!rZ@Zc6V7~0`iRfx|O~QU%UeCH(d_2i{9;an9ZuQssc+1RsK==Tz&nJL~ zZGO5H^`_;{W9(&mQTOqAo=)$Ou|M@PA2-0Qz;$=8oR+@|N14Y>?e^{_NAY35%BKoi zwQuN(d^c(U{4Tgq$sTHLqDi+Yj$J2NgMm3r^7OKUPPaE|8lv= a<)F*_pN@um{huG#;rbgb+g8AQ0ssIZ?!p8B diff --git a/tests/repository_data/repository/metadata/targets.json b/tests/repository_data/repository/metadata/targets.json index 8c9337d88c..0620bfe42a 100644 --- a/tests/repository_data/repository/metadata/targets.json +++ b/tests/repository_data/repository/metadata/targets.json @@ -3,7 +3,7 @@ { "keyid": "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b", "method": "ed25519", - "sig": "e96f9ca4425a37919dc91d5679c5319150b41f729389d70be7d8c8dc3dda647aa9fd11ca3c6a959c10819d652e516b375caf147721f96af329b54c0720373c06" + "sig": "74ee9970ed709ab65586ef99c0005102676a92f11e2a448bb685875b641d2efe3fd2bdefaa90e1a050bfbb34163834aadb43d13ac0c7452aa7df27c454c34507" } ], "signed": { @@ -23,7 +23,6 @@ }, "roles": [ { - "backtrack": true, "keyids": [ "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" ], @@ -31,6 +30,7 @@ "paths": [ "/file3.txt" ], + "terminating": false, "threshold": 1 } ] diff --git a/tests/repository_data/repository/metadata/targets.json.gz b/tests/repository_data/repository/metadata/targets.json.gz index 44e222812c4df2940e014474c46921f721396e71..e2f9e949af6455e9da30ef1aad29e6702f0a2e9a 100644 GIT binary patch literal 816 zcmV-01JC>)iwFRC>e^QV|D{yhZW}oaefL)w^PKKdBz5a|^l2ACKuJ_=)Q$tsq-as( z-^&?KoVeKCmj%WKL{a47;URx+w!^r8e3I$qtmE)``}bzM{b|4L@Kt~8n^^_{fey7p zYSMd@-kg@;6Gk5zQ%+GNQU^@HoG6I*E~C#8LKTa;s80kz@mvf3^EY>cd$ z?TxkwSuDO!L3_>>0K5YXF-XSloT6BoQbO`6`V!bR)UMPUmZn`KgE|2ZrI$kN0wrRR zR+yS2sZb+(G{~)Etn9fGdx(R9dEIR9?^Z^1hB`AbJWM~Hb%yvw&W}2c3+|!mK_6w> zpPt6maiQ#bn0X5xI_B0i5Chd}C00!0Y6u-MU<*W<4IP+?Bqb+SnYNZom3)n~_TphD z($N%X?Hrz z@^SAK-*48c2mSuMzs%ZzK=28ieO~}RFW+Ambxo_(-juaKaSN2IzUs?3osJ(=GW$F{ z&(Owk)#VTZU%+1|-wv~gbo}sYmCo$zOR6Du?`qD}Qch^JGl=*oG?j93oa^iY#V1t~44Rk6p$FNnCs7q=QlNXLM3pU2v+7J5lIPDA7P2v)qbKOV?~eFPi*PmSjwAKz8B?w)AOvWi?CleufG5x${dzG1^@t!-j#O% literal 816 zcmV-01JC>)iwFR3sD@Vp|D{yhj@vj4ea}|}exB}1yhvidqffgS3@C}31WhJE;wf4b z`S;3>Co?H__hkXwfGGOt@bFMSSL@-teSXpL^*1^Y*w!8y)TwK( zG?%Kj7PVDxIdFo=p%O}psu7!m}wdFDM-+u5^@ z+r!IwNt`9SEhgOpy9QFL1tf>1kXeDGzJ$;K0@Q#ggru&AP*d_sDO;_Qi^>gY>BZAV z&zmRRJs(cnakGEx)|_zOC~&wcx~$p5a9ut=yu-ZJa@T|F);Ha~AGscmuX(pEQ!Zxj zp|{7NX7!|C4ICL0m>7BC-jrM#I)cD#Sfi0O)@InFK^GdA)$Mz^w*GY3*?Spp#bzyE z$5a3Go$>T)46x}U|Fc=?+&M;$kJ$Nt&2JHtQ5@8j>5lPD1sVvQ8NNo=Mm=)d-2TUj~y~W4y0@XD`p=rt?Do!Gg24 zaIfuus6-6K61v3B)r6fG0i~KOxB$ok-itu%ThUYuqcaZPTJFBe^QV|5cGoZrv~pMdx0Hq48@WlA=CYM@9*Xg0iUCY2OF26Eq0C z_sV|mpa4106>)e;es#T>r&S0`K6yzX^Nq0Q}xbAVZ^Lr$vZ(J z;25w(XKf5zYRTwobvhKniUGVUJRl8VPCM_c%Xr;^T96sip||#%2*f$1EXiA7gs2cA zH%cZZ*Zw`8#nlPNFVw(PrDh|gta@W|NmIK N^)DlVi0mo?000M=wl)9& literal 368 zcmV-$0gwJ4iwFR3sD@Vp|5cF9j@=*(h4*=ii1lrS4d!Q_V^*!I%78t&)7~FV(yF6T z-n}F@T~sm>93MUo{uo9xpI@%L{CbnwJdIz6as23OG(Y71+`3|%+N{Bc#DrLctqY#f zIiM7=C2wtt)U*}!s$Obys2CymMipHJbJ}?qS*F_#l!io*54}IXi4c8`m^+gt=h6y` zDrE|&z&2E6r&v;T&}>P4r(m@fUn^-*?4G76kx8921zGxSk#l{gms=GIefn;gEjI+kD=`z$UE2;r_p| O4(ngMc3Ltj0ssJ;y1kVE From feb2cd0df48f0dda391e6eb56488da6e2b6a6284 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Thu, 15 Sep 2016 15:24:00 -0400 Subject: [PATCH 05/20] Update tests to improve code coverage and following the changes to repository write and backtracking --- tests/test_indefinite_freeze_attack.py | 4 +- tests/test_key_revocation.py | 16 +++---- tests/test_mix_and_match_attack.py | 2 +- tests/test_pyca_crypto_keys.py | 5 ++- tests/test_replay_attack.py | 4 +- tests/test_repository_lib.py | 12 ++--- tests/test_repository_tool.py | 61 +++++++++++++------------- tests/test_slow_retrieval_attack.py | 2 +- tests/test_updater.py | 30 ++++++------- 9 files changed, 66 insertions(+), 70 deletions(-) diff --git a/tests/test_indefinite_freeze_attack.py b/tests/test_indefinite_freeze_attack.py index f86a48271e..f5a0ae2ae7 100755 --- a/tests/test_indefinite_freeze_attack.py +++ b/tests/test_indefinite_freeze_attack.py @@ -300,7 +300,7 @@ def test_with_tuf(self): repository.snapshot.expiration = datetime_object # Now write to the repository. - repository.write() + repository.writeall() # And move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -369,7 +369,7 @@ def test_with_tuf(self): expiry_time = time.time() + 1 datetime_object = tuf.formats.unix_timestamp_to_datetime(int(expiry_time)) repository.timestamp.expiration = datetime_object - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) diff --git a/tests/test_key_revocation.py b/tests/test_key_revocation.py index 5399bf3769..16f1f418de 100755 --- a/tests/test_key_revocation.py +++ b/tests/test_key_revocation.py @@ -207,7 +207,7 @@ def test_timestamp_key_revocation(self): repository.root.load_signing_key(self.role_keys['root']['private']) repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['snapshot']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. @@ -258,7 +258,7 @@ def test_snapshot_key_revocation(self): # Note: we added Timetamp's key to the Snapshot role. repository.snapshot.load_signing_key(self.role_keys['timestamp']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. @@ -311,7 +311,7 @@ def test_targets_key_revocation(self): repository.targets.load_signing_key(self.role_keys['timestamp']['private']) repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. @@ -368,8 +368,8 @@ def test_root_key_revocation(self): repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - # Root's version number = 2 after the following write(). - repository.write() + # Root's version number = 2 after the following writeall(). + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -388,7 +388,7 @@ def test_root_key_revocation(self): # Load the previous Root signing key so that the the client can update # successfully. repository.root.load_signing_key(self.role_keys['root']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -409,7 +409,7 @@ def test_root_key_revocation(self): repository.root.unload_signing_key(self.role_keys['root']['private']) repository.root.remove_verification_key(self.role_keys['snapshot']['public']) repository.root.unload_signing_key(self.role_keys['snapshot']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -421,7 +421,7 @@ def test_root_key_revocation(self): repository.root.remove_verification_key(self.role_keys['targets']['public']) repository.root.unload_signing_key(self.role_keys['targets']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) diff --git a/tests/test_mix_and_match_attack.py b/tests/test_mix_and_match_attack.py index 9225b43a35..d1644c402a 100755 --- a/tests/test_mix_and_match_attack.py +++ b/tests/test_mix_and_match_attack.py @@ -221,7 +221,7 @@ def test_with_tuf(self): file_object.write('This is role2\'s target file.') repository.targets('role1').add_target(file3_path) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) diff --git a/tests/test_pyca_crypto_keys.py b/tests/test_pyca_crypto_keys.py index 64ed7a8553..4e33f07f8b 100755 --- a/tests/test_pyca_crypto_keys.py +++ b/tests/test_pyca_crypto_keys.py @@ -122,8 +122,6 @@ def test_verify_rsa_signature(self): # Check for invalid 'signature', 'public_key', and 'data' arguments. self.assertRaises(tuf.FormatError, crypto_keys.verify_rsa_signature, signature, method, public_rsa, 123) - - self.assertRaises(tuf.CryptoError, crypto_keys.verify_rsa_signature, signature, method, 'bad_key', data) @@ -137,6 +135,8 @@ def test_verify_rsa_signature(self): self.assertEqual(False, crypto_keys.verify_rsa_signature(mismatched_signature, method, public_rsa, data)) + + def test__decrypt(self): # Verify that invalid encrypted file is detected. self.assertRaises(tuf.CryptoError, crypto_keys._decrypt, @@ -159,6 +159,7 @@ def test_encrypt_key(self): ed25519_key, 'password') + def test__decrypt_key(self): ed25519_key = {'keytype': 'ed25519', 'keyid': 'd62247f817883f593cf6c66a5a55292488d457bcf638ae03207dbbba9dbe457d', diff --git a/tests/test_replay_attack.py b/tests/test_replay_attack.py index 76b2180bb5..e3cca2175b 100755 --- a/tests/test_replay_attack.py +++ b/tests/test_replay_attack.py @@ -218,7 +218,7 @@ def test_without_tuf(self): # Set an arbitrary expiration so that the repository tool generates a new # version. repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -289,7 +289,7 @@ def test_with_tuf(self): # Set an arbitrary expiration so that the repository tool generates a new # version. repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) diff --git a/tests/test_repository_lib.py b/tests/test_repository_lib.py index 107add4cb6..351778dc56 100755 --- a/tests/test_repository_lib.py +++ b/tests/test_repository_lib.py @@ -900,7 +900,6 @@ def test__generate_and_write_metadata(self): tuf.roledb.add_role('obsolete_role', targets_roleinfo) repo_lib._generate_and_write_metadata('obsolete_role', obsolete_metadata, - True, targets_directory, metadata_directory, consistent_snapshot=False, filenames=None, @@ -979,19 +978,16 @@ def test__load_top_level_metadata(self): repository = repo_tool.create_new_repository(repository_directory) repo_lib._load_top_level_metadata(repository, filenames) - # We partially loaded 'role1' via the top-level Targets role. For the - # purposes of this test case (which only loads top-level metadata and no - # delegated metadata), remove this role to avoid issues with partially - # loaded information (e.g., missing 'version' info, signatures, etc.) - tuf.roledb.remove_role('role1') - # Partially write all top-level roles (we increase the threshold of each # top-level role so that they are flagged as partially written. repository.root.threshold = repository.root.threshold + 1 repository.snapshot.threshold = repository.snapshot.threshold + 1 repository.targets.threshold = repository.targets.threshold + 1 repository.timestamp.threshold = repository.timestamp.threshold + 1 - repository.write(write_partial=True) + repository.write('root', ) + repository.write('snapshot') + repository.write('targets') + repository.write('timestamp') repo_lib._load_top_level_metadata(repository, filenames) diff --git a/tests/test_repository_tool.py b/tests/test_repository_tool.py index 1690057b02..7cce836c16 100755 --- a/tests/test_repository_tool.py +++ b/tests/test_repository_tool.py @@ -112,7 +112,7 @@ def test_init(self): - def test_write_and_write_partial(self): + def test_writeall(self): # Test creation of a TUF repository. # # 1. Import public and private keys. @@ -120,9 +120,9 @@ def test_write_and_write_partial(self): # 3. Load signing keys. # 4. Add target files. # 5. Perform delegation. - # 5. write() + # 6. writeall() # - # Copy the target files from 'tuf/tests/repository_data' so that write() + # Copy the target files from 'tuf/tests/repository_data' so that writeall() # has target fileinfo to include in metadata. temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) targets_directory = os.path.join(temporary_directory, 'repository', @@ -186,9 +186,9 @@ def test_write_and_write_partial(self): repository.targets.add_verification_key(targets_pubkey) repository.snapshot.add_verification_key(snapshot_pubkey) - # Verify that repository.write() fails for insufficient threshold + # Verify that repository.writeall() fails for insufficient threshold # of signatures (default threshold = 1). - self.assertRaises(tuf.UnsignedMetadataError, repository.write) + self.assertRaises(tuf.UnsignedMetadataError, repository.writeall) repository.timestamp.add_verification_key(timestamp_pubkey) @@ -198,9 +198,9 @@ def test_write_and_write_partial(self): repository.targets.load_signing_key(targets_privkey) repository.snapshot.load_signing_key(snapshot_privkey) - # Verify that repository.write() fails for insufficient threshold + # Verify that repository.writeall() fails for insufficient threshold # of signatures (default threshold = 1). - self.assertRaises(tuf.UnsignedMetadataError, repository.write) + self.assertRaises(tuf.UnsignedMetadataError, repository.writeall) repository.timestamp.load_signing_key(timestamp_privkey) @@ -218,7 +218,7 @@ def test_write_and_write_partial(self): # (6) Write repository. repository.targets.compressions = ['gz'] - repository.write() + repository.writeall() # Verify that the expected metadata is written. for role in ['root.json', 'targets.json', 'snapshot.json', 'timestamp.json']: @@ -239,8 +239,9 @@ def test_write_and_write_partial(self): role1_signable = tuf.util.load_json_file(role1_filepath) tuf.formats.check_signable_object_format(role1_signable) - # Verify that an exception is *not* raised for multiple repository.write(). - repository.write() + # Verify that an exception is *not* raised for multiple + # repository.writeall(). + repository.writeall() # Verify that status() does not raise an exception. repository.status() @@ -308,17 +309,14 @@ def test_write_and_write_partial(self): repository.timestamp.unload_signing_key(root_privkey) repository.timestamp.load_signing_key(timestamp_privkey) - # Verify that a write() fails if a repository is loaded and a change + # Verify that a writeall() fails if a repository is loaded and a change # is made to a role. repo_tool.load_repository(repository_directory) repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 0) - self.assertRaises(tuf.UnsignedMetadataError, repository.write) + self.assertRaises(tuf.UnsignedMetadataError, repository.writeall) - # Verify that a write_partial() is allowed. - repository.write_partial() - - # Next, perform a non-partial write() with consistent snapshots enabled. + # Next, perform a writeall() with consistent snapshots enabled. # Since the timestamp was modified, load its private key. repository.timestamp.load_signing_key(timestamp_privkey) @@ -326,27 +324,28 @@ def test_write_and_write_partial(self): # snapshot modifies the Root metadata, which specifies whether a repository # supports consistent snapshots. Verify that an exception is raised due to # the missing signatures of Root and Snapshot. - self.assertRaises(tuf.UnsignedMetadataError, repository.write, - False, True) + self.assertRaises(tuf.UnsignedMetadataError, repository.writeall, True) # Load the private keys of Root and Snapshot (new version required since - # Root has changed.) + # Root will change to enable consistent snapshots. repository.root.load_signing_key(root_privkey) + repository.targets.load_signing_key(targets_privkey) repository.snapshot.load_signing_key(snapshot_privkey) + repository.targets('role1').load_signing_key(role1_privkey) # Verify that a consistent snapshot can be written and loaded. The # 'targets' and 'role1' roles must be be marked as dirty, otherwise - # write() will not create consistent snapshots for them. + # writeall() will not create consistent snapshots for them. repository.mark_dirty(['targets', 'role1']) - repository.write(consistent_snapshot=True) + repository.writeall(consistent_snapshot=True) # Verify that the newly written consistent snapshot can be loaded # successfully. repo_tool.load_repository(repository_directory) # Test improperly formatted arguments. - self.assertRaises(tuf.FormatError, repository.write, 3, False) - self.assertRaises(tuf.FormatError, repository.write, False, 3) + self.assertRaises(tuf.FormatError, repository.writeall, 3, False) + self.assertRaises(tuf.FormatError, repository.writeall, False, 3) @@ -363,7 +362,7 @@ def test_get_filepaths_in_directory(self): expected_files = ['root.json', 'root.json.gz', 'targets.json', 'targets.json.gz', 'snapshot.json', 'snapshot.json.gz', 'timestamp.json', 'timestamp.json.gz', 'role1.json', - 'role1.json.gz'] + 'role1.json.gz', 'role2.json', 'role2.json.gz'] basenames = [] for filepath in metadata_files: basenames.append(os.path.basename(filepath)) @@ -948,7 +947,7 @@ def test_delegations(self): threshold = 1 self.targets_object.delegate(rolename, public_keys, list_of_targets, - threshold, backtrack=True, + threshold, terminating=False, restricted_paths=None, path_hash_prefixes=None) @@ -1107,7 +1106,7 @@ def test_delegate(self): path_hash_prefixes = ['e3a3', '8fae', 'd543'] self.targets_object.delegate(rolename, public_keys, list_of_targets, - threshold, backtrack=True, + threshold, terminating=False, restricted_paths=restricted_paths, path_hash_prefixes=path_hash_prefixes) @@ -1116,7 +1115,7 @@ def test_delegate(self): # Try to delegate to a role that has already been delegated. self.assertRaises(tuf.Error, self.targets_object.delegate, rolename, - public_keys, list_of_targets, threshold, backtrack=True, + public_keys, list_of_targets, threshold, terminating=False, restricted_paths=restricted_paths, path_hash_prefixes=path_hash_prefixes) @@ -1124,13 +1123,13 @@ def test_delegate(self): self.targets_object.revoke(rolename) self.assertRaises(tuf.Error, self.targets_object.delegate, rolename, public_keys, ['non-existent.txt'], threshold, - backtrack=True, restricted_paths=restricted_paths, + terminating=False, restricted_paths=restricted_paths, path_hash_prefixes=path_hash_prefixes) # Test for targets that do not exist under the targets directory. self.assertRaises(tuf.Error, self.targets_object.delegate, rolename, public_keys, list_of_targets, threshold, - backtrack=True, restricted_paths=['non-existent.txt'], + terminating=False, restricted_paths=['non-existent.txt'], path_hash_prefixes=path_hash_prefixes) @@ -1506,7 +1505,7 @@ def test_load_repository(self): # Verify the expected roles have been loaded. See # 'tuf/tests/repository_data/repository/'. expected_roles = \ - ['root', 'targets', 'snapshot', 'timestamp', 'role1'] + ['root', 'targets', 'snapshot', 'timestamp', 'role1', 'role2'] for role in tuf.roledb.get_rolenames(): self.assertTrue(role in expected_roles) @@ -1514,7 +1513,7 @@ def test_load_repository(self): self.assertTrue(len(repository.targets.keys)) self.assertTrue(len(repository.snapshot.keys)) self.assertTrue(len(repository.timestamp.keys)) - self.assertTrue(len(repository.targets('role1').keys)) + self.assertEqual(1, repository.targets('role1').version) # Assumed the targets (tuf/tests/repository_data/) role contains 'file1.txt' # and 'file2.txt'. diff --git a/tests/test_slow_retrieval_attack.py b/tests/test_slow_retrieval_attack.py index 0a47c055da..60cd457321 100755 --- a/tests/test_slow_retrieval_attack.py +++ b/tests/test_slow_retrieval_attack.py @@ -186,7 +186,7 @@ def setUp(self): repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) diff --git a/tests/test_updater.py b/tests/test_updater.py index 88b5a6f270..03a7a5bde5 100755 --- a/tests/test_updater.py +++ b/tests/test_updater.py @@ -744,7 +744,7 @@ def test_3__update_metadata_if_changed(self): repository.targets.load_signing_key(self.role_keys['targets']['private']) repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -816,7 +816,7 @@ def test_4_refresh(self): repository.targets.load_signing_key(self.role_keys['targets']['private']) repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -870,7 +870,7 @@ def test_4__refresh_targets_metadata(self): self.repository_updater._refresh_targets_metadata(refresh_all_delegated_roles=True) # Verify that client's metadata files were refreshed successfully. - self.assertEqual(len(self.repository_updater.metadata['current']), 5) + self.assertEqual(len(self.repository_updater.metadata['current']), 6) # Test for compressed metadata roles. self.repository_updater.metadata['current']['snapshot']['meta']['targets.json.gz'] = \ @@ -997,18 +997,18 @@ def test_6_target(self): repository = repo_tool.load_repository(self.repository_directory) - repository.targets.delegate('role2', [self.role_keys['targets']['public']], + repository.targets.delegate('role3', [self.role_keys['targets']['public']], [], restricted_paths=[foo_pattern]) - repository.targets.delegate('role3', [self.role_keys['targets']['public']], + repository.targets.delegate('role4', [self.role_keys['targets']['public']], [foo_package], restricted_paths=[foo_pattern]) repository.targets.load_signing_key(self.role_keys['targets']['private']) - repository.targets('role2').load_signing_key(self.role_keys['targets']['private']) repository.targets('role3').load_signing_key(self.role_keys['targets']['private']) + repository.targets('role4').load_signing_key(self.role_keys['targets']['private']) repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -1027,21 +1027,21 @@ def test_6_target(self): # return a 'tuf.UnknownTargetError' exception. repository = repo_tool.load_repository(self.repository_directory) - repository.targets.revoke('role2') repository.targets.revoke('role3') + repository.targets.revoke('role4') # Ensure we delegate in trusted order (i.e., 'role2' has higher priority.) - repository.targets.delegate('role2', [self.role_keys['targets']['public']], - [], backtrack=False, restricted_paths=[foo_pattern]) repository.targets.delegate('role3', [self.role_keys['targets']['public']], + [], terminating=True, restricted_paths=[foo_pattern]) + repository.targets.delegate('role4', [self.role_keys['targets']['public']], [foo_package], restricted_paths=[foo_pattern]) - repository.targets('role2').load_signing_key(self.role_keys['targets']['private']) repository.targets('role3').load_signing_key(self.role_keys['targets']['private']) + repository.targets('role4').load_signing_key(self.role_keys['targets']['private']) repository.targets.load_signing_key(self.role_keys['targets']['private']) repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -1108,7 +1108,7 @@ def test_6_download_target(self): repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.write(consistent_snapshot=True) + repository.writeall(consistent_snapshot=True) # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -1233,7 +1233,7 @@ def test_7_updated_targets(self): repository.targets.load_signing_key(self.role_keys['targets']['private']) repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -1278,7 +1278,7 @@ def test_8_remove_obsolete_targets(self): repository.targets.load_signing_key(self.role_keys['targets']['private']) repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) - repository.write() + repository.writeall() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) From 83eb466055430c759f6297150a4f2a3afd2caaa0 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Thu, 15 Sep 2016 15:27:19 -0400 Subject: [PATCH 06/20] Add repository.writeall() and repository.write(), and rename backtrack to terminating --- tuf/client/updater.py | 2 +- tuf/formats.py | 6 +- tuf/repository_lib.py | 123 ++++++++++++++++------------------- tuf/repository_tool.py | 142 +++++++++++++++++++++-------------------- 4 files changed, 131 insertions(+), 142 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 76e0a94ecf..fe3f2f8d90 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -2697,7 +2697,7 @@ def _preorder_depth_first_walk(self, target_filepath): for child_role in child_roles: child_role_name = self._visit_child_role(child_role, target_filepath, delegations) - if not child_role['backtrack'] and child_role_name is not None: + if child_role['terminating'] and child_role_name is not None: logger.debug('Adding child role ' + repr(child_role_name)) logger.debug('Not backtracking to other roles.') role_names = [] diff --git a/tuf/formats.py b/tuf/formats.py index 9c257bd902..03dcb84b90 100755 --- a/tuf/formats.py +++ b/tuf/formats.py @@ -408,7 +408,7 @@ name = SCHEMA.Optional(ROLENAME_SCHEMA), keyids = KEYIDS_SCHEMA, threshold = THRESHOLD_SCHEMA, - backtrack = SCHEMA.Optional(BOOLEAN_SCHEMA), + terminating = SCHEMA.Optional(BOOLEAN_SCHEMA), paths = SCHEMA.Optional(RELPATHS_SCHEMA), path_hash_prefixes = SCHEMA.Optional(PATH_HASH_PREFIXES_SCHEMA)) @@ -459,9 +459,9 @@ # tuf.roledb ROLEDB_SCHEMA = SCHEMA.Object( object_name = 'ROLEDB_SCHEMA', - keyids = KEYIDS_SCHEMA, + keyids = SCHEMA.Optional(KEYIDS_SCHEMA), signing_keyids = SCHEMA.Optional(KEYIDS_SCHEMA), - threshold = THRESHOLD_SCHEMA, + threshold = SCHEMA.Optional(THRESHOLD_SCHEMA), version = SCHEMA.Optional(METADATAVERSION_SCHEMA), expires = SCHEMA.Optional(ISO8601_DATETIME_SCHEMA), signatures = SCHEMA.Optional(SIGNATURES_SCHEMA), diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 643c6c23df..1896edb49a 100755 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -102,18 +102,15 @@ METADATA_EXTENSIONS = ['.json.gz', '.json'] -def _generate_and_write_metadata(rolename, metadata_filename, write_partial, +def _generate_and_write_metadata(rolename, metadata_filename, targets_directory, metadata_directory, consistent_snapshot=False, filenames=None, - compression_algorithms=['gz']): + compression_algorithms=['gz'], + allow_partially_signed=False): """ - Non-public function that can generate and write the metadata of the specified - top-level 'rolename'. It also increments version numbers if: - - 1. write_partial==True and the metadata is the first to be written. - - 2. write_partial=False (i.e., write()), the metadata was not loaded as - partially written, and a write_partial is not needed. + Non-public function that can generate and write the metadata for the + specified 'rolename'. It also increments the version number of 'rolename' if + it is a top-level role. """ metadata = None @@ -169,58 +166,54 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, roleinfo['expires'], roleinfo['delegations'], consistent_snapshot) + + if rolename in ['root', 'targets', 'snapshot', 'timestamp'] and not allow_partially_signed: + # Before writing 'rolename' to disk, increment its version number and + # verify that it is fully signed. Only delegated roles should not be + # written to disk without full verification of its signatures, since they + # can only be considered fully signed depending on the delegating role. + roleinfo = tuf.roledb.get_roleinfo(rolename) + metadata['version'] = metadata['version'] + 1 + roleinfo['version'] = roleinfo['version'] + 1 + tuf.roledb.update_roleinfo(rolename, roleinfo) + signable = \ + sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename) + + if tuf.sig.verify(signable, rolename): + _remove_invalid_and_duplicate_signatures(signable) + filename = write_metadata_file(signable, metadata_filename, + metadata['version'], compression_algorithms, + consistent_snapshot) + + # The root and timestamp files should also be written without a version + # number prepended if 'consistent_snaptshot' is True. Clients may request + # a timestamp and root file without knowing their version numbers. + if rolename == 'root' or rolename == 'timestamp': + write_metadata_file(signable, metadata_filename, metadata['version'], + compression_algorithms, consistent_snapshot=False) + + # 'signable' contains an invalid threshold of signatures. + else: + message = 'Not enough signatures for ' + repr(metadata_filename) + raise tuf.UnsignedMetadataError(message, signable) - signable = sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename) - - # Check if the version number of 'rolename' may be automatically incremented, - # depending on whether if partial metadata is loaded or if the metadata is - # written with write() / write_partial(). - # Increment the version number if this is the first partial write. - if write_partial: - temp_signable = sign_metadata(metadata, [], metadata_filename) - temp_signable['signatures'].extend(roleinfo['signatures']) - status = tuf.sig.get_signature_status(temp_signable, rolename) - if len(status['good_sigs']) == 0: - metadata['version'] = metadata['version'] + 1 - roleinfo = tuf.roledb.get_roleinfo(rolename) - roleinfo['version'] = roleinfo['version'] + 1 - tuf.roledb.update_roleinfo(rolename, roleinfo) - signable = sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename) - # non-partial write() + # 'rolename' is a delegated role or a top-level role that is partially + # signed, and thus its signatures shouldn't be verified. Its version number + # is also not automatically incremented. else: # If writing a new version of 'rolename,' increment its version number in # both the metadata file and roledb (required so that snapshot references # the latest version). - if tuf.sig.verify(signable, rolename) and not roleinfo['partial_loaded']: - metadata['version'] = metadata['version'] + 1 - roleinfo = tuf.roledb.get_roleinfo(rolename) - roleinfo['version'] = roleinfo['version'] + 1 - tuf.roledb.update_roleinfo(rolename, roleinfo) - signable = sign_metadata(metadata, roleinfo['signing_keyids'], + roleinfo = tuf.roledb.get_roleinfo(rolename) + metadata['version'] = metadata['version'] + 1 + roleinfo['version'] = roleinfo['version'] + 1 + tuf.roledb.update_roleinfo(rolename, roleinfo) + signable = sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename) - - # Write the metadata to file if it contains a threshold of signatures. - signable['signatures'].extend(roleinfo['signatures']) - - if tuf.sig.verify(signable, rolename) or write_partial: _remove_invalid_and_duplicate_signatures(signable) filename = write_metadata_file(signable, metadata_filename, metadata['version'], compression_algorithms, consistent_snapshot) - - # The root and timestamp files should also be written without a version - # number prepended if 'consistent_snaptshot' is True. Clients may request - # a timestamp and root file without knowing their version numbers. - if rolename == 'root' or rolename == 'timestamp': - write_metadata_file(signable, metadata_filename, metadata['version'], - compression_algorithms, consistent_snapshot=False) - - # 'signable' contains an invalid threshold of signatures. - else: - message = 'Not enough signatures for ' + repr(metadata_filename) - raise tuf.UnsignedMetadataError(message, signable) return signable, filename @@ -737,16 +730,7 @@ def _load_top_level_metadata(repository, top_level_filenames): except tuf.KeyAlreadyExistsError: pass - - for role in targets_metadata['delegations']['roles']: - rolename = role['name'] - roleinfo = {'name': role['name'], 'keyids': role['keyids'], - 'threshold': role['threshold'], 'compressions': [''], - 'signing_keyids': [], 'partial_loaded': False, 'paths': {}, - 'signatures': [], 'delegations': {'keys': {}, - 'roles': []}} - tuf.roledb.add_role(rolename, roleinfo) - + else: logger.debug('The Targets file cannot be loaded: ' + repr(targets_filename)) @@ -1515,8 +1499,8 @@ def generate_targets_metadata(targets_directory, target_files, version, """ Generate the targets metadata object. The targets in 'target_files' must - exist at the same path they should on the repo. 'target_files' is a list of - targets. The 'custom' field of the targets metadata is not currently + exist at the same path they should on the repo. 'target_files' is a list + of targets. The 'custom' field of the targets metadata is not currently supported. @@ -1555,6 +1539,10 @@ def generate_targets_metadata(targets_directory, target_files, version, The target files are read and file information generated about them. + If 'write_consistent_targets' is True, hard links are created for + the targets in 'target_files'. For example, if 'some_file.txt' is one + of the targets of 'target_files', consistent targets + .some_file.txt, .some_file.txt, etc., are created. A targets metadata object, conformant to 'tuf.formats.TARGETS_SCHEMA'. @@ -1600,7 +1588,6 @@ def generate_targets_metadata(targets_directory, target_files, version, raise tuf.Error(repr(target_path) + ' cannot be read.' ' Unable to generate targets metadata.') - # Add 'custom' if it has been provided. Custom data about the target is # optional and will only be included in metadata (i.e., a 'custom' field in # the target's fileinfo dictionary) if specified here. @@ -2135,7 +2122,7 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory): # Verify the metadata of the Root role. try: signable, root_filename = \ - _generate_and_write_metadata('root', root_filename, False, + _generate_and_write_metadata('root', root_filename, targets_directory, metadata_directory) _log_status('root', signable) @@ -2148,7 +2135,7 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory): # Verify the metadata of the Targets role. try: signable, targets_filename = \ - _generate_and_write_metadata('targets', targets_filename, False, + _generate_and_write_metadata('targets', targets_filename, targets_directory, metadata_directory) _log_status('targets', signable) @@ -2160,7 +2147,7 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory): filenames = {'root': root_filename, 'targets': targets_filename} try: signable, snapshot_filename = \ - _generate_and_write_metadata('snapshot', snapshot_filename, False, + _generate_and_write_metadata('snapshot', snapshot_filename, targets_directory, metadata_directory, False, filenames) _log_status('snapshot', signable) @@ -2173,7 +2160,7 @@ def _log_status_of_top_level_roles(targets_directory, metadata_directory): filenames = {'snapshot': snapshot_filename} try: signable, timestamp_filename = \ - _generate_and_write_metadata('timestamp', timestamp_filename, False, + _generate_and_write_metadata('timestamp', timestamp_filename, targets_directory, metadata_directory, False, filenames) _log_status('timestamp', signable) diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index a1d1682a1e..2f7289d932 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -178,23 +178,15 @@ def __init__(self, repository_directory, metadata_directory, targets_directory): - def write(self, write_partial=False, consistent_snapshot=False, - compression_algorithms=['gz']): + def writeall(self, consistent_snapshot=False, compression_algorithms=['gz']): """ Write all the JSON Metadata objects to their corresponding files. - write() raises an exception if any of the role metadata to be written to - disk is invalid, such as an insufficient threshold of signatures, missing - private keys, etc. + writeall() raises an exception if any of the role metadata to be written + to disk is invalid, such as an insufficient threshold of signatures, + missing private keys, etc. - write_partial: - A boolean indicating whether partial metadata should be written to - disk. Partial metadata may be written to allow multiple maintainters - to independently sign and update role metadata. write() raises an - exception if a metadata role cannot be written due to not having enough - signatures. - consistent_snapshot: A boolean indicating whether written metadata and target files should include a version number in the filename (i.e., @@ -218,17 +210,17 @@ def write(self, write_partial=False, consistent_snapshot=False, None. """ - # Does 'write_partial' have the correct format? + # Do the arguments have the correct format? # Ensure the arguments have the appropriate number of objects and object # types, and that all dict keys are properly named. # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.BOOLEAN_SCHEMA.check_match(write_partial) tuf.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) tuf.formats.COMPRESSIONS_SCHEMA.check_match(compression_algorithms) - # At this point the tuf.keydb and tuf.roledb stores must be fully - # populated, otherwise write() throws a 'tuf.UnsignedMetadataError' - # exception if any of the top-level roles are missing signatures, keys, etc. + # At this point, tuf.keydb and tuf.roledb must be fully populated, + # otherwise writeall() throws a 'tuf.UnsignedMetadataError' for the + # top-level roles. exception if any of the top-level roles are missing + # signatures, keys, etc. # Write the metadata files of all the Targets roles that are dirty (i.e., # have been modified via roledb.update_roleinfo()). @@ -252,20 +244,17 @@ def write(self, write_partial=False, consistent_snapshot=False, dirty_rolename + METADATA_EXTENSION) repo_lib._generate_and_write_metadata(dirty_rolename, dirty_filename, - write_partial, self._targets_directory, self._metadata_directory, consistent_snapshot, filenames) - # Metadata should be written in (delegated targets -> root -> - # targets -> snapshot -> timestamp) order. - # Begin by generating the 'root.json' metadata file. - # _generate_and_write_metadata() raises a 'tuf.Error' exception if the - # metadata cannot be written. + # Metadata should be written in (delegated targets -> root -> targets -> + # snapshot -> timestamp) order. Begin by generating the 'root.json' + # metadata file. _generate_and_write_metadata() raises a 'tuf.Error' + # exception if the metadata cannot be written. if 'root' in dirty_rolenames or consistent_snapshot: repo_lib._generate_and_write_metadata('root', filenames['root'], - write_partial, self._targets_directory, self._metadata_directory, consistent_snapshot, @@ -274,7 +263,6 @@ def write(self, write_partial=False, consistent_snapshot=False, # Generate the 'targets.json' metadata file. if 'targets' in dirty_rolenames: repo_lib._generate_and_write_metadata('targets', filenames['targets'], - write_partial, self._targets_directory, self._metadata_directory, consistent_snapshot) @@ -283,7 +271,6 @@ def write(self, write_partial=False, consistent_snapshot=False, if 'snapshot' in dirty_rolenames: snapshot_signable, junk = \ repo_lib._generate_and_write_metadata('snapshot', filenames['snapshot'], - write_partial, self._targets_directory, self._metadata_directory, consistent_snapshot, filenames) @@ -291,7 +278,6 @@ def write(self, write_partial=False, consistent_snapshot=False, # Generate the 'timestamp.json' metadata file. if 'timestamp' in dirty_rolenames: repo_lib._generate_and_write_metadata('timestamp', filenames['timestamp'], - write_partial, self._targets_directory, self._metadata_directory, consistent_snapshot, filenames) @@ -307,14 +293,23 @@ def write(self, write_partial=False, consistent_snapshot=False, - def write_partial(self): + def write(self, rolename, consistent_snapshot=False): """ - Write all the JSON metadata to their corresponding files, but allow - metadata files to contain an invalid threshold of signatures. + Write the JSON metadata for 'rolename' to its corresponding file on disk. + Unlike writeall(), write() allows the metadata file to contain an invalid + threshold of signatures. - None. + rolename: + The name of the role to be written to disk. + + consistent_snapshot: + A boolean indicating whether written metadata and target files should + include a version number in the filename (i.e., + .root.json, .targets.json.gz, + .README.json + Example: 13.root.json' None. @@ -326,17 +321,33 @@ def write_partial(self): None. """ - self.write(write_partial=True) + rolename_filename = os.path.join(self._metadata_directory, + rolename + METADATA_EXTENSION) + + filenames = {'root': os.path.join(self._metadata_directory, repo_lib.ROOT_FILENAME), + 'targets': os.path.join(self._metadata_directory, repo_lib.TARGETS_FILENAME), + 'snapshot': os.path.join(self._metadata_directory, repo_lib.SNAPSHOT_FILENAME), + 'timestamp': os.path.join(self._metadata_directory, repo_lib.TIMESTAMP_FILENAME)} + + repo_lib._generate_and_write_metadata(rolename, + rolename_filename, + self._targets_directory, + self._metadata_directory, + consistent_snapshot, + filenames=filenames, + allow_partially_signed=True) + + def status(self): """ Determine the status of the top-level roles, including those delegated by the Targets role. status() checks if each role provides sufficient public and private keys, signatures, and that a valid metadata file is generated - if write() were to be called. Metadata files are temporarily written so + if writeall() were to be called. Metadata files are temporarily written so that file hashes and lengths may be verified, determine if delegated role trust is fully obeyed, and target paths valid according to parent roles. status() does not do a simple check for number of threshold keys and @@ -396,7 +407,7 @@ def status(self): continue try: - repo_lib._generate_and_write_metadata(delegated_role, filename, False, + repo_lib._generate_and_write_metadata(delegated_role, filename, targets_directory, metadata_directory) except tuf.UnsignedMetadataError: @@ -432,7 +443,7 @@ def dirty_roles(self): roles printed/logged here. Unlike status(), signatures, public keys, targets, etc. are not verified. status() should be called instead if the caller would like to verify if a valid role file is generated - if write() were to be called. + if writeall() were to be called. None. @@ -739,8 +750,8 @@ def load_signing_key(self, key): key: The role's key, conformant to 'tuf.formats.ANYKEY_SCHEMA'. It must contain the private key, so that role signatures may be generated when - write() or write_partial() is eventually called to generate valid - metadata files. + writeall() or write() is eventually called to generate valid metadata + files. tuf.FormatError, if 'key' is improperly formatted. @@ -2087,7 +2098,7 @@ def get_delegated_rolenames(self): def delegate(self, rolename, public_keys, list_of_targets, threshold=1, - backtrack=True, restricted_paths=None, path_hash_prefixes=None): + terminating=False, restricted_paths=None, path_hash_prefixes=None): """ Create a new delegation, where 'rolename' is a child delegation of this @@ -2118,13 +2129,13 @@ def delegate(self, rolename, public_keys, list_of_targets, threshold=1, threshold: The threshold number of keys of 'rolename'. - backtrack: + terminating: Boolean that indicates whether this role allows the updater client to continue searching for targets (target files it is trusted to list - but has not yet specified) in other delegations. If 'backtrack' is - False and 'updater.target()' does not find 'example_target.tar.gz' in + but has not yet specified) in other delegations. If 'terminating' is + True and 'updater.target()' does not find 'example_target.tar.gz' in this role, a 'tuf.UnknownTargetError' exception should be raised. If - 'backtrack' is True (default), and 'target/other_role' is also trusted + 'terminatin' is False (default), and 'target/other_role' is also trusted with 'example_target.tar.gz' and has listed it, updater.target() should backtrack and return the target file specified by 'target/other_role'. @@ -2161,7 +2172,7 @@ def delegate(self, rolename, public_keys, list_of_targets, threshold=1, tuf.formats.ANYKEYLIST_SCHEMA.check_match(public_keys) tuf.formats.RELPATHS_SCHEMA.check_match(list_of_targets) tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) - tuf.formats.BOOLEAN_SCHEMA.check_match(backtrack) + tuf.formats.BOOLEAN_SCHEMA.check_match(terminating) if restricted_paths is not None: tuf.formats.RELPATHS_SCHEMA.check_match(restricted_paths) @@ -2239,7 +2250,7 @@ def delegate(self, rolename, public_keys, list_of_targets, threshold=1, roleinfo = {'name': rolename, 'keyids': roleinfo['keyids'], 'threshold': roleinfo['threshold'], - 'backtrack': backtrack, + 'terminating': terminating, 'paths': list(roleinfo['paths'].keys())} if restricted_paths is not None: @@ -2281,7 +2292,7 @@ def revoke(self, rolename): 'tuf.roledb'. Actual metadata files are not updated, only when repository.write() or - repository.write_partial() is called. + repository.write() is called. >>> >>> @@ -2829,6 +2840,9 @@ def load_repository(repository_directory): tuf.formats.PATH_SCHEMA.check_match(repository_directory) # Load top-level metadata. + #tuf.roledb.clear_roledb(clear_all=True) + #tuf.keydb.clear_keydb(clear_all=True) + repository_directory = os.path.abspath(repository_directory) metadata_directory = os.path.join(repository_directory, METADATA_STAGED_DIRECTORY_NAME) @@ -2901,7 +2915,14 @@ def load_repository(repository_directory): # Extract the metadata attributes of 'metadata_name' and update its # corresponding roleinfo. - roleinfo = tuf.roledb.get_roleinfo(metadata_name) + roleinfo = {'name': metadata_name, + 'signing_keyids': [], + 'signatures': [], + 'partial_loaded': False, + 'compressions': [], + 'paths': {}, + } + roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = metadata_object['version'] roleinfo['expires'] = metadata_object['expires'] @@ -2912,13 +2933,8 @@ def load_repository(repository_directory): if os.path.exists(metadata_path + '.gz'): roleinfo['compressions'].append('gz') - - # The roleinfo of 'metadata_name' should have been initialized with - # defaults when it was loaded from its parent role. - if repo_lib._metadata_is_partially_loaded(metadata_name, signable, roleinfo): - roleinfo['partial_loaded'] = True - - tuf.roledb.update_roleinfo(metadata_name, roleinfo, mark_role_as_dirty=False) + + tuf.roledb.add_role(metadata_name, roleinfo) loaded_metadata.append(metadata_name) # Generate the Targets objects of the delegated roles of 'metadata_name' @@ -2938,27 +2954,13 @@ def load_repository(repository_directory): # The repository maintainer should have also been made aware of the # duplicate key when it was added. for key_metadata in six.itervalues(metadata_object['delegations']['keys']): - key_object = tuf.keys.format_metadata_to_key(key_metadata) - try: + key_object, junk = tuf.keys.format_metadata_to_key(key_metadata) + try: tuf.keydb.add_key(key_object) except tuf.KeyAlreadyExistsError: pass - - # Add the delegated role's initial roleinfo, to be fully populated - # when its metadata file is next loaded in one of the next iterations. - for role in metadata_object['delegations']['roles']: - rolename = role['name'] - roleinfo = {'name': role['name'], 'keyids': role['keyids'], - 'threshold': role['threshold'], - 'compressions': [''], 'signing_keyids': [], - 'signatures': [], - 'paths': {}, - 'partial_loaded': False, - 'delegations': {'keys': {}, - 'roles': []}} - tuf.roledb.add_role(rolename, roleinfo) - + return repository From b5a0cc413b552d69c3bd5a9e41a3923d6c44fcd9 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Thu, 15 Sep 2016 16:33:20 -0400 Subject: [PATCH 07/20] Add test repository compressed metadata was incorrectly listed in snapshot.json. List on roles with '.json' extension --- .../client/metadata/current/role1.json.gz | Bin 619 -> 619 bytes .../client/metadata/current/role2.json.gz | Bin 302 -> 302 bytes .../client/metadata/current/root.json | 2 +- .../client/metadata/current/root.json.gz | Bin 1679 -> 1677 bytes .../client/metadata/current/snapshot.json | 10 ++-------- .../client/metadata/current/snapshot.json.gz | Bin 397 -> 387 bytes .../client/metadata/current/targets.json.gz | Bin 816 -> 816 bytes .../client/metadata/current/timestamp.json | 6 +++--- .../client/metadata/current/timestamp.json.gz | Bin 367 -> 366 bytes .../client/metadata/previous/role1.json.gz | Bin 619 -> 619 bytes .../client/metadata/previous/role2.json.gz | Bin 302 -> 302 bytes .../client/metadata/previous/root.json | 2 +- .../client/metadata/previous/root.json.gz | Bin 1679 -> 1677 bytes .../client/metadata/previous/snapshot.json | 10 ++-------- .../client/metadata/previous/snapshot.json.gz | Bin 397 -> 387 bytes .../client/metadata/previous/targets.json.gz | Bin 816 -> 816 bytes .../client/metadata/previous/timestamp.json | 6 +++--- .../metadata/previous/timestamp.json.gz | Bin 367 -> 366 bytes .../repository/metadata.staged/role1.json.gz | Bin 619 -> 619 bytes .../repository/metadata.staged/role2.json.gz | Bin 302 -> 302 bytes .../repository/metadata.staged/root.json | 2 +- .../repository/metadata.staged/root.json.gz | Bin 1679 -> 1677 bytes .../repository/metadata.staged/snapshot.json | 10 ++-------- .../metadata.staged/snapshot.json.gz | Bin 397 -> 387 bytes .../metadata.staged/targets.json.gz | Bin 816 -> 816 bytes .../repository/metadata.staged/timestamp.json | 6 +++--- .../metadata.staged/timestamp.json.gz | Bin 367 -> 366 bytes .../repository/metadata/role1.json.gz | Bin 619 -> 619 bytes .../repository/metadata/role2.json.gz | Bin 302 -> 302 bytes .../repository/metadata/root.json | 2 +- .../repository/metadata/root.json.gz | Bin 1679 -> 1677 bytes .../repository/metadata/snapshot.json | 10 ++-------- .../repository/metadata/snapshot.json.gz | Bin 397 -> 387 bytes .../repository/metadata/targets.json.gz | Bin 816 -> 816 bytes .../repository/metadata/timestamp.json | 6 +++--- .../repository/metadata/timestamp.json.gz | Bin 367 -> 366 bytes 36 files changed, 24 insertions(+), 48 deletions(-) diff --git a/tests/repository_data/client/metadata/current/role1.json.gz b/tests/repository_data/client/metadata/current/role1.json.gz index 9c2e24c6b4bc138cc178fc95188a2deb61e510bc..e2fd36f00b5d950d4b88ee2ee8286bc891929560 100644 GIT binary patch delta 16 XcmaFO@|uNRzMF&L4fE}d>?uqDG1&!x delta 16 XcmaFO@|uNRzMF%g;?=E<>?uqDG!g~! diff --git a/tests/repository_data/client/metadata/current/role2.json.gz b/tests/repository_data/client/metadata/current/role2.json.gz index a25840d547c493ea8c250d9f76d0a00e6d7859b0..6074f9bdfd6b611f2ac527c62c1ab56896f80073 100644 GIT binary patch delta 16 XcmZ3-w2p~gzMF&L4fE}d?COjFDgXq; delta 16 XcmZ3-w2p~gzMF%g;?=E diff --git a/tests/repository_data/client/metadata/current/root.json b/tests/repository_data/client/metadata/current/root.json index aebb36c2e8..ab8e8dc859 100644 --- a/tests/repository_data/client/metadata/current/root.json +++ b/tests/repository_data/client/metadata/current/root.json @@ -3,7 +3,7 @@ { "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", "method": "RSASSA-PSS", - "sig": "02a93c6d09ddc20e13e2b7f3ff29e400ab350346297f181d7f9e55486eeda62aa0ca60786ada23eee937e9b6286f9dc7750a36778a0ee5b1bd4cfecf040f3a425932e6e5f74abedcbd8e4eb29ae73c0aaf0047154565e1cb0d4501d05b23e413901aaa50ff70dafeaacb0fe15729da1e3483d4448fc2afedde986f082eef5f0cac57010a45db5f91e90373fe95db9899037f2056b24d3bac4179f24f02d8ea62e21b4c80414253e56ff26580d18595cb4b6549ce7580148ca8cc10f18420c6c14cf9c7208bede73a862b70d2d88942eab4cd435e5ad52fcd6d80504d9491b2bcd23df679e1d1f7d60e0535cf00943f92ec90e4037f5793c62c7e0f13f2e59c6ebdda0831dd0ef9a71bea4660eb5f080e7318411fddb53edda4be93d52194b36ee2268961666679a3aaade1e97ed1c07943331fb24746b77ee3f3cce6891dc6e35d126f0f2b29be91b5113cecfb4cb13017fcdba512c6dd48669cdde86d44a6f0b51e4633edff91dca220a62b52a2393499d38589021c778ac77136261ab7a95a" + "sig": "3a5a1d2c4fba47117b1d297517261da9f13d6bfbdfd322bb68d5631ff456dba6446e35cd0d67cf068d1592cd80333f5566b74225dfbdc2aead60ea4ca2f79a4d4542e9d6039d9715404f07ca05145b02a53241ec30a992161777cc9154e9a8fc37cd292f6dd11af4acfc307b8b4ccb3024ad3d5409d24b91b6ae9f542d8813641f0d8d4c5a16d30f471937c2badcffc591f0e32f81755b44e8139d69042213997d459711c482a7bde2e0177ba0079a3d7cf19f825f0619c114dc88ff9eada298b7e524c727a51fbe5b9e59221f0a515931427aa662022738a03c3b1f44953e6a110e0aacbd55c328f9f0bbe97d3f6bde5fe0b3d390b5da3442ea02cc06b7b5daef31f2356283ab197d11f677c57106897b27ca2c2ada87880d906416d9de90ac1593312af726f4a43f9290b19a81d4d092be6408deb53469dabd27f1d4a16d5f306736d483a5f9b4ab820d4e8ffca2f05ba0e501062da11389da137a0aff7c8111e28269a609fe602eb1786d1732f43d6cddbe6c5847241697a7ed5f395879b4" } ], "signed": { diff --git a/tests/repository_data/client/metadata/current/root.json.gz b/tests/repository_data/client/metadata/current/root.json.gz index 527b3ca18946d69b3793ca74726f3657f81cde3b..b4c12d146d9d14442e32c708d04875c6783ab84e 100644 GIT binary patch literal 1677 zcmV;826FiyiwFSh1KU>u|HW3@ZX!t%efL)&UfnJ7erahRii1TUg+lVg@E71VMyIstlqI+8{EfISs;Ki~=&oSfQWq`Ip#5n|Y*{_5M#UzvqqNuSL{)cfCfH9XCs;Q+VPz?g5^F~Rfkk<%M z6JGnEk#YMFDJCXDzX1A-MonNZ48_5od8COAf+6LD<&+8SjrBxEuG06z2x;(%n}2x1`~lBEhfU@8qVF-oT}#(^jzv=Ky(P&5X; z>%4~u99K+2BMB{y4EudGR}%KC0vTS8D&Ic6kuvPrvL|Ep`+8f119kFzeIQNM<;wyn zTc&yT-7Kd0DqYW(KNwu8PIr56-`-x$%CuaEZ2i5=Oi|9j$CpYpi!vPSVN;~9?5`3; z(PxC=x7l4KqWyc-4v5M~+GL@bvA|PZvMn37&9{b@dO+lUe-S ztWqr=?ILxDHx<+#c!&__0W}aUHw38aRfqvA9;i@+F-Qx#i&_cF0*z(0P|kvAsMio) z_P;N$B1R)rT`-{x2XTVV21XkK4GF~shJ!05L3+F;3=}Ik(6C4(7R+-;sKb9hCY%6| zh++aMXj3b(G8&}G0RcgmKogvuZ!{60CaPv;M_dD_;`sGeN5PLkxJk=&_dZ%?N^ zxNjw8^Wi#APrBpVV?JH-V%x!sNp|CE=X87Dy}9TtbbFij&~mul-O$eN`f@kBe^A+M zj>1{o-9D4s=T35W(QODntgYL_HtRMwldL-rJAQI<%64aYxvJgnu4JRlcbhJq_d1QU zCVtYZ@#Tt-r^)cLc~Q(ayTOi$RXb~;^3+SnqR5Itf8-Ke^gFxfJ|5C0d9U$cnqSjR_rbKMUstnPZ<5VAHN4!_7iqt@O3ycg-l);OSzOF( zm&G`)kGD9MjFhO^_q={_QqPaaF26my8)j~*M>HXmY_qA|UkP1bQy0=?{E!tgivIC< zmtUuG6c#2wlb4;lX{|kPEsjyXxoV>Au$Z;eM(;V(Zju!vo5}NOX73+H(@sCM){9u< zjeNT*i72)wv8LPhvGjg0+H5w@oo-Ebx{oJvfbX{GYcZZ=^M1Hfbfz=X@mcFGJzf>w zo^=P8UY-<>WV7-KJ;~?yDtT%YZn??SdFOWg*gHO6soJJ&O|s8CdtN6C{P zm~QrvTnw-1#lqOHsM*uS%02h8?Jk-2>q*l3$0R#6Q|r9>@BQ^>Cc%(FKqH`!-%lG8 zJq}iIMI@Yk7}W{}vjhnu3NaX9!t6Ou;5dMUGg#ZdUtN&UFau!hQVtT}h2j{rGAO52 z3_)}3L4OovA;1J@Cdk05fDdOA45y&?m>j*~QeEYX@W)tM<%h}kKGFA?eOQhD1%&ks zX67tk?1wadSE+X+_P+)4;qm!PfvnAH8rJ1s#PQ*#`n5RHWhmEXS^PyLA0D3nBa)vD X^AuM5Bh#zl<>=*iHu5=R0Sy2E&5$XY literal 1679 zcmV;A25|WwiwFRC>e^QV|HW2aQzAOU2?#bV zjSAfGzuyF%x$fTGyAxq-+>WM$uBy(;JelWY{kB)BmXlF#mye5BR=-sKxmT(DhQCU6 z8rPG6BUK@RCLR*_K&7LE&;*(>M|{*lB=4LsLJ4jp3CsddOgW>8h}6d*fK@;3?A^!Z zc)l}uKF9`x>~nuGc$-2Gw&OspVO|C>A$SUrut=RsEG23p2e6KzIWDPDiD(j3GEoSw zWsJc}YAtvxL1}4&r7Xs1n2N?ps%0|4DH}vfLLnpORV7MWUIxG(sl2j&=K#~d)A_yiBUoe~{L!&o< zdBiA$+9sNM6>)o(XcWfF=t8hqStJCA$yh~Pv|OSYL<2OSOXw;gDIh!y;j`RfiqRKh zxMNraluC_NDDhIpGE7E@h@y&tcu?pBVYKcj;_#v*3TrzYmrL9RzDPUThpjeKEabp70 zLK~pOZ>ta=gh?uib;=rHt4M~Iy~?*wZ={S{wrt5*{k~imaYvo`e7Pe{)z9w>q-;5v z=ilvYG+#`XwYK)<(oT|D$$e@>9hv$0s?9SUYxmY?vjCk?Dwb$STPZB&|>*ftC?|Z zo77-_7*^{Wdn+vm%x zh_M7FNiMY&lw#!qlN|HTh|mftgr>9-AqA|zlz8-k(L@!IJP1#jCx1UCf}$c(&E-}B zkRe1{gWUJZi=Yt(g9%1Kq7gxnC=3C`F>;4(z&Wp(|1%~ZZXN$8Octg61w1})uX?l9 zKB@FC>&NYbO1F8l8@$bX?RI^Bmz~r{(}(eN(lXCb&(4~MS$2Am-RNvPKN)rLe={o^ z50~@FVP|-~FGlyGShvaSHox+<6SltVT%EROrnR1Q;r@KRxnk|j<=JL@_n`Cf6ykB( zTtCz6=XQ2;+BuNnytZtew|J+qy3IS&xDkhkM|^WUFBi4z&4oH>iOs6Rrrq|zaf3XW z#qew)hNJBKtZ`aQSDXHZ%S9`1LU|MvMp5KNzc=t1DSGYAbB~toq%-LF;OEI<1`* z!?fIAlT>nALZcT%{q(Rt-yiz<_3_Pl?nh?8GJ2b@R<*keY3fUq+DSHi$P1NZZ-2O% zUry2>&g}eHowaX9wbryb+lTq;q5bf0tMZ}Vc{a&JDgANboO*m`1@6^PukbR$L{|ALf2Mh^ES^ej~N^tsW7_A1!Pa5 zZ#(%zc6xrnPG{DAg+|vD3;*2B*PCqAt7lpBAGi6gn3^Yzf3L4UGs%Jlq!H2wPZmkp z6iCE&uBj5(ewfk9T0AQN#rBWK3~9HV#}kbL=`Att?^hR)273UPh6$8_AhjT&^Y3#H3#P+>C%n{}3drbD;aH%flv-n3_TFiHy?R}u{Bm1x#{R;@o zG4{;ye70@UnB2#5Y43}_@Z`hI^M82q ZvtXX$VtZtIop{-M`5jqtZW93w006R%I57YK diff --git a/tests/repository_data/client/metadata/current/snapshot.json b/tests/repository_data/client/metadata/current/snapshot.json index c2b2365777..bf565452c9 100644 --- a/tests/repository_data/client/metadata/current/snapshot.json +++ b/tests/repository_data/client/metadata/current/snapshot.json @@ -3,7 +3,7 @@ { "keyid": "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9", "method": "ed25519", - "sig": "1a8e64f88506d11089250e426b8508bdf645d7074897e5fec53fccd2dcccfa69868f6b9bd003cfa999623b41747705f8e0b33b20119ff4a1b3b5819aa4d1340d" + "sig": "9419a135b0c41fe350d712f944047661ddfa2c8b4cb141088976bc789c8ea55aba6efff78dcfa46b11790136281ae649e1e421713fbab47e274e1afd838aca03" } ], "signed": { @@ -13,18 +13,12 @@ "role1.json": { "version": 1 }, - "role1.json.gz": { - "version": 1 - }, "role2.json": { "version": 1 }, - "role2.json.gz": { - "version": 1 - }, "root.json": { "hashes": { - "sha256": "54695388090bfe60edb2342694b7dc930985cd66e30e973cde3c8b71dd548d35" + "sha256": "03843cc3b2a50d363894b2aa26e617466147355487d647abd36aba209e69a6e6" }, "length": 3329, "version": 1 diff --git a/tests/repository_data/client/metadata/current/snapshot.json.gz b/tests/repository_data/client/metadata/current/snapshot.json.gz index 8efedbdbcbbd6e0303c22a82d0fc149a1c8a2ce2..88aa0f12c90f526ce4bca0793f7d405c7e8b082b 100644 GIT binary patch literal 387 zcmV-}0et=+iwFSh1KU>u|BX;xZ`&{oeD|*q^qdu1Cfs*MsT^9$jH?%17 z-%C5`fbC(MKmpY0a6H~?XY72~A9?w4mf7B$FP$;3dX4=iFNa!HD~8qaD6{G zJ%klO`{Mcxp`}1vNt}WlO7NVqR7_B%FLt--eV1jNHbLoq2=I>U_!TS!g#kl#jbISe zNeTQ=h#aLD*IDdauGA_JxSUIhwWVBI77l?cM;V48<-QF}u>z#x0He>q5~&CXc}Rdm zRi{aOA^{%yjF}rdv})$knTOpi5m~LSN!YK;%Sl#?ACG*R$7$J&TX{Ymu9;cy(0y>A z^~t&Wtv+8#x-{Q(j6F|J0(Xz|bbN=5{VC`9umP|Fm)$R>|4-(${6QRf9yedtx2`j? z4{FcY?h)wAY{5pyB!*++?xNMojiAy7_zq{LMz8mxCkV$pn~-4w5*jeU~iei>C4 h(U;xdTrPax%QF9CqwdE4{o~SI-T-FEttqAg006FZytM!T literal 397 zcmV;80doEyiwFRC>e^QV|BX^jj}tKrz0a?RaxN<;j+1!*2An_$<-_R|mQK|f5X-9n zoy@d*U=Ps9k&+kd`T4~!gVOV1e@x4dv&{NYeHxT{>09Y<@_eWr(I#4Je2x*I5Y*bB zFNYaTKQq*5s04B;JAn=+VzMbBXACAwWxd<@zRNOB8=+K7WM1)JeyftAc-LYi@YNWI zYzf5KUYH=}+PtG0Ah^gus7WDgrBqu>DJ^-9J~p2@R{-n)Gkc5KnczYIYH`(u*@7|L znoA~Qrf4`NR}DI-x{J9C>htdAh^$gqC-j%)`6R2v_s4Xa$7$JwYx#9LTs_kk5Z;05 z@d4m*i;vf&-gLV0D1Dxu#5{bRr{gQ6^iMg@hYc_*aM``deAxfKC)wUjYwxF>mN%?p zn#Zl<>%AK_CQE*`fShN-J|hgdi3h3KV*fpMIn=@k9H~_A1wlAqse&cu&_{?|tVnue ryp~q_Nsjwv?7WDUcmGnkr1M^u`9JLsxBR~!F2m&y9pUXd00RI3@Uh5i diff --git a/tests/repository_data/client/metadata/current/targets.json.gz b/tests/repository_data/client/metadata/current/targets.json.gz index e2f9e949af6455e9da30ef1aad29e6702f0a2e9a..0753ea6945da4785543f5d99659a1b8a641a608d 100644 GIT binary patch delta 16 XcmdnMwtu|5cF9Zrm^ogztU|L+5QFlKRbarH0!G=3e%@k77S{E+u^Q(~yw+=B~=Jz^2I6dWTF z_!O}vXKjkU^;XdJ?xZ%tjuC=uz9LmHr`_)&%XHffrJ?tb53cJsnAs6Gkq%UHg9g51 ziIF6>fF*<&190$#obqN}MIXFw#|l84JlO~YwYR{Md!m*h2lAF;q8eIGA!ACh5@L0k zRTT>~tGEh+^+v<2!}#rVG$LEntqJpedB4jR@%em_dEv`_Pj2MT{ro7+pe6gsg3hzG zPy2j%i1cbcjMVqa_j$T4pI_(O^&cJ0#PhWKJpNTTv0k?sSMUWKLS!w+TNXW+0@$1r zm53oXDvh-SCm`B^=)~facf}I}nB8$bJr@0x>&r5!Z;0tYzvXQ{?_ppA>u{+5e^`h0 MFVhk>B`N{{0R6e^QV|5cGoZrv~pMdx0Hq48@WlA=CYM@9*Xg0iUCY2OF26Eq0C z_sV|mpa4106>)e;es#T>r&S0`K6yzX^Nq0Q}xbAVZ^Lr$vZ(J z;25w(XKf5zYRTwobvhKniUGVUJRl8VPCM_c%Xr;^T96sip||#%2*f$1EXiA7gs2cA zH%cZZ*Zw`8#nlPNFVw(PrDh|gta@W|NmIK N^)DlVi0mo?000M=wl)9& diff --git a/tests/repository_data/client/metadata/previous/role1.json.gz b/tests/repository_data/client/metadata/previous/role1.json.gz index 9c2e24c6b4bc138cc178fc95188a2deb61e510bc..e2fd36f00b5d950d4b88ee2ee8286bc891929560 100644 GIT binary patch delta 16 XcmaFO@|uNRzMF&L4fE}d>?uqDG1&!x delta 16 XcmaFO@|uNRzMF%g;?=E<>?uqDG!g~! diff --git a/tests/repository_data/client/metadata/previous/role2.json.gz b/tests/repository_data/client/metadata/previous/role2.json.gz index a25840d547c493ea8c250d9f76d0a00e6d7859b0..6074f9bdfd6b611f2ac527c62c1ab56896f80073 100644 GIT binary patch delta 16 XcmZ3-w2p~gzMF&L4fE}d?COjFDgXq; delta 16 XcmZ3-w2p~gzMF%g;?=E diff --git a/tests/repository_data/client/metadata/previous/root.json b/tests/repository_data/client/metadata/previous/root.json index aebb36c2e8..ab8e8dc859 100644 --- a/tests/repository_data/client/metadata/previous/root.json +++ b/tests/repository_data/client/metadata/previous/root.json @@ -3,7 +3,7 @@ { "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", "method": "RSASSA-PSS", - "sig": "02a93c6d09ddc20e13e2b7f3ff29e400ab350346297f181d7f9e55486eeda62aa0ca60786ada23eee937e9b6286f9dc7750a36778a0ee5b1bd4cfecf040f3a425932e6e5f74abedcbd8e4eb29ae73c0aaf0047154565e1cb0d4501d05b23e413901aaa50ff70dafeaacb0fe15729da1e3483d4448fc2afedde986f082eef5f0cac57010a45db5f91e90373fe95db9899037f2056b24d3bac4179f24f02d8ea62e21b4c80414253e56ff26580d18595cb4b6549ce7580148ca8cc10f18420c6c14cf9c7208bede73a862b70d2d88942eab4cd435e5ad52fcd6d80504d9491b2bcd23df679e1d1f7d60e0535cf00943f92ec90e4037f5793c62c7e0f13f2e59c6ebdda0831dd0ef9a71bea4660eb5f080e7318411fddb53edda4be93d52194b36ee2268961666679a3aaade1e97ed1c07943331fb24746b77ee3f3cce6891dc6e35d126f0f2b29be91b5113cecfb4cb13017fcdba512c6dd48669cdde86d44a6f0b51e4633edff91dca220a62b52a2393499d38589021c778ac77136261ab7a95a" + "sig": "3a5a1d2c4fba47117b1d297517261da9f13d6bfbdfd322bb68d5631ff456dba6446e35cd0d67cf068d1592cd80333f5566b74225dfbdc2aead60ea4ca2f79a4d4542e9d6039d9715404f07ca05145b02a53241ec30a992161777cc9154e9a8fc37cd292f6dd11af4acfc307b8b4ccb3024ad3d5409d24b91b6ae9f542d8813641f0d8d4c5a16d30f471937c2badcffc591f0e32f81755b44e8139d69042213997d459711c482a7bde2e0177ba0079a3d7cf19f825f0619c114dc88ff9eada298b7e524c727a51fbe5b9e59221f0a515931427aa662022738a03c3b1f44953e6a110e0aacbd55c328f9f0bbe97d3f6bde5fe0b3d390b5da3442ea02cc06b7b5daef31f2356283ab197d11f677c57106897b27ca2c2ada87880d906416d9de90ac1593312af726f4a43f9290b19a81d4d092be6408deb53469dabd27f1d4a16d5f306736d483a5f9b4ab820d4e8ffca2f05ba0e501062da11389da137a0aff7c8111e28269a609fe602eb1786d1732f43d6cddbe6c5847241697a7ed5f395879b4" } ], "signed": { diff --git a/tests/repository_data/client/metadata/previous/root.json.gz b/tests/repository_data/client/metadata/previous/root.json.gz index 527b3ca18946d69b3793ca74726f3657f81cde3b..b4c12d146d9d14442e32c708d04875c6783ab84e 100644 GIT binary patch literal 1677 zcmV;826FiyiwFSh1KU>u|HW3@ZX!t%efL)&UfnJ7erahRii1TUg+lVg@E71VMyIstlqI+8{EfISs;Ki~=&oSfQWq`Ip#5n|Y*{_5M#UzvqqNuSL{)cfCfH9XCs;Q+VPz?g5^F~Rfkk<%M z6JGnEk#YMFDJCXDzX1A-MonNZ48_5od8COAf+6LD<&+8SjrBxEuG06z2x;(%n}2x1`~lBEhfU@8qVF-oT}#(^jzv=Ky(P&5X; z>%4~u99K+2BMB{y4EudGR}%KC0vTS8D&Ic6kuvPrvL|Ep`+8f119kFzeIQNM<;wyn zTc&yT-7Kd0DqYW(KNwu8PIr56-`-x$%CuaEZ2i5=Oi|9j$CpYpi!vPSVN;~9?5`3; z(PxC=x7l4KqWyc-4v5M~+GL@bvA|PZvMn37&9{b@dO+lUe-S ztWqr=?ILxDHx<+#c!&__0W}aUHw38aRfqvA9;i@+F-Qx#i&_cF0*z(0P|kvAsMio) z_P;N$B1R)rT`-{x2XTVV21XkK4GF~shJ!05L3+F;3=}Ik(6C4(7R+-;sKb9hCY%6| zh++aMXj3b(G8&}G0RcgmKogvuZ!{60CaPv;M_dD_;`sGeN5PLkxJk=&_dZ%?N^ zxNjw8^Wi#APrBpVV?JH-V%x!sNp|CE=X87Dy}9TtbbFij&~mul-O$eN`f@kBe^A+M zj>1{o-9D4s=T35W(QODntgYL_HtRMwldL-rJAQI<%64aYxvJgnu4JRlcbhJq_d1QU zCVtYZ@#Tt-r^)cLc~Q(ayTOi$RXb~;^3+SnqR5Itf8-Ke^gFxfJ|5C0d9U$cnqSjR_rbKMUstnPZ<5VAHN4!_7iqt@O3ycg-l);OSzOF( zm&G`)kGD9MjFhO^_q={_QqPaaF26my8)j~*M>HXmY_qA|UkP1bQy0=?{E!tgivIC< zmtUuG6c#2wlb4;lX{|kPEsjyXxoV>Au$Z;eM(;V(Zju!vo5}NOX73+H(@sCM){9u< zjeNT*i72)wv8LPhvGjg0+H5w@oo-Ebx{oJvfbX{GYcZZ=^M1Hfbfz=X@mcFGJzf>w zo^=P8UY-<>WV7-KJ;~?yDtT%YZn??SdFOWg*gHO6soJJ&O|s8CdtN6C{P zm~QrvTnw-1#lqOHsM*uS%02h8?Jk-2>q*l3$0R#6Q|r9>@BQ^>Cc%(FKqH`!-%lG8 zJq}iIMI@Yk7}W{}vjhnu3NaX9!t6Ou;5dMUGg#ZdUtN&UFau!hQVtT}h2j{rGAO52 z3_)}3L4OovA;1J@Cdk05fDdOA45y&?m>j*~QeEYX@W)tM<%h}kKGFA?eOQhD1%&ks zX67tk?1wadSE+X+_P+)4;qm!PfvnAH8rJ1s#PQ*#`n5RHWhmEXS^PyLA0D3nBa)vD X^AuM5Bh#zl<>=*iHu5=R0Sy2E&5$XY literal 1679 zcmV;A25|WwiwFRC>e^QV|HW2aQzAOU2?#bV zjSAfGzuyF%x$fTGyAxq-+>WM$uBy(;JelWY{kB)BmXlF#mye5BR=-sKxmT(DhQCU6 z8rPG6BUK@RCLR*_K&7LE&;*(>M|{*lB=4LsLJ4jp3CsddOgW>8h}6d*fK@;3?A^!Z zc)l}uKF9`x>~nuGc$-2Gw&OspVO|C>A$SUrut=RsEG23p2e6KzIWDPDiD(j3GEoSw zWsJc}YAtvxL1}4&r7Xs1n2N?ps%0|4DH}vfLLnpORV7MWUIxG(sl2j&=K#~d)A_yiBUoe~{L!&o< zdBiA$+9sNM6>)o(XcWfF=t8hqStJCA$yh~Pv|OSYL<2OSOXw;gDIh!y;j`RfiqRKh zxMNraluC_NDDhIpGE7E@h@y&tcu?pBVYKcj;_#v*3TrzYmrL9RzDPUThpjeKEabp70 zLK~pOZ>ta=gh?uib;=rHt4M~Iy~?*wZ={S{wrt5*{k~imaYvo`e7Pe{)z9w>q-;5v z=ilvYG+#`XwYK)<(oT|D$$e@>9hv$0s?9SUYxmY?vjCk?Dwb$STPZB&|>*ftC?|Z zo77-_7*^{Wdn+vm%x zh_M7FNiMY&lw#!qlN|HTh|mftgr>9-AqA|zlz8-k(L@!IJP1#jCx1UCf}$c(&E-}B zkRe1{gWUJZi=Yt(g9%1Kq7gxnC=3C`F>;4(z&Wp(|1%~ZZXN$8Octg61w1})uX?l9 zKB@FC>&NYbO1F8l8@$bX?RI^Bmz~r{(}(eN(lXCb&(4~MS$2Am-RNvPKN)rLe={o^ z50~@FVP|-~FGlyGShvaSHox+<6SltVT%EROrnR1Q;r@KRxnk|j<=JL@_n`Cf6ykB( zTtCz6=XQ2;+BuNnytZtew|J+qy3IS&xDkhkM|^WUFBi4z&4oH>iOs6Rrrq|zaf3XW z#qew)hNJBKtZ`aQSDXHZ%S9`1LU|MvMp5KNzc=t1DSGYAbB~toq%-LF;OEI<1`* z!?fIAlT>nALZcT%{q(Rt-yiz<_3_Pl?nh?8GJ2b@R<*keY3fUq+DSHi$P1NZZ-2O% zUry2>&g}eHowaX9wbryb+lTq;q5bf0tMZ}Vc{a&JDgANboO*m`1@6^PukbR$L{|ALf2Mh^ES^ej~N^tsW7_A1!Pa5 zZ#(%zc6xrnPG{DAg+|vD3;*2B*PCqAt7lpBAGi6gn3^Yzf3L4UGs%Jlq!H2wPZmkp z6iCE&uBj5(ewfk9T0AQN#rBWK3~9HV#}kbL=`Att?^hR)273UPh6$8_AhjT&^Y3#H3#P+>C%n{}3drbD;aH%flv-n3_TFiHy?R}u{Bm1x#{R;@o zG4{;ye70@UnB2#5Y43}_@Z`hI^M82q ZvtXX$VtZtIop{-M`5jqtZW93w006R%I57YK diff --git a/tests/repository_data/client/metadata/previous/snapshot.json b/tests/repository_data/client/metadata/previous/snapshot.json index c2b2365777..bf565452c9 100644 --- a/tests/repository_data/client/metadata/previous/snapshot.json +++ b/tests/repository_data/client/metadata/previous/snapshot.json @@ -3,7 +3,7 @@ { "keyid": "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9", "method": "ed25519", - "sig": "1a8e64f88506d11089250e426b8508bdf645d7074897e5fec53fccd2dcccfa69868f6b9bd003cfa999623b41747705f8e0b33b20119ff4a1b3b5819aa4d1340d" + "sig": "9419a135b0c41fe350d712f944047661ddfa2c8b4cb141088976bc789c8ea55aba6efff78dcfa46b11790136281ae649e1e421713fbab47e274e1afd838aca03" } ], "signed": { @@ -13,18 +13,12 @@ "role1.json": { "version": 1 }, - "role1.json.gz": { - "version": 1 - }, "role2.json": { "version": 1 }, - "role2.json.gz": { - "version": 1 - }, "root.json": { "hashes": { - "sha256": "54695388090bfe60edb2342694b7dc930985cd66e30e973cde3c8b71dd548d35" + "sha256": "03843cc3b2a50d363894b2aa26e617466147355487d647abd36aba209e69a6e6" }, "length": 3329, "version": 1 diff --git a/tests/repository_data/client/metadata/previous/snapshot.json.gz b/tests/repository_data/client/metadata/previous/snapshot.json.gz index 8efedbdbcbbd6e0303c22a82d0fc149a1c8a2ce2..88aa0f12c90f526ce4bca0793f7d405c7e8b082b 100644 GIT binary patch literal 387 zcmV-}0et=+iwFSh1KU>u|BX;xZ`&{oeD|*q^qdu1Cfs*MsT^9$jH?%17 z-%C5`fbC(MKmpY0a6H~?XY72~A9?w4mf7B$FP$;3dX4=iFNa!HD~8qaD6{G zJ%klO`{Mcxp`}1vNt}WlO7NVqR7_B%FLt--eV1jNHbLoq2=I>U_!TS!g#kl#jbISe zNeTQ=h#aLD*IDdauGA_JxSUIhwWVBI77l?cM;V48<-QF}u>z#x0He>q5~&CXc}Rdm zRi{aOA^{%yjF}rdv})$knTOpi5m~LSN!YK;%Sl#?ACG*R$7$J&TX{Ymu9;cy(0y>A z^~t&Wtv+8#x-{Q(j6F|J0(Xz|bbN=5{VC`9umP|Fm)$R>|4-(${6QRf9yedtx2`j? z4{FcY?h)wAY{5pyB!*++?xNMojiAy7_zq{LMz8mxCkV$pn~-4w5*jeU~iei>C4 h(U;xdTrPax%QF9CqwdE4{o~SI-T-FEttqAg006FZytM!T literal 397 zcmV;80doEyiwFRC>e^QV|BX^jj}tKrz0a?RaxN<;j+1!*2An_$<-_R|mQK|f5X-9n zoy@d*U=Ps9k&+kd`T4~!gVOV1e@x4dv&{NYeHxT{>09Y<@_eWr(I#4Je2x*I5Y*bB zFNYaTKQq*5s04B;JAn=+VzMbBXACAwWxd<@zRNOB8=+K7WM1)JeyftAc-LYi@YNWI zYzf5KUYH=}+PtG0Ah^gus7WDgrBqu>DJ^-9J~p2@R{-n)Gkc5KnczYIYH`(u*@7|L znoA~Qrf4`NR}DI-x{J9C>htdAh^$gqC-j%)`6R2v_s4Xa$7$JwYx#9LTs_kk5Z;05 z@d4m*i;vf&-gLV0D1Dxu#5{bRr{gQ6^iMg@hYc_*aM``deAxfKC)wUjYwxF>mN%?p zn#Zl<>%AK_CQE*`fShN-J|hgdi3h3KV*fpMIn=@k9H~_A1wlAqse&cu&_{?|tVnue ryp~q_Nsjwv?7WDUcmGnkr1M^u`9JLsxBR~!F2m&y9pUXd00RI3@Uh5i diff --git a/tests/repository_data/client/metadata/previous/targets.json.gz b/tests/repository_data/client/metadata/previous/targets.json.gz index e2f9e949af6455e9da30ef1aad29e6702f0a2e9a..0753ea6945da4785543f5d99659a1b8a641a608d 100644 GIT binary patch delta 16 XcmdnMwtu|5cF9Zrm^ogztU|L+5QFlKRbarH0!G=3e%@k77S{E+u^Q(~yw+=B~=Jz^2I6dWTF z_!O}vXKjkU^;XdJ?xZ%tjuC=uz9LmHr`_)&%XHffrJ?tb53cJsnAs6Gkq%UHg9g51 ziIF6>fF*<&190$#obqN}MIXFw#|l84JlO~YwYR{Md!m*h2lAF;q8eIGA!ACh5@L0k zRTT>~tGEh+^+v<2!}#rVG$LEntqJpedB4jR@%em_dEv`_Pj2MT{ro7+pe6gsg3hzG zPy2j%i1cbcjMVqa_j$T4pI_(O^&cJ0#PhWKJpNTTv0k?sSMUWKLS!w+TNXW+0@$1r zm53oXDvh-SCm`B^=)~facf}I}nB8$bJr@0x>&r5!Z;0tYzvXQ{?_ppA>u{+5e^`h0 MFVhk>B`N{{0R6e^QV|5cGoZrv~pMdx0Hq48@WlA=CYM@9*Xg0iUCY2OF26Eq0C z_sV|mpa4106>)e;es#T>r&S0`K6yzX^Nq0Q}xbAVZ^Lr$vZ(J z;25w(XKf5zYRTwobvhKniUGVUJRl8VPCM_c%Xr;^T96sip||#%2*f$1EXiA7gs2cA zH%cZZ*Zw`8#nlPNFVw(PrDh|gta@W|NmIK N^)DlVi0mo?000M=wl)9& diff --git a/tests/repository_data/repository/metadata.staged/role1.json.gz b/tests/repository_data/repository/metadata.staged/role1.json.gz index 9c2e24c6b4bc138cc178fc95188a2deb61e510bc..e2fd36f00b5d950d4b88ee2ee8286bc891929560 100644 GIT binary patch delta 16 XcmaFO@|uNRzMF&L4fE}d>?uqDG1&!x delta 16 XcmaFO@|uNRzMF%g;?=E<>?uqDG!g~! diff --git a/tests/repository_data/repository/metadata.staged/role2.json.gz b/tests/repository_data/repository/metadata.staged/role2.json.gz index a25840d547c493ea8c250d9f76d0a00e6d7859b0..6074f9bdfd6b611f2ac527c62c1ab56896f80073 100644 GIT binary patch delta 16 XcmZ3-w2p~gzMF&L4fE}d?COjFDgXq; delta 16 XcmZ3-w2p~gzMF%g;?=E diff --git a/tests/repository_data/repository/metadata.staged/root.json b/tests/repository_data/repository/metadata.staged/root.json index aebb36c2e8..ab8e8dc859 100644 --- a/tests/repository_data/repository/metadata.staged/root.json +++ b/tests/repository_data/repository/metadata.staged/root.json @@ -3,7 +3,7 @@ { "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", "method": "RSASSA-PSS", - "sig": "02a93c6d09ddc20e13e2b7f3ff29e400ab350346297f181d7f9e55486eeda62aa0ca60786ada23eee937e9b6286f9dc7750a36778a0ee5b1bd4cfecf040f3a425932e6e5f74abedcbd8e4eb29ae73c0aaf0047154565e1cb0d4501d05b23e413901aaa50ff70dafeaacb0fe15729da1e3483d4448fc2afedde986f082eef5f0cac57010a45db5f91e90373fe95db9899037f2056b24d3bac4179f24f02d8ea62e21b4c80414253e56ff26580d18595cb4b6549ce7580148ca8cc10f18420c6c14cf9c7208bede73a862b70d2d88942eab4cd435e5ad52fcd6d80504d9491b2bcd23df679e1d1f7d60e0535cf00943f92ec90e4037f5793c62c7e0f13f2e59c6ebdda0831dd0ef9a71bea4660eb5f080e7318411fddb53edda4be93d52194b36ee2268961666679a3aaade1e97ed1c07943331fb24746b77ee3f3cce6891dc6e35d126f0f2b29be91b5113cecfb4cb13017fcdba512c6dd48669cdde86d44a6f0b51e4633edff91dca220a62b52a2393499d38589021c778ac77136261ab7a95a" + "sig": "3a5a1d2c4fba47117b1d297517261da9f13d6bfbdfd322bb68d5631ff456dba6446e35cd0d67cf068d1592cd80333f5566b74225dfbdc2aead60ea4ca2f79a4d4542e9d6039d9715404f07ca05145b02a53241ec30a992161777cc9154e9a8fc37cd292f6dd11af4acfc307b8b4ccb3024ad3d5409d24b91b6ae9f542d8813641f0d8d4c5a16d30f471937c2badcffc591f0e32f81755b44e8139d69042213997d459711c482a7bde2e0177ba0079a3d7cf19f825f0619c114dc88ff9eada298b7e524c727a51fbe5b9e59221f0a515931427aa662022738a03c3b1f44953e6a110e0aacbd55c328f9f0bbe97d3f6bde5fe0b3d390b5da3442ea02cc06b7b5daef31f2356283ab197d11f677c57106897b27ca2c2ada87880d906416d9de90ac1593312af726f4a43f9290b19a81d4d092be6408deb53469dabd27f1d4a16d5f306736d483a5f9b4ab820d4e8ffca2f05ba0e501062da11389da137a0aff7c8111e28269a609fe602eb1786d1732f43d6cddbe6c5847241697a7ed5f395879b4" } ], "signed": { diff --git a/tests/repository_data/repository/metadata.staged/root.json.gz b/tests/repository_data/repository/metadata.staged/root.json.gz index 527b3ca18946d69b3793ca74726f3657f81cde3b..b4c12d146d9d14442e32c708d04875c6783ab84e 100644 GIT binary patch literal 1677 zcmV;826FiyiwFSh1KU>u|HW3@ZX!t%efL)&UfnJ7erahRii1TUg+lVg@E71VMyIstlqI+8{EfISs;Ki~=&oSfQWq`Ip#5n|Y*{_5M#UzvqqNuSL{)cfCfH9XCs;Q+VPz?g5^F~Rfkk<%M z6JGnEk#YMFDJCXDzX1A-MonNZ48_5od8COAf+6LD<&+8SjrBxEuG06z2x;(%n}2x1`~lBEhfU@8qVF-oT}#(^jzv=Ky(P&5X; z>%4~u99K+2BMB{y4EudGR}%KC0vTS8D&Ic6kuvPrvL|Ep`+8f119kFzeIQNM<;wyn zTc&yT-7Kd0DqYW(KNwu8PIr56-`-x$%CuaEZ2i5=Oi|9j$CpYpi!vPSVN;~9?5`3; z(PxC=x7l4KqWyc-4v5M~+GL@bvA|PZvMn37&9{b@dO+lUe-S ztWqr=?ILxDHx<+#c!&__0W}aUHw38aRfqvA9;i@+F-Qx#i&_cF0*z(0P|kvAsMio) z_P;N$B1R)rT`-{x2XTVV21XkK4GF~shJ!05L3+F;3=}Ik(6C4(7R+-;sKb9hCY%6| zh++aMXj3b(G8&}G0RcgmKogvuZ!{60CaPv;M_dD_;`sGeN5PLkxJk=&_dZ%?N^ zxNjw8^Wi#APrBpVV?JH-V%x!sNp|CE=X87Dy}9TtbbFij&~mul-O$eN`f@kBe^A+M zj>1{o-9D4s=T35W(QODntgYL_HtRMwldL-rJAQI<%64aYxvJgnu4JRlcbhJq_d1QU zCVtYZ@#Tt-r^)cLc~Q(ayTOi$RXb~;^3+SnqR5Itf8-Ke^gFxfJ|5C0d9U$cnqSjR_rbKMUstnPZ<5VAHN4!_7iqt@O3ycg-l);OSzOF( zm&G`)kGD9MjFhO^_q={_QqPaaF26my8)j~*M>HXmY_qA|UkP1bQy0=?{E!tgivIC< zmtUuG6c#2wlb4;lX{|kPEsjyXxoV>Au$Z;eM(;V(Zju!vo5}NOX73+H(@sCM){9u< zjeNT*i72)wv8LPhvGjg0+H5w@oo-Ebx{oJvfbX{GYcZZ=^M1Hfbfz=X@mcFGJzf>w zo^=P8UY-<>WV7-KJ;~?yDtT%YZn??SdFOWg*gHO6soJJ&O|s8CdtN6C{P zm~QrvTnw-1#lqOHsM*uS%02h8?Jk-2>q*l3$0R#6Q|r9>@BQ^>Cc%(FKqH`!-%lG8 zJq}iIMI@Yk7}W{}vjhnu3NaX9!t6Ou;5dMUGg#ZdUtN&UFau!hQVtT}h2j{rGAO52 z3_)}3L4OovA;1J@Cdk05fDdOA45y&?m>j*~QeEYX@W)tM<%h}kKGFA?eOQhD1%&ks zX67tk?1wadSE+X+_P+)4;qm!PfvnAH8rJ1s#PQ*#`n5RHWhmEXS^PyLA0D3nBa)vD X^AuM5Bh#zl<>=*iHu5=R0Sy2E&5$XY literal 1679 zcmV;A25|WwiwFRC>e^QV|HW2aQzAOU2?#bV zjSAfGzuyF%x$fTGyAxq-+>WM$uBy(;JelWY{kB)BmXlF#mye5BR=-sKxmT(DhQCU6 z8rPG6BUK@RCLR*_K&7LE&;*(>M|{*lB=4LsLJ4jp3CsddOgW>8h}6d*fK@;3?A^!Z zc)l}uKF9`x>~nuGc$-2Gw&OspVO|C>A$SUrut=RsEG23p2e6KzIWDPDiD(j3GEoSw zWsJc}YAtvxL1}4&r7Xs1n2N?ps%0|4DH}vfLLnpORV7MWUIxG(sl2j&=K#~d)A_yiBUoe~{L!&o< zdBiA$+9sNM6>)o(XcWfF=t8hqStJCA$yh~Pv|OSYL<2OSOXw;gDIh!y;j`RfiqRKh zxMNraluC_NDDhIpGE7E@h@y&tcu?pBVYKcj;_#v*3TrzYmrL9RzDPUThpjeKEabp70 zLK~pOZ>ta=gh?uib;=rHt4M~Iy~?*wZ={S{wrt5*{k~imaYvo`e7Pe{)z9w>q-;5v z=ilvYG+#`XwYK)<(oT|D$$e@>9hv$0s?9SUYxmY?vjCk?Dwb$STPZB&|>*ftC?|Z zo77-_7*^{Wdn+vm%x zh_M7FNiMY&lw#!qlN|HTh|mftgr>9-AqA|zlz8-k(L@!IJP1#jCx1UCf}$c(&E-}B zkRe1{gWUJZi=Yt(g9%1Kq7gxnC=3C`F>;4(z&Wp(|1%~ZZXN$8Octg61w1})uX?l9 zKB@FC>&NYbO1F8l8@$bX?RI^Bmz~r{(}(eN(lXCb&(4~MS$2Am-RNvPKN)rLe={o^ z50~@FVP|-~FGlyGShvaSHox+<6SltVT%EROrnR1Q;r@KRxnk|j<=JL@_n`Cf6ykB( zTtCz6=XQ2;+BuNnytZtew|J+qy3IS&xDkhkM|^WUFBi4z&4oH>iOs6Rrrq|zaf3XW z#qew)hNJBKtZ`aQSDXHZ%S9`1LU|MvMp5KNzc=t1DSGYAbB~toq%-LF;OEI<1`* z!?fIAlT>nALZcT%{q(Rt-yiz<_3_Pl?nh?8GJ2b@R<*keY3fUq+DSHi$P1NZZ-2O% zUry2>&g}eHowaX9wbryb+lTq;q5bf0tMZ}Vc{a&JDgANboO*m`1@6^PukbR$L{|ALf2Mh^ES^ej~N^tsW7_A1!Pa5 zZ#(%zc6xrnPG{DAg+|vD3;*2B*PCqAt7lpBAGi6gn3^Yzf3L4UGs%Jlq!H2wPZmkp z6iCE&uBj5(ewfk9T0AQN#rBWK3~9HV#}kbL=`Att?^hR)273UPh6$8_AhjT&^Y3#H3#P+>C%n{}3drbD;aH%flv-n3_TFiHy?R}u{Bm1x#{R;@o zG4{;ye70@UnB2#5Y43}_@Z`hI^M82q ZvtXX$VtZtIop{-M`5jqtZW93w006R%I57YK diff --git a/tests/repository_data/repository/metadata.staged/snapshot.json b/tests/repository_data/repository/metadata.staged/snapshot.json index c2b2365777..bf565452c9 100644 --- a/tests/repository_data/repository/metadata.staged/snapshot.json +++ b/tests/repository_data/repository/metadata.staged/snapshot.json @@ -3,7 +3,7 @@ { "keyid": "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9", "method": "ed25519", - "sig": "1a8e64f88506d11089250e426b8508bdf645d7074897e5fec53fccd2dcccfa69868f6b9bd003cfa999623b41747705f8e0b33b20119ff4a1b3b5819aa4d1340d" + "sig": "9419a135b0c41fe350d712f944047661ddfa2c8b4cb141088976bc789c8ea55aba6efff78dcfa46b11790136281ae649e1e421713fbab47e274e1afd838aca03" } ], "signed": { @@ -13,18 +13,12 @@ "role1.json": { "version": 1 }, - "role1.json.gz": { - "version": 1 - }, "role2.json": { "version": 1 }, - "role2.json.gz": { - "version": 1 - }, "root.json": { "hashes": { - "sha256": "54695388090bfe60edb2342694b7dc930985cd66e30e973cde3c8b71dd548d35" + "sha256": "03843cc3b2a50d363894b2aa26e617466147355487d647abd36aba209e69a6e6" }, "length": 3329, "version": 1 diff --git a/tests/repository_data/repository/metadata.staged/snapshot.json.gz b/tests/repository_data/repository/metadata.staged/snapshot.json.gz index 8efedbdbcbbd6e0303c22a82d0fc149a1c8a2ce2..88aa0f12c90f526ce4bca0793f7d405c7e8b082b 100644 GIT binary patch literal 387 zcmV-}0et=+iwFSh1KU>u|BX;xZ`&{oeD|*q^qdu1Cfs*MsT^9$jH?%17 z-%C5`fbC(MKmpY0a6H~?XY72~A9?w4mf7B$FP$;3dX4=iFNa!HD~8qaD6{G zJ%klO`{Mcxp`}1vNt}WlO7NVqR7_B%FLt--eV1jNHbLoq2=I>U_!TS!g#kl#jbISe zNeTQ=h#aLD*IDdauGA_JxSUIhwWVBI77l?cM;V48<-QF}u>z#x0He>q5~&CXc}Rdm zRi{aOA^{%yjF}rdv})$knTOpi5m~LSN!YK;%Sl#?ACG*R$7$J&TX{Ymu9;cy(0y>A z^~t&Wtv+8#x-{Q(j6F|J0(Xz|bbN=5{VC`9umP|Fm)$R>|4-(${6QRf9yedtx2`j? z4{FcY?h)wAY{5pyB!*++?xNMojiAy7_zq{LMz8mxCkV$pn~-4w5*jeU~iei>C4 h(U;xdTrPax%QF9CqwdE4{o~SI-T-FEttqAg006FZytM!T literal 397 zcmV;80doEyiwFRC>e^QV|BX^jj}tKrz0a?RaxN<;j+1!*2An_$<-_R|mQK|f5X-9n zoy@d*U=Ps9k&+kd`T4~!gVOV1e@x4dv&{NYeHxT{>09Y<@_eWr(I#4Je2x*I5Y*bB zFNYaTKQq*5s04B;JAn=+VzMbBXACAwWxd<@zRNOB8=+K7WM1)JeyftAc-LYi@YNWI zYzf5KUYH=}+PtG0Ah^gus7WDgrBqu>DJ^-9J~p2@R{-n)Gkc5KnczYIYH`(u*@7|L znoA~Qrf4`NR}DI-x{J9C>htdAh^$gqC-j%)`6R2v_s4Xa$7$JwYx#9LTs_kk5Z;05 z@d4m*i;vf&-gLV0D1Dxu#5{bRr{gQ6^iMg@hYc_*aM``deAxfKC)wUjYwxF>mN%?p zn#Zl<>%AK_CQE*`fShN-J|hgdi3h3KV*fpMIn=@k9H~_A1wlAqse&cu&_{?|tVnue ryp~q_Nsjwv?7WDUcmGnkr1M^u`9JLsxBR~!F2m&y9pUXd00RI3@Uh5i diff --git a/tests/repository_data/repository/metadata.staged/targets.json.gz b/tests/repository_data/repository/metadata.staged/targets.json.gz index e2f9e949af6455e9da30ef1aad29e6702f0a2e9a..0753ea6945da4785543f5d99659a1b8a641a608d 100644 GIT binary patch delta 16 XcmdnMwtu|5cF9Zrm^ogztU|L+5QFlKRbarH0!G=3e%@k77S{E+u^Q(~yw+=B~=Jz^2I6dWTF z_!O}vXKjkU^;XdJ?xZ%tjuC=uz9LmHr`_)&%XHffrJ?tb53cJsnAs6Gkq%UHg9g51 ziIF6>fF*<&190$#obqN}MIXFw#|l84JlO~YwYR{Md!m*h2lAF;q8eIGA!ACh5@L0k zRTT>~tGEh+^+v<2!}#rVG$LEntqJpedB4jR@%em_dEv`_Pj2MT{ro7+pe6gsg3hzG zPy2j%i1cbcjMVqa_j$T4pI_(O^&cJ0#PhWKJpNTTv0k?sSMUWKLS!w+TNXW+0@$1r zm53oXDvh-SCm`B^=)~facf}I}nB8$bJr@0x>&r5!Z;0tYzvXQ{?_ppA>u{+5e^`h0 MFVhk>B`N{{0R6e^QV|5cGoZrv~pMdx0Hq48@WlA=CYM@9*Xg0iUCY2OF26Eq0C z_sV|mpa4106>)e;es#T>r&S0`K6yzX^Nq0Q}xbAVZ^Lr$vZ(J z;25w(XKf5zYRTwobvhKniUGVUJRl8VPCM_c%Xr;^T96sip||#%2*f$1EXiA7gs2cA zH%cZZ*Zw`8#nlPNFVw(PrDh|gta@W|NmIK N^)DlVi0mo?000M=wl)9& diff --git a/tests/repository_data/repository/metadata/role1.json.gz b/tests/repository_data/repository/metadata/role1.json.gz index 9c2e24c6b4bc138cc178fc95188a2deb61e510bc..e2fd36f00b5d950d4b88ee2ee8286bc891929560 100644 GIT binary patch delta 16 XcmaFO@|uNRzMF&L4fE}d>?uqDG1&!x delta 16 XcmaFO@|uNRzMF%g;?=E<>?uqDG!g~! diff --git a/tests/repository_data/repository/metadata/role2.json.gz b/tests/repository_data/repository/metadata/role2.json.gz index a25840d547c493ea8c250d9f76d0a00e6d7859b0..6074f9bdfd6b611f2ac527c62c1ab56896f80073 100644 GIT binary patch delta 16 XcmZ3-w2p~gzMF&L4fE}d?COjFDgXq; delta 16 XcmZ3-w2p~gzMF%g;?=E diff --git a/tests/repository_data/repository/metadata/root.json b/tests/repository_data/repository/metadata/root.json index aebb36c2e8..ab8e8dc859 100644 --- a/tests/repository_data/repository/metadata/root.json +++ b/tests/repository_data/repository/metadata/root.json @@ -3,7 +3,7 @@ { "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", "method": "RSASSA-PSS", - "sig": "02a93c6d09ddc20e13e2b7f3ff29e400ab350346297f181d7f9e55486eeda62aa0ca60786ada23eee937e9b6286f9dc7750a36778a0ee5b1bd4cfecf040f3a425932e6e5f74abedcbd8e4eb29ae73c0aaf0047154565e1cb0d4501d05b23e413901aaa50ff70dafeaacb0fe15729da1e3483d4448fc2afedde986f082eef5f0cac57010a45db5f91e90373fe95db9899037f2056b24d3bac4179f24f02d8ea62e21b4c80414253e56ff26580d18595cb4b6549ce7580148ca8cc10f18420c6c14cf9c7208bede73a862b70d2d88942eab4cd435e5ad52fcd6d80504d9491b2bcd23df679e1d1f7d60e0535cf00943f92ec90e4037f5793c62c7e0f13f2e59c6ebdda0831dd0ef9a71bea4660eb5f080e7318411fddb53edda4be93d52194b36ee2268961666679a3aaade1e97ed1c07943331fb24746b77ee3f3cce6891dc6e35d126f0f2b29be91b5113cecfb4cb13017fcdba512c6dd48669cdde86d44a6f0b51e4633edff91dca220a62b52a2393499d38589021c778ac77136261ab7a95a" + "sig": "3a5a1d2c4fba47117b1d297517261da9f13d6bfbdfd322bb68d5631ff456dba6446e35cd0d67cf068d1592cd80333f5566b74225dfbdc2aead60ea4ca2f79a4d4542e9d6039d9715404f07ca05145b02a53241ec30a992161777cc9154e9a8fc37cd292f6dd11af4acfc307b8b4ccb3024ad3d5409d24b91b6ae9f542d8813641f0d8d4c5a16d30f471937c2badcffc591f0e32f81755b44e8139d69042213997d459711c482a7bde2e0177ba0079a3d7cf19f825f0619c114dc88ff9eada298b7e524c727a51fbe5b9e59221f0a515931427aa662022738a03c3b1f44953e6a110e0aacbd55c328f9f0bbe97d3f6bde5fe0b3d390b5da3442ea02cc06b7b5daef31f2356283ab197d11f677c57106897b27ca2c2ada87880d906416d9de90ac1593312af726f4a43f9290b19a81d4d092be6408deb53469dabd27f1d4a16d5f306736d483a5f9b4ab820d4e8ffca2f05ba0e501062da11389da137a0aff7c8111e28269a609fe602eb1786d1732f43d6cddbe6c5847241697a7ed5f395879b4" } ], "signed": { diff --git a/tests/repository_data/repository/metadata/root.json.gz b/tests/repository_data/repository/metadata/root.json.gz index 527b3ca18946d69b3793ca74726f3657f81cde3b..b4c12d146d9d14442e32c708d04875c6783ab84e 100644 GIT binary patch literal 1677 zcmV;826FiyiwFSh1KU>u|HW3@ZX!t%efL)&UfnJ7erahRii1TUg+lVg@E71VMyIstlqI+8{EfISs;Ki~=&oSfQWq`Ip#5n|Y*{_5M#UzvqqNuSL{)cfCfH9XCs;Q+VPz?g5^F~Rfkk<%M z6JGnEk#YMFDJCXDzX1A-MonNZ48_5od8COAf+6LD<&+8SjrBxEuG06z2x;(%n}2x1`~lBEhfU@8qVF-oT}#(^jzv=Ky(P&5X; z>%4~u99K+2BMB{y4EudGR}%KC0vTS8D&Ic6kuvPrvL|Ep`+8f119kFzeIQNM<;wyn zTc&yT-7Kd0DqYW(KNwu8PIr56-`-x$%CuaEZ2i5=Oi|9j$CpYpi!vPSVN;~9?5`3; z(PxC=x7l4KqWyc-4v5M~+GL@bvA|PZvMn37&9{b@dO+lUe-S ztWqr=?ILxDHx<+#c!&__0W}aUHw38aRfqvA9;i@+F-Qx#i&_cF0*z(0P|kvAsMio) z_P;N$B1R)rT`-{x2XTVV21XkK4GF~shJ!05L3+F;3=}Ik(6C4(7R+-;sKb9hCY%6| zh++aMXj3b(G8&}G0RcgmKogvuZ!{60CaPv;M_dD_;`sGeN5PLkxJk=&_dZ%?N^ zxNjw8^Wi#APrBpVV?JH-V%x!sNp|CE=X87Dy}9TtbbFij&~mul-O$eN`f@kBe^A+M zj>1{o-9D4s=T35W(QODntgYL_HtRMwldL-rJAQI<%64aYxvJgnu4JRlcbhJq_d1QU zCVtYZ@#Tt-r^)cLc~Q(ayTOi$RXb~;^3+SnqR5Itf8-Ke^gFxfJ|5C0d9U$cnqSjR_rbKMUstnPZ<5VAHN4!_7iqt@O3ycg-l);OSzOF( zm&G`)kGD9MjFhO^_q={_QqPaaF26my8)j~*M>HXmY_qA|UkP1bQy0=?{E!tgivIC< zmtUuG6c#2wlb4;lX{|kPEsjyXxoV>Au$Z;eM(;V(Zju!vo5}NOX73+H(@sCM){9u< zjeNT*i72)wv8LPhvGjg0+H5w@oo-Ebx{oJvfbX{GYcZZ=^M1Hfbfz=X@mcFGJzf>w zo^=P8UY-<>WV7-KJ;~?yDtT%YZn??SdFOWg*gHO6soJJ&O|s8CdtN6C{P zm~QrvTnw-1#lqOHsM*uS%02h8?Jk-2>q*l3$0R#6Q|r9>@BQ^>Cc%(FKqH`!-%lG8 zJq}iIMI@Yk7}W{}vjhnu3NaX9!t6Ou;5dMUGg#ZdUtN&UFau!hQVtT}h2j{rGAO52 z3_)}3L4OovA;1J@Cdk05fDdOA45y&?m>j*~QeEYX@W)tM<%h}kKGFA?eOQhD1%&ks zX67tk?1wadSE+X+_P+)4;qm!PfvnAH8rJ1s#PQ*#`n5RHWhmEXS^PyLA0D3nBa)vD X^AuM5Bh#zl<>=*iHu5=R0Sy2E&5$XY literal 1679 zcmV;A25|WwiwFRC>e^QV|HW2aQzAOU2?#bV zjSAfGzuyF%x$fTGyAxq-+>WM$uBy(;JelWY{kB)BmXlF#mye5BR=-sKxmT(DhQCU6 z8rPG6BUK@RCLR*_K&7LE&;*(>M|{*lB=4LsLJ4jp3CsddOgW>8h}6d*fK@;3?A^!Z zc)l}uKF9`x>~nuGc$-2Gw&OspVO|C>A$SUrut=RsEG23p2e6KzIWDPDiD(j3GEoSw zWsJc}YAtvxL1}4&r7Xs1n2N?ps%0|4DH}vfLLnpORV7MWUIxG(sl2j&=K#~d)A_yiBUoe~{L!&o< zdBiA$+9sNM6>)o(XcWfF=t8hqStJCA$yh~Pv|OSYL<2OSOXw;gDIh!y;j`RfiqRKh zxMNraluC_NDDhIpGE7E@h@y&tcu?pBVYKcj;_#v*3TrzYmrL9RzDPUThpjeKEabp70 zLK~pOZ>ta=gh?uib;=rHt4M~Iy~?*wZ={S{wrt5*{k~imaYvo`e7Pe{)z9w>q-;5v z=ilvYG+#`XwYK)<(oT|D$$e@>9hv$0s?9SUYxmY?vjCk?Dwb$STPZB&|>*ftC?|Z zo77-_7*^{Wdn+vm%x zh_M7FNiMY&lw#!qlN|HTh|mftgr>9-AqA|zlz8-k(L@!IJP1#jCx1UCf}$c(&E-}B zkRe1{gWUJZi=Yt(g9%1Kq7gxnC=3C`F>;4(z&Wp(|1%~ZZXN$8Octg61w1})uX?l9 zKB@FC>&NYbO1F8l8@$bX?RI^Bmz~r{(}(eN(lXCb&(4~MS$2Am-RNvPKN)rLe={o^ z50~@FVP|-~FGlyGShvaSHox+<6SltVT%EROrnR1Q;r@KRxnk|j<=JL@_n`Cf6ykB( zTtCz6=XQ2;+BuNnytZtew|J+qy3IS&xDkhkM|^WUFBi4z&4oH>iOs6Rrrq|zaf3XW z#qew)hNJBKtZ`aQSDXHZ%S9`1LU|MvMp5KNzc=t1DSGYAbB~toq%-LF;OEI<1`* z!?fIAlT>nALZcT%{q(Rt-yiz<_3_Pl?nh?8GJ2b@R<*keY3fUq+DSHi$P1NZZ-2O% zUry2>&g}eHowaX9wbryb+lTq;q5bf0tMZ}Vc{a&JDgANboO*m`1@6^PukbR$L{|ALf2Mh^ES^ej~N^tsW7_A1!Pa5 zZ#(%zc6xrnPG{DAg+|vD3;*2B*PCqAt7lpBAGi6gn3^Yzf3L4UGs%Jlq!H2wPZmkp z6iCE&uBj5(ewfk9T0AQN#rBWK3~9HV#}kbL=`Att?^hR)273UPh6$8_AhjT&^Y3#H3#P+>C%n{}3drbD;aH%flv-n3_TFiHy?R}u{Bm1x#{R;@o zG4{;ye70@UnB2#5Y43}_@Z`hI^M82q ZvtXX$VtZtIop{-M`5jqtZW93w006R%I57YK diff --git a/tests/repository_data/repository/metadata/snapshot.json b/tests/repository_data/repository/metadata/snapshot.json index c2b2365777..bf565452c9 100644 --- a/tests/repository_data/repository/metadata/snapshot.json +++ b/tests/repository_data/repository/metadata/snapshot.json @@ -3,7 +3,7 @@ { "keyid": "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9", "method": "ed25519", - "sig": "1a8e64f88506d11089250e426b8508bdf645d7074897e5fec53fccd2dcccfa69868f6b9bd003cfa999623b41747705f8e0b33b20119ff4a1b3b5819aa4d1340d" + "sig": "9419a135b0c41fe350d712f944047661ddfa2c8b4cb141088976bc789c8ea55aba6efff78dcfa46b11790136281ae649e1e421713fbab47e274e1afd838aca03" } ], "signed": { @@ -13,18 +13,12 @@ "role1.json": { "version": 1 }, - "role1.json.gz": { - "version": 1 - }, "role2.json": { "version": 1 }, - "role2.json.gz": { - "version": 1 - }, "root.json": { "hashes": { - "sha256": "54695388090bfe60edb2342694b7dc930985cd66e30e973cde3c8b71dd548d35" + "sha256": "03843cc3b2a50d363894b2aa26e617466147355487d647abd36aba209e69a6e6" }, "length": 3329, "version": 1 diff --git a/tests/repository_data/repository/metadata/snapshot.json.gz b/tests/repository_data/repository/metadata/snapshot.json.gz index 8efedbdbcbbd6e0303c22a82d0fc149a1c8a2ce2..88aa0f12c90f526ce4bca0793f7d405c7e8b082b 100644 GIT binary patch literal 387 zcmV-}0et=+iwFSh1KU>u|BX;xZ`&{oeD|*q^qdu1Cfs*MsT^9$jH?%17 z-%C5`fbC(MKmpY0a6H~?XY72~A9?w4mf7B$FP$;3dX4=iFNa!HD~8qaD6{G zJ%klO`{Mcxp`}1vNt}WlO7NVqR7_B%FLt--eV1jNHbLoq2=I>U_!TS!g#kl#jbISe zNeTQ=h#aLD*IDdauGA_JxSUIhwWVBI77l?cM;V48<-QF}u>z#x0He>q5~&CXc}Rdm zRi{aOA^{%yjF}rdv})$knTOpi5m~LSN!YK;%Sl#?ACG*R$7$J&TX{Ymu9;cy(0y>A z^~t&Wtv+8#x-{Q(j6F|J0(Xz|bbN=5{VC`9umP|Fm)$R>|4-(${6QRf9yedtx2`j? z4{FcY?h)wAY{5pyB!*++?xNMojiAy7_zq{LMz8mxCkV$pn~-4w5*jeU~iei>C4 h(U;xdTrPax%QF9CqwdE4{o~SI-T-FEttqAg006FZytM!T literal 397 zcmV;80doEyiwFRC>e^QV|BX^jj}tKrz0a?RaxN<;j+1!*2An_$<-_R|mQK|f5X-9n zoy@d*U=Ps9k&+kd`T4~!gVOV1e@x4dv&{NYeHxT{>09Y<@_eWr(I#4Je2x*I5Y*bB zFNYaTKQq*5s04B;JAn=+VzMbBXACAwWxd<@zRNOB8=+K7WM1)JeyftAc-LYi@YNWI zYzf5KUYH=}+PtG0Ah^gus7WDgrBqu>DJ^-9J~p2@R{-n)Gkc5KnczYIYH`(u*@7|L znoA~Qrf4`NR}DI-x{J9C>htdAh^$gqC-j%)`6R2v_s4Xa$7$JwYx#9LTs_kk5Z;05 z@d4m*i;vf&-gLV0D1Dxu#5{bRr{gQ6^iMg@hYc_*aM``deAxfKC)wUjYwxF>mN%?p zn#Zl<>%AK_CQE*`fShN-J|hgdi3h3KV*fpMIn=@k9H~_A1wlAqse&cu&_{?|tVnue ryp~q_Nsjwv?7WDUcmGnkr1M^u`9JLsxBR~!F2m&y9pUXd00RI3@Uh5i diff --git a/tests/repository_data/repository/metadata/targets.json.gz b/tests/repository_data/repository/metadata/targets.json.gz index e2f9e949af6455e9da30ef1aad29e6702f0a2e9a..0753ea6945da4785543f5d99659a1b8a641a608d 100644 GIT binary patch delta 16 XcmdnMwtu|5cF9Zrm^ogztU|L+5QFlKRbarH0!G=3e%@k77S{E+u^Q(~yw+=B~=Jz^2I6dWTF z_!O}vXKjkU^;XdJ?xZ%tjuC=uz9LmHr`_)&%XHffrJ?tb53cJsnAs6Gkq%UHg9g51 ziIF6>fF*<&190$#obqN}MIXFw#|l84JlO~YwYR{Md!m*h2lAF;q8eIGA!ACh5@L0k zRTT>~tGEh+^+v<2!}#rVG$LEntqJpedB4jR@%em_dEv`_Pj2MT{ro7+pe6gsg3hzG zPy2j%i1cbcjMVqa_j$T4pI_(O^&cJ0#PhWKJpNTTv0k?sSMUWKLS!w+TNXW+0@$1r zm53oXDvh-SCm`B^=)~facf}I}nB8$bJr@0x>&r5!Z;0tYzvXQ{?_ppA>u{+5e^`h0 MFVhk>B`N{{0R6e^QV|5cGoZrv~pMdx0Hq48@WlA=CYM@9*Xg0iUCY2OF26Eq0C z_sV|mpa4106>)e;es#T>r&S0`K6yzX^Nq0Q}xbAVZ^Lr$vZ(J z;25w(XKf5zYRTwobvhKniUGVUJRl8VPCM_c%Xr;^T96sip||#%2*f$1EXiA7gs2cA zH%cZZ*Zw`8#nlPNFVw(PrDh|gta@W|NmIK N^)DlVi0mo?000M=wl)9& From 383a99b6ad74bf56c539c2c5f010bdae438b00f2 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Thu, 15 Sep 2016 16:34:39 -0400 Subject: [PATCH 08/20] Don't list compressed versions of rolenames in snapshot.json --- tuf/repository_lib.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 1896edb49a..475eaae67b 100755 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -101,6 +101,8 @@ # The full list of supported TUF metadata extensions. METADATA_EXTENSIONS = ['.json.gz', '.json'] +# The supported extensions of roles listed in Snapshot metadata. +SNAPSHOT_ROLE_EXTENSIONS = ['.json'] def _generate_and_write_metadata(rolename, metadata_filename, targets_directory, metadata_directory, @@ -1713,7 +1715,7 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, _strip_version_number(metadata_filename, consistent_snapshot) # All delegated roles are added to the snapshot file. - for metadata_extension in METADATA_EXTENSIONS: + for metadata_extension in SNAPSHOT_ROLE_EXTENSIONS: if metadata_filename.endswith(metadata_extension): rolename = metadata_filename[:-len(metadata_extension)] From a11709000d3d42dd39c457ef8b2fdb6d85922a6a Mon Sep 17 00:00:00 2001 From: Evan Cordell Date: Fri, 9 Sep 2016 15:20:19 -0400 Subject: [PATCH 09/20] Add root versioning for root key rotation --- docs/tuf-spec.txt | 14 ++- tests/test_replay_attack.py | 4 +- tests/test_repository_lib.py | 14 +-- tests/test_repository_tool.py | 2 + tests/test_root_versioning.py | 228 ++++++++++++++++++++++++++++++++++ tests/test_sig.py | 18 +-- tuf/client/updater.py | 2 +- tuf/formats.py | 2 + tuf/repository_lib.py | 82 ++++++++---- tuf/repository_tool.py | 16 ++- tuf/roledb.py | 2 + tuf/sig.py | 190 ++++++++++++++++------------ 12 files changed, 441 insertions(+), 133 deletions(-) create mode 100644 tests/test_root_versioning.py diff --git a/docs/tuf-spec.txt b/docs/tuf-spec.txt index 9efd19284a..3be7e332de 100644 --- a/docs/tuf-spec.txt +++ b/docs/tuf-spec.txt @@ -1008,12 +1008,14 @@ Version 1.0 (Draft) To replace a compromised root key or any other top-level role key, the root role signs a new root.json file that lists the updated trusted keys for the role. When replacing root keys, an application will sign the new root.json - file with both the new and old root keys until all clients are known to have - obtained the new root.json file (a safe assumption is that this will be a - very long time or never). There is no risk posed by continuing to sign the - root.json file with revoked keys as once clients have updated they no longer - trust the revoked key. This is only to ensure outdated clients remain able - to update. + file with both the new and old root keys. Any time such a change is + required, the root.json file is versioned and accessible by version number, + e.g. 3.root.json. Clients update the set of trusted root keys by requesting + the current root.json and all previous root.json versions, until one is + found that has been signed by keys the client already trusts. This is to + ensure that outdated clients remain able to update, without requiring all + previous root keys to be kept to sign new root.json metadata. + To replace a delegated developer key, the role that delegated to that key just replaces that key with another in the signed metadata where the diff --git a/tests/test_replay_attack.py b/tests/test_replay_attack.py index 76b2180bb5..866c2e17b6 100755 --- a/tests/test_replay_attack.py +++ b/tests/test_replay_attack.py @@ -218,7 +218,7 @@ def test_without_tuf(self): # Set an arbitrary expiration so that the repository tool generates a new # version. repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12) - repository.write() + repository.write_partial() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) @@ -289,7 +289,7 @@ def test_with_tuf(self): # Set an arbitrary expiration so that the repository tool generates a new # version. repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12) - repository.write() + repository.write_partial() # Move the staged metadata to the "live" metadata. shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) diff --git a/tests/test_repository_lib.py b/tests/test_repository_lib.py index 4d798248d0..61bfe86f9e 100755 --- a/tests/test_repository_lib.py +++ b/tests/test_repository_lib.py @@ -432,14 +432,14 @@ def test_generate_root_metadata(self): tuf.keydb.create_keydb_from_root_metadata(root_signable['signed']) expires = '1985-10-21T01:22:00Z' - root_metadata = repo_lib.generate_root_metadata(1, expires, + root_metadata = repo_lib.generate_root_metadata(1, expires, [], 1, consistent_snapshot=False) self.assertTrue(tuf.formats.ROOT_SCHEMA.matches(root_metadata)) root_keyids = tuf.roledb.get_role_keyids('root') tuf.keydb._keydb_dict['default'][root_keyids[0]]['keytype'] = 'bad_keytype' self.assertRaises(tuf.Error, repo_lib.generate_root_metadata, 1, - expires, consistent_snapshot=False) + expires, [], 1, consistent_snapshot=False) # Reset the root key's keytype, so that we can next verify that a different # tuf.Error exception is raised for duplicate keyids. @@ -448,21 +448,21 @@ def test_generate_root_metadata(self): # Add duplicate keyid to root's roleinfo. tuf.roledb._roledb_dict['default']['root']['keyids'].append(root_keyids[0]) self.assertRaises(tuf.Error, repo_lib.generate_root_metadata, 1, - expires, consistent_snapshot=False) + expires, [], 1, consistent_snapshot=False) # Test improperly formatted arguments. self.assertRaises(tuf.FormatError, repo_lib.generate_root_metadata, - '3', expires, False) + '3', expires, [], 1, False) self.assertRaises(tuf.FormatError, repo_lib.generate_root_metadata, - 1, '3', False) + 1, '3', [], 1, False) self.assertRaises(tuf.FormatError, repo_lib.generate_root_metadata, - 1, expires, 3) + 1, expires, [], 1, 3) # Test for missing required roles and keys. tuf.roledb.clear_roledb() tuf.keydb.clear_keydb() self.assertRaises(tuf.Error, repo_lib.generate_root_metadata, - 1, expires, False) + 1, expires, [], 1, False) diff --git a/tests/test_repository_tool.py b/tests/test_repository_tool.py index 1690057b02..131e0d0399 100755 --- a/tests/test_repository_tool.py +++ b/tests/test_repository_tool.py @@ -338,6 +338,8 @@ def test_write_and_write_partial(self): # 'targets' and 'role1' roles must be be marked as dirty, otherwise # write() will not create consistent snapshots for them. repository.mark_dirty(['targets', 'role1']) + repository.targets('role1').load_signing_key(role1_privkey) + repository.targets.load_signing_key(targets_privkey) repository.write(consistent_snapshot=True) # Verify that the newly written consistent snapshot can be loaded diff --git a/tests/test_root_versioning.py b/tests/test_root_versioning.py new file mode 100644 index 0000000000..1f577aecf2 --- /dev/null +++ b/tests/test_root_versioning.py @@ -0,0 +1,228 @@ +#!/usr/bin/env python + +""" + + test_root_versioning.py + + + Evan Cordell. + + + July 21, 2016. + + + See LICENSE for licensing information. + + + Test root versioning for efficient root key rotation. +""" + +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +import os +import logging +import tempfile +import shutil +import sys + +# 'unittest2' required for testing under Python < 2.7. +if sys.version_info >= (2, 7): + import unittest +else: + import unittest2 as unittest + +import tuf +import tuf.log +import tuf.formats +import tuf.roledb +import tuf.keydb +import tuf.hash +import tuf.repository_tool as repo_tool + +logger = logging.getLogger('tuf.test_root_versioning') + +repo_tool.disable_console_log_messages() + + +class TestRepository(unittest.TestCase): + + @classmethod + def setUpClass(cls): + cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) + + @classmethod + def tearDownClass(cls): + shutil.rmtree(cls.temporary_directory) + + def tearDown(self): + tuf.roledb.clear_roledb() + tuf.keydb.clear_keydb() + + def test_init(self): + # Test normal case. + repository = repo_tool.Repository('repository_directory/', + 'metadata_directory/', + 'targets_directory/') + self.assertTrue(isinstance(repository.root, repo_tool.Root)) + self.assertTrue(isinstance(repository.snapshot, repo_tool.Snapshot)) + self.assertTrue(isinstance(repository.timestamp, repo_tool.Timestamp)) + self.assertTrue(isinstance(repository.targets, repo_tool.Targets)) + + # Test improperly formatted arguments. + self.assertRaises(tuf.FormatError, repo_tool.Repository, 3, + 'metadata_directory/', 'targets_directory') + self.assertRaises(tuf.FormatError, repo_tool.Repository, + 'repository_directory', 3, 'targets_directory') + self.assertRaises(tuf.FormatError, repo_tool.Repository, + 'repository_directory', 'metadata_directory', 3) + + + + def test_root_role_versioning(self): + # Test root role versioning + # + # 1. Import public and private keys. + # 2. Add verification keys. + # 3. Load signing keys. + # 4. Add target files. + # 5. Perform delegation. + # 6. write() + # + # Copy the target files from 'tuf/tests/repository_data' so that write() + # has target fileinfo to include in metadata. + temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) + targets_directory = os.path.join(temporary_directory, 'repository', + repo_tool.TARGETS_DIRECTORY_NAME) + original_targets_directory = os.path.join('repository_data', + 'repository', 'targets') + shutil.copytree(original_targets_directory, targets_directory) + + # In this case, create_new_repository() creates the 'repository/' + # sub-directory in 'temporary_directory' if it does not exist. + repository_directory = os.path.join(temporary_directory, 'repository') + metadata_directory = os.path.join(repository_directory, + repo_tool.METADATA_STAGED_DIRECTORY_NAME) + repository = repo_tool.create_new_repository(repository_directory) + + + + + # (1) Load the public and private keys of the top-level roles, and one + # delegated role. + keystore_directory = os.path.join('repository_data', 'keystore') + + # Load the public keys. + root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub') + targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub') + snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub') + timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub') + role1_pubkey_path = os.path.join(keystore_directory, 'delegation_key.pub') + + root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path) + targets_pubkey = repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path) + snapshot_pubkey = \ + repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path) + timestamp_pubkey = \ + repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path) + role1_pubkey = repo_tool.import_ed25519_publickey_from_file(role1_pubkey_path) + + # Load the private keys. + root_privkey_path = os.path.join(keystore_directory, 'root_key') + targets_privkey_path = os.path.join(keystore_directory, 'targets_key') + snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key') + timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key') + role1_privkey_path = os.path.join(keystore_directory, 'delegation_key') + + root_privkey = \ + repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password') + targets_privkey = \ + repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, 'password') + snapshot_privkey = \ + repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path, + 'password') + timestamp_privkey = \ + repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path, + 'password') + role1_privkey = \ + repo_tool.import_ed25519_privatekey_from_file(role1_privkey_path, + 'password') + + + # (2) Add top-level verification keys. + repository.root.add_verification_key(root_pubkey) + repository.targets.add_verification_key(targets_pubkey) + repository.snapshot.add_verification_key(snapshot_pubkey) + repository.timestamp.add_verification_key(timestamp_pubkey) + + + # (3) Load top-level signing keys. + repository.root.load_signing_key(root_privkey) + repository.targets.load_signing_key(targets_privkey) + repository.snapshot.load_signing_key(snapshot_privkey) + repository.timestamp.load_signing_key(timestamp_privkey) + + # (4) Add target files. + target1 = os.path.join(targets_directory, 'file1.txt') + target2 = os.path.join(targets_directory, 'file2.txt') + target3 = os.path.join(targets_directory, 'file3.txt') + repository.targets.add_target(target1) + repository.targets.add_target(target2) + + + # (5) Perform delegation. + repository.targets.delegate('role1', [role1_pubkey], [target3]) + repository.targets('role1').load_signing_key(role1_privkey) + + # (6) Write repository. + repository.targets.compressions = ['gz'] + repository.write() + + self.assertTrue(os.path.exists(os.path.join(metadata_directory, 'root.json'))) + self.assertTrue(os.path.exists(os.path.join(metadata_directory, '1.root.json'))) + + + # Verify that the expected metadata is written. + root_filepath = os.path.join(metadata_directory, 'root.json') + root_1_filepath = os.path.join(metadata_directory, '1.root.json') + root_2_filepath = os.path.join(metadata_directory, '2.root.json') + old_root_signable = tuf.util.load_json_file(root_filepath) + root_1_signable = tuf.util.load_json_file(root_1_filepath) + + # Make a change to the root keys + repository.root.add_verification_key(targets_pubkey) + repository.root.load_signing_key(targets_privkey) + repository.root.threshold = 2 + repository.write() + + new_root_signable = tuf.util.load_json_file(root_filepath) + root_2_signable = tuf.util.load_json_file(root_2_filepath) + + for role_signable in [old_root_signable, new_root_signable, root_1_signable, root_2_signable]: + # Raise 'tuf.FormatError' if 'role_signable' is an invalid signable. + tuf.formats.check_signable_object_format(role_signable) + + # Verify contents of versioned roots + self.assertEqual(old_root_signable, root_1_signable) + self.assertEqual(new_root_signable, root_2_signable) + + + self.assertEqual(root_1_signable['signed']['version'], 1) + self.assertEqual(root_2_signable['signed']['version'], 2) + + repository.root.remove_verification_key(root_pubkey) + repository.root.unload_signing_key(root_privkey) + repository.root.threshold = 1 + + # Errors, not enough signing keys to satisfy old threshold + self.assertRaises(tuf.UnsignedMetadataError, repository.write) + + # No error, ignores old threshold for witnessing + repository.write_partial() + + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_sig.py b/tests/test_sig.py index 2ac541fd43..e0a700e9e7 100755 --- a/tests/test_sig.py +++ b/tests/test_sig.py @@ -59,8 +59,7 @@ def test_get_signature_status_no_role(self): signable = {'signed' : 'test', 'signatures' : []} # A valid, but empty signature status - sig_status = tuf.sig.get_signature_status(signable) - self.assertTrue(tuf.formats.SIGNATURESTATUS_SCHEMA.matches(sig_status)) + self.assertRaises(tuf.FormatError, tuf.sig.get_signature_status, signable) # A valid signable, but non-existent role argument. self.assertRaises(tuf.UnknownRoleError, tuf.sig.get_signature_status, @@ -74,19 +73,10 @@ def test_get_signature_status_no_role(self): tuf.keydb.add_key(KEYS[0]) - # No specific role we're considering. - sig_status = tuf.sig.get_signature_status(signable, None) - # Non-existent role. - self.assertRaises(tuf.UnknownRoleError, tuf.sig.get_signature_status, - signable, 'unknown_role') + self.assertRaises(tuf.FormatError, tuf.sig.get_signature_status, + signable, None) - self.assertEqual(0, sig_status['threshold']) - self.assertEqual([KEYS[0]['keyid']], sig_status['good_sigs']) - self.assertEqual([], sig_status['bad_sigs']) - self.assertEqual([], sig_status['unknown_sigs']) - self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_method_sigs']) # Not allowed to call verify() without having specified a role. args = (signable, None) @@ -163,11 +153,11 @@ def test_get_signature_status_single_key(self): signable['signatures'].append(tuf.keys.create_signature( KEYS[0], signable['signed'])) - tuf.keydb.add_key(KEYS[0]) threshold = 1 roleinfo = tuf.formats.make_role_metadata( [KEYS[0]['keyid']], threshold) tuf.roledb.add_role('Root', roleinfo) + tuf.keydb.add_key(KEYS[0]) sig_status = tuf.sig.get_signature_status(signable, 'Root') diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 76e0a94ecf..cc53604b14 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -2498,7 +2498,7 @@ def _targets_of_role(self, rolename, targets=None, skip_refresh=False): return [] # Get the targets specified by the role itself. - for filepath, fileinfo in six.iteritems(self.metadata['current'][rolename]['targets']): + for filepath, fileinfo in six.iteritems(self.metadata['current'][rolename].get('targets', [])): new_target = {} new_target['filepath'] = filepath new_target['fileinfo'] = fileinfo diff --git a/tuf/formats.py b/tuf/formats.py index 9c257bd902..dc04af9ba0 100755 --- a/tuf/formats.py +++ b/tuf/formats.py @@ -461,7 +461,9 @@ object_name = 'ROLEDB_SCHEMA', keyids = KEYIDS_SCHEMA, signing_keyids = SCHEMA.Optional(KEYIDS_SCHEMA), + previous_keyids = SCHEMA.Optional(KEYIDS_SCHEMA), threshold = THRESHOLD_SCHEMA, + previous_threshold = SCHEMA.Optional(THRESHOLD_SCHEMA), version = SCHEMA.Optional(METADATAVERSION_SCHEMA), expires = SCHEMA.Optional(ISO8601_DATETIME_SCHEMA), signatures = SCHEMA.Optional(SIGNATURES_SCHEMA), diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index ae3fa760f0..291b1f8335 100755 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -121,11 +121,18 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, # Retrieve the roleinfo of 'rolename' to extract the needed metadata # attributes, such as version number, expiration, etc. roleinfo = tuf.roledb.get_roleinfo(rolename) - + previous_keyids = roleinfo.get('previous_keyids', []) + previous_threshold = roleinfo.get('previous_threshold', 1) + # Generate the appropriate role metadata for 'rolename'. - if rolename == 'root': + if rolename == 'root': + tuf.roledb.update_roleinfo(rolename, roleinfo) + metadata = generate_root_metadata(roleinfo['version'], - roleinfo['expires'], consistent_snapshot, + roleinfo['expires'], + previous_keyids, + previous_threshold, + consistent_snapshot, compression_algorithms) _log_warning_if_expires_soon(ROOT_FILENAME, roleinfo['expires'], @@ -169,9 +176,11 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, roleinfo['expires'], roleinfo['delegations'], consistent_snapshot) + + + signing_keyids = list(set(roleinfo['signing_keyids'] + previous_keyids)) + signable = sign_metadata(metadata, signing_keyids, metadata_filename) - signable = sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename) # Check if the version number of 'rolename' may be automatically incremented, # depending on whether if partial metadata is loaded or if the metadata is @@ -186,8 +195,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, roleinfo = tuf.roledb.get_roleinfo(rolename) roleinfo['version'] = roleinfo['version'] + 1 tuf.roledb.update_roleinfo(rolename, roleinfo) - signable = sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename) + signable = sign_metadata(metadata, signing_keyids, metadata_filename) # non-partial write() else: # If writing a new version of 'rolename,' increment its version number in @@ -198,13 +206,28 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, roleinfo = tuf.roledb.get_roleinfo(rolename) roleinfo['version'] = roleinfo['version'] + 1 tuf.roledb.update_roleinfo(rolename, roleinfo) - signable = sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename) + signable = sign_metadata(metadata, signing_keyids, metadata_filename) # Write the metadata to file if it contains a threshold of signatures. signable['signatures'].extend(roleinfo['signatures']) + - if tuf.sig.verify(signable, rolename) or write_partial: + def should_write(): + # Always write if partial writing + if write_partial: + return True + + # In the normal case, we should write metadata if the threshold is meet + write = tuf.sig.verify_threshold(signable, roleinfo['threshold'], roleinfo['signing_keyids'], rolename) + + # The root must also be signed by previous root keys and threshold + if rolename == 'root' and len(previous_keyids) > 0 and previous_threshold is not None: + write = write and tuf.sig.verify_threshold(signable, previous_threshold, previous_keyids, rolename) + + return write + + + if should_write(): _remove_invalid_and_duplicate_signatures(signable) filename = write_metadata_file(signable, metadata_filename, metadata['version'], compression_algorithms, @@ -216,7 +239,13 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, if rolename == 'root' or rolename == 'timestamp': write_metadata_file(signable, metadata_filename, metadata['version'], compression_algorithms, consistent_snapshot=False) - + + # The root role should also be accessible by version number, so that clients + # can walk through root history to update keys + if rolename == 'root': + write_metadata_file(signable, metadata_filename, metadata['version'], + compression_algorithms, consistent_snapshot=True) + # 'signable' contains an invalid threshold of signatures. else: message = 'Not enough signatures for ' + repr(metadata_filename) @@ -434,20 +463,25 @@ def _delete_obsolete_metadata(metadata_directory, snapshot_metadata, # 'files' here is a list of target file names. for basename in files: + + # don't delete previous root files + if basename.endswith('root.json'): + return + metadata_path = os.path.join(directory_path, basename) # Strip the metadata dirname and the leading path separator. # '{repository_directory}/metadata/django.json' --> # 'django.json' metadata_name = \ metadata_path[len(metadata_directory):].lstrip(os.path.sep) - + # Strip the version number if 'consistent_snapshot' is True. Example: # '10.django.json' --> 'django.json'. Consistent and non-consistent # metadata might co-exist if write() and # write(consistent_snapshot=True) are mixed, so ensure only # '.filename' metadata is stripped. embedded_version_number = None - + # Should we check if 'consistent_snapshot' is True? It might have been # set previously, but 'consistent_snapshot' can potentially be False # now. We'll proceed with the understanding that 'metadata_name' can @@ -459,6 +493,8 @@ def _delete_obsolete_metadata(metadata_directory, snapshot_metadata, else: logger.debug(repr(metadata_name) + ' found in the snapshot role.') + + # Strip filename extensions. The role database does not include the # metadata extension. @@ -472,7 +508,7 @@ def _delete_obsolete_metadata(metadata_directory, snapshot_metadata, else: logger.debug(repr(metadata_name) + ' does not match' ' supported extension ' + repr(metadata_extension)) - + if metadata_name in ['root', 'targets', 'snapshot', 'timestamp']: return @@ -532,7 +568,7 @@ def _strip_version_number(metadata_filename, consistent_snapshot): else: return stripped_metadata_filename, version_number - + else: return metadata_filename, '' @@ -1393,7 +1429,7 @@ def get_target_hash(target_filepath): -def generate_root_metadata(version, expiration_date, consistent_snapshot, +def generate_root_metadata(version, expiration_date, previous_keyids, previous_threshold, consistent_snapshot, compression_algorithms=['gz']): """ @@ -1869,8 +1905,11 @@ def sign_metadata(metadata_object, keyids, filename): if key['keytype'] in SUPPORTED_KEY_TYPES: if 'private' in key['keyval']: signed = signable['signed'] - signature = tuf.keys.create_signature(key, signed) - signable['signatures'].append(signature) + try: + signature = tuf.keys.create_signature(key, signed) + signable['signatures'].append(signature) + except: + logger.warning('Unable to create signature for keyid: ' + repr(keyid)) else: logger.warning('Private key unset. Skipping: ' + repr(keyid)) @@ -1980,14 +2019,15 @@ def write_metadata_file(metadata, filename, version_number, if consistent_snapshot: dirname, basename = os.path.split(written_filename) - basename = basename.split(METADATA_EXTENSION, 1)[0] version_and_filename = str(version_number) + '.' + basename + METADATA_EXTENSION written_consistent_filename = os.path.join(dirname, version_and_filename) - logger.info('Linking ' + repr(written_consistent_filename)) - os.link(written_filename, written_consistent_filename) + #logger.info('Linking ' + repr(written_consistent_filename)) + #os.link(written_filename, written_consistent_filename) + logger.info('Copying ' + repr(written_consistent_filename)) + shutil.copyfile(written_filename, written_consistent_filename) else: logger.info('Not linking a consistent filename for: ' + repr(written_filename)) diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index a1d1682a1e..4be33d85a7 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -232,8 +232,6 @@ def write(self, write_partial=False, consistent_snapshot=False, # Write the metadata files of all the Targets roles that are dirty (i.e., # have been modified via roledb.update_roleinfo()). - dirty_roles = tuf.roledb.get_dirty_roles() - filenames = {'root': os.path.join(self._metadata_directory, repo_lib.ROOT_FILENAME), 'targets': os.path.join(self._metadata_directory, repo_lib.TARGETS_FILENAME), 'snapshot': os.path.join(self._metadata_directory, repo_lib.SNAPSHOT_FILENAME), @@ -667,10 +665,13 @@ def add_verification_key(self, key, expires=None): keyid = key['keyid'] roleinfo = tuf.roledb.get_roleinfo(self.rolename) + + previous_keyids = roleinfo['keyids'] # Add 'key' to the role's entry in 'tuf.roledb.py' and avoid duplicates. - if keyid not in roleinfo['keyids']: + if keyid not in previous_keyids: roleinfo['keyids'].append(keyid) + roleinfo['previous_keyids'] = previous_keyids tuf.roledb.update_roleinfo(self._rolename, roleinfo) @@ -1152,6 +1153,7 @@ def threshold(self, threshold): tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) roleinfo = tuf.roledb.get_roleinfo(self._rolename) + roleinfo['previous_threshold'] = roleinfo['threshold'] roleinfo['threshold'] = threshold tuf.roledb.update_roleinfo(self._rolename, roleinfo) @@ -2864,11 +2866,13 @@ def load_repository(repository_directory): metadata_path = os.path.join(metadata_directory, metadata_role) metadata_name = \ metadata_path[len(metadata_directory):].lstrip(os.path.sep) - - # Strip the version number if 'consistent_snapshot' is True. + + # Strip the version number if 'consistent_snapshot' is True or if root. # Example: '10.django.json' --> 'django.json' + strip_version = metadata_role.endswith('root.json') or consistent_snapshot == True metadata_name, version_number_junk = \ - repo_lib._strip_version_number(metadata_name, consistent_snapshot) + repo_lib._strip_version_number(metadata_name, strip_version) + if metadata_name.endswith(METADATA_EXTENSION): extension_length = len(METADATA_EXTENSION) diff --git a/tuf/roledb.py b/tuf/roledb.py index d0bfb4a194..142ff1ed9d 100755 --- a/tuf/roledb.py +++ b/tuf/roledb.py @@ -124,6 +124,8 @@ def create_roledb_from_root_metadata(root_metadata, repository_name='default'): if rolename == 'root': roleinfo['version'] = root_metadata['version'] roleinfo['expires'] = root_metadata['expires'] + roleinfo['previous_keyids'] = root_metadata.get('signing_keyids', []) + roleinfo['previous_threshold'] = root_metadata.get('threshold', 1) roleinfo['signatures'] = [] roleinfo['signing_keyids'] = [] diff --git a/tuf/sig.py b/tuf/sig.py index 42760af050..8307926005 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -49,7 +49,7 @@ import tuf.roledb -def get_signature_status(signable, role=None, repository_name='default'): +def get_signature_status(signable, role=None, repository_name=None): """ Return a dictionary representing the status of the signatures listed @@ -87,6 +87,101 @@ def get_signature_status(signable, role=None, repository_name='default'): None. + + A dictionary representing the status of the signatures in 'signable'. + Conformant to tuf.formats.SIGNATURESTATUS_SCHEMA. + """ + if repository_name is None: + repository_name = 'default' + + # Retrieve the threshold value for 'role'. Raise tuf.UnknownRoleError + # if we were given an invalid role. + if role is not None: + try: + threshold = tuf.roledb.get_role_threshold(role, repository_name=repository_name) + + except tuf.UnknownRoleError: + raise + + else: + threshold = 0 + + return get_signature_status_threshold(signable, threshold, tuf.roledb.get_role_keyids(role, repository_name=repository_name), role, repository_name=repository_name) + + + +def verify(signable, role, repository_name='default'): + """ + + Verify whether the authorized signatures of 'signable' meet the minimum + required by 'role'. Authorized signatures are those with valid keys + associated with 'role'. 'signable' must conform to SIGNABLE_SCHEMA + and 'role' must not equal 'None' or be less than zero. + + + signable: + A dictionary containing a list of signatures and a 'signed' identifier. + signable = {'signed':, 'signatures': [{'keyid':, 'method':, 'sig':}]} + + role: + TUF role (e.g., 'root', 'targets', 'snapshot'). + + + tuf.UnknownRoleError, if 'role' is not recognized. + + tuf.FormatError, if 'signable' is not formatted correctly. + + tuf.Error, if an invalid threshold is encountered. + + + tuf.sig.get_signature_status() called. Any exceptions thrown by + get_signature_status() will be caught here and re-raised. + + + Boolean. True if the number of good signatures >= the role's threshold, + False otherwise. + """ + + # Retrieve the signature status. tuf.sig.get_signature_status() raises + # tuf.UnknownRoleError + # tuf.FormatError + status = get_signature_status(signable, role, repository_name=repository_name) + key_ids = tuf.roledb.get_role_keyids(role, repository_name=repository_name) + + return verify_threshold(signable, status['threshold'], key_ids, role, repository_name=repository_name) + + +def get_signature_status_threshold(signable, threshold, keyids, role=None, repository_name=None): + """ + + Return a dictionary representing the status of the signatures listed + in 'signable'. Given an object conformant to SIGNABLE_SCHEMA, a set + of public keys in 'tuf.keydb', a set of roles in 'tuf.roledb', + and a role, the status of these signatures can be determined. This + method will iterate through the signatures in 'signable' and enumerate + all the keys that are valid, invalid, unrecognized, unauthorized, or + generated using an unknown method. + + + signable: + A dictionary containing a list of signatures and a 'signed' identifier. + signable = {'signed': 'signer', + 'signatures': [{'keyid': keyid, + 'method': 'evp', + 'sig': sig}]} + Conformant to tuf.formats.SIGNABLE_SCHEMA. + + role: + TUF role (e.g., 'root', 'targets', 'snapshot'). + + + tuf.FormatError, if 'signable' does not have the correct format. + + tuf.UnknownRoleError, if 'role' is not recognized. + + + None. + A dictionary representing the status of the signatures in 'signable'. Conformant to tuf.formats.SIGNATURESTATUS_SCHEMA. @@ -97,6 +192,10 @@ def get_signature_status(signable, role=None, repository_name='default'): # all dict keys are properly named. Raise 'tuf.FormatError' if the check # fails. tuf.formats.SIGNABLE_SCHEMA.check_match(signable) + + if repository_name is None: + repository_name = 'default' + tuf.formats.NAME_SCHEMA.check_match(repository_name) if role is not None: @@ -151,7 +250,7 @@ def get_signature_status(signable, role=None, repository_name='default'): if role is not None: try: # Identify unauthorized key. - if keyid not in tuf.roledb.get_role_keyids(role, repository_name): + if keyid not in keyids: untrusted_sigs.append(keyid) continue @@ -163,22 +262,10 @@ def get_signature_status(signable, role=None, repository_name='default'): else: # Identify bad key. - bad_sigs.append(keyid) - - # Retrieve the threshold value for 'role'. Raise tuf.UnknownRoleError - # if we were given an invalid role. - if role is not None: - try: - threshold = tuf.roledb.get_role_threshold(role, repository_name) - - except tuf.UnknownRoleError: - raise - - else: - threshold = 0 + bad_sigs.append(keyid) # Build the signature_status dict. - signature_status['threshold'] = threshold + signature_status['threshold'] = threshold signature_status['good_sigs'] = good_sigs signature_status['bad_sigs'] = bad_sigs signature_status['unknown_sigs'] = unknown_sigs @@ -189,76 +276,27 @@ def get_signature_status(signable, role=None, repository_name='default'): - - -def verify(signable, role, repository_name='default'): - """ - - Verify whether the authorized signatures of 'signable' meet the minimum - required by 'role'. Authorized signatures are those with valid keys - associated with 'role'. 'signable' must conform to SIGNABLE_SCHEMA - and 'role' must not equal 'None' or be less than zero. - - - signable: - A dictionary containing a list of signatures and a 'signed' identifier. - signable = {'signed':, 'signatures': [{'keyid':, 'method':, 'sig':}]} - - role: - TUF role (e.g., 'root', 'targets', 'snapshot'). - - repository_name: - The name of the repository to verify 'signable'. The role and key db - modules keep track of separate sets of roles and keys for each - repository. If 'repository_name' is not supplied, the 'default' - repository is queried. - - - tuf.UnknownRoleError, if 'role' is not recognized. - - tuf.FormatError, if 'signable' is not formatted correctly. - - tuf.Error, if an invalid threshold is encountered. - - tuf.InvalidNameError, if 'repository_name' does not exist in either the - role or key db. - - - tuf.sig.get_signature_status() called. Any exceptions thrown by - get_signature_status() will be caught here and re-raised. - - - Boolean. True if the number of good signatures >= the role's threshold, - False otherwise. - """ - - # Do the arguments have the correct format? If not, raise 'tuf.FormatError'. - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) +def verify_threshold(signable, threshold, keyids, role, repository_name=None): + if repository_name is None: + repository_name = 'default' + + tuf.formats.SIGNABLE_SCHEMA.check_match(signable) tuf.formats.ROLENAME_SCHEMA.check_match(role) tuf.formats.NAME_SCHEMA.check_match(repository_name) - # Retrieve the signature status. tuf.sig.get_signature_status() raises: - # tuf.UnknownRoleError - # tuf.FormatError - status = get_signature_status(signable, role, repository_name) + if threshold is None: + threshold = 1 + + if threshold <= 0: + raise tuf.Error("Invalid threshold: " + str(threshold)) - # Retrieve the role's threshold and the authorized keys of 'status' - threshold = status['threshold'] + status = get_signature_status_threshold(signable, threshold, keyids, role, repository_name) good_sigs = status['good_sigs'] - # Does 'status' have the required threshold of signatures? - # First check for invalid threshold values before returning result. - # Note: get_signature_status() is expected to verify that 'threshold' is - # not None or <= 0. - if threshold is None or threshold <= 0: #pragma: no cover - raise tuf.Error("Invalid threshold: " + str(threshold)) - return len(good_sigs) >= threshold - - def may_need_new_keys(signature_status): """ From 0e93c3a7d201c68f4a1950ad331e3adcacb51b2e Mon Sep 17 00:00:00 2001 From: Evan Cordell Date: Fri, 9 Sep 2016 15:21:02 -0400 Subject: [PATCH 10/20] Enforce root version signature chaining in client updater --- .../repository/metadata.staged/1.root.json | 86 +++++ .../repository/metadata.staged/1.root.json.gz | Bin 0 -> 1680 bytes .../repository/metadata/1.root.json | 86 +++++ .../repository/metadata/1.root.json.gz | Bin 0 -> 1680 bytes tests/test_key_revocation.py | 6 +- tests/test_repository_tool.py | 2 +- tests/test_updater.py | 2 +- tests/test_updater_root_rotation.py | 355 ++++++++++++++++++ tuf/client/updater.py | 131 ++++--- 9 files changed, 597 insertions(+), 71 deletions(-) create mode 100644 tests/repository_data/repository/metadata.staged/1.root.json create mode 100644 tests/repository_data/repository/metadata.staged/1.root.json.gz create mode 100644 tests/repository_data/repository/metadata/1.root.json create mode 100644 tests/repository_data/repository/metadata/1.root.json.gz create mode 100755 tests/test_updater_root_rotation.py diff --git a/tests/repository_data/repository/metadata.staged/1.root.json b/tests/repository_data/repository/metadata.staged/1.root.json new file mode 100644 index 0000000000..94327a115a --- /dev/null +++ b/tests/repository_data/repository/metadata.staged/1.root.json @@ -0,0 +1,86 @@ +{ + "signatures": [ + { + "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", + "method": "RSASSA-PSS", + "sig": "3851d11ed11ea69ab5d873cfd015de79dc856d83e0a060e73d535d705da086c26191e6bc1ed6bbdde9305c3816c1c5885b48cf51c41fedc906a5ebe0e33a6b823145d40bd3e588e77c6bc724b62f4b2ca9700da03e0ba603170bfd365ea1d25ee7f9661848a14f5916869f00f3e03aa4cb468a4de647bbf205b96f9aa8dd408e3e0b1f9d53fe74654dfe139441dfe3651b3473b67bd104d754112e594a9c6ed0127e94b9057322d630f70c93c01d0cd0c2b98f6abdfd2ed7ac7dc5d3e201d191e168992574edfa935bb2a2cbaa67532c7aaddd4582b53a015c11e567d7fe7ba38cc743e7a939b9e7f2e334b48f46bdf4b82b66e639189644998d90a27847e63e8ade170f8c8aa15c8076b0af8032d78870ac18278663eddb08a7eed30c199c97c81d30bdf47d6649c7ab297120b983d9b6a1da648026d552be73bb77a9346f98a3b8db1a583b71bb706c397a3142f8194c80e62a1632152cd2ffd340605325ea39baf60fb30cf574701e5ae07efee75fc51df4f1810f3ce14345c466d25e36a3" + } + ], + "signed": { + "_type": "Root", + "compression_algorithms": [ + "gz" + ], + "consistent_snapshot": false, + "expires": "2030-01-01T00:00:00Z", + "keys": { + "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "e8bd29d20025d3ac755a27b8d8efe185e368244000d9d3b76e32afb968cb0ea8" + } + }, + "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "52de9284608be6b718a9d88c7c5d8d9d93b33732e00d670dd4ebe4bce8bbc83c" + } + }, + "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAsDqUoiFJZX+5gm5pyI1l\nVc/N3yjJVOIl9GyiK0mRyzV3IzUQzhjq8nhk0eLfzXw2XwIAYOJC6dR/tGRG4JDx\nJkez5FFH4zLosr/XzT7CG5zxJ3kKICLD1v9rZQr5ZgARQDOpkxzPz46rGnE0sHd7\nMpnpPMScA1pMIzwM1RoPS4ntZipI1cl9M7HMQ6mkBp8/DNKCqaDWixJqaGgWrhhK\nhI/1mzBliMKriNxPKSCGVlOk/QpZft+y1fs42s0DMd5BOFBo+ZcoXLYRncg9S3A2\nxx/jT69Bt3ceiAZqnp7f6M+ZzoUifSelaoL7QIYg/GkEl+0oxTD0yRphGiCKwn9c\npSbn7NgnbjqSgIMeEtlf/5Coyrs26pyFf/9GbusddPSxxxwIJ/7IJuF7P1Yy0WpZ\nkMeY83h9n2IdnEYi+rpdbLJPQd7Fpu2xrdA3Fokj8AvCpcmxn8NIXZuK++r8/xsE\nAUL30HH7dgVn50AvdPaJnqAORT3OlabW0DK9prcwKnyzAgMBAAE=\n-----END PUBLIC KEY-----" + } + }, + "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", + "keyval": { + "public": "0692a846935833d685168ae8c98fee951d52d8aa76685b8ba55b8e1eada217c2" + } + } + }, + "roles": { + "root": { + "keyids": [ + "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9" + ], + "threshold": 1 + }, + "targets": { + "keyids": [ + "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1" + ], + "threshold": 1 + } + }, + "version": 1 + } +} \ No newline at end of file diff --git a/tests/repository_data/repository/metadata.staged/1.root.json.gz b/tests/repository_data/repository/metadata.staged/1.root.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..4a13f7efd616dff425bc51ec67ad3e32a6f7402b GIT binary patch literal 1680 zcmV;B25SwF5qS^rY|=RvLZ8~keZ zdDx~NM(SK35}A+4c_J*qm_(!~wb%vad34rt%_V1AU{4KVkxHu-=7G52IimGXdwa{U zp5=Rk7vp3+PCk#u{VNbQ~C!Ydk(K>`IyJ*V7D z#JxdEI3lpdL0AVCgtguWO%ZogVd1djN^#4SiyS+KWAIKRVR*0s1xk&uicrkBXUKXQ zV0DnvfiX##6%fBA&S;4c>;(>3BM`+B+32a@!C+5#2r_CRuwu$!7P-bk2^|rN@BuZ3 zIm?7H%m=}wwUHohwTRjnqeo0dmMi z;F=ljL;#eC44PSuxTJ)5K~a>*Y3dO6kUZoF&TS`#i= z@X=_>ttEyyYmAVb5+{xE;3-$ca!54h4id$M^b-8FhAQVIqd~%_+G;?90J01~k4(Ux z3?Q~b1VJ@cS}>-y@){W;6_YR;l<@&Djmjxwz`8M8;tS%LOd<-XdjW z2%aK`wRT!M1qFzzoun6nX>iyQEipk5H}%>I$deIFAtF5I#6k&KD=U8)$BcMcem*PEibmaYuejgo$qG%4=S6@ zQ8dF6cB35=t8Ugo<*}C#MUfSw;n*d(81{D0Lwu2s#w=S;(xQjm zLJ#Hf@LVkC%|bQWr-RmmX@5;O{Rh*XeqGIGgS%|jYvARsxk!hDReHJ^4aTkR_2O*a zI4>q~eYnN3WTZsxq36xBqh@|Marw>3?M3FMdQ1~?mu)tU`%9slYmme=nLK2LjAD2= z+2vPh9EXL;Pvm*;cG~F9JBve0JDX?K?uW1Gp-X=d*q#?#&~bk>X5 z;H`YSDv2nzN3o&1_ObMSG~R4B&%J&__WF-Ua)fWU=xZ^#%jUyytLRK;r028FZF;yW zygliU&b>S;9?53q6MB@-?^W{DD%^6Dsng!genstpE>`Y&kZpI#bl6Oi&Oh$5eKB=T+y7o)e`b<_GY`-R=;KX{ z(nODg70`+$9Elj!3YxP72_gzHrl6ZtoF`mYJZ_W4VWtj%f~*5zOL@!_KSH9yj2DA#6L{DmhUZl3?c alb;3i6js%d>2>1e;N^EOHy%3y4FCXSwF5qS^rY|=RvLZ8~keZ zdDx~NM(SK35}A+4c_J*qm_(!~wb%vad34rt%_V1AU{4KVkxHu-=7G52IimGXdwa{U zp5=Rk7vp3+PCk#u{VNbQ~C!Ydk(K>`IyJ*V7D z#JxdEI3lpdL0AVCgtguWO%ZogVd1djN^#4SiyS+KWAIKRVR*0s1xk&uicrkBXUKXQ zV0DnvfiX##6%fBA&S;4c>;(>3BM`+B+32a@!C+5#2r_CRuwu$!7P-bk2^|rN@BuZ3 zIm?7H%m=}wwUHohwTRjnqeo0dmMi z;F=ljL;#eC44PSuxTJ)5K~a>*Y3dO6kUZoF&TS`#i= z@X=_>ttEyyYmAVb5+{xE;3-$ca!54h4id$M^b-8FhAQVIqd~%_+G;?90J01~k4(Ux z3?Q~b1VJ@cS}>-y@){W;6_YR;l<@&Djmjxwz`8M8;tS%LOd<-XdjW z2%aK`wRT!M1qFzzoun6nX>iyQEipk5H}%>I$deIFAtF5I#6k&KD=U8)$BcMcem*PEibmaYuejgo$qG%4=S6@ zQ8dF6cB35=t8Ugo<*}C#MUfSw;n*d(81{D0Lwu2s#w=S;(xQjm zLJ#Hf@LVkC%|bQWr-RmmX@5;O{Rh*XeqGIGgS%|jYvARsxk!hDReHJ^4aTkR_2O*a zI4>q~eYnN3WTZsxq36xBqh@|Marw>3?M3FMdQ1~?mu)tU`%9slYmme=nLK2LjAD2= z+2vPh9EXL;Pvm*;cG~F9JBve0JDX?K?uW1Gp-X=d*q#?#&~bk>X5 z;H`YSDv2nzN3o&1_ObMSG~R4B&%J&__WF-Ua)fWU=xZ^#%jUyytLRK;r028FZF;yW zygliU&b>S;9?53q6MB@-?^W{DD%^6Dsng!genstpE>`Y&kZpI#bl6Oi&Oh$5eKB=T+y7o)e`b<_GY`-R=;KX{ z(nODg70`+$9Elj!3YxP72_gzHrl6ZtoF`mYJZ_W4VWtj%f~*5zOL@!_KSH9yj2DA#6L{DmhUZl3?c alb;3i6js%d>2>1e;N^EOHy%3y4FCX + test_updater_root_rotation.py + + + Evan Cordell + + + August 8, 2016 + + + See LICENSE for licensing information. + + + 'test_updater_root_rotation.py' provides a collection of methods that test + root key rotation in the example client. + + + Test cases here should follow a specific order (i.e., independent methods are + tested before dependent methods). More accurately, least dependent methods + are tested before most dependent methods. There is no reason to rewrite or + construct other methods that replicate already-tested methods solely for + testing purposes. This is possible because the 'unittest.TestCase' class + guarantees the order of unit tests. The 'test_something_A' method would + be tested before 'test_something_B'. To ensure the expected order of tests, + a number is placed after 'test' and before methods name like so: + 'test_1_check_directory'. The number is a measure of dependence, where 1 is + less dependent than 2. +""" + +# Help with Python 3 compatibility, where the print statement is a function, an +# implicit relative import is invalid, and the '/' operator performs true +# division. Example: print 'hello world' raises a 'SyntaxError' exception. +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +import os +import time +import shutil +import copy +import tempfile +import logging +import random +import subprocess +import sys + +# 'unittest2' required for testing under Python < 2.7. +if sys.version_info >= (2, 7): + import unittest + +else: + import unittest2 as unittest + +import tuf +import tuf.util +import tuf.conf +import tuf.log +import tuf.formats +import tuf.keydb +import tuf.roledb +import tuf.repository_tool as repo_tool +import tuf.unittest_toolbox as unittest_toolbox +import tuf.client.updater as updater +import six + +logger = logging.getLogger('tuf.test_updater') +repo_tool.disable_console_log_messages() + + +class TestUpdater(unittest_toolbox.Modified_TestCase): + + @classmethod + def setUpClass(cls): + # setUpClass() is called before tests in an individual class are executed. + + # Create a temporary directory to store the repository, metadata, and target + # files. 'temporary_directory' must be deleted in TearDownModule() so that + # temporary files are always removed, even when exceptions occur. + cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) + + # Launch a SimpleHTTPServer (serves files in the current directory). + # Test cases will request metadata and target files that have been + # pre-generated in 'tuf/tests/repository_data', which will be served + # by the SimpleHTTPServer launched here. The test cases of 'test_updater.py' + # assume the pre-generated metadata files have a specific structure, such + # as a delegated role 'targets/role1', three target files, five key files, + # etc. + cls.SERVER_PORT = random.randint(30000, 45000) + command = ['python', 'simple_server.py', str(cls.SERVER_PORT)] + cls.server_process = subprocess.Popen(command, stderr=subprocess.PIPE) + logger.info('\n\tServer process started.') + logger.info('\tServer process id: '+str(cls.server_process.pid)) + logger.info('\tServing on port: '+str(cls.SERVER_PORT)) + cls.url = 'http://localhost:'+str(cls.SERVER_PORT) + os.path.sep + + # NOTE: Following error is raised if a delay is not applied: + # + time.sleep(1) + + + + @classmethod + def tearDownClass(cls): + # tearDownModule() is called after all the tests have run. + # http://docs.python.org/2/library/unittest.html#class-and-module-fixtures + + # Remove the temporary repository directory, which should contain all the + # metadata, targets, and key files generated for the test cases. + shutil.rmtree(cls.temporary_directory) + + # Kill the SimpleHTTPServer process. + if cls.server_process.returncode is None: + logger.info('\tServer process ' + str(cls.server_process.pid) + ' terminated.') + cls.server_process.kill() + + + + def setUp(self): + # We are inheriting from custom class. + unittest_toolbox.Modified_TestCase.setUp(self) + + # Copy the original repository files provided in the test folder so that + # any modifications made to repository files are restricted to the copies. + # The 'repository_data' directory is expected to exist in 'tuf.tests/'. + original_repository_files = os.path.join(os.getcwd(), 'repository_data') + temporary_repository_root = \ + self.make_temp_directory(directory=self.temporary_directory) + + # The original repository, keystore, and client directories will be copied + # for each test case. + original_repository = os.path.join(original_repository_files, 'repository') + original_keystore = os.path.join(original_repository_files, 'keystore') + original_client = os.path.join(original_repository_files, 'client') + + # Save references to the often-needed client repository directories. + # Test cases need these references to access metadata and target files. + self.repository_directory = \ + os.path.join(temporary_repository_root, 'repository') + self.keystore_directory = \ + os.path.join(temporary_repository_root, 'keystore') + self.client_directory = os.path.join(temporary_repository_root, 'client') + self.client_metadata = os.path.join(self.client_directory, 'metadata') + self.client_metadata_current = os.path.join(self.client_metadata, 'current') + self.client_metadata_previous = \ + os.path.join(self.client_metadata, 'previous') + + # Copy the original 'repository', 'client', and 'keystore' directories + # to the temporary repository the test cases can use. + shutil.copytree(original_repository, self.repository_directory) + shutil.copytree(original_client, self.client_directory) + shutil.copytree(original_keystore, self.keystore_directory) + + # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. + repository_basepath = self.repository_directory[len(os.getcwd()):] + url_prefix = \ + 'http://localhost:' + str(self.SERVER_PORT) + repository_basepath + + # Setting 'tuf.conf.repository_directory' with the temporary client + # directory copied from the original repository files. + tuf.conf.repository_directory = self.client_directory + + self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, + 'metadata_path': 'metadata', + 'targets_path': 'targets', + 'confined_target_dirs': ['']}} + + # Creating a repository instance. The test cases will use this client + # updater to refresh metadata, fetch target files, etc. + self.repository_name = 'test_repository' + self.repository_updater = updater.Updater(self.repository_name, + self.repository_mirrors) + + # Metadata role keys are needed by the test cases to make changes to the + # repository (e.g., adding a new target file to 'targets.json' and then + # requesting a refresh()). + self.role_keys = _load_role_keys(self.keystore_directory) + + + + def tearDown(self): + # We are inheriting from custom class. + unittest_toolbox.Modified_TestCase.tearDown(self) + tuf.roledb.clear_roledb(clear_all=True) + tuf.keydb.clear_keydb(clear_all=True) + + + + + # UNIT TESTS. + + def test_root_rotation(self): + repository = repo_tool.load_repository(self.repository_directory) + repository.root.threshold = 2 + + repository.root.add_verification_key(self.role_keys['root']['public']) + repository.targets.add_verification_key(self.role_keys['targets']['public']) + repository.snapshot.add_verification_key(self.role_keys['snapshot']['public']) + repository.timestamp.add_verification_key(self.role_keys['timestamp']['public']) + repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) + repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) + + # Errors, not enough signing keys to satisfy old threshold + self.assertRaises(tuf.UnsignedMetadataError, repository.write) + + repository.root.add_verification_key(self.role_keys['role1']['public']) + repository.root.load_signing_key(self.role_keys['root']['private']) + repository.root.load_signing_key(self.role_keys['role1']['private']) + repository.write() + + repository.root.add_verification_key(self.role_keys['snapshot']['public']) + repository.root.load_signing_key(self.role_keys['snapshot']['private']) + repository.root.threshold = 3 + repository.write() + + # Move the staged metadata to the "live" metadata. + shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) + shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), + os.path.join(self.repository_directory, 'metadata')) + + self.repository_updater.refresh() + + + def test_root_rotation_missing_keys(self): + repository = repo_tool.load_repository(self.repository_directory) + repository.root.threshold = 2 + + # Partial write forces invalid signature chain + repository.write_partial() + + repository.root.add_verification_key(self.role_keys['root']['public']) + repository.targets.add_verification_key(self.role_keys['targets']['public']) + repository.snapshot.add_verification_key(self.role_keys['snapshot']['public']) + repository.timestamp.add_verification_key(self.role_keys['timestamp']['public']) + repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) + repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) + + repository.root.add_verification_key(self.role_keys['role1']['public']) + repository.root.load_signing_key(self.role_keys['root']['private']) + repository.root.load_signing_key(self.role_keys['role1']['private']) + repository.write() + + repository.root.add_verification_key(self.role_keys['snapshot']['public']) + repository.root.load_signing_key(self.role_keys['snapshot']['private']) + repository.root.threshold = 3 + repository.write() + + # Move the staged metadata to the "live" metadata. + shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) + shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), + os.path.join(self.repository_directory, 'metadata')) + + self.assertRaises(tuf.NoWorkingMirrorError, self.repository_updater.refresh) + + + def test_root_rotation_unmet_threshold(self): + repository = repo_tool.load_repository(self.repository_directory) + + # Add verification keys + repository.root.add_verification_key(self.role_keys['root']['public']) + repository.root.add_verification_key(self.role_keys['role1']['public']) + repository.targets.add_verification_key(self.role_keys['targets']['public']) + repository.snapshot.add_verification_key(self.role_keys['snapshot']['public']) + repository.timestamp.add_verification_key(self.role_keys['timestamp']['public']) + repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) + repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) + + # Add signing keys + repository.root.load_signing_key(self.role_keys['root']['private']) + repository.root.load_signing_key(self.role_keys['role1']['private']) + + # Set root threshold + repository.root.threshold = 2 + repository.write() + + # Add new verification key + repository.root.add_verification_key(self.role_keys['snapshot']['public']) + + # Remove one of the original signing keys + repository.root.remove_verification_key(self.role_keys['role1']['public']) + repository.root.unload_signing_key(self.role_keys['role1']['private']) + + # Set threshold + repository.root.threshold = 1 + + repository.write() + + # Move the staged metadata to the "live" metadata. + shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) + shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), + os.path.join(self.repository_directory, 'metadata')) + + self.assertRaises(tuf.NoWorkingMirrorError, self.repository_updater.refresh) + + +def _load_role_keys(keystore_directory): + + # Populating 'self.role_keys' by importing the required public and private + # keys of 'tuf/tests/repository_data/'. The role keys are needed when + # modifying the remote repository used by the test cases in this unit test. + + # The pre-generated key files in 'repository_data/keystore' are all encrypted with + # a 'password' passphrase. + EXPECTED_KEYFILE_PASSWORD = 'password' + + # Store and return the cryptography keys of the top-level roles, including 1 + # delegated role. + role_keys = {} + + root_key_file = os.path.join(keystore_directory, 'root_key') + targets_key_file = os.path.join(keystore_directory, 'targets_key') + snapshot_key_file = os.path.join(keystore_directory, 'snapshot_key') + timestamp_key_file = os.path.join(keystore_directory, 'timestamp_key') + delegation_key_file = os.path.join(keystore_directory, 'delegation_key') + + role_keys = {'root': {}, 'targets': {}, 'snapshot': {}, 'timestamp': {}, + 'role1': {}} + + # Import the top-level and delegated role public keys. + role_keys['root']['public'] = \ + repo_tool.import_rsa_publickey_from_file(root_key_file+'.pub') + role_keys['targets']['public'] = \ + repo_tool.import_ed25519_publickey_from_file(targets_key_file+'.pub') + role_keys['snapshot']['public'] = \ + repo_tool.import_ed25519_publickey_from_file(snapshot_key_file+'.pub') + role_keys['timestamp']['public'] = \ + repo_tool.import_ed25519_publickey_from_file(timestamp_key_file+'.pub') + role_keys['role1']['public'] = \ + repo_tool.import_ed25519_publickey_from_file(delegation_key_file+'.pub') + + # Import the private keys of the top-level and delegated roles. + role_keys['root']['private'] = \ + repo_tool.import_rsa_privatekey_from_file(root_key_file, + EXPECTED_KEYFILE_PASSWORD) + role_keys['targets']['private'] = \ + repo_tool.import_ed25519_privatekey_from_file(targets_key_file, + EXPECTED_KEYFILE_PASSWORD) + role_keys['snapshot']['private'] = \ + repo_tool.import_ed25519_privatekey_from_file(snapshot_key_file, + EXPECTED_KEYFILE_PASSWORD) + role_keys['timestamp']['private'] = \ + repo_tool.import_ed25519_privatekey_from_file(timestamp_key_file, + EXPECTED_KEYFILE_PASSWORD) + role_keys['role1']['private'] = \ + repo_tool.import_ed25519_privatekey_from_file(delegation_key_file, + EXPECTED_KEYFILE_PASSWORD) + + return role_keys + + +if __name__ == '__main__': + unittest.main() diff --git a/tuf/client/updater.py b/tuf/client/updater.py index cc53604b14..43923aeb97 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -650,18 +650,10 @@ def refresh(self, unsafely_update_root_if_necessary=True): # _update_metadata() calls below do NOT perform an update if there # is insufficient trusted signatures for the specified metadata. # Raise 'tuf.NoWorkingMirrorError' if an update fails. - - # Is the Root role expired? When the top-level roles are initially loaded - # from disk, their expiration is not checked to allow their updating when - # requested (and give the updater the chance to continue, rather than always - # failing with an expired metadata error.) If - # 'unsafely_update_root_if_necessary' is True, update an expired Root role - # now. Updating the other top-level roles, regardless of their validity, - # should only occur if the root of trust is up-to-date. root_metadata = self.metadata['current']['root'] + try: self._ensure_not_expired(root_metadata, 'root') - except tuf.ExpiredMetadataError: # Raise 'tuf.NoWorkingMirrorError' if a valid (not expired, properly # signed, and valid metadata) 'root.json' cannot be installed. @@ -669,70 +661,62 @@ def refresh(self, unsafely_update_root_if_necessary=True): message = \ 'Expired Root metadata was loaded from disk. Try to update it now.' logger.info(message) - self._update_metadata('root', DEFAULT_ROOT_UPPERLENGTH) # The caller explicitly requested not to unsafely fetch an expired Root. else: logger.info('An expired Root metadata was loaded and must be updated.') raise - # If an exception is raised during the metadata update attempts, we will - # attempt to update root metadata once by recursing with a special argument - # (unsafely_update_root_if_necessary) to avoid further recursion. + self._update_root_metadata(root_metadata) + # Use default but sane information for timestamp metadata, and do not # require strict checks on its required length. - try: - self._update_metadata('timestamp', DEFAULT_TIMESTAMP_UPPERLENGTH) - self._update_metadata_if_changed('snapshot', - referenced_metadata='timestamp') - self._update_metadata_if_changed('root') - self._update_metadata_if_changed('targets') - - # There are two distinct error scenarios that can rise from the - # _update_metadata_if_changed calls in the try block above: - # - # - tuf.NoWorkingMirrorError: - # - # If a change to a metadata file IS detected in an - # _update_metadata_if_changed call, but we are unable to download a - # valid (not expired, properly signed, valid) version of that metadata - # file, a tuf.NoWorkingMirrorError rises to this point. - # - # - tuf.ExpiredMetadataError: - # - # If, on the other hand, a change to a metadata file IS NOT detected - # in a given _update_metadata_if_changed call, but we observe that the - # version of the metadata file we have on hand is now expired, a - # tuf.ExpiredMetadataError exception rises to this point. - # - except tuf.NoWorkingMirrorError: - if unsafely_update_root_if_necessary: - logger.info('Valid top-level metadata cannot be downloaded. Unsafely' - ' update the Root metadata.') - self._update_metadata('root', DEFAULT_ROOT_UPPERLENGTH) - self.refresh(unsafely_update_root_if_necessary=False) - - else: - raise - - except tuf.ExpiredMetadataError: - if unsafely_update_root_if_necessary: - logger.info('No changes were detected from the mirrors for a given role' - ', and that metadata that is available on disk has been found to be' - ' expired. Trying to update root in case of foul play.') - self._update_metadata('root', DEFAULT_ROOT_UPPERLENGTH) - self.refresh(unsafely_update_root_if_necessary=False) - - # The caller explicitly requested not to unsafely fetch an expired Root. - else: - logger.info('No changes were detected from the mirrors for a given role' - ', and that metadata that is available on disk has been found to be ' - 'expired. Your metadata is out of date.') - raise - - + self._update_metadata('timestamp', DEFAULT_TIMESTAMP_UPPERLENGTH) + self._update_metadata_if_changed('snapshot', + referenced_metadata='timestamp') + self._update_metadata_if_changed('targets') + + def _update_root_metadata(self, current_root_metadata, compression_algorithm=None): + """ + + The root file must be signed by the current root threshold and keys as well + as the previous root threshold and keys. The update process for root files + means that each intermediate root file must be downloaded, to build a chain + of trusted root keys from keys already trusted by the client: + + 1.root -> 2.root -> 3.root + + 3.root must be signed by the threshold and keys of 2.root, and 2.root must + be signed by the threshold and keys of 1.root. + + + current_root_metadata: + The currently held version of metadata. + + compresison_algorithm: + The compression algorithm used to compress remote files. + + + Updates the root metadata files with the latest information. + + + None. + """ + next_version = current_root_metadata['version'] + 1 + new_root_metadata_file = self._get_metadata_file('root', 'root.json', tuf.conf.DEFAULT_ROOT_REQUIRED_LENGTH, None, + compression_algorithm=compression_algorithm) + new_root_metadata = \ + tuf.util.load_json_string(new_root_metadata_file.read().decode('utf-8')) + new_version = new_root_metadata['signed']['version'] + 1 + + for version in range(next_version, new_version): + # Temporarily set consistent snapshot. Will be updated to whatever is set in the latests root.json after + # running through the intermediates with `_update_metadata` + self.consistent_snapshot = True + self._update_metadata('root', tuf.conf.DEFAULT_ROOT_REQUIRED_LENGTH, version=version, + compression_algorithm=compression_algorithm) def _check_hashes(self, file_object, trusted_hashes): @@ -1188,7 +1172,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, logger.info(metadata_role + ' not available locally.') self._verify_uncompressed_metadata_file(file_object, metadata_role) - + except Exception as exception: # Remember the error from this mirror, and "reset" the target file. logger.exception('Update failed from ' + file_mirror + '.') @@ -1205,6 +1189,19 @@ def _get_metadata_file(self, metadata_role, remote_filename, logger.error('Failed to update {0} from all mirrors: {1}'.format( remote_filename, file_mirror_errors)) raise tuf.NoWorkingMirrorError(file_mirror_errors) + + + def _verify_root_chain(self, role, current, next): + if role != 'root': + return True + + current_role = current['roles'][role] + + # Verify next metadata with current keys/threshold + valid = tuf.sig.verify_threshold(next, current_role['threshold'], current_role['keyids'], role=role) + + if not valid: + raise tuf.BadSignatureError('Root not signed by previous threshold/keys.') @@ -1474,7 +1471,7 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None, remote_filename = metadata_filename filename_version = '' - if self.consistent_snapshot: + if self.consistent_snapshot and version: filename_version = version dirname, basename = os.path.split(remote_filename) remote_filename = os.path.join(dirname, str(filename_version) + '.' + basename) @@ -1523,6 +1520,8 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None, # stored for 'metadata_role'. updated_metadata_object = metadata_signable['signed'] current_metadata_object = self.metadata['current'].get(metadata_role) + + self._verify_root_chain(metadata_role, current_metadata_object, metadata_signable) # Finally, update the metadata and fileinfo stores, and rebuild the # key and role info for the top-level roles if 'metadata_role' is root. @@ -1648,7 +1647,7 @@ def _update_metadata_via_fileinfo(self, metadata_role, uncompressed_fileinfo, filename_digest = \ random.choice(list(uncompressed_fileinfo['hashes'].values())) dirname, basename = os.path.split(remote_filename) - remote_filename = os.path.join(dirname, filename_digesti + '.' + basename) + remote_filename = os.path.join(dirname, filename_digest + '.' + basename) metadata_file_object = \ self._safely_get_metadata_file(metadata_role, remote_filename, From d75d25a4e667903758dd43b8f9254cc49f28e381 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Mon, 19 Sep 2016 17:21:33 -0400 Subject: [PATCH 11/20] Implement pyca_crypto_keys.py's create_rsa_encrypted_pem and improve code coverage for it --- tests/repository_tool.py | 2866 -------------------------------- tests/test_pyca_crypto_keys.py | 29 +- tuf/pyca_crypto_keys.py | 56 +- 3 files changed, 52 insertions(+), 2899 deletions(-) delete mode 100755 tests/repository_tool.py diff --git a/tests/repository_tool.py b/tests/repository_tool.py deleted file mode 100755 index 042adea63d..0000000000 --- a/tests/repository_tool.py +++ /dev/null @@ -1,2866 +0,0 @@ -#!/usr/bin/env python - -""" - - repository_tool.py - - - Vladimir Diaz - - - October 19, 2013 - - - See LICENSE for licensing information. - - - Provide a tool that can create a TUF repository. It can be used with the - Python interpreter in interactive mode, or imported directly into a Python - module. See 'tuf/README' for the complete guide to using - 'tuf.repository_tool.py'. -""" - -# Help with Python 3 compatibility, where the print statement is a function, an -# implicit relative import is invalid, and the '/' operator performs true -# division. Example: print 'hello world' raises a 'SyntaxError' exception. -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -import os -import errno -import time -import datetime -import logging -import tempfile -import shutil -import json -import random - -import tuf -import tuf.formats -import tuf.util -import tuf.keydb -import tuf.roledb -import tuf.keys -import tuf.sig -import tuf.log -import tuf.conf -import tuf.repository_lib as repo_lib -from tuf.repository_lib import generate_and_write_rsa_keypair -from tuf.repository_lib import generate_and_write_ed25519_keypair -from tuf.repository_lib import import_rsa_publickey_from_file -from tuf.repository_lib import import_ed25519_publickey_from_file -from tuf.repository_lib import import_rsa_privatekey_from_file -from tuf.repository_lib import import_ed25519_privatekey_from_file -from tuf.repository_lib import create_tuf_client_directory -from tuf.repository_lib import disable_console_log_messages - -import iso8601 -import six - - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger('tuf.repository_tool') - -# Add a console handler so that users are aware of potentially unintended -# states, such as multiple roles that share keys. -tuf.log.add_console_handler() -tuf.log.set_console_log_level(logging.INFO) - -# The algorithm used by the repository to generate the digests of the -# target filepaths, which are included in metadata files and may be prepended -# to the filenames of consistent snapshots. -HASH_FUNCTION = 'sha256' - -# The targets and metadata directory names. Metadata files are written -# to the staged metadata directory instead of the "live" one. -METADATA_STAGED_DIRECTORY_NAME = 'metadata.staged' -METADATA_DIRECTORY_NAME = 'metadata' -TARGETS_DIRECTORY_NAME = 'targets' - -# The extension of TUF metadata. -METADATA_EXTENSION = '.json' - -# Expiration date delta, in seconds, of the top-level roles. A metadata -# expiration date is set by taking the current time and adding the expiration -# seconds listed below. - -# Initial 'root.json' expiration time of 1 year. -ROOT_EXPIRATION = 31556900 - -# Initial 'targets.json' expiration time of 3 months. -TARGETS_EXPIRATION = 7889230 - -# Initial 'snapshot.json' expiration time of 1 week. -SNAPSHOT_EXPIRATION = 604800 - -# Initial 'timestamp.json' expiration time of 1 day. -TIMESTAMP_EXPIRATION = 86400 - -try: - tuf.keys.check_crypto_libraries(['rsa', 'ed25519', 'general']) - -except tuf.UnsupportedLibraryError: #pragma: no cover - logger.warn('Warning: The repository and developer tools require' - ' additional libraries, which can be installed as follows:' - '\n $ pip install tuf[tools]') - - -class Repository(object): - """ - - Represent a TUF repository that contains the metadata of the top-level - roles, including all those delegated from the 'targets.json' role. The - repository object returned provides access to the top-level roles, and any - delegated targets that are added as the repository is modified. For - example, a Repository object named 'repository' provides the following - access by default: - - repository.root.version = 2 - repository.timestamp.expiration = datetime.datetime(2015, 8, 8, 12, 0) - repository.snapshot.add_verification_key(...) - repository.targets.delegate('unclaimed', ...) - - Delegating a role from 'targets' updates the attributes of the parent - delegation, which then provides: - - repository.targets('unclaimed').add_verification_key(...) - - - - repository_directory: - The root folder of the repository that contains the metadata and targets - sub-directories. - - metadata_directory: - The metadata sub-directory contains the files of the top-level - roles, including all roles delegated from 'targets.json'. - - targets_directory: - The targets sub-directory contains all the target files that are - downloaded by clients and are referenced in TUF Metadata. The hashes and - file lengths are listed in Metadata files so that they are securely - downloaded. Metadata files are similarly referenced in the top-level - metadata. - - - tuf.FormatError, if the arguments are improperly formatted. - - - Creates top-level role objects and assigns them as attributes. - - - A Repository object that contains default Metadata objects for the top-level - roles. - """ - - def __init__(self, repository_directory, metadata_directory, targets_directory): - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.PATH_SCHEMA.check_match(repository_directory) - tuf.formats.PATH_SCHEMA.check_match(metadata_directory) - tuf.formats.PATH_SCHEMA.check_match(targets_directory) - - self._repository_directory = repository_directory - self._metadata_directory = metadata_directory - self._targets_directory = targets_directory - - # Set the top-level role objects. - self.root = Root() - self.snapshot = Snapshot() - self.timestamp = Timestamp() - self.targets = Targets(self._targets_directory, 'targets') - - - - def write(self, write_partial=False, consistent_snapshot=False, - compression_algorithms=['gz']): - """ - - Write all the JSON Metadata objects to their corresponding files. - write() raises an exception if any of the role metadata to be written to - disk is invalid, such as an insufficient threshold of signatures, missing - private keys, etc. - - - write_partial: - A boolean indicating whether partial metadata should be written to - disk. Partial metadata may be written to allow multiple maintainters - to independently sign and update role metadata. write() raises an - exception if a metadata role cannot be written due to not having enough - signatures. - - consistent_snapshot: - A boolean indicating whether written metadata and target files should - include a version number in the filename (i.e., - .root.json, .targets.json.gz, - .README.json, where is the file's - SHA256 digest. Example: 13.root.json' - - compression_algorithms: - A list of compression algorithms. Each of these algorithms will be - used to compress all of the metadata available on the repository. - By default, all metadata is compressed with gzip. - - - tuf.UnsignedMetadataError, if any of the top-level and delegated roles do - not have the minimum threshold of signatures. - - - Creates metadata files in the repository's metadata directory. - - - None. - """ - - # Does 'write_partial' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.BOOLEAN_SCHEMA.check_match(write_partial) - tuf.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - tuf.formats.COMPRESSIONS_SCHEMA.check_match(compression_algorithms) - - # At this point the tuf.keydb and tuf.roledb stores must be fully - # populated, otherwise write() throws a 'tuf.UnsignedMetadataError' - # exception if any of the top-level roles are missing signatures, keys, etc. - - # Write the metadata files of all the delegated roles that are dirty (i.e., - # have been modified via roledb.update_roleinfo()). - for delegated_rolename in tuf.roledb.get_dirty_roles(): - - # Ignore top-level roles, they will be generated later on in this method. - if delegated_rolename in ['root', 'targets', 'snapshot', 'timestamp']: - continue - - delegated_filename = os.path.join(self._metadata_directory, - delegated_rolename + METADATA_EXTENSION) - - repo_lib._generate_and_write_metadata(delegated_rolename, - delegated_filename, - write_partial, - self._targets_directory, - self._metadata_directory, - consistent_snapshot) - - # Generate the 'root.json' metadata file. - # _generate_and_write_metadata() raises a 'tuf.Error' exception if the - # metadata cannot be written. - root_filename = repo_lib.ROOT_FILENAME - root_filename = os.path.join(self._metadata_directory, root_filename) - - signable_junk, root_filename = \ - repo_lib._generate_and_write_metadata('root', root_filename, write_partial, - self._targets_directory, - self._metadata_directory, - consistent_snapshot) - - # Generate the 'targets.json' metadata file. - targets_filename = repo_lib.TARGETS_FILENAME - targets_filename = os.path.join(self._metadata_directory, targets_filename) - - signable_junk, targets_filename = \ - repo_lib._generate_and_write_metadata('targets', targets_filename, - write_partial, - self._targets_directory, - self._metadata_directory, - consistent_snapshot) - - # Generate the 'snapshot.json' metadata file. - snapshot_filename = repo_lib.SNAPSHOT_FILENAME - snapshot_filename = os.path.join(self._metadata_directory, snapshot_filename) - filenames = {'root': root_filename, 'targets': targets_filename} - snapshot_signable = None - - snapshot_signable, snapshot_filename = \ - repo_lib._generate_and_write_metadata('snapshot', snapshot_filename, - write_partial, - self._targets_directory, - self._metadata_directory, - consistent_snapshot, filenames) - - # Generate the 'timestamp.json' metadata file. - timestamp_filename = repo_lib.TIMESTAMP_FILENAME - timestamp_filename = os.path.join(self._metadata_directory, timestamp_filename) - filenames = {'snapshot': snapshot_filename} - - repo_lib._generate_and_write_metadata('timestamp', timestamp_filename, - write_partial, - self._targets_directory, - self._metadata_directory, - consistent_snapshot, filenames) - - # Delete the metadata of roles no longer in 'tuf.roledb'. Obsolete roles - # may have been revoked and should no longer have their metadata files - # available on disk, otherwise loading a repository may unintentionally load - # them. - repo_lib._delete_obsolete_metadata(self._metadata_directory, - snapshot_signable['signed'], - consistent_snapshot) - - - - def write_partial(self): - """ - - Write all the JSON Metadata objects to their corresponding files, but - allow metadata files to contain an invalid threshold of signatures. - - - None. - - - None. - - - Creates metadata files in the repository's metadata directory. - - - None. - """ - - self.write(write_partial=True) - - - - def status(self): - """ - - Determine the status of the top-level roles, including those delegated by - the Targets role. status() checks if each role provides sufficient public - and private keys, signatures, and that a valid metadata file is generated - if write() were to be called. Metadata files are temporarily written so - that file hashes and lengths may be verified, determine if delegated role - trust is fully obeyed, and target paths valid according to parent roles. - status() does not do a simple check for number of threshold keys and - signatures. - - - None. - - - None. - - - Generates and writes temporary metadata files. - - - None. - """ - - temp_repository_directory = None - - # Generate and write temporary metadata so that full verification of - # metadata is possible, such as verifying signatures, digests, and file - # content. Ensure temporary files generated are removed after verification - # results are completed. - try: - temp_repository_directory = tempfile.mkdtemp() - targets_directory = self._targets_directory - metadata_directory = os.path.join(temp_repository_directory, - METADATA_STAGED_DIRECTORY_NAME) - os.mkdir(metadata_directory) - - - # Retrieve the roleinfo of the delegated roles, exluding the top-level - # targets role. - delegated_roles = tuf.roledb.get_delegated_rolenames('targets') - insufficient_keys = [] - insufficient_signatures = [] - - # Iterate the list of delegated roles and determine the list of invalid - # roles. First verify the public and private keys, and then the generated - # metadata file. - for delegated_role in delegated_roles: - filename = delegated_role + METADATA_EXTENSION - filename = os.path.join(metadata_directory, filename) - - # Ensure the parent directories of 'filename' exist, otherwise an - # IO exception is raised if 'filename' is written to a sub-directory. - tuf.util.ensure_parent_dir(filename) - - # Append any invalid roles to the 'insufficient_keys' and - # 'insufficient_signatures' lists - try: - repo_lib._check_role_keys(delegated_role) - - except tuf.InsufficientKeysError: - insufficient_keys.append(delegated_role) - continue - - try: - repo_lib._generate_and_write_metadata(delegated_role, filename, False, - targets_directory, - metadata_directory) - except tuf.UnsignedMetadataError: - insufficient_signatures.append(delegated_role) - - # Log the verification results of the delegated roles and return - # immediately after each invalid case. - if len(insufficient_keys): - logger.info('Delegated roles with insufficient' - ' keys:\n' + repr(insufficient_keys)) - return - - if len(insufficient_signatures): - logger.info('Delegated roles with insufficient' - ' signatures:\n' + repr(insufficient_signatures)) - return - - # Verify the top-level roles and log the results. - repo_lib._log_status_of_top_level_roles(targets_directory, - metadata_directory) - - finally: - shutil.rmtree(temp_repository_directory, ignore_errors=True) - - - @staticmethod - def get_filepaths_in_directory(files_directory, recursive_walk=False, - followlinks=True): - """ - - Walk the given 'files_directory' and build a list of target files found. - - - files_directory: - The path to a directory of target files. - - recursive_walk: - To recursively walk the directory, set recursive_walk=True. - - followlinks: - To follow symbolic links, set followlinks=True. - - - tuf.FormatError, if the arguments are improperly formatted. - - tuf.Error, if 'file_directory' is not a valid directory. - - Python IO exceptions. - - - None. - - - A list of absolute paths to target files in the given 'files_directory'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.PATH_SCHEMA.check_match(files_directory) - tuf.formats.BOOLEAN_SCHEMA.check_match(recursive_walk) - tuf.formats.BOOLEAN_SCHEMA.check_match(followlinks) - - # Ensure a valid directory is given. - if not os.path.isdir(files_directory): - raise tuf.Error(repr(files_directory) + ' is not a directory.') - - # A list of the target filepaths found in 'files_directory'. - targets = [] - - # FIXME: We need a way to tell Python 2, but not Python 3, to return - # filenames in Unicode; see #61 and: - # http://docs.python.org/2/howto/unicode.html#unicode-filenames - for dirpath, dirnames, filenames in os.walk(files_directory, - followlinks=followlinks): - for filename in filenames: - full_target_path = os.path.join(dirpath, filename) - targets.append(full_target_path) - - # Prune the subdirectories to walk right now if we do not wish to - # recursively walk files_directory. - if recursive_walk is False: - del dirnames[:] - - return targets - - - - - -class Metadata(object): - """ - - Provide a base class to represent a TUF Metadata role. There are four - top-level roles: Root, Targets, Snapshot, and Timestamp. The Metadata class - provides methods that are needed by all top-level roles, such as adding - and removing public keys, private keys, and signatures. Metadata - attributes, such as rolename, version, threshold, expiration, key list, and - compressions, is also provided by the Metadata base class. - - - None. - - - None. - - - None. - - - None. - """ - - def __init__(self): - self._rolename = None - - - - def add_verification_key(self, key): - """ - - Add 'key' to the role. Adding a key, which should contain only the public - portion, signifies the corresponding private key and signatures the role - is expected to provide. A threshold of signatures is required for a role - to be considered properly signed. If a metadata file contains an - insufficient threshold of signatures, it must not be accepted. - - >>> - >>> - >>> - - - key: - The role key to be added, conformant to 'tuf.formats.ANYKEY_SCHEMA'. - Adding a public key to a role means that its corresponding private key - must generate and add its signature to the role. A threshold number of - signatures is required for a role to be fully signed. - - - tuf.FormatError, if the 'key' argument is improperly formatted. - - - The role's entries in 'tuf.keydb.py' and 'tuf.roledb.py' are updated. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.ANYKEY_SCHEMA.check_match(key) - - # Ensure 'key', which should contain the public portion, is added to - # 'tuf.keydb.py'. Add 'key' to the list of recognized keys. Keys may be - # shared, so do not raise an exception if 'key' has already been loaded. - try: - tuf.keydb.add_key(key) - - except tuf.KeyAlreadyExistsError: - logger.warning('Adding a verification key that has already been used.') - - keyid = key['keyid'] - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - - # Add 'key' to the role's entry in 'tuf.roledb.py' and avoid duplicates. - if keyid not in roleinfo['keyids']: - roleinfo['keyids'].append(keyid) - - tuf.roledb.update_roleinfo(self._rolename, roleinfo) - - - - def remove_verification_key(self, key): - """ - - Remove 'key' from the role's currently recognized list of role keys. - The role expects a threshold number of signatures. - - >>> - >>> - >>> - - - key: - The role's key, conformant to 'tuf.formats.ANYKEY_SCHEMA'. 'key' - should contain only the public portion, as only the public key is - needed. The 'add_verification_key()' method should have previously - added 'key'. - - - tuf.FormatError, if the 'key' argument is improperly formatted. - - tuf.Error, if the 'key' argument has not been previously added. - - - Updates the role's 'tuf.roledb.py' entry. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.ANYKEY_SCHEMA.check_match(key) - - keyid = key['keyid'] - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - - if keyid in roleinfo['keyids']: - roleinfo['keyids'].remove(keyid) - - tuf.roledb.update_roleinfo(self._rolename, roleinfo) - - else: - raise tuf.Error('Verification key not found.') - - - - def load_signing_key(self, key): - """ - - Load the role key, which must contain the private portion, so that role - signatures may be generated when the role's metadata file is eventually - written to disk. - - >>> - >>> - >>> - - - key: - The role's key, conformant to 'tuf.formats.ANYKEY_SCHEMA'. It must - contain the private key, so that role signatures may be generated when - write() or write_partial() is eventually called to generate valid - metadata files. - - - tuf.FormatError, if 'key' is improperly formatted. - - tuf.Error, if the private key is not found in 'key'. - - - Updates the role's 'tuf.keydb.py' and 'tuf.roledb.py' entries. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.ANYKEY_SCHEMA.check_match(key) - - # Ensure the private portion of the key is available, otherwise signatures - # cannot be generated when the metadata file is written to disk. - if not len(key['keyval']['private']): - raise tuf.Error('This is not a private key.') - - # Has the key, with the private portion included, been added to the keydb? - # The public version of the key may have been previously added. - try: - tuf.keydb.add_key(key) - - except tuf.KeyAlreadyExistsError: - tuf.keydb.remove_key(key['keyid']) - tuf.keydb.add_key(key) - - # Update the role's 'signing_keys' field in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - if key['keyid'] not in roleinfo['signing_keyids']: - roleinfo['signing_keyids'].append(key['keyid']) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo) - - - - def unload_signing_key(self, key): - """ - - Remove a previously loaded role private key (i.e., load_signing_key()). - The keyid of the 'key' is removed from the list of recognized signing - keys. - - >>> - >>> - >>> - - - key: - The role key to be unloaded, conformant to 'tuf.formats.ANYKEY_SCHEMA'. - - - tuf.FormatError, if the 'key' argument is improperly formatted. - - tuf.Error, if the 'key' argument has not been previously loaded. - - - Updates the signing keys of the role in 'tuf.roledb.py'. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.ANYKEY_SCHEMA.check_match(key) - - # Update the role's 'signing_keys' field in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - - if key['keyid'] in roleinfo['signing_keyids']: - roleinfo['signing_keyids'].remove(key['keyid']) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo) - - else: - raise tuf.Error('Signing key not found.') - - - - def add_signature(self, signature): - """ - - Add a signature to the role. A role is considered fully signed if it - contains a threshold of signatures. The 'signature' should have been - generated by the private key corresponding to one of the role's expected - keys. - - >>> - >>> - >>> - - - signature: - The signature to be added to the role, conformant to - 'tuf.formats.SIGNATURE_SCHEMA'. - - - tuf.FormatError, if the 'signature' argument is improperly formatted. - - - Adds 'signature', if not already added, to the role's 'signatures' field - in 'tuf.roledb.py'. - - - None. - """ - - # Does 'signature' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.SIGNATURE_SCHEMA.check_match(signature) - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - - # Ensure the roleinfo contains a 'signatures' field. - if 'signatures' not in roleinfo: - roleinfo['signatures'] = [] - - # Update the role's roleinfo by adding 'signature', if it has not been - # added. - if signature not in roleinfo['signatures']: - roleinfo['signatures'].append(signature) - tuf.roledb.update_roleinfo(self.rolename, roleinfo) - - - - def remove_signature(self, signature): - """ - - Remove a previously loaded, or added, role 'signature'. A role must - contain a threshold number of signatures to be considered fully signed. - - >>> - >>> - >>> - - - signature: - The role signature to remove, conformant to - 'tuf.formats.SIGNATURE_SCHEMA'. - - - tuf.FormatError, if the 'signature' argument is improperly formatted. - - tuf.Error, if 'signature' has not been previously added to this role. - - - Updates the 'signatures' field of the role in 'tuf.roledb.py'. - - - None. - """ - - # Does 'signature' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.SIGNATURE_SCHEMA.check_match(signature) - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - - if signature in roleinfo['signatures']: - roleinfo['signatures'].remove(signature) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo) - - else: - raise tuf.Error('Signature not found.') - - - - @property - def signatures(self): - """ - - A getter method that returns the role's signatures. A role is considered - fully signed if it contains a threshold number of signatures, where each - signature must be provided by the generated by the private key. Keys - are added to a role with the add_verification_key() method. - - - None. - - - None. - - - None. - - - A list of signatures, conformant to 'tuf.formats.SIGNATURES_SCHEMA'. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - signatures = roleinfo['signatures'] - - return signatures - - - - @property - def keys(self): - """ - - A getter method that returns the role's keyids of the keys. The role - is expected to eventually contain a threshold of signatures generated - by the private keys of each of the role's keys (returned here as a keyid.) - - - None. - - - None. - - - None. - - - A list of the role's keyids (i.e., keyids of the keys). - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - keyids = roleinfo['keyids'] - - return keyids - - - - @property - def rolename(self): - """ - - Return the role's name. - Examples: 'root', 'timestamp', 'targets/unclaimed/django'. - - - None. - - - None. - - - None. - - - The role's name, conformant to 'tuf.formats.ROLENAME_SCHEMA'. - Examples: 'root', 'timestamp', 'targets/unclaimed/django'. - """ - - return self._rolename - - - - @property - def version(self): - """ - - A getter method that returns the role's version number, conformant to - 'tuf.formats.VERSION_SCHEMA'. - - - None. - - - None. - - - None. - - - The role's version number, conformant to 'tuf.formats.VERSION_SCHEMA'. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - version = roleinfo['version'] - - return version - - - - @version.setter - def version(self, version): - """ - - A setter method that updates the role's version number. TUF clients - download new metadata with version number greater than the version - currently trusted. New metadata start at version 1 when either write() - or write_partial() is called. Version numbers are automatically - incremented, when the write methods are called, as follows: - - 1. write_partial==True and the metadata is the first to be written. - - 2. write_partial=False (i.e., write()), the metadata was not loaded as - partially written, and a write_partial is not needed. - - >>> - >>> - >>> - - - version: - The role's version number, conformant to 'tuf.formats.VERSION_SCHEMA'. - - - tuf.FormatError, if the 'version' argument is improperly formatted. - - - Modifies the 'version' attribute of the Repository object and updates - the role's version in 'tuf.roledb.py'. - - - None. - """ - - # Does 'version' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - roleinfo['version'] = version - - tuf.roledb.update_roleinfo(self._rolename, roleinfo) - - - - @property - def threshold(self): - """ - - Return the role's threshold value. A role is considered fully signed if - a threshold number of signatures is available. - - - None. - - - None. - - - None. - - - The role's threshold value, conformant to 'tuf.formats.THRESHOLD_SCHEMA'. - """ - - roleinfo = tuf.roledb.get_roleinfo(self._rolename) - threshold = roleinfo['threshold'] - - return threshold - - - - @threshold.setter - def threshold(self, threshold): - """ - - A setter method that modified the threshold value of the role. Metadata - is considered fully signed if a 'threshold' number of signatures is - available. - - >>> - >>> - >>> - - - threshold: - An integer value that sets the role's threshold value, or the miminum - number of signatures needed for metadata to be considered fully - signed. Conformant to 'tuf.formats.THRESHOLD_SCHEMA'. - - - tuf.FormatError, if the 'threshold' argument is improperly formatted. - - - Modifies the threshold attribute of the Repository object and updates - the roles threshold in 'tuf.roledb.py'. - - - None. - """ - - # Does 'threshold' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) - - roleinfo = tuf.roledb.get_roleinfo(self._rolename) - roleinfo['threshold'] = threshold - - tuf.roledb.update_roleinfo(self._rolename, roleinfo) - - - @property - def expiration(self): - """ - - A getter method that returns the role's expiration datetime. - - >>> - >>> - >>> - - - None. - - - None. - - - None. - - - The role's expiration datetime, a datetime.datetime() object. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - expires = roleinfo['expires'] - - expires_datetime_object = iso8601.parse_date(expires) - - return expires_datetime_object - - - - @expiration.setter - def expiration(self, datetime_object): - """ - - A setter method for the role's expiration datetime. The top-level - roles have a default expiration (e.g., ROOT_EXPIRATION), but may later - be modified by this setter method. - - >>> - >>> - >>> - - - datetime_object: - The datetime expiration of the role, a datetime.datetime() object. - - - tuf.FormatError, if 'datetime_object' is not a datetime.datetime() object. - - tuf.Error, if 'datetime_object' has already expired. - - - Modifies the expiration attribute of the Repository object. - The datetime given will be truncated to microseconds = 0 - - - None. - """ - - # Is 'datetime_object' a datetime.datetime() object? - # Raise 'tuf.FormatError' if not. - if not isinstance(datetime_object, datetime.datetime): - raise tuf.FormatError(repr(datetime_object) + ' is not a' - ' datetime.datetime() object.') - - # truncate the microseconds value to produce a correct schema string - # of the form yyyy-mm-ddThh:mm:ssZ - datetime_object = datetime_object.replace(microsecond = 0) - - # Ensure the expiration has not already passed. - current_datetime_object = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time())) - - if datetime_object < current_datetime_object: - raise tuf.Error(repr(self.rolename) + ' has already expired.') - - # Update the role's 'expires' entry in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - expires = datetime_object.isoformat() + 'Z' - roleinfo['expires'] = expires - - tuf.roledb.update_roleinfo(self.rolename, roleinfo) - - - - @property - def signing_keys(self): - """ - - A getter method that returns a list of the role's signing keys. - - >>> - >>> - >>> - - - None. - - - None. - - - None. - - - A list of keyids of the role's signing keys, conformant to - 'tuf.formats.KEYIDS_SCHEMA'. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - signing_keyids = roleinfo['signing_keyids'] - - return signing_keyids - - - - @property - def compressions(self): - """ - - A getter method that returns a list of the file compression algorithms - used when the metadata is written to disk. If ['gz'] is set for the - 'targets.json' role, the metadata files 'targets.json' and - 'targets.json.gz' are written. - - >>> - >>> - >>> - - - None. - - - None. - - - None. - - - A list of compression algorithms, conformant to - 'tuf.formats.COMPRESSIONS_SCHEMA'. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - compressions = roleinfo['compressions'] - - return compressions - - - - @compressions.setter - def compressions(self, compression_list): - """ - - A setter method for the file compression algorithms used when the - metadata is written to disk. If ['gz'] is set for the 'targets.json' role - the metadata files 'targets.json' and 'targets.json.gz' are written. - - >>> - >>> - >>> - - - compression_list: - A list of file compression algorithms, conformant to - 'tuf.formats.COMPRESSIONS_SCHEMA'. - - - tuf.FormatError, if 'compression_list' is improperly formatted. - - - Updates the role's compression algorithms listed in 'tuf.roledb.py'. - - - None. - """ - - # Does 'compression_name' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.COMPRESSIONS_SCHEMA.check_match(compression_list) - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - - # Add the compression algorithms of 'compression_list' to the role's - # entry in 'tuf.roledb.py'. - for compression in compression_list: - if compression not in roleinfo['compressions']: - roleinfo['compressions'].append(compression) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo) - - - - - -class Root(Metadata): - """ - - Represent a Root role object. The root role is responsible for - listing the public keys and threshold of all the top-level roles, including - itself. Top-level metadata is rejected if it does not comply with what is - specified by the Root role. - - This Root object sub-classes Metadata, so the expected Metadata - operations like adding/removing public keys, signatures, private keys, and - updating metadata attributes (e.g., version and expiration) is supported. - Since Root is a top-level role and must exist, a default Root object - is instantiated when a new Repository object is created. - - >>> - >>> - >>> - - - None. - - - None. - - - A 'root' role is added to 'tuf.roledb.py'. - - - None. - """ - - def __init__(self): - - super(Root, self).__init__() - - self._rolename = 'root' - - # By default, 'snapshot' metadata is set to expire 1 week from the current - # time. The expiration may be modified. - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + ROOT_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'consistent_snapshot': False, - 'compressions': [''], 'expires': expiration, - 'partial_loaded': False} - try: - tuf.roledb.add_role(self._rolename, roleinfo) - - except tuf.RoleAlreadyExistsError: - pass - - - - - -class Timestamp(Metadata): - """ - - Represent a Timestamp role object. The timestamp role is responsible for - referencing the latest version of the Snapshot role. Under normal - conditions, it is the only role to be downloaded from a remote repository - without a known file length and hash. An upper length limit is set, though. - Also, its signatures are also verified to be valid according to the Root - role. If invalid metadata can only be downloaded by the client, Root - is the only other role that is downloaded without a known length and hash. - This case may occur if a role's signing keys have been revoked and a newer - Root file is needed to list the updated keys. - - This Timestamp object sub-classes Metadata, so the expected Metadata - operations like adding/removing public keys, signatures, private keys, and - updating metadata attributes (e.g., version and expiration) is supported. - Since Snapshot is a top-level role and must exist, a default Timestamp object - is instantiated when a new Repository object is created. - - >>> - >>> - >>> - - - None. - - - None. - - - A 'timestamp' role is added to 'tuf.roledb.py'. - - - None. - """ - - def __init__(self): - - super(Timestamp, self).__init__() - - self._rolename = 'timestamp' - - # By default, 'snapshot' metadata is set to expire 1 week from the current - # time. The expiration may be modified. - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'compressions': [''], - 'expires': expiration, 'partial_loaded': False} - - try: - tuf.roledb.add_role(self.rolename, roleinfo) - - except tuf.RoleAlreadyExistsError: - pass - - - - - -class Snapshot(Metadata): - """ - - Represent a Snapshot role object. The snapshot role is responsible for - referencing the other top-level roles (excluding Timestamp) and all - delegated roles. - - This Snapshot object sub-classes Metadata, so the expected - Metadata operations like adding/removing public keys, signatures, private - keys, and updating metadata attributes (e.g., version and expiration) is - supported. Since Snapshot is a top-level role and must exist, a default - Snapshot object is instantiated when a new Repository object is created. - - >>> - >>> - >>> - - - None. - - - None. - - - A 'snapshot' role is added to 'tuf.roledb.py'. - - - None. - """ - - def __init__(self): - - super(Snapshot, self).__init__() - - self._rolename = 'snapshot' - - # By default, 'snapshot' metadata is set to expire 1 week from the current - # time. The expiration may be modified. - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + SNAPSHOT_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'compressions': [''], - 'expires': expiration, 'partial_loaded': False} - - try: - tuf.roledb.add_role(self._rolename, roleinfo) - - except tuf.RoleAlreadyExistsError: - pass - - - - - -class Targets(Metadata): - """ - - Represent a Targets role object. Targets roles include the top-level role - 'targets.json' and all delegated roles (e.g., 'targets/unclaimed/django'). - The expected operations of Targets metadata is included, such as adding - and removing repository target files, making and revoking delegations, and - listing the target files provided by it. - - Adding or removing a delegation causes the attributes of the Targets object - to be updated. That is, if the 'django' Targets object is delegated by - 'targets/unclaimed', a new attribute is added so that the following - code statement is supported: - repository.targets('unclaimed')('django').version = 2 - - Likewise, revoking a delegation causes removal of the delegation attribute. - - This Targets object sub-classes Metadata, so the expected - Metadata operations like adding/removing public keys, signatures, private - keys, and updating metadata attributes (e.g., version and expiration) is - supported. Since Targets is a top-level role and must exist, a default - Targets object (for 'targets.json', not delegated roles) is instantiated when - a new Repository object is created. - - >>> - >>> - >>> - - - targets_directory: - The targets directory of the Repository object. - - rolename: - The rolename of this Targets object. - - roleinfo: - An already populated roleinfo object of 'rolename'. Conformant to - 'tuf.formats.ROLEDB_SCHEMA'. - - - tuf.FormatError, if the arguments are improperly formatted. - - - Modifies the roleinfo of the targets role in 'tuf.roledb', or creates - a default one named 'targets'. - - - None. - """ - - def __init__(self, targets_directory, rolename='targets', roleinfo=None, - parent_targets_object=None): - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.PATH_SCHEMA.check_match(targets_directory) - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - - if roleinfo is not None: - tuf.formats.ROLEDB_SCHEMA.check_match(roleinfo) - - super(Targets, self).__init__() - self._targets_directory = targets_directory - self._rolename = rolename - self._target_files = [] - self._delegated_roles = {} - self._parent_targets_object = self - - # Keep a reference to the top-level 'targets' object. Any delegated roles - # that may be created, can be added to and accessed via the top-level - # 'targets' object. - if parent_targets_object is not None: - self._parent_targets_object = parent_targets_object - - # By default, Targets objects are set to expire 3 months from the current - # time. May be later modified. - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TARGETS_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - # If 'roleinfo' is not provided, set an initial default. - if roleinfo is None: - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'version': 0, 'compressions': [''], 'expires': expiration, - 'signatures': [], 'paths': {}, 'path_hash_prefixes': [], - 'partial_loaded': False, 'delegations': {'keys': {}, - 'roles': []}} - - # Add the new role to the 'tuf.roledb'. - try: - tuf.roledb.add_role(self.rolename, roleinfo) - - except tuf.RoleAlreadyExistsError: - pass - - - - def __call__(self, rolename): - """ - - Allow callable Targets object so that delegated roles may be referenced - by their string rolenames. Rolenames may include characters like '-' and - are not restricted to Python identifiers. - - - rolename: - The rolename of the delegated role. 'rolename' must be a role - previously delegated by this Targets role. - - - tuf.FormatError, if the arguments are improperly formatted. - - tuf.UnknownRoleError, if 'rolename' has not been delegated by this - Targets object. - - - Modifies the roleinfo of the targets role in 'tuf.roledb'. - - - The Targets object of 'rolename'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - - if rolename in self._delegated_roles: - return self._delegated_roles[rolename] - - else: - raise tuf.UnknownRoleError(repr(rolename) + ' has not been delegated' - ' by ' + repr(self.rolename)) - - - - - - def add_delegated_role(self, rolename, targets_object): - """ - - Add 'targets_object' to this Targets object's list of known delegated - roles. Specifically, delegated Targets roles should call 'super(Targets, - self).add_delegated_role(...)' so that the top-level 'targets' role - contains a dictionary of all the available roles on the repository. - - - rolename: - The rolename of the delegated role. 'rolename' must be a role - previously delegated by this Targets role. - - targets_object: - A Targets() object. - - - tuf.FormatError, if the arguments are improperly formatted. - - tuf.RoleAlreadyExistsError, if 'rolename' has already been delegated by - this Targets object. - - - Updates the Target object's dictionary of delegated targets. - - - The Targets object of 'rolename'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - - if not isinstance(targets_object, Targets): - raise tuf.FormatError(repr(targets_object) + ' is not a Targets object.') - - - if rolename in self._delegated_roles: - raise tuf.RoleAlreadyExistsError(repr(rolename) + ' already exists.') - - else: - self._delegated_roles[rolename] = targets_object - - - - - - @property - def target_files(self): - """ - - A getter method that returns the target files added thus far to this - Targets object. - - >>> - >>> - >>> - - - None. - - - None. - - - None. - - - None. - """ - - target_files = tuf.roledb.get_roleinfo(self._rolename)['paths'] - - return target_files - - - - def add_restricted_paths(self, list_of_directory_paths, child_rolename): - """ - - Add 'list_of_directory_paths' to the restricted paths of 'child_rolename'. - The updater client verifies the target paths specified by child roles, and - searches for targets by visiting these restricted paths. A child role may - only provide targets specifically listed in the delegations field of the - parent, or a target that falls under a restricted path. - - >>> - >>> - >>> - - - list_of_directory_paths: - A list of directory paths 'child_rolename' should also be restricted to. - - child_rolename: - The child delegation that requires an update to its restricted paths, - as listed in the parent role's delegations (e.g., 'Django' in - 'unclaimed'). - - - tuf.Error, if a directory path in 'list_of_directory_paths' is not a - directory, or not under the repository's targets directory. If - 'child_rolename' has not been delegated yet. - - - Modifies this Targets' delegations field. - - - None. - """ - - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.PATHS_SCHEMA.check_match(list_of_directory_paths) - tuf.formats.ROLENAME_SCHEMA.check_match(child_rolename) - - # A list of verified paths to be added to the child role's entry in the - # parent's delegations. - directory_paths = [] - - # Ensure the 'child_rolename' has been delegated, otherwise it will not - # have an entry in the parent role's delegations field. - if not tuf.roledb.role_exists(child_rolename): - raise tuf.Error(repr(child_rolename) + ' has not been delegated.') - - # Are the paths in 'list_of_directory_paths' valid? - for directory_path in list_of_directory_paths: - directory_path = os.path.abspath(directory_path) - if not os.path.isdir(directory_path): - raise tuf.Error(repr(directory_path) + ' is not a directory.') - - # Are the paths in the repository's targets directory? Append a trailing - # path separator with os.path.join(path, ''). - targets_directory = os.path.join(self._targets_directory, '') - directory_path = os.path.join(directory_path, '') - if not directory_path.startswith(targets_directory): - raise tuf.Error(repr(directory_path) + ' is not under the' - ' Repository\'s targets directory: ' + repr(self._targets_directory)) - - directory_paths.append(directory_path[len(self._targets_directory):]) - - # Get the current role's roleinfo, so that its delegations field can be - # updated. - roleinfo = tuf.roledb.get_roleinfo(self._rolename) - - # Update the restricted paths of 'child_rolename'. - for role in roleinfo['delegations']['roles']: - if role['name'] == child_rolename: - restricted_paths = role['paths'] - - for directory_path in directory_paths: - if directory_path not in restricted_paths: - restricted_paths.append(directory_path) - - tuf.roledb.update_roleinfo(self._rolename, roleinfo) - - - - def add_target(self, filepath, custom=None): - """ - - Add a filepath (must be under the repository's targets directory) to the - Targets object. - - This method does not actually create 'filepath' on the file system. - 'filepath' must already exist on the file system. - - >>> - >>> - >>> - - - filepath: - The path of the target file. It must be located in the repository's - targets directory. - - custom: - An optional object providing additional information about the file. - - - tuf.FormatError, if 'filepath' is improperly formatted. - - tuf.Error, if 'filepath' is not found under the repository's targets - directory. - - - Adds 'filepath' to this role's list of targets. This role's - 'tuf.roledb.py' is also updated. - - - None. - """ - - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.PATH_SCHEMA.check_match(filepath) - if custom is None: - custom = {} - - else: - tuf.formats.CUSTOM_SCHEMA.check_match(custom) - - filepath = os.path.abspath(filepath) - - # Ensure 'filepath' is found under the repository's targets directory. - if not filepath.startswith(self._targets_directory): - raise tuf.Error(repr(filepath) + ' is not under the Repository\'s' - ' targets directory: ' + repr(self._targets_directory)) - - # Add 'filepath' (i.e., relative to the targets directory) to the role's - # list of targets. 'filepath' will be verified as an allowed path according - # to this Targets parent role when write() is called. Not verifying - # 'filepath' here allows freedom to add targets and parent restrictions - # in any order, and minimize the number of times these checks are performed. - if os.path.isfile(filepath): - - # Update the role's 'tuf.roledb.py' entry and avoid duplicates. - targets_directory_length = len(self._targets_directory) - roleinfo = tuf.roledb.get_roleinfo(self._rolename) - relative_path = filepath[targets_directory_length:] - if relative_path not in roleinfo['paths']: - roleinfo['paths'].update({relative_path: custom}) - tuf.roledb.update_roleinfo(self._rolename, roleinfo) - - else: - raise tuf.Error(repr(filepath) + ' is not a valid file.') - - - - def add_targets(self, list_of_targets): - """ - - Add a list of target filepaths (all relative to 'self.targets_directory'). - This method does not actually create files on the file system. The - list of target must already exist. - - >>> - >>> - >>> - - - list_of_targets: - A list of target filepaths that are added to the paths of this Targets - object. - - - tuf.FormatError, if the arguments are improperly formatted. - - tuf.Error, if any of the paths listed in 'list_of_targets' is not found - under the repository's targets directory or is invalid. - - - This Targets' roleinfo is updated with the paths in 'list_of_targets'. - - - None. - """ - - # Does 'list_of_targets' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.RELPATHS_SCHEMA.check_match(list_of_targets) - - # Update the tuf.roledb entry. - targets_directory_length = len(self._targets_directory) - relative_list_of_targets = [] - - # Ensure the paths in 'list_of_targets' are valid and fall under the - # repository's targets directory. The paths of 'list_of_targets' will be - # verified as allowed paths according to this Targets parent role when - # write() is called. Not verifying filepaths here allows the freedom to add - # targets and parent restrictions in any order, and minimize the number of - # times these checks are performed. - for target in list_of_targets: - filepath = os.path.abspath(target) - - if not filepath.startswith(self._targets_directory+os.sep): - raise tuf.Error(repr(filepath) + ' is not under the Repository\'s' - ' targets directory: ' + repr(self._targets_directory)) - - if os.path.isfile(filepath): - relative_list_of_targets.append(filepath[targets_directory_length:]) - - else: - raise tuf.Error(repr(filepath) + ' is not a valid file.') - - # Update this Targets 'tuf.roledb.py' entry. - roleinfo = tuf.roledb.get_roleinfo(self._rolename) - for relative_target in relative_list_of_targets: - if relative_target not in roleinfo['paths']: - roleinfo['paths'].update({relative_target: {}}) - - else: - continue - - tuf.roledb.update_roleinfo(self.rolename, roleinfo) - - - - def remove_target(self, filepath): - """ - - Remove the target 'filepath' from this Targets' 'paths' field. 'filepath' - is relative to the targets directory. - - >>> - >>> - >>> - - - filepath: - The target to remove from this Targets object, relative to the - repository's targets directory. - - - tuf.FormatError, if 'filepath' is improperly formatted. - - tuf.Error, if 'filepath' is not under the repository's targets directory, - or not found. - - - Modifies this Targets 'tuf.roledb.py' entry. - - - None. - """ - - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(filepath) - - filepath = os.path.abspath(filepath) - targets_directory_length = len(self._targets_directory) - - # Ensure 'filepath' is under the repository targets directory. - if not filepath.startswith(self._targets_directory+os.sep): - raise tuf.Error(repr(filepath) + ' is not under the Repository\'s' - ' targets directory: ' + repr(self._targets_directory)) - - # The relative filepath is listed in 'paths'. - relative_filepath = filepath[targets_directory_length:] - - # Remove 'relative_filepath', if found, and update this Targets roleinfo. - fileinfo = tuf.roledb.get_roleinfo(self.rolename) - if relative_filepath in fileinfo['paths']: - del fileinfo['paths'][relative_filepath] - tuf.roledb.update_roleinfo(self.rolename, fileinfo) - - else: - raise tuf.Error('Target file path not found.') - - - - def clear_targets(self): - """ - - Remove all the target filepaths in the "paths" field of this Targets. - - >>> - >>> - >>> - - - None - - - None. - - - Modifies this Targets' 'tuf.roledb.py' entry. - - - None. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - roleinfo['paths'] = {} - - tuf.roledb.update_roleinfo(self.rolename, roleinfo) - - - - - - def get_delegated_rolenames(self): - """ - - Return all delegations of a role. If ['a/b/', 'a/b/c/', 'a/b/c/d'] have - been delegated by the delegated role 'django', - repository.targets('django').get_delegated_rolenames() returns: ['a/b', - 'a/b/c', 'a/b/c/d']. - - - None. - - - None. - - - None. - - - A list of rolenames. - """ - - return tuf.roledb.get_delegated_rolenames(self.rolename) - - - - - - def delegate(self, rolename, public_keys, list_of_targets, threshold=1, - backtrack=True, restricted_paths=None, path_hash_prefixes=None): - """ - - Create a new delegation, where 'rolename' is a child delegation of this - Targets object. The keys and roles database is updated, including the - delegations field of this Targets. The delegation of 'rolename' is added - and accessible (i.e., repository.targets(rolename)). - - Actual metadata files are not create, only when repository.write() or - repository.write_partial() is called. - - >>> - >>> - >>> - - - rolename: - The name of the delegated role, as in 'django' or 'unclaimed'. - - public_keys: - A list of TUF key objects in 'ANYKEYLIST_SCHEMA' format. The list - may contain any of the supported key types: RSAKEY_SCHEMA, - ED25519KEY_SCHEMA, etc. - - list_of_targets: - A list of target filepaths that are added to the paths of 'rolename'. - 'list_of_targets' is a list of target filepaths, and can be empty. - - threshold: - The threshold number of keys of 'rolename'. - - backtrack: - Boolean that indicates whether this role allows the updater client - to continue searching for targets (target files it is trusted to list - but has not yet specified) in other delegations. If 'backtrack' is - False and 'updater.target()' does not find 'example_target.tar.gz' in - this role, a 'tuf.UnknownTargetError' exception should be raised. If - 'backtrack' is True (default), and 'target/other_role' is also trusted - with 'example_target.tar.gz' and has listed it, updater.target() - should backtrack and return the target file specified by - 'target/other_role'. - - restricted_paths: - A list of restricted directory or file paths of 'rolename'. Any target - files added to 'rolename' must fall under one of the restricted paths. - - path_hash_prefixes: - A list of hash prefixes in 'tuf.formats.PATH_HASH_PREFIXES_SCHEMA' - format, used in hashed bin delegations. Targets may be located and - stored in hashed bins by calculating the target path's hash prefix. - - - tuf.FormatError, if any of the arguments are improperly formatted. - - tuf.Error, if the delegated role already exists or if any of the arguments - is an invalid path (i.e., not under the repository's targets directory). - - - A new Target object is created for 'rolename' that is accessible to the - caller (i.e., targets.). The 'tuf.keydb.py' and - 'tuf.roledb.py' stores are updated with 'public_keys'. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - tuf.formats.ANYKEYLIST_SCHEMA.check_match(public_keys) - tuf.formats.RELPATHS_SCHEMA.check_match(list_of_targets) - tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) - tuf.formats.BOOLEAN_SCHEMA.check_match(backtrack) - - if restricted_paths is not None: - tuf.formats.RELPATHS_SCHEMA.check_match(restricted_paths) - - if path_hash_prefixes is not None: - tuf.formats.PATH_HASH_PREFIXES_SCHEMA.check_match(path_hash_prefixes) - - # Check if 'rolename' is not already a delegation. - if tuf.roledb.role_exists(rolename): - raise tuf.Error(repr(rolename) + ' already delegated.') - - # Keep track of the valid keyids (added to the new Targets object) and - # their keydicts (added to this Targets delegations). - keyids = [] - keydict = {} - - # Add all the keys of 'public_keys' to tuf.keydb. - for key in public_keys: - keyid = key['keyid'] - key_metadata_format = tuf.keys.format_keyval_to_metadata(key['keytype'], - key['keyval']) - # Update 'keyids' and 'keydict'. - new_keydict = {keyid: key_metadata_format} - keydict.update(new_keydict) - keyids.append(keyid) - - # Ensure the paths of 'list_of_targets' all fall under the repository's - # targets. - relative_targetpaths = {} - targets_directory_length = len(self._targets_directory) - - for target in list_of_targets: - target = os.path.abspath(target) - if not target.startswith(self._targets_directory+os.sep): - raise tuf.Error(repr(target) + ' is not under the Repository\'s' - ' targets directory: ' + repr(self._targets_directory)) - - relative_targetpaths.update({target[targets_directory_length:]: {}}) - - # Ensure the paths of 'restricted_paths' all fall under the repository's - # targets. - relative_restricted_paths = [] - - if restricted_paths is not None: - for path in restricted_paths: - path = os.path.abspath(path) + os.sep - if not path.startswith(self._targets_directory + os.sep): - raise tuf.Error(repr(path) + ' is not under the Repository\'s' - ' targets directory: ' +repr(self._targets_directory)) - - # Append a trailing path separator with os.path.join(path, ''). - path = os.path.join(path, '') - relative_restricted_paths.append(path[targets_directory_length:]) - - # Create a new Targets object for the 'rolename' delegation. An initial - # expiration is set (3 months from the current time). - expiration = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TARGETS_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'name': rolename, 'keyids': keyids, 'signing_keyids': [], - 'threshold': threshold, 'version': 0, 'compressions': [''], - 'expires': expiration, 'signatures': [], 'partial_loaded': False, - 'paths': relative_targetpaths, 'delegations': {'keys': {}, - 'roles': []}} - - # The new targets object is added as an attribute to this Targets object. - new_targets_object = Targets(self._targets_directory, rolename, - roleinfo, parent_targets_object=self) - - # Update the 'delegations' field of the current role. - current_roleinfo = tuf.roledb.get_roleinfo(self.rolename) - current_roleinfo['delegations']['keys'].update(keydict) - - # Update the roleinfo of this role. A ROLE_SCHEMA object requires only - # 'keyids', 'threshold', and 'paths'. - roleinfo = {'name': rolename, - 'keyids': roleinfo['keyids'], - 'threshold': roleinfo['threshold'], - 'backtrack': backtrack, - 'paths': list(roleinfo['paths'].keys())} - - if restricted_paths is not None: - roleinfo['paths'] = relative_restricted_paths - - if path_hash_prefixes is not None: - roleinfo['path_hash_prefixes'] = path_hash_prefixes - # A role in a delegations must list either 'path_hash_prefixes' - # or 'paths'. - del roleinfo['paths'] - - current_roleinfo['delegations']['roles'].append(roleinfo) - tuf.roledb.update_roleinfo(self.rolename, current_roleinfo) - - # Update the public keys of 'new_targets_object'. - for key in public_keys: - new_targets_object.add_verification_key(key) - - # Add the new delegation to the top-level 'targets' role object (i.e., - # 'repository.targets()'). For example, 'django', which was delegated by - # repository.target('claimed'), is added to 'repository.targets('django')). - - # Add 'new_targets_object' to the 'targets' role object (this object). - if self.rolename == 'targets': - self.add_delegated_role(rolename, new_targets_object) - - else: - self._parent_targets_object.add_delegated_role(rolename, new_targets_object) - - - - - - def revoke(self, rolename): - """ - - Revoke this Targets' 'rolename' delegation. Its 'rolename' attribute is - deleted, including the entries in its 'delegations' field and in - 'tuf.roledb'. - - Actual metadata files are not updated, only when repository.write() or - repository.write_partial() is called. - - >>> - >>> - >>> - - - rolename: - The rolename (e.g., 'Django' in 'django') of the child delegation the - parent role (this role) wants to revoke. - - - tuf.FormatError, if 'rolename' is improperly formatted. - - - The delegations dictionary of 'rolename' is modified, and its 'tuf.roledb' - entry is updated. This Targets' 'rolename' delegation attribute is also - deleted. - - - None. - """ - - # Does 'rolename' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - - # Remove 'rolename' from this Target's delegations dict. - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - - for role in roleinfo['delegations']['roles']: - if role['name'] == rolename: - roleinfo['delegations']['roles'].remove(role) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo) - - # Remove 'rolename' from 'tuf.roledb.py'. - tuf.roledb.remove_role(rolename) - - # Remove the rolename delegation from the current role. For example, the - # 'django' role is removed from repository.targets('django'). - del self._delegated_roles[rolename] - - - - def delegate_hashed_bins(self, list_of_targets, keys_of_hashed_bins, - number_of_bins=1024): - """ - - Distribute a large number of target files over multiple delegated roles - (hashed bins). The metadata files of delegated roles will be nearly - equal in size (i.e., 'list_of_targets' is uniformly distributed by - calculating the target filepath's hash and determing which bin it should - reside in. The updater client will use "lazy bin walk" to find a target - file's hashed bin destination. The parent role lists a range of path - hash prefixes each hashed bin contains. This method is intended for - repositories with a large number of target files, a way of easily - distributing and managing the metadata that lists the targets, and - minimizing the number of metadata files (and their size) downloaded by - the client. See tuf-spec.txt and the following link for more - information: - http://www.python.org/dev/peps/pep-0458/#metadata-scalability - - >>> - >>> - >>> - - - list_of_targets: - The target filepaths of the targets that should be stored in hashed - bins created (i.e., delegated roles). A repository object's - get_filepaths_in_directory() can generate a list of valid target - paths. - - keys_of_hashed_bins: - The initial public keys of the delegated roles. Public keys may be - later added or removed by calling the usual methods of the delegated - Targets object. For example: - repository.targets('000-003').add_verification_key() - - number_of_bins: - The number of delegated roles, or hashed bins, that should be generated - and contain the target file attributes listed in 'list_of_targets'. - 'number_of_bins' must be a power of 2. Each bin may contain a - range of path hash prefixes (e.g., target filepath digests that range - from [000]... - [003]..., where the series of digits in brackets is - considered the hash prefix). - - - tuf.FormatError, if the arguments are improperly formatted. - - tuf.Error, if 'number_of_bins' is not a power of 2, or one of the targets - in 'list_of_targets' is not located under the repository's targets - directory. - - - Delegates multiple target roles from the current parent role. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.PATHS_SCHEMA.check_match(list_of_targets) - tuf.formats.ANYKEYLIST_SCHEMA.check_match(keys_of_hashed_bins) - tuf.formats.NUMBINS_SCHEMA.check_match(number_of_bins) - - # Convert 'number_of_bins' to hexadecimal and determine the number of - # hexadecimal digits needed by each hash prefix. Calculate the total - # number of hash prefixes (e.g., 000 - FFF total values) to be spread over - # 'number_of_bins' and strip the first two characters ('0x') from Python's - # representation of hexadecimal values (so that they are not used in the - # calculation of the prefix length.) Example: number_of_bins = 32, - # total_hash_prefixes = 256, and each hashed bin is responsible for 8 hash - # prefixes. Hashed bin roles created = 00-07.json, 08-0f.json, ..., - # f8-ff.json. - prefix_length = len(hex(number_of_bins - 1)[2:]) - total_hash_prefixes = 16 ** prefix_length - - # For simplicity, ensure that 'total_hash_prefixes' (16 ^ n) can be evenly - # distributed over 'number_of_bins' (must be 2 ^ n). Each bin will contain - # (total_hash_prefixes / number_of_bins) hash prefixes. - if total_hash_prefixes % number_of_bins != 0: - raise tuf.Error('The "number_of_bins" argument must be a power of 2.') - - logger.info('Creating hashed bin delegations.') - logger.info(repr(len(list_of_targets)) + ' total targets.') - logger.info(repr(number_of_bins) + ' hashed bins.') - logger.info(repr(total_hash_prefixes) + ' total hash prefixes.') - - # Store the target paths that fall into each bin. The digest of the - # target path, reduced to the first 'prefix_length' hex digits, is - # calculated to determine which 'bin_index' it should go. - target_paths_in_bin = {} - for bin_index in six.moves.xrange(total_hash_prefixes): - target_paths_in_bin[bin_index] = [] - - # Assign every path to its bin. Ensure every target is located under the - # repository's targets directory. - for target_path in list_of_targets: - target_path = os.path.abspath(target_path) - if not target_path.startswith(self._targets_directory+os.sep): - raise tuf.Error('A path in the list of targets argument is not' - ' under the repository\'s targets directory: ' + repr(target_path)) - - # Determine the hash prefix of 'target_path' by computing the digest of - # its path relative to the targets directory. Example: - # '{repository_root}/targets/file1.txt' -> 'file1.txt'. - relative_path = target_path[len(self._targets_directory):] - digest_object = tuf.hash.digest(algorithm=HASH_FUNCTION) - digest_object.update(relative_path.encode('utf-8')) - relative_path_hash = digest_object.hexdigest() - relative_path_hash_prefix = relative_path_hash[:prefix_length] - - # 'target_paths_in_bin' store bin indices in base-10, so convert the - # 'relative_path_hash_prefix' base-16 (hex) number to a base-10 (dec) - # number. - bin_index = int(relative_path_hash_prefix, 16) - - # Add the 'target_path' (absolute) to the bin. These target paths are - # later added to the targets of the 'bin_index' role. - target_paths_in_bin[bin_index].append(target_path) - - # Calculate the path hash prefixes of each 'bin_offset' stored in the parent - # role. For example: 'targets/unclaimed/000-003' may list the path hash - # prefixes "000", "001", "002", "003" in the delegations dict of - # 'targets/unclaimed'. - bin_offset = total_hash_prefixes // number_of_bins - - logger.info('Each bin ranges over ' + repr(bin_offset) + ' hash prefixes.') - - # The parent roles will list bin roles starting from "0" to - # 'total_hash_prefixes' in 'bin_offset' increments. The skipped bin roles - # are listed in 'path_hash_prefixes' of 'outer_bin_index'. - for outer_bin_index in six.moves.xrange(0, total_hash_prefixes, bin_offset): - # The bin index is hex padded from the left with zeroes for up to the - # 'prefix_length' (e.g., '000-003'). Ensure the correct hash bin name is - # generated if a prefix range is unneeded. - start_bin = hex(outer_bin_index)[2:].zfill(prefix_length) - end_bin = hex(outer_bin_index+bin_offset-1)[2:].zfill(prefix_length) - if start_bin == end_bin: - bin_rolename = start_bin - - else: - bin_rolename = start_bin + '-' + end_bin - - # 'bin_rolename' may contain a range of target paths, from 'start_bin' to - # 'end_bin'. Determine the total target paths that should be included. - path_hash_prefixes = [] - bin_rolename_targets = [] - - for inner_bin_index in six.moves.xrange(outer_bin_index, outer_bin_index+bin_offset): - # 'inner_bin_rolename' needed in padded hex. For example, "00b". - inner_bin_rolename = hex(inner_bin_index)[2:].zfill(prefix_length) - path_hash_prefixes.append(inner_bin_rolename) - bin_rolename_targets.extend(target_paths_in_bin[inner_bin_index]) - - # Delegate from the "unclaimed" targets role to each 'bin_rolename' - # (i.e., outer_bin_index). - self.delegate(bin_rolename, keys_of_hashed_bins, - list_of_targets=bin_rolename_targets, - path_hash_prefixes=path_hash_prefixes) - logger.debug('Delegated from ' + repr(self.rolename) + ' to ' + repr(bin_rolename)) - - - - def add_target_to_bin(self, target_filepath): - """ - - Add the fileinfo of 'target_filepath' to the expected hashed bin, if - the bin is available. The hashed bin should have been created by - {targets_role}.delegate_hashed_bins(). Assuming the target filepath - falls under the repository's targets directory, determine the filepath's - hash prefix, locate the expected bin (if any), and then add the fileinfo - to the expected bin. Example: 'targets/foo.tar.gz' may be added to - the 'targets/unclaimed/58-5f.json' role's list of targets by calling this - method. - - - target_filepath: - The filepath of the target to be added to a hashed bin. The filepath - must fall under repository's targets directory. - - - tuf.FormatError, if 'target_filepath' is improperly formatted. - - tuf.Error, if 'target_filepath' cannot be added to a hashed bin - (e.g., an invalid target filepath, or the expected hashed bin does not - exist.) - - - The fileinfo of 'target_filepath' is added to a hashed bin of this Targets - object. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.PATH_SCHEMA.check_match(target_filepath) - - return self._locate_and_update_target_in_bin(target_filepath, 'add_target') - - - - def remove_target_from_bin(self, target_filepath): - """ - - Remove the fileinfo of 'target_filepath' from the expected hashed bin, if - the bin is available. The hashed bin should have been created by - {targets_role}.delegate_hashed_bins(). Assuming the target filepath - falls under the repository's targets directory, determine the filepath's - hash prefix, locate the expected bin (if any), and then remove the - fileinfo from the expected bin. Example: 'targets/foo.tar.gz' may be - removed from the '58-5f.json' role's list of targets by - calling this method. - - - target_filepath: - The filepath of the target to be added to a hashed bin. The filepath - must fall under repository's targets directory. - - - tuf.FormatError, if 'target_filepath' is improperly formatted. - - tuf.Error, if 'target_filepath' cannot be removed from a hashed bin - (e.g., an invalid target filepath, or the expected hashed bin does not - exist.) - - - The fileinfo of 'target_filepath' is removed from a hashed bin of this - Targets object. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.PATH_SCHEMA.check_match(target_filepath) - - return self._locate_and_update_target_in_bin(target_filepath, 'remove_target') - - - - def _locate_and_update_target_in_bin(self, target_filepath, method_name): - """ - - Assuming the target filepath falls under the repository's targets - directory, determine the filepath's hash prefix, locate the expected bin - (if any), and then call the 'method_name' method of the expected hashed - bin role. - - - target_filepath: - The filepath of the target that may be specified in one of the hashed - bins. 'target_filepath' must fall under repository's targets directory. - - method_name: - A supported method, in string format, of the Targets() class. For - example, 'add_target' and 'remove_target'. If 'target_filepath' were - to be manually added or removed from a bin: - - repository.targets('58-f7').add_target(target_filepath) - repository.targets('000-007').remove_target(target_filepath) - - - tuf.Error, if 'target_filepath' cannot be updated (e.g., an invalid target - filepath, or the expected hashed bin does not exist.) - - - The fileinfo of 'target_filepath' is added to a hashed bin of this Targets - object. - - - None. - """ - - # Determine the prefix length of any one of the hashed bins. The prefix - # length is not stored in the roledb, so it must be determined here by - # inspecting one of path hash prefixes listed. - roleinfo = tuf.roledb.get_roleinfo(self.rolename) - prefix_length = 0 - delegation = None - - # Set 'delegation' if this Targets role has performed any delegations. - if len(roleinfo['delegations']['roles']): - delegation = roleinfo['delegations']['roles'][0] - - else: - raise tuf.Error(self.rolename + ' has not delegated to any roles.') - - # Set 'prefix_length' if this Targets object has delegated to hashed bins, - # otherwise raise an exception. - if 'path_hash_prefixes' in delegation and len(delegation['path_hash_prefixes']): - prefix_length = len(delegation['path_hash_prefixes'][0]) - - else: - raise tuf.Error(self.rolename + ' has not delegated to hashed bins.') - - # Ensure the filepath falls under the repository's targets directory. - filepath = os.path.abspath(target_filepath) - if not filepath.startswith(self._targets_directory + os.sep): - raise tuf.Error(repr(filepath) + ' is not under the Repository\'s' - ' targets directory: ' + repr(self._targets_directory)) - - # Determine the hash prefix of 'target_path' by computing the digest of - # its path relative to the targets directory. Example: - # '{repository_root}/targets/file1.txt' -> '/file1.txt'. - relative_path = filepath[len(self._targets_directory):] - digest_object = tuf.hash.digest(algorithm=HASH_FUNCTION) - digest_object.update(relative_path.encode('utf-8')) - path_hash = digest_object.hexdigest() - path_hash_prefix = path_hash[:prefix_length] - - # Search for 'path_hash_prefix', and if found, extract the hashed bin's - # rolename. The hashed bin name is needed so that 'target_filepath' can be - # added to the Targets object of the hashed bin. - hashed_bin_name = None - for delegation in roleinfo['delegations']['roles']: - if path_hash_prefix in delegation['path_hash_prefixes']: - hashed_bin_name = delegation['name'] - break - - else: - continue - - # 'self._delegated_roles' is keyed by relative rolenames, so update - # 'hashed_bin_name'. - if hashed_bin_name is not None: - hashed_bin_name = hashed_bin_name[len(self.rolename) + 1:] - - # 'method_name' should be one of the supported methods of the Targets() - # class. - getattr(self._delegated_roles[hashed_bin_name], method_name)(target_filepath) - - else: - raise tuf.Error(target_filepath + ' not found in any of the bins.') - - - - @property - def delegations(self): - """ - - A getter method that returns the delegations made by this Targets role. - - >>> - >>> - >>> - - - None. - - - tuf.UnknownRoleError, if this Targets' rolename does not exist in - 'tuf.roledb'. - - - None. - - - A list containing the Targets objects of this Targets' delegations. - """ - - return list(self._delegated_roles.values()) - - - - - -def create_new_repository(repository_directory): - """ - - Create a new repository, instantiate barebones metadata for the top-level - roles, and return a Repository object. On disk, create_new_repository() - only creates the directories needed to hold the metadata and targets files. - The repository object returned may be modified to update the newly created - repository. The methods of the returned object may be called to create - actual repository files (e.g., repository.write()). - - - repository_directory: - The directory that will eventually hold the metadata and target files of - the TUF repository. - - - tuf.FormatError, if the arguments are improperly formatted. - - - The 'repository_directory' is created if it does not exist, including its - metadata and targets sub-directories. - - - A 'tuf.repository_tool.Repository' object. - """ - - # Does 'repository_directory' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.PATH_SCHEMA.check_match(repository_directory) - - # Set the repository, metadata, and targets directories. These directories - # are created if they do not exist. - repository_directory = os.path.abspath(repository_directory) - metadata_directory = None - targets_directory = None - - # Try to create 'repository_directory' if it does not exist. - try: - logger.info('Creating ' + repr(repository_directory)) - os.makedirs(repository_directory) - - # 'OSError' raised if the leaf directory already exists or cannot be created. - # Check for case where 'repository_directory' has already been created. - except OSError as e: - if e.errno == errno.EEXIST: - pass - - else: - raise - - # Set the metadata and targets directories. The metadata directory is a - # staged one so that the "live" repository is not affected. The - # staged metadata changes may be moved over to "live" after all updated - # have been completed. - metadata_directory = \ - os.path.join(repository_directory, METADATA_STAGED_DIRECTORY_NAME) - targets_directory = \ - os.path.join(repository_directory, TARGETS_DIRECTORY_NAME) - - # Try to create the metadata directory that will hold all of the metadata - # files, such as 'root.json' and 'snapshot.json'. - try: - logger.info('Creating ' + repr(metadata_directory)) - os.mkdir(metadata_directory) - - # 'OSError' raised if the leaf directory already exists or cannot be created. - except OSError as e: - if e.errno == errno.EEXIST: - pass - - else: - raise - - # Try to create the targets directory that will hold all of the target files. - try: - logger.info('Creating ' + repr(targets_directory)) - os.mkdir(targets_directory) - - except OSError as e: - if e.errno == errno.EEXIST: - pass - - else: - raise - - # Create the bare bones repository object, where only the top-level roles - # have been set and contain default values (e.g., Root roles has a threshold - # of 1, expires 1 year into the future, etc.) - repository = Repository(repository_directory, metadata_directory, - targets_directory) - - return repository - - - - - -def load_repository(repository_directory): - """ - - Return a repository object containing the contents of metadata files loaded - from the repository. - - - repository_directory: - - - tuf.FormatError, if 'repository_directory' or any of the metadata files - are improperly formatted. - - tuf.RepositoryError, if the Root role cannot be found. At a minimum, - a repository must contain 'root.json' - - - All the metadata files found in the repository are loaded and their contents - stored in a repository_tool.Repository object. - - - repository_tool.Repository object. - """ - - # Does 'repository_directory' have the correct format? - # Raise 'tuf.FormatError' if there is a mismatch. - tuf.formats.PATH_SCHEMA.check_match(repository_directory) - - # Load top-level metadata. - repository_directory = os.path.abspath(repository_directory) - metadata_directory = os.path.join(repository_directory, - METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, - TARGETS_DIRECTORY_NAME) - - # The Repository() object loaded (i.e., containing all the metadata roles - # found) and returned. - repository = Repository(repository_directory, metadata_directory, - targets_directory) - - filenames = repo_lib.get_metadata_filenames(metadata_directory) - - # The Root file is always available without a version number (a consistent - # snapshot) attached to the filename. Store the 'consistent_snapshot' value - # and read the loaded Root file so that other metadata files may be located. - consistent_snapshot = False - - # Load the metadata of the top-level roles (i.e., Root, Timestamp, Targets, - # and Snapshot). - repository, consistent_snapshot = repo_lib._load_top_level_metadata(repository, - filenames) - - # Load delegated targets metadata. - # Extract the fileinfo of all the role files found in the metadata directory. - # This information is stored in the 'meta' field of the snapshot - # metadata object. - targets_objects = {} - loaded_metadata = [] - targets_objects['targets'] = repository.targets - targets_metadata_directory = os.path.join(metadata_directory, - TARGETS_DIRECTORY_NAME) - if os.path.exists(targets_metadata_directory) and \ - os.path.isdir(targets_metadata_directory): - for root, directories, files in os.walk(targets_metadata_directory): - - # 'files' here is a list of target file names. - for basename in files: - metadata_path = os.path.join(root, basename) - metadata_name = \ - metadata_path[len(metadata_directory):].lstrip(os.path.sep) - - # Strip the version number if 'consistent_snapshot' is True. - # Example: '10.django.json' --> 'django.json' - metadata_name, version_number_junk = \ - repo_lib._strip_version_number(metadata_name, - consistent_snapshot) - - if metadata_name.endswith(METADATA_EXTENSION): - extension_length = len(METADATA_EXTENSION) - metadata_name = metadata_name[:-extension_length] - - else: - continue - - # Keep a store metadata previously loaded metadata to prevent - # re-loading duplicate versions. Duplicate versions may occur with - # 'consistent_snapshot', where the same metadata may be available in - # multiples files (the different hash is included in each filename. - if metadata_name in loaded_metadata: - continue - - signable = None - try: - signable = tuf.util.load_json_file(metadata_path) - - except (ValueError, IOError): - continue - - metadata_object = signable['signed'] - - # Extract the metadata attributes of 'metadata_name' and update its - # corresponding roleinfo. - roleinfo = tuf.roledb.get_roleinfo(metadata_name) - roleinfo['signatures'].extend(signable['signatures']) - roleinfo['version'] = metadata_object['version'] - roleinfo['expires'] = metadata_object['expires'] - for filepath, fileinfo in six.iteritems(metadata_object['targets']): - roleinfo['paths'].update({filepath: fileinfo.get('custom', {})}) - roleinfo['delegations'] = metadata_object['delegations'] - - if os.path.exists(metadata_path + '.gz'): - roleinfo['compressions'].append('gz') - - # The roleinfo of 'metadata_name' should have been initialized with - # defaults when it was loaded from its parent role. - if repo_lib._metadata_is_partially_loaded(metadata_name, signable, roleinfo): - roleinfo['partial_loaded'] = True - - tuf.roledb.update_roleinfo(metadata_name, roleinfo) - loaded_metadata.append(metadata_name) - - # Generate the Targets objects of the delegated roles of - # 'metadata_name' and update the parent role Targets object. - new_targets_object = Targets(targets_directory, metadata_name, roleinfo) - targets_object = \ - targets_objects[tuf.roledb.get_parent_rolename(metadata_name)] - targets_objects[metadata_name] = new_targets_object - - targets_object._delegated_roles[(os.path.basename(metadata_name))] = \ - new_targets_object - - # Extract the keys specified in the delegations field of the Targets - # role. Add 'key_object' to the list of recognized keys. Keys may be - # shared, so do not raise an exception if 'key_object' has already been - # added. In contrast to the methods that may add duplicate keys, do not - # log a warning here as there may be many such duplicate key warnings. - # The repository maintainer should have also been made aware of the - # duplicate key when it was added. - for key_metadata in six.itervalues(metadata_object['delegations']['keys']): - key_object = tuf.keys.format_metadata_to_key(key_metadata) - try: - tuf.keydb.add_key(key_object) - - except tuf.KeyAlreadyExistsError: - pass - - # Add the delegated role's initial roleinfo, to be fully populated - # when its metadata file is next loaded in the os.walk() iteration. - for role in metadata_object['delegations']['roles']: - rolename = role['name'] - roleinfo = {'name': role['name'], 'keyids': role['keyids'], - 'threshold': role['threshold'], - 'compressions': [''], 'signing_keyids': [], - 'signatures': [], - 'paths': {}, - 'partial_loaded': False, - 'delegations': {'keys': {}, - 'roles': []}} - tuf.roledb.add_role(rolename, roleinfo) - - return repository - - - - - -if __name__ == '__main__': - # The interactive sessions of the documentation strings can - # be tested by running repository_tool.py as a standalone module: - # $ python repository_tool.py. - import doctest - doctest.testmod() diff --git a/tests/test_pyca_crypto_keys.py b/tests/test_pyca_crypto_keys.py index 4e33f07f8b..998a7d9ecb 100755 --- a/tests/test_pyca_crypto_keys.py +++ b/tests/test_pyca_crypto_keys.py @@ -105,14 +105,15 @@ def test_verify_rsa_signature(self): self.assertEqual(True, valid_signature) # Check for improperly formatted arguments. + self.assertRaises(tuf.FormatError, crypto_keys.verify_rsa_signature, 123, method, + public_rsa, data) + self.assertRaises(tuf.FormatError, crypto_keys.verify_rsa_signature, signature, 123, public_rsa, data) self.assertRaises(tuf.FormatError, crypto_keys.verify_rsa_signature, signature, method, 123, data) - self.assertRaises(tuf.FormatError, crypto_keys.verify_rsa_signature, 123, method, - public_rsa, data) self.assertRaises(tuf.UnknownMethodError, crypto_keys.verify_rsa_signature, signature, @@ -186,6 +187,30 @@ def test_create_rsa_public_and_private_from_encrypted_pem(self): + def test_create_rsa_encrypted_pem(self): + global private_rsa + passphrase = 'password' + + # Verify normal case. + encrypted_pem = crypto_keys.create_rsa_encrypted_pem(private_rsa, passphrase) + + self.assertTrue(tuf.formats.PEMRSA_SCHEMA.matches(encrypted_pem)) + + # Test for invalid arguments. + self.assertRaises(tuf.FormatError, crypto_keys.create_rsa_encrypted_pem, + 1, passphrase) + self.assertRaises(tuf.FormatError, crypto_keys.create_rsa_encrypted_pem, + private_rsa, 2) + + self.assertRaises(TypeError, crypto_keys.create_rsa_encrypted_pem, + '', passphrase) + + self.assertRaises(tuf.CryptoError, crypto_keys.create_rsa_encrypted_pem, + 'bad_private_pem', passphrase) + + + + # Run the unit tests. if __name__ == '__main__': unittest.main() diff --git a/tuf/pyca_crypto_keys.py b/tuf/pyca_crypto_keys.py index 724d1b276a..adfd8e4e54 100755 --- a/tuf/pyca_crypto_keys.py +++ b/tuf/pyca_crypto_keys.py @@ -468,17 +468,10 @@ def verify_rsa_signature(signature, signature_method, public_key, data): def create_rsa_encrypted_pem(private_key, passphrase): """ - Return a string in PEM format, where the private part of the RSA key is - encrypted. The private part of the RSA key is encrypted by the Triple - Data Encryption Algorithm (3DES) and Cipher-block chaining (CBC) for the - mode of operation. Password-Based Key Derivation Function 1 (PBKF1) + MD5 - is used to strengthen 'passphrase'. - - TODO: Generate encrypted PEM (that matches PyCrypto's) once support is - added to pyca/cryptography. - - https://en.wikipedia.org/wiki/Triple_DES - https://en.wikipedia.org/wiki/PBKDF2 + Return a string in PEM format, where the private portion of the RSA key is + encrypted. The format of the encrypted PEM is PKCS8, while the encryption + algorithm used is not fixed. pyca/cryptography will try to use the best + available encryption algorithm in this case. >>> public, private = generate_rsa_public_and_private(2048) >>> passphrase = 'secret' @@ -498,13 +491,14 @@ def create_rsa_encrypted_pem(private_key, passphrase): tuf.FormatError, if the arguments are improperly formatted. - tuf.CryptoError, if an RSA key in encrypted PEM format cannot be created. + tuf.CryptoError, if 'private_key' (private PEM format) cannot be + deserialized. TypeError, if 'private_key' is unset. - PyCrypto's Crypto.PublicKey.RSA.exportKey() called to perform the actual - generation of the PEM-formatted output. + pyca/cryptography's key serialization functions are called to the + PEM-formatted output. A string in PEM format, where the private RSA key is encrypted. @@ -521,35 +515,35 @@ def create_rsa_encrypted_pem(private_key, passphrase): tuf.formats.PASSWORD_SCHEMA.check_match(passphrase) # 'private_key' is in PEM format and unencrypted. The extracted key will be - # imported and converted to PyCrypto's RSA key object - # (i.e., Crypto.PublicKey.RSA). Use PyCrypto's exportKey method, with a - # passphrase specified, to create the string. PyCrypto uses PBKDF1+MD5 to - # strengthen 'passphrase', and 3DES with CBC mode for encryption. + # imported and converted to PyCA's RSA key object (. Use PyCA's + # private_bytes() method, with a passphrase specified, to create the expected + # format of the private key. In contrast, pycrypto_keys.py uses PBKDF1+MD5 + # to strengthen 'passphrase', and 3DES with CBC mode for encryption. # 'private_key' may still be a NULL string after the # 'tuf.formats.PEMRSA_SCHEMA' (i.e., 'private_key' has variable size and can # be an empty string. - # TODO: Use PyCrypto to generate the encrypted PEM string. Generating - # encrypted PEMs appears currently unsupported by pyca/cryptography. + if len(private_key): - try: - rsa_key_object = Crypto.PublicKey.RSA.importKey(private_key) - encrypted_pem = rsa_key_object.exportKey(format='PEM', - passphrase=passphrase) - - except (ValueError, IndexError, TypeError) as e: - raise tuf.CryptoError('An encrypted RSA key in PEM format cannot be' - ' generated: ' + str(e)) + try: + private_key = load_pem_private_key(private_key.encode('utf-8'), + password=None, + backend=default_backend()) + except ValueError: + raise tuf.CryptoError('The private key (in PEM format) could not be' + ' deserialized.') else: raise TypeError('The required private key is unset.') - + + encrypted_pem = \ + private_key.private_bytes(encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.BestAvailableEncryption(passphrase.encode('utf-8'))) return encrypted_pem.decode() - - def create_rsa_public_and_private_from_encrypted_pem(encrypted_pem, passphrase): """ From 35d478f1c23ac7f73ea1f17086ea28d4d318b7f6 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Tue, 20 Sep 2016 11:02:53 -0400 Subject: [PATCH 12/20] Cover remaining code coverage for repository_lib.py --- tests/test_repository_lib.py | 9 +++++++-- tuf/repository_lib.py | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/test_repository_lib.py b/tests/test_repository_lib.py index 351778dc56..2c9496e474 100755 --- a/tests/test_repository_lib.py +++ b/tests/test_repository_lib.py @@ -1031,10 +1031,15 @@ def test__remove_invalid_and_duplicate_signatures(self): tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable) self.assertEqual(len(root_signable), expected_number_of_signatures) - # Test that invalid keyid are ignored. - root_signable['signatures'][0]['keyid'] = '404' + # Test that an exception is not raised if an invalid sig is present, + # and that the duplicate key is removed 'root_signable'. + root_signable['signatures'][0]['sig'] = '4040' + invalid_keyid = root_signable['signatures'][0]['keyid'] tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable) + for signature in root_signable['signatures']: + self.assertFalse(invalid_keyid == signature['keyid']) + # Run the test cases. diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 475eaae67b..d96a2d1bef 100755 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -390,6 +390,7 @@ def _remove_invalid_and_duplicate_signatures(signable): # Remove 'signature' from 'signable' if it is an invalid signature. if not tuf.keys.verify_signature(key, signature, signed): + logger.debug('Removing invalid signature for ' + repr(keyid)) signable['signatures'].remove(signature) # Although valid, it may still need removal if it is a duplicate. Check From 070c9e0149b6cb908e6996eb88b02fef829ff5f1 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Tue, 20 Sep 2016 11:40:38 -0400 Subject: [PATCH 13/20] Test for invalid keyid in remove_invalid_and_duplicate_signatures() --- tests/test_repository_lib.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_repository_lib.py b/tests/test_repository_lib.py index 2c9496e474..f110a516d1 100755 --- a/tests/test_repository_lib.py +++ b/tests/test_repository_lib.py @@ -1031,6 +1031,13 @@ def test__remove_invalid_and_duplicate_signatures(self): tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable) self.assertEqual(len(root_signable), expected_number_of_signatures) + # Test for an invalid keyid. + root_signable['signatures'][0]['keyid'] = '404' + tuf.repository_lib._remove_invalid_and_duplicate_signatures(root_signable) + + # Re-add a valid signature for the following test condition. + root_signable['signatures'].append(new_pss_signature) + # Test that an exception is not raised if an invalid sig is present, # and that the duplicate key is removed 'root_signable'. root_signable['signatures'][0]['sig'] = '4040' From 402057e716e4eb6e56b2e7d6e47fd048b2b50700 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Tue, 20 Sep 2016 11:42:01 -0400 Subject: [PATCH 14/20] Group load_pem_public_key() exceptions in verify_rsa_signature() --- tuf/pyca_crypto_keys.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/tuf/pyca_crypto_keys.py b/tuf/pyca_crypto_keys.py index adfd8e4e54..09dddcfc47 100755 --- a/tuf/pyca_crypto_keys.py +++ b/tuf/pyca_crypto_keys.py @@ -429,7 +429,7 @@ def verify_rsa_signature(signature, signature_method, public_key, data): # Verify the expected 'signature_method' value. if signature_method != 'RSASSA-PSS': raise tuf.UnknownMethodError(signature_method) - + # Verify the RSASSA-PSS signature with pyca/cryptography. try: public_key_object = serialization.load_pem_public_key(public_key.encode('utf-8'), @@ -454,12 +454,9 @@ def verify_rsa_signature(signature, signature_method, public_key, data): return False # Raised by load_pem_public_key(). - except ValueError: - raise tuf.CryptoError('The PEM could not be decoded successfully.') - - # Raised by load_pem_public_key(). - except cryptography.exceptions.UnsupportedAlgorithm: - raise tuf.CryptoError('The private key type is not supported.') + except (ValueError, cryptography.exceptions.UnsupportedAlgorithm) as e: + raise tuf.CryptoError('The PEM could not be decoded successfully,' + ' or contained an unsupported key type: ' + str(e)) From e1debf8ea9a90141224f0dbc6ceea86f22b66479 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Wed, 21 Sep 2016 09:17:37 -0400 Subject: [PATCH 15/20] Improve code coverage for repository_tool.py --- tests/test_repository_tool.py | 54 +++++++++++++++++++++++++++++++++-- tuf/repository_tool.py | 3 ++ 2 files changed, 55 insertions(+), 2 deletions(-) diff --git a/tests/test_repository_tool.py b/tests/test_repository_tool.py index 7cce836c16..98ab050090 100755 --- a/tests/test_repository_tool.py +++ b/tests/test_repository_tool.py @@ -554,7 +554,7 @@ def test_compressions(self): def test_add_verification_key(self): - # Add verification key and verify with keys() that it was added. + # Add verification key and verify that it was added via (role).keys. key_path = os.path.join('repository_data', 'keystore', 'snapshot_key.pub') key_object = repo_tool.import_ed25519_publickey_from_file(key_path) @@ -562,10 +562,42 @@ def test_add_verification_key(self): keyid = key_object['keyid'] self.assertEqual([keyid], self.metadata.keys) + + expiration = \ + tuf.formats.unix_timestamp_to_datetime(int(time.time() + 86400)) + expiration = expiration.isoformat() + 'Z' + roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, + 'signatures': [], 'version': 0, + 'consistent_snapshot': False, + 'compressions': [''], 'expires': expiration, + 'partial_loaded': False} + + tuf.roledb.add_role('Root', roleinfo) + tuf.roledb.add_role('Targets', roleinfo) + tuf.roledb.add_role('Snapshot', roleinfo) + tuf.roledb.add_role('Timestamp', roleinfo) + + # Test for different top-level role names. + self.metadata._rolename = 'Targets' + self.metadata.add_verification_key(key_object) + self.metadata._rolename = 'Snapshot' + self.metadata.add_verification_key(key_object) + self.metadata._rolename = 'Timestamp' + self.metadata.add_verification_key(key_object) + # Test for a given 'expires' argument. + expires = datetime.datetime(2030, 1, 1, 12, 0) + self.metadata.add_verification_key(key_object, expires) + + # Test for an expired 'expires'. + expired = datetime.datetime(1984, 1, 1, 12, 0) + self.assertRaises(tuf.Error, + self.metadata.add_verification_key, key_object, expired) + # Test improperly formatted key argument. self.assertRaises(tuf.FormatError, self.metadata.add_verification_key, 3) + self.assertRaises(tuf.FormatError, self.metadata.add_verification_key, key_object, 3) @@ -662,13 +694,22 @@ def test_add_signature(self): root_signable = tuf.util.load_json_file(root_filepath) signatures = root_signable['signatures'] - # Add the first signature from the list, as only need one is needed. + # Add the first signature from the list, as only one is needed. self.metadata.add_signature(signatures[0]) self.assertEqual(signatures, self.metadata.signatures) + # Verify that a signature is added if a 'signatures' entry is not present. + tuf.roledb.create_roledb_from_root_metadata(root_signable['signed']) + del tuf.roledb._roledb_dict['default']['root']['signatures'] + self.metadata._rolename = 'root' + self.metadata.add_signature(signatures[0]) + + # Add a duplicate signature. + self.metadata.add_signature(signatures[0]) # Test improperly formatted signature argument. self.assertRaises(tuf.FormatError, self.metadata.add_signature, 3) + self.assertRaises(tuf.FormatError, self.metadata.add_signature, signatures[0], 3) @@ -1536,6 +1577,15 @@ def test_load_repository(self): + def test_dirty_roles(self): + original_repository_directory = os.path.join('repository_data', + 'repository') + repository = repo_tool.load_repository(original_repository_directory) + + # dirty_roles() only logs the list of dirty roles. + repository.dirty_roles() + + # Run the test cases. if __name__ == '__main__': unittest.main() diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index 2f7289d932..61cb8a57d2 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -897,6 +897,9 @@ def add_signature(self, signature, mark_role_as_dirty=True): roleinfo['signatures'].append(signature) tuf.roledb.update_roleinfo(self.rolename, roleinfo, mark_role_as_dirty) + else: + logger.debug('Signature already exists for role: ' + repr(self.rolename)) + def remove_signature(self, signature): From 9e2fe125ee2bd8894d1756410220548a6fe36bf3 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Mon, 17 Oct 2016 16:08:46 -0400 Subject: [PATCH 16/20] Remove code no longer needed that fetched root and snapshot by digest (they are now fetched by version number). --- tuf/client/updater.py | 52 +++++++++---------------------------------- 1 file changed, 11 insertions(+), 41 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index dbcd7a24c2..bf68311186 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -1803,39 +1803,11 @@ def _update_metadata_if_changed(self, metadata_role, logger.debug(repr(metadata_role) + ' referenced in ' + repr(referenced_metadata)+ '. ' + repr(metadata_role) + ' may be updated.') - - # TODO: All metadata should now be referenced by version number. - # Temporarily commenting the soon-to-be-removed lines. - """ - if metadata_role in ['root', 'snapshot']: - # Extract the fileinfo of the uncompressed version of 'metadata_role'. - expected_fileinfo = self.metadata['current'][referenced_metadata] \ - ['meta'] \ - [uncompressed_metadata_filename] - - # Simply return if the metadata for 'metadata_role' has not been updated, - # according to the uncompressed metadata provided by the referenced - # metadata. The metadata is considered updated if its fileinfo has - # changed. - if not self._fileinfo_has_changed(uncompressed_metadata_filename, - expected_fileinfo): - logger.info(repr(uncompressed_metadata_filename) + ' up-to-date.') - - # Since we have not downloaded a new version of this metadata, we - # should check to see if our local version is stale and notify the user - # if so. This raises tuf.ExpiredMetadataError if the metadata we - # have is expired. Resolves issue #322. - self._ensure_not_expired(self.metadata['current'][metadata_role], - metadata_role) - - return - - # The version number is inspected instead for all other roles. The - # metadata is considered updated if its version number is strictly greater - # than its currently trusted version number. - else: - """ + # Simply return if the metadata for 'metadata_role' has not been updated, + # according to the uncompressed metadata provided by the referenced + # metadata. The metadata is considered updated if its version number is + # strictly greater than its currently trusted version number. expected_versioninfo = self.metadata['current'][referenced_metadata] \ ['meta'] \ [uncompressed_metadata_filename] @@ -1844,8 +1816,15 @@ def _update_metadata_if_changed(self, metadata_role, expected_versioninfo): logger.info(repr(uncompressed_metadata_filename) + ' up-to-date.') + # Since we have not downloaded a new version of this metadata, we + # should check to see if our local version is stale and notify the user + # if so. This raises tuf.ExpiredMetadataError if the metadata we + # have is expired. Resolves issue #322. self._ensure_not_expired(self.metadata['current'][metadata_role], metadata_role) + # TODO: If 'metadata_role' is root or snapshot, we should verify that + # root's hash matches what's in snapshot, and that snapshot hash matches + # what's listed in timestamp.json. return @@ -1898,15 +1877,6 @@ def _update_metadata_if_changed(self, metadata_role, upperbound_filelength = tuf.conf.DEFAULT_TARGETS_REQUIRED_LENGTH try: - # TODO: All metadata should now be referenced by version number. - # Temporarily commenting the soon-to-be-removed lines. - """ - if metadata_role in ['root', 'snapshot']: - self._update_metadata_via_fileinfo(metadata_role, expected_fileinfo, compression) - - # Update all other metadata by way of version number. - else: - """ self._update_metadata(metadata_role, upperbound_filelength, expected_versioninfo['version'], compression) From 1cbf89e4da54dd69e8c5363b32c35bddf976ef24 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Mon, 17 Oct 2016 16:22:12 -0400 Subject: [PATCH 17/20] Add some comments/concerns in updater.refresh() --- tuf/client/updater.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index bf68311186..0ba876e3a2 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -667,11 +667,18 @@ def refresh(self, unsafely_update_root_if_necessary=True): logger.info('An expired Root metadata was loaded and must be updated.') raise + # TODO: How should the latest root metadata be verified? According to the + # currently trusted root keys? What if all of the currently trusted + # root keys have since been revoked by the latest metadata? Alternatively, + # do we blindly trust the downloaded root metadata here? self._update_root_metadata(root_metadata) # Use default but sane information for timestamp metadata, and do not # require strict checks on its required length. self._update_metadata('timestamp', DEFAULT_TIMESTAMP_UPPERLENGTH) + # TODO: After fetching snapshot.json, we should either verify the root + # fileinfo referenced there matches what was fetched earlier in + # _update_root_metadata() or make another attempt to download root.json. self._update_metadata_if_changed('snapshot', referenced_metadata='timestamp') self._update_metadata_if_changed('targets') From 18619f2da78e216c0b5efddf0fcab8e51e13a609 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Mon, 17 Oct 2016 16:26:24 -0400 Subject: [PATCH 18/20] Rename function: _verify_next_version_of_root() --> _verify_root_chain_link() --- tuf/client/updater.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 0ba876e3a2..7c11333418 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -1213,7 +1213,7 @@ def _get_metadata_file(self, metadata_role, remote_filename, - def _verify_next_version_of_root(self, role, current, next): + def _verify_root_chain_link(self, role, current, next): if role != 'root': return True @@ -1544,7 +1544,7 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None, updated_metadata_object = metadata_signable['signed'] current_metadata_object = self.metadata['current'].get(metadata_role) - self._verify_next_version_of_root(metadata_role, current_metadata_object, + self._verify_root_chain_link(metadata_role, current_metadata_object, metadata_signable) # Finally, update the metadata and fileinfo stores, and rebuild the From 58370a5a2f401c3c3ea181a2c45382c562a13df2 Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Mon, 17 Oct 2016 16:56:08 -0400 Subject: [PATCH 19/20] Append '_integration.py' to the module name of the integration tests. --- ...y_revocation.py => test_key_revocation_integration.py} | 2 +- ..._versioning.py => test_root_versioning_integration.py} | 2 +- ...ation.py => test_updater_root_rotation_integration.py} | 8 ++++---- tuf/client/updater.py | 3 ++- 4 files changed, 8 insertions(+), 7 deletions(-) rename tests/{test_key_revocation.py => test_key_revocation_integration.py} (99%) rename tests/{test_root_versioning.py => test_root_versioning_integration.py} (99%) rename tests/{test_updater_root_rotation.py => test_updater_root_rotation_integration.py} (99%) diff --git a/tests/test_key_revocation.py b/tests/test_key_revocation_integration.py similarity index 99% rename from tests/test_key_revocation.py rename to tests/test_key_revocation_integration.py index 8f59c4250c..529fe4f4e4 100755 --- a/tests/test_key_revocation.py +++ b/tests/test_key_revocation_integration.py @@ -2,7 +2,7 @@ """ - test_key_revocation.py + test_key_revocation_integration.py Vladimir Diaz. diff --git a/tests/test_root_versioning.py b/tests/test_root_versioning_integration.py similarity index 99% rename from tests/test_root_versioning.py rename to tests/test_root_versioning_integration.py index 052f090408..28f2c64d6d 100755 --- a/tests/test_root_versioning.py +++ b/tests/test_root_versioning_integration.py @@ -2,7 +2,7 @@ """ - test_root_versioning.py + test_root_versioning_integration.py Evan Cordell. diff --git a/tests/test_updater_root_rotation.py b/tests/test_updater_root_rotation_integration.py similarity index 99% rename from tests/test_updater_root_rotation.py rename to tests/test_updater_root_rotation_integration.py index 13eb1e0c00..9ae2d6b1f1 100755 --- a/tests/test_updater_root_rotation.py +++ b/tests/test_updater_root_rotation_integration.py @@ -2,13 +2,13 @@ """ - test_updater_root_rotation.py + test_updater_root_rotation_integration.py - Evan Cordell + Evan Cordell. - August 8, 2016 + August 8, 2016. See LICENSE for licensing information. @@ -67,7 +67,7 @@ import tuf.client.updater as updater import six -logger = logging.getLogger('tuf.test_updater') +logger = logging.getLogger('tuf.test_updater_root_rotation_integration') repo_tool.disable_console_log_messages() diff --git a/tuf/client/updater.py b/tuf/client/updater.py index 7c11333418..4be5a891ec 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -1224,7 +1224,8 @@ def _verify_root_chain_link(self, role, current, next): current_role['threshold'], current_role['keyids']) if not valid: - raise tuf.BadSignatureError('Root not signed by previous threshold/keys.') + raise tuf.BadSignatureError('Root is not signed by previous threshold' + ' of keys.') From 8eac568c921585bd110e91de302b20bbbe0c07cc Mon Sep 17 00:00:00 2001 From: Vladimir Diaz Date: Tue, 18 Oct 2016 13:31:37 -0400 Subject: [PATCH 20/20] Partial implmentation for issue #374 If writting a consistent snapshot, the snapshot should be written first and and then a copy or link to it should be created. --- tuf/repository_lib.py | 33 ++++++++++++++++++++------------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 669014e487..3e63eb2ba9 100755 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -2017,28 +2017,35 @@ def write_metadata_file(metadata, filename, version_number, # and indentation is used. The 'tuf.util.TempFile' file-like object is # automically closed after the final move. file_object.write(file_content) - logger.debug('Saving ' + repr(written_filename)) - file_object.move(written_filename) - if consistent_snapshot: dirname, basename = os.path.split(written_filename) basename = basename.split(METADATA_EXTENSION, 1)[0] version_and_filename = str(version_number) + '.' + basename + METADATA_EXTENSION written_consistent_filename = os.path.join(dirname, version_and_filename) - # TODO: If we were to create a hard link to 'written_filename', all - # consistent snapshots will always point to the current version. - # Example: 1.root.json and 2.root.json -> root.json - #logger.info('Linking ' + repr(written_consistent_filename)) - #os.link(written_filename, written_consistent_filename) - - logger.info('Copying ' + repr(written_consistent_filename)) - shutil.copyfile(written_filename, written_consistent_filename) + # If we were to point consistent snapshots to 'written_filename', they + # would always point to the current version. Example: 1.root.json and + # 2.root.json -> root.json. If consistent snapshot is True, we should save + # the consistent snapshot and point 'written_filename' to it. + logger.info('Creating a consistent snapshot for ' + repr(written_filename)) + logger.debug('Saving ' + repr(written_consistent_filename)) + file_object.move(written_consistent_filename) + + # TODO: We should provide the option of either (1) creating a link via + # os.link() to the consistent snapshot or (2) creating a copy of the + # consistent snapshot and saving to its expected filename (e.g., + # root.json). The option should be a configurable in tuf.conf.py. + # For now, we create a copy of the consistent snapshot and save it to + # 'written_filename'. + logger.info('Pointing ' + repr(filename) + ' to the consistent snapshot.') + shutil.copyfile(written_consistent_filename, written_filename) else: - logger.info('Not linking a consistent filename for: ' + repr(written_filename)) - + logger.info('Not creating a consistent snapshot for ' + repr(written_filename)) + logger.debug('Saving ' + repr(written_filename)) + file_object.move(written_filename) + # Generate the compressed versions of 'metadata', if necessary. A compressed # file may be written (without needing to write the uncompressed version) if # the repository maintainer adds compression after writing the uncompressed