diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 0000000000..867a3f0899 --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,13 @@ +Please fill in the fields below to submit an issue or feature request. The +more information that is provided, the better. + + +**Description of issue or feature request**: + + +**Current behavior**: + + +**Expected behavior**: + + diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000000..85e1ed9acc --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,15 @@ +Please fill in the fields below to submit a pull request. The more information +that is provided, the better. + +**Fixes issue #**: + +**Description of the changes being introduced by the pull request**: + +**Please verify and check that the pull request fulfills the following +requirements**: + +- [ ] The code follows the [Code Style Guidelines](https://github.com/secure-systems-lab/code-style-guidelines#code-style-guidelines) +- [ ] Tests have been added for the bug fix or new feature +- [ ] Docs have been added for the bug fix or new feature + + diff --git a/.gitignore b/.gitignore index d16c81a975..9e100761bc 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ # root level directories dist/* build/* +env/* # global file patterns *.log @@ -12,4 +13,5 @@ build/* .coverage .tox/* tests/htmlcov/* -.DS_Store \ No newline at end of file +.DS_Store +.python-version diff --git a/.pyup.yml b/.pyup.yml new file mode 100644 index 0000000000..a222ea0a71 --- /dev/null +++ b/.pyup.yml @@ -0,0 +1,40 @@ +# configure updates globally +# default: all +# allowed: all, insecure, False +update: all + +# set the default branch +# default: empty, the default branch on GitHub +branch: develop + +# update schedule +# default: empty +# allowed: "every day", "every week", .. +schedule: "every day" + +# configure dependency pinning globally +# default: True +# allowed: True, False +pin: True + +# search for requirement files +# default: True +# allowed: True, False +search: False + +# Specify requirement files by hand, default is empty +# default: empty +# allowed: list +requirements: + - dev-requirements.txt: + # update all dependencies and pin them + update: all + pin: True + +# configure the branch prefix the bot is using +# default: pyup- +branch_prefix: pyup- + +# allow to close stale PRs +# default: True +close_prs: True diff --git a/.travis.yml b/.travis.yml index 5e250e04e4..ef10292a82 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,11 +1,13 @@ language: python - +dist: trusty sudo: false - cache: pip +python: 3.6 -python: - - "3.5" +env: + - TOXENV=py27 + - TOXENV=py34 + - TOXENV=py36 before_script: - pip install -U pip wheel tox @@ -13,7 +15,7 @@ before_script: script: tox after_success: - - cd tests + - cd tests - coveralls - cd - diff --git a/AUTHORS.txt b/AUTHORS.txt index 74b06279c8..9f003e037e 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -1,3 +1,7 @@ +The TUF project is managed by Justin Cappos at NYU (jcappos@nyu.edu). + +Contributors: + Arturo Filastò Benno Fünfstück David Halls diff --git a/LICENSE-APACHE.txt b/LICENSE-APACHE.txt new file mode 100644 index 0000000000..91e18a62b6 --- /dev/null +++ b/LICENSE-APACHE.txt @@ -0,0 +1,174 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/LICENSE.txt b/LICENSE-MIT.txt similarity index 62% rename from LICENSE.txt rename to LICENSE-MIT.txt index 544f53dc45..f100e0638a 100644 --- a/LICENSE.txt +++ b/LICENSE-MIT.txt @@ -1,33 +1,25 @@ - This file contains the license for TUF: The Update Framework. +The MIT License (MIT) - It also lists license information for components and source - code used by TUF: The Update Framework. +Copyright (c) 2010 New York University - If you got this file as a part of a larger bundle, - there may be other license terms that you should be aware of. - -=============================================================================== -TUF: The Update Framework is distributed under this license: - -Copyright (c) 2010, Justin Samuel and Justin Cappos. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and/or hardware specification (the “Work”) to deal in the Work -without restriction, including without limitation the rights to use, copy, -modify, merge, publish, distribute, sublicense, and/or sell copies of the Work, -and to permit persons to whom the Work is furnished to do so, subject to the -following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Work. +copies or substantial portions of the Software. -THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR -OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -ARISING FROM, OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER -DEALINGS IN THE WORK. +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + =============================================================================== Many files are modified from Thandy and are licensed under the following license: diff --git a/METADATA.md b/METADATA.md index 5417387ec2..8bd6c17cd3 100644 --- a/METADATA.md +++ b/METADATA.md @@ -13,7 +13,7 @@ Required: * Root * Targets * Snapshot -* Timestamp +* Timestamp Optional: @@ -27,8 +27,7 @@ Signed by: Root role. Specifies the other top-level roles. When specifying these roles, the trusted keys for each role are listed along with the minimum number of those keys which are required to sign the role's metadata. We call this number the signature threshold. -Note: Metadata content and name out-of-date. -See [example](http://mirror1.poly.edu/test-pypi/metadata/root.txt). +See [example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/root.json) of Root metadata. ## Targets Metadata (targets.json) @@ -38,21 +37,28 @@ The targets.json metadata file lists hashes and sizes of target files. Target fi This file can optionally define other roles to which it delegates trust. Delegating trust means that the delegated role is trusted for some or all of the target files available from the repository. When delegated roles are specified, they are specified in a similar way to how the Root role specifies the top-level roles: the trusted keys and signature threshold for each role is given. Additionally, one or more patterns are specified which indicate the target file paths for which clients should trust each delegated role. -Note: Metadata content and name out-of-date. -See [example](http://mirror1.poly.edu/test-pypi/metadata/targets.txt). +See [example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/targets.json) of Targets metadata. -## Delegated Targets Metadata (targets/foo.json) +## Delegated Targets Metadata (role1.json) Signed by: A delegated targets role. The metadata files provided by delegated targets roles follow exactly the same format as the metadata file provided by the top-level Targets role. -The location of the metadata file for each delegated target role is based on the delegation ancestry of the role. If the top-level Targets role defines a role named foo, then the delegated target role's full name would be targets/foo and its metadata file will be available on the repository at the path targets/foo.json (this is relative to the base directory from which all metadata is available). This path is just the full name of the role followed by a file extension. +When the targets role delegates trust to other roles, each delegated role provides one signed metadata file. As is the +case with the directory structure of top-level metadata, the delegated files are relative to the base URL of metadata available from a given repository mirror. -If this delegated role foo further delegates to a role bar, then the result is a role whose full name is targets/foo/bar and whose signed metadata file is made available on the repository at targets/foo/bar.json. +A delegated role file is located at: -Note: Metadata content and name out-of-date. -See [example](http://mirror1.poly.edu/test-pypi/metadata/targets/unclaimed.txt). +/DELEGATED_ROLE.json + +where DELEGATED_ROLE is the name of the delegated role that has been specified in targets.json. If this role further delegates trust to a role named ANOTHER_ROLE, that role's signed metadata file is made available at: + +/ANOTHER_ROLE.json + +See +[example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/role1.json) +of delegated Targets metadata and [example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/role2.json) of a nested delegation. ## snapshot Metadata (snapshot.json) @@ -60,8 +66,7 @@ Signed by: Snapshot role. The snapshot.json metadata file lists hashes and sizes of all metadata files other than timestamp.json. This file ensures that clients will see a consistent view of the files on the repository. That is, metadata files (and thus target file) that existed on the repository at different times cannot be combined and presented to clients by an attacker. -Note: Metadata content and name out-of-date. -​See [example](http://mirror1.poly.edu/test-pypi/metadata/release.txt). +​See [example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/snapshot.json) of Snapshot metadata. ## Timestamp Metadata (timestamp.json) @@ -74,8 +79,7 @@ There are two primary reasons why the timestamp.json file doesn't contain all of * The timestamp.json file is downloaded very frequently and so should be kept as small as possible, especially considering that the snapshot.json file grows in size in proportion to the number of delegated target roles. * As the Timestamp role's key is an online key and thus at high risk, separate keys should be used for signing the snapshot.json metadata file so that the Snapshot role's keys can be kept offline and thus more secure. -Note: Metadata content and name out-of-date. -See [example](http://mirror1.poly.edu/test-pypi/metadata/timestamp.txt). +See [example](https://raw.githubusercontent.com/theupdateframework/tuf/develop/tests/repository_data/repository/metadata/timestamp.json) of Timestamp metadata. ## Mirrors Metadata (mirrors.json) @@ -83,4 +87,5 @@ Optionally signed by: Mirrors role. The mirrors.json file provides an optional way to provide mirror list updates to TUF clients. Mirror lists can alternatively be provided directly by the software update system and obtained in any way the system sees fit, including being hard coded if that is what an applications wants to do. -No example available. At the time of writing, this hasn't been implemented in TUF. Currently mirrors are specified by the client code. +No example available. At the time of writing, this hasn't been implemented in +TUF. Currently mirrors are specified by the client code. diff --git a/README.rst b/README.rst index 71206cfd8a..185c25180e 100644 --- a/README.rst +++ b/README.rst @@ -4,9 +4,17 @@ A Framework for Securing Software Update Systems .. image:: https://travis-ci.org/theupdateframework/tuf.svg?branch=develop :target: https://travis-ci.org/theupdateframework/tuf -.. image:: https://coveralls.io/repos/theupdateframework/tuf/badge.png?branch=develop +.. image:: https://coveralls.io/repos/theupdateframework/tuf/badge.svg?branch=develop :target: https://coveralls.io/r/theupdateframework/tuf?branch=develop +.. image:: https://pyup.io/repos/github/theupdateframework/tuf/shield.svg + :target: https://pyup.io/repos/github/theupdateframework/tuf/ + :alt: Updates + +.. image:: https://pyup.io/repos/github/theupdateframework/tuf/python-3-shield.svg + :target: https://pyup.io/repos/github/theupdateframework/tuf/ + :alt: Python 3 + .. image:: /docs/images/banner_readme.JPG The Update Framework (TUF) helps developers to secure new or existing @@ -104,6 +112,9 @@ The following papers provide detailed information on securing software updater systems, TUF's design and implementation details, attacks on package managers, and package management security: +- `Mercury: Bandwidth-Effective Prevention of Rollback Attacks Against Community Repositories + `_ + - `Diplomat: Using Delegations to Protect Community Repositories `_ @@ -138,7 +149,7 @@ trustworthy, TUF hands them over to your software update system. See TUF specification document is also available: -- `The Update Framework Specification `_ +- `The Update Framework Specification `_ TUF Home Page ------------- @@ -152,6 +163,18 @@ Please visit `https://groups.google.com/forum/?fromgroups#!forum/theupdateframew A group feed is available at: https://groups.google.com/forum/feed/theupdateframework/msgs/atom.xml?num=50 +What is a TAP? +-------------- + +A TAP (TUF Augmentation Proposal) is a design document providing information to the +TUF community, or describing a new feature for TUF or its processes or environment. +We intend TAPs to be the primary mechanisms for proposing major new features, for +collecting community input on an issue, and for documenting the design decisions +that have gone into TUF. + +Please visit the `TAPs GitHub repo `_ +to review design changes that have been proposed to date, or to submit +your own new feature. Installation ------------ @@ -252,13 +275,25 @@ we provide for this purpose. - `Creating a TUF Repository `_ +License +------- + +This work is `dual-licensed `_ +and distributed under the (1) MIT License and (2) Apache License, Version 2.0. +Please see `LICENSE-MIT.txt +`_ +and `LICENSE-APACHE.txt +`_. Acknowledgements ---------------- +This project is managed by Prof. Justin Cappos and other members of the +`Secure Systems Lab `_ at NYU. + This material is based upon work supported by the National Science -Foundation under Grant No. CNS-1345049 and CNS-0959138. Any opinions, +Foundation under Grant Nos. CNS-1345049 and CNS-0959138. Any opinions, findings, and conclusions or recommendations expressed in this material are those of the author(s) and do not necessarily reflect the views of the National Science Foundation. diff --git a/SECURITY.md b/SECURITY.md index 1aa59d243a..52fba62f73 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,4 +1,4 @@ -#Security +# Security Generally, a software update system is secure if it can be sure that it knows about the latest available updates in a timely manner, any files it downloads are the correct files, and no harm results from checking or downloading files. The details of making this happen are complicated by various attacks that can be carried out against software update systems. @@ -32,7 +32,7 @@ snapshot metadata, and thus new updates could never be downloaded. * **Vulnerability to key compromises**. An attacker who is able to compromise a single key or less than a given threshold of keys can compromise clients. This includes relying on a single online key (such as only being protected by SSL) or a single offline key (such as most software update systems use to sign files). -##Design Concepts +## Design Concepts The design and implementation of TUF aims to be secure against all of the above attacks. A few general ideas drive much of the security of TUF. diff --git a/dev-requirements.txt b/dev-requirements.txt index 4754221d5c..bad053856e 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -6,21 +6,20 @@ # "develop mode"). The current working directory must contain 'setup.py'. --editable . -# Install PyNaCl for generation and verification of ed25519 keys and signatures. -# It also includes protection against side-channel attacks. -# NOTE: TUF only uses the pure Python implementation of ed25519 for signature +# Install PyNaCl for generation and verification of ed25519 keys and +# signatures. It also includes protection against side-channel attacks. NOTE: +# TUF only uses the pure Python implementation of ed25519 for signature # verification. PyNaCl is required for ed25519 key and signature generation -# with the TUF repository tools. Also install PyCrypto for RSA key & signature -# support, and general-purpose cryptography needed by the repository tools. -# Client must also install PyCrypto to verify RSASSA-PSS signatures. The -# minimal install can only verify ed25519 signatures. Pinned packages: -# http://nvie.com/posts/pin-your-packages/ -cffi==1.7.0 -pycrypto==2.6.1 -pynacl==1.0.1 -cryptography==1.4.0 -securesystemslib==0.10.1 +# with the TUF repository tools. Also install cryptography for RSA key & +# signature support, and general-purpose cryptography needed by the repository +# tools. Client must also install cryptography to verify RSASSA-PSS +# signatures. The minimal install can only verify ed25519 signatures. Pinned +# packages: http://nvie.com/posts/pin-your-packages/ +cffi==1.11.2 +pynacl==1.1.2 +cryptography==2.1.1 +securesystemslib==0.10.7 # Testing requirements. The rest of the testing dependencies available in # 'tox.ini' -tox +tox==2.9.1 diff --git a/docs/papers/prevention-rollback-attacks-atc2017.pdf b/docs/papers/prevention-rollback-attacks-atc2017.pdf new file mode 100644 index 0000000000..8e0c844eb5 Binary files /dev/null and b/docs/papers/prevention-rollback-attacks-atc2017.pdf differ diff --git a/docs/proposals/README.md b/docs/proposals/README.md deleted file mode 100644 index 062f23ff6d..0000000000 --- a/docs/proposals/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Proposals # - -## Accepted ## - -## Rejected ## - -## In Progress ## -* Trust pinning -* Multirole delegations diff --git a/docs/tuf-spec.0.9.txt b/docs/tuf-spec.0.9.txt index 0c2281ac20..7d8df7b60f 100644 --- a/docs/tuf-spec.0.9.txt +++ b/docs/tuf-spec.0.9.txt @@ -1,1098 +1 @@ - The Update Framework Specification - -14 May 2015 -Version 0.9 - -1. Introduction - -1.1. Scope - - This document describes a framework for securing software update systems. - -1.2. Motivation - - Software is commonly updated through software update systems. These systems - can be package managers that are responsible for all of the software that is - installed on a system, application updaters that are only responsible for - individual installed applications, or software library managers that install - software that adds functionality such as plugins or programming language - libraries. - - Software update systems all have the common behavior of downloading files - that identify whether updates exist and, when updates do exist, downloading - the files that are required for the update. For the implementations - concerned with security, various integrity and authenticity checks are - performed on downloaded files. - - Software update systems are vulnerable to a variety of known attacks. This - is generally true even for implementations that have tried to be secure. - -1.3. History and credit - - Work on TUF began in late 2009. The core ideas are based off of previous - work done by Justin Cappos and Justin Samuel that identified security flaws - in all popular Linux package managers. More information and current - versions of this document can be found at https://www.updateframework.com/ - - The Global Environment for Network Innovations (GENI) and the National - Science Foundation (NSF) have provided support for the development of TUF. - (http://www.geni.net/) - (http://www.nsf.gov/) - - TUF's reference implementation is based heavily on Thandy, the application - updater for Tor (http://www.torproject.org/). Its design and this spec are - also largely based on Thandy's, with many parts being directly borrowed - from Thandy. The Thandy spec can be found here: - https://gitweb.torproject.org/thandy.git?a=blob_plain;f=specs/thandy-spec.txt;hb=HEAD - - Whereas Thandy is an application updater for an individual software project, - TUF aims to provide a way to secure any software update system. We're very - grateful to the Tor Project and the Thandy developers as it is doubtful our - design and implementation would have been anywhere near as good without - being able to use their great work as a starting point. Thandy is the hard - work of Nick Mathewson, Sebastian Hahn, Roger Dingledine, Martin Peck, and - others. - -1.4. Non-goals - - We aren't creating a universal update system, but rather a simple and - flexible way that applications can have high levels of security with their - software update systems. Creating a universal software update system would - not be a reasonable goal due to the diversity of application-specific - functionality in software update systems and the limited usefulness that - such a system would have for securing legacy software update systems. - - We won't be defining package formats or even performing the actual update - of application files. We will provide the simplest mechanism possible that - remains easy to use and provides a secure way for applications to obtain and - verify files being distributed by trusted parties. - - We are not providing a means to bootstrap security so that arbitrary - installation of new software is secure. In practice this means that people - still need to use other means to verify the integrity and authenticity of - files they download manually. - - The framework will not have the responsibility of deciding on the correct - course of action in all error situations, such as those that can occur when - certain attacks are being performed. Instead, the framework will provide - the software update system the relevant information about any errors that - require security decisions which are situation-specific. How those errors - are handled is up to the software update system. - -1.5. Goals - - We need to provide a framework (a set of libraries, file formats, and - utilities) that can be used to secure new and existing software update - systems. - - The framework should enable applications to be secure from all known attacks - on the software update process. It is not concerned with exposing - information about what software is being updating (and thus what software - the client may be running) or the contents of updates. - - The framework should provide means to minimize the impact of key compromise. - To do so, it must support roles with multiple keys and threshold/quorum - trust (with the exception of minimally trusted roles designed to use a - single key). The compromise of roles using highly vulnerable keys should - have minimal impact. Therefore, online keys (keys which are used in an - automated fashion) must not be used for any role that clients ultimately - trust for files they may install. - - The framework must be flexible enough to meet the needs of a wide variety of - software update systems. - - The framework must be easy to integrate with software update systems. - -1.5.1 Goals for implementation - - The client side of the framework must be straightforward to implement in any - programming language and for any platform with the requisite networking and - crypto support. - - The framework should be easily customizable for use with any crypto - libraries. - - The process by which developers push updates to the repository must be - simple. - - The repository must serve only static files and be easy to mirror. - - The framework must be secure to use in environments that lack support for - SSL (TLS). This does not exclude the optional use of SSL when available, - but the framework will be designed without it. - -1.5.2. Goals for specific attacks to protect against - - Note: When saying the framework protects against an attack, this means that - the attack will not be successful. It does not mean that a client will - always be able to successfully update during an attack. Fundamentally, an - attacker positioned to intercept and modify a client's communication will - always be able to perform a denial of service. The part we have control - over is not allowing an inability to update to go unnoticed. - - Rollback attacks. Attackers should not be able to trick clients into - installing software that is older than that which the client previously knew - to be available. - - Indefinite freeze attacks. Attackers should not be able to respond to client - requests with the same, outdated metadata without the client being aware of - the problem. - - Endless data attacks. Attackers should not be able to respond to client - requests with huge amounts of data (extremely large files) that interfere - with the client's system. - - Slow retrieval attacks. Attackers should not be able to prevent clients - from being aware of interference with receiving updates by responding to - client requests so slowly that automated updates never complete. - - Extraneous dependencies attacks. Attackers should not be able to cause - clients to download or install software dependencies that are not the - intended dependencies. - - Mix-and-match attacks. Attackers should not be able to trick clients into - using a combination of metadata that never existed together on the - repository at the same time. - - Malicious repository mirrors should not be able to prevent updates from good - mirrors. - -1.5.3. Goals for PKIs - - Software update systems using the framework's client code interface should - never have to directly manage keys. - - All keys must be easily and safely revocable. Trusting new keys for a role - must be easy. - - For roles where trust delegation is meaningful, a role should be able to - delegate full or limited trust to another role. - - The root of trust will not rely on external PKI. That is, no authority will - be derived from keys outside of the framework. - -2. System overview - - The framework ultimately provides a secure method of obtaining trusted - files. To avoid ambiguity, we will refer to the files the framework is used - to distribute as "target files". Target files are opaque to the framework. - Whether target files are packages containing multiple files, single text - files, or executable binaries is irrelevant to the framework. - - The metadata describing target files is the information necessary to - securely identify the file and indicate which roles are trusted to provide - the file. As providing additional information about - target files may be important to some software update systems using the - framework, additional arbitrary information can be provided with any target - file. This information will be included in signed metadata that describes - the target files. - - The following are the high-level steps of using the framework from the - viewpoint of a software update system using the framework. This is an - error-free case. - - Polling: - - Periodically, the software update system using the framework - instructs the framework to check each repository for updates. - If the framework reports to the application code that there are - updates, the application code determines whether it wants to - download the updated target files. Only target files that are - trusted (referenced by properly signed and timely metadata) are made - available by the framework. - - Fetching: - - For each file that the application wants, it asks the framework to - download the file. The framework downloads the file and performs - security checks to ensure that the downloaded file is exactly what is - expected according to the signed metadata. The application code is - not given access to the file until the security checks have been - completed. The application asks the framework to copy the downloaded - file to a location specified by the application. At this point, the - application has securely obtained the target file and can do with it - whatever it wishes. - -2.1. Roles and PKI - - In the discussion of roles that follows, it is important to remember that - the framework has been designed to allow a large amount of flexibility for - many different use cases. For example, it is possible to use the framework - with a single key that is the only key used in the entire system. This is - considered to be insecure but the flexibility is provided in order to meet - the needs of diverse use cases. - - There are four fundamental top-level roles in the framework: - - Root role - - Targets role - - Snapshot role - - Timestamp role - - There is also one optional top-level role: - - Mirrors role - - All roles can use one or more keys and require a threshold of signatures of - the role's keys in order to trust a given metadata file. - -2.1.1 Root role - - The root role delegates trust to specific keys trusted for all other - top-level roles used in the system. - - The client-side of the framework must ship with trusted root keys for each - configured repository. - - The root role's private keys must be kept very secure and thus should be - kept offline. - -2.1.2 Targets role - - The targets role's signature indicates which target files are trusted by - clients. The targets role signs metadata that describes these files, not - the actual target files themselves. - - In addition, the targets role can delegate full or partial trust to other - roles. Delegating trust means that the targets role indicates another role - (that is, another set of keys and the threshold required for trust) is - trusted to sign target file metadata. Partial trust delegation is when the - delegated role is only trusted for some of the target files that the - delegating role is trusted for. - - Delegated developer roles can further delegate trust to other delegated - roles. This provides for multiple levels of trust delegation where each - role can delegate full or partial trust for the target files they are - trusted for. The delegating role in these cases is still trusted. That is, - a role does not become untrusted when it has delegated trust. - - Delegated trust can be revoked at any time by the delegating role signing - new metadata that indicates the delegated role is no longer trusted. - -2.1.3 Snapshot role - - The snapshot role signs a metadata file that provides information about the - latest version of all of the other metadata on the repository (excluding the - timestamp file, discussed below). This information allows clients to know - which metadata files have been updated and also prevents mix-and-match - attacks. - -2.1.4 Timestamp role - - To prevent an adversary from replaying an out-of-date signed metadata file - whose signature has not yet expired, an automated process periodically signs - a timestamped statement containing the hash of the snapshot file. Even - though this timestamp key must be kept online, the risk posed to clients by - compromise of this key is minimal. - -2.1.5 Mirrors role - - Every repository has one or more mirrors from which files can be downloaded - by clients. A software update system using the framework may choose to - hard-code the mirror information in their software or they may choose to use - mirror metadata files that can optionally be signed by a mirrors role. - - The importance of using signed mirror lists depends on the application and - the users of that application. There is minimal risk to the application's - security from being tricked into contacting the wrong mirrors. This is - because the framework has very little trust in repositories. - -2.2. Threat Model And Analysis - - We assume an adversary who can respond to client requests, whether by acting - as a man-in-the-middle or through compromising repository mirrors. At - worst, such an adversary can deny updates to users if no good mirrors are - accessible. An inability to obtain updates is noticed by the framework. - - If an adversary compromises enough keys to sign metadata, the best that can - be done is to limit the number of users who are affected. The level to - which this threat is mitigated is dependent on how the application is using - the framework. This includes whether different keys have been used for - different signing roles. - - A detailed threat analysis is outside the scope of this document. This is - partly because the specific threat posted to clients in many situations is - largely determined by how the framework is being used. - -3. The repository - - An application uses the framework to interact with one or more repositories. - A repository is a conceptual source of target files of interest to the - application. Each repository has one or more mirrors which are the actual - providers of files to be downloaded. For example, each mirror may specify a - different host where files can be downloaded from over HTTP. - - The mirrors can be full or partial mirrors as long as the application-side - of the framework can ultimately obtain all of the files it needs. A mirror - is a partial mirror if it is missing files that a full mirror should have. - If a mirror is intended to only act as a partial mirror, the metadata and - target paths available from that mirror can be specified. - - Roles, trusted keys, and target files are completely separate between - repositories. A multi-repository setup is a multi-root system. When an - application uses the framework with multiple repositories, the framework - does not perform any "mixing" of the trusted content from each repository. - It is up to the application to determine the significance of the same or - different target files provided from separate repositories. - -3.1 Repository layout - - The filesystem layout in the repository is used for two purposes: - - To give mirrors an easy way to mirror only some of the repository. - - To specify which parts of the repository a given role has authority - to sign/provide. - -3.1.1 Target files - - The filenames and the directory structure of target files available from - a repository are not specified by the framework. The names of these files - and directories are completely at the discretion of the application using - the framework. - -3.1.2 Metadata files - - The filenames and directory structure of repository metadata are strictly - defined. The following are the metadata files of top-level roles relative - to the base URL of metadata available from a given repository mirror. - - /root.json - - Signed by the root keys; specifies trusted keys for the other - top-level roles. - - /snapshot.json - - Signed by the snapshot role's keys. Lists hashes and sizes of all - metadata files other than timestamp.json. - - /targets.json - - Signed by the target role's keys. Lists hashes and sizes of target - files. - - /timestamp.json - - Signed by the timestamp role's keys. Lists hashes and size of the - snapshot file. This is the first and potentially only file that needs - to be downloaded when clients poll for the existence of updates. - - /mirrors.json (optional) - - Signed by the mirrors role's keys. Lists information about available - mirrors and the content available from each mirror. - - An implementation of the framework may optionally choose to make available - any metadata files in compressed (e.g. gzip'd) format. In doing so, the - filename of the compressed file should be the same as the original with the - addition of the file name extension for the compression type (e.g. - snapshot.json.gz). The original (uncompressed) file should always be made - available, as well. - -3.1.2.1 Metadata files for targets delegation - - When the targets role delegates trust to other roles, each delegated role - provides one signed metadata file. This file is located at: - - /targets/DELEGATED_ROLE.json - - where DELEGATED_ROLE is the name of the delegated role that has been - specified in targets.json. If this role further delegates trust to a role - named ANOTHER_ROLE, that role's signed metadata file is made available at: - - /targets/DELEGATED_ROLE/ANOTHER_ROLE.json - -4. Document formats - - All of the formats described below include the ability to add more - attribute-value fields for backwards-compatible format changes. If - a backwards incompatible format change is needed, a new filename can - be used. - -4.1. Metaformat - - All documents use a subset of the JSON object format, with - floating-point numbers omitted. When calculating the digest of an - object, we use the "canonical JSON" subdialect as described at - http://wiki.laptop.org/go/Canonical_JSON - -4.2. File formats: general principles - - All signed metadata files have the format: - - { "signed" : ROLE, - "signatures" : [ - { "keyid" : KEYID, - "method" : METHOD, - "sig" : SIGNATURE } - , ... ] - } - - where: ROLE is a dictionary whose "_type" field describes the role type. - KEYID is the identifier of the key signing the ROLE dictionary. - METHOD is the key signing method used to generate the signature. - SIGNATURE is a signature of the canonical JSON form of ROLE. - - The current reference implementation of TUF defines two signing methods, - although TUF is not restricted to any particular key signing method, - key type, or cryptographic library: - - "RSASSA-PSS" : RSA Probabilistic signature scheme with appendix. - The underlying hash function is SHA256. - - "ed25519" : Elliptic curve digital signature algorithm based on Twisted - Edwards curves. - - RSASSA-PSS: http://tools.ietf.org/html/rfc3447#page-29 - ed25519: http://ed25519.cr.yp.to/ - - All keys have the format: - - { "keytype" : KEYTYPE, - "keyval" : KEYVAL } - - where KEYTYPE is a string describing the type of the key and how it's - used to sign documents. The type determines the interpretation of - KEYVAL. - - We define two keytypes at present: 'rsa' and 'ed25519'. - - The 'rsa' format is: - - { "keytype" : "rsa", - "keyval" : { "public" : PUBLIC, - "private" : PRIVATE } - } - - where PUBLIC and PRIVATE are in PEM format and are strings. All RSA keys - must be at least 2048 bits. - - The 'ed25519' format is: - - { "keytype" : "ed25519", - "keyval" : { "public" : PUBLIC, - "private" : PRIVATE } - } - - where PUBLIC and PRIVATE are both 32-byte strings. - - Metadata does not include the private portion of the key object: - - { "keytype" : "rsa", - "keyval" : { "public" : PUBLIC} - } - - The KEYID of a key is the hexdigest of the SHA-256 hash of the - canonical JSON form of the key, where the "private" object key is excluded. - - Metadata date-time data follows the ISO 8601 standard. The expected format - of the combined date and time string is "YYYY-MM-DDTHH:MM:SSZ". Time is - always in UTC, and the "Z" time zone designator is attached to indicate a - zero UTC offset. An example date-time string is "1985-10-21T01:21:00Z". - - -4.3. File formats: root.json - - The root.json file is signed by the root role's keys. It indicates - which keys are authorized for all top-level roles, including the root - role itself. Revocation and replacement of top-level role keys, including - for the root role, is done by changing the keys listed for the roles in - this file. - - The format of root.json is as follows: - - { "_type" : "Root", - "version" : VERSION, - "expires" : EXPIRES, - "keys" : { - KEYID : KEY - , ... }, - "roles" : { - ROLE : { - "keyids" : [ KEYID, ... ] , - "threshold" : THRESHOLD } - , ... } - } - - VERSION is an integer that is greater than 0. Clients MUST NOT replace a - metadata file with a version number less than the one currently trusted. - - EXPIRES determines when metadata should be considered expired and no longer - trusted by clients. Clients MUST NOT trust an expired file. - - A ROLE is one of "root", "snapshot", "targets", "timestamp", or "mirrors". - A role for each of "root", "snapshot", "timestamp", and "targets" MUST be - specified in the key list. The role of "mirror" is optional. If not - specified, the mirror list will not need to be signed if mirror lists are - being used. - - The KEYID must be correct for the specified KEY. Clients MUST calculate - each KEYID to verify this is correct for the associated key. Clients MUST - ensure that for any KEYID represented in this key list and in other files, - only one unique key has that KEYID. - - The THRESHOLD for a role is an integer of the number of keys of that role - whose signatures are required in order to consider a file as being properly - signed by that role. - - A signed root.json example file: - - { - "signatures": [ - { - "keyid": "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6", - "method": "ed25519", - "sig": "a312b9c3cb4a1b693e8ebac5ee1ca9cc01f2661c14391917dcb111517f72370809 - f32c890c6b801e30158ac4efe0d4d87317223077784c7a378834249d048306" - } - ], - "signed": { - "_type": "Root", - "consistent_snapshot": false, - "expires": "2030-01-01T00:00:00Z", - "keys": { - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4": { - "keytype": "ed25519", - "keyval": { - "public": "72378e5bc588793e58f81c8533da64a2e8f1565c1fcc7f253496394ffc52542c" - } - }, - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b": { - "keytype": "ed25519", - "keyval": { - "public": "68ead6e54a43f8f36f9717b10669d1ef0ebb38cee6b05317669341309f1069cb" - } - }, - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6": { - "keytype": "ed25519", - "keyval": { - "public": "66dd78c5c2a78abc6fc6b267ff1a8017ba0e8bfc853dd97af351949bba021275" - } - }, - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309": { - "keytype": "ed25519", - "keyval": { - "public": "01c61f8dc7d77fcef973f4267927541e355e8ceda757e2c402818dad850f856e" - } - } - }, - "roles": { - "root": { - "keyids": [ - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6" - ], - "threshold": 1 - }, - "snapshot": { - "keyids": [ - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309" - ], - "threshold": 1 - }, - "targets": { - "keyids": [ - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b" - ], - "threshold": 1 - }, - "timestamp": { - "keyids": [ - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4" - ], - "threshold": 1 - } - }, - "version": 1 - } - } - -4.4. File formats: snapshot.json - - The snapshot.json file is signed by the snapshot role. It lists hashes and - sizes of all metadata on the repository, excluding timestamp.json and - mirrors.json. - - The format of snapshot.json is as follows: - - { "_type" : "Snapshot", - "version" : VERSION, - "expires" : EXPIRES, - "meta" : METAFILES - } - - METAFILES is an object whose format is the following: - - { METAPATH : { - "length" : LENGTH, - "hashes" : HASHES, - ("custom" : { ... }) } - , ... - } - - METAPATH is the the metadata file's path on the repository relative to the - metadata base URL. - - The HASHES and LENGTH are the hashes and length of the file. LENGTH is an - integer. HASHES is a dictionary that specifies one or more hashes, including - the cryptographic hash function. For example: { "sha256": HASH, ... } - - A signed snapshot.json example file: - - { - "signatures": [ - { - "keyid": "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309", - "method": "ed25519", - "sig": "f7f03b13e3f4a78a23561419fc0dd741a637e49ee671251be9f8f3fceedfc112e4 - 4ee3aaff2278fad9164ab039118d4dc53f22f94900dae9a147aa4d35dcfc0f" - } - ], - "signed": { - "_type": "Snapshot", - "expires": "2030-01-01T00:00:00Z", - "meta": { - "root.json": { - "hashes": { - "sha256": "52bbb30f683d166fae5c366e4582cfe8212aacbe1b21ae2026dae58ec55d3701" - }, - "length": 1831 - }, - "targets.json": { - "hashes": { - "sha256": "f592d072e1193688a686267e8e10d7257b4ebfcf28133350dae88362d82a0c8a" - }, - "length": 1184 - }, - "targets.json.gz": { - "hashes": { - "sha256": "9f8aff5b55ee4b3140360d99b39fa755a3ea640462072b4fd74bdd72e6fe245a" - }, - "length": 599 - }, - "targets/project.json": { - "hashes": { - "sha256": "1f812e378264c3085bb69ec5f6663ed21e5882bbece3c3f8a0e8479f205ffb91" - }, - "length": 604 - } - }, - "version": 1 - } - } - -4.5. File formats: targets.json and delegated target roles - - The format of targets.json is as follows: - - { "_type" : "Targets", - "version" : VERSION, - "expires" : EXPIRES, - "targets" : TARGETS, - ("delegations" : DELEGATIONS) - } - - TARGETS is an object whose format is the following: - - { TARGETPATH : { - "length" : LENGTH, - "hashes" : HASHES, - ("custom" : { ... }) } - , ... - } - - Each key of the TARGETS object is a TARGETPATH. A TARGETPATH is a path to - a file that is relative to a mirror's base URL of targets. - - It is allowed to have a TARGETS object with no TARGETPATH elements. This - can be used to indicate that no target files are available. - - If defined, the elements and values of "custom" will be made available to the - client application. The information in "custom" is opaque to the framework - and can include version numbers, dependencies, requirements, and any other - data that the application wants to include to describe the file at - TARGETPATH. The application may use this information to guide download - decisions. - - DELEGATIONS is an object whose format is the following: - - { "keys" : { - KEYID : KEY, - ... }, - "roles" : [{ - "name": ROLENAME, - "keyids" : [ KEYID, ... ] , - "threshold" : THRESHOLD, - ("path_hash_prefixes" : [ HEX_DIGEST, ... ] | - "paths" : [ PATHPATTERN, ... ]) - }, ... ] - } - - ROLENAME is the full name of the delegated role. For example, - "targets/projects" - - In order to discuss target paths, a role MUST specify only one of the - "path_hash_prefixes" or "paths" attributes, each of which we discuss next. - - The "path_hash_prefixes" list is used to succinctly describe a set of target - paths. Specifically, each HEX_DIGEST in "path_hash_prefixes" describes a set - of target paths; therefore, "path_hash_prefixes" is the union over each - prefix of its set of target paths. The target paths must meet this - condition: each target path, when hashed with the SHA-256 hash function to - produce a 64-byte hexadecimal digest (HEX_DIGEST), must share the same - prefix as one of the prefixes in "path_hash_prefixes". This is useful to - split a large number of targets into separate bins identified by consistent - hashing. - - The "paths" list describes paths that the role is trusted to provide. - Clients MUST check that a target is in one of the trusted paths of all roles - in a delegation chain, not just in a trusted path of the role that describes - the target file. The format of a PATHPATTERN may be either a path to a - single file, or a path to a directory to indicate all files and/or - subdirectories under that directory. - - A path to a directory is used to indicate all possible targets sharing that - directory as a prefix; e.g. if the directory is "targets/A", then targets - which match that directory include "targets/A/B.json" and - "targets/A/B/C.json". - - We are currently investigating a few "priority tag" schemes to resolve - conflicts between delegated roles that share responsibility for overlapping - target paths. One of the simplest of such schemes is for the client to - consider metadata in order of appearance of delegations; we treat the order - of delegations such that the first delegation is trusted more than the - second one, the second delegation is trusted more than the third one, and so - on. The metadata of the first delegation will override that of the second - delegation, the metadata of the second delegation will override that of the - third delegation, and so on. In order to accommodate this scheme, the - "roles" key in the DELEGATIONS object above points to an array, instead of a - hash table, of delegated roles. - - Another priority tag scheme would have the clients prefer the delegated role - with the latest metadata for a conflicting target path. Similar ideas were - explored in the Stork package manager (University of Arizona Tech Report - 08-04)[https://isis.poly.edu/~jcappos/papers/cappos_stork_dissertation_08.pdf]. - - The metadata files for delegated target roles has the same format as the - top-level targets.json metadata file. - - A signed targets.json example file: - - { - "signatures": [ - { - "keyid": "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b", - "method": "ed25519", - "sig": "e9fd40008fba263758a3ff1dc59f93e42a4910a282749af915fbbea1401178e5a0 - 12090c228f06db1deb75ad8ddd7e40635ac51d4b04301fce0fd720074e0209" - } - ], - "signed": { - "_type": "Targets", - "delegations": { - "keys": { - "ce3e02e72980b09ca6f5efa68197130b381921e5d0675e2e0c8f3c47e0626bba": { - "keytype": "ed25519", - "keyval": { - "public": "b6e40fb71a6041212a3d84331336ecaa1f48a0c523f80ccc762a034c727606fa" - } - } - }, - "roles": [ - { - "keyids": [ - "ce3e02e72980b09ca6f5efa68197130b381921e5d0675e2e0c8f3c47e0626bba" - ], - "name": "targets/project", - "paths": [ - "/project/file3.txt" - ], - "threshold": 1 - } - ] - }, - "expires": "2030-01-01T00:00:00Z", - "targets": { - "/file1.txt": { - "hashes": { - "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da" - }, - "length": 31 - }, - "/file2.txt": { - "hashes": { - "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99" - }, - "length": 39 - } - }, - "version": 1 - } - } - -4.6. File formats: timestamp.json - - The timestamp file is signed by a timestamp key. It indicates the - latest versions of other files and is frequently resigned to limit the - amount of time a client can be kept unaware of interference with obtaining - updates. - - Timestamp files will potentially be downloaded very frequently. Unnecessary - information in them will be avoided. - - The format of the timestamp file is as follows: - - { "_type" : "Timestamp", - "version" : VERSION, - "expires" : EXPIRES, - "meta" : METAFILES - } - - METAFILES is the same is described for the snapshot.json file. In the case - of the timestamp.json file, this will commonly only include a description of - the snapshot.json file. - - A signed timestamp.json example file: - - { - "signatures": [ - { - "keyid": "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4", - "method": "ed25519", - "sig": "90d2a06c7a6c2a6a93a9f5771eb2e5ce0c93dd580bebc2080d10894623cfd6eaed - f4df84891d5aa37ace3ae3736a698e082e12c300dfe5aee92ea33a8f461f02" - } - ], - "signed": { - "_type": "Timestamp", - "expires": "2030-01-01T00:00:00Z", - "meta": { - "snapshot.json": { - "hashes": { - "sha256": "c14aeb4ac9f4a8fc0d83d12482b9197452f6adf3eb710e3b1e2b79e8d14cb681" - }, - "length": 1007 - } - }, - "version": 1 - } - } - -4.7. File formats: mirrors.json - - The mirrors.json file is signed by the mirrors role. It indicates which - mirrors are active and believed to be mirroring specific parts of the - repository. - - The format of mirrors.json is as follows: - - { "_type" : "Mirrorlist", - "version" : VERSION, - "expires" : EXPIRES, - "mirrors" : [ - { "urlbase" : URLBASE, - "metapath" : METAPATH, - "targetspath" : TARGETSPATH, - "metacontent" : [ PATHPATTERN ... ] , - "targetscontent" : [ PATHPATTERN ... ] , - ("custom" : { ... }) } - , ... ] - } - - URLBASE is the URL of the mirror which METAPATH and TARGETSPATH are relative - to. All metadata files will be retrieved from METAPATH and all target files - will be retrieved from TARGETSPATH. - - The lists of PATHPATTERN for "metacontent" and "targetscontent" describe the - metadata files and target files available from the mirror. - - The order of the list of mirrors is important. For any file to be - downloaded, whether it is a metadata file or a target file, the framework on - the client will give priority to the mirrors that are listed first. That is, - the first mirror in the list whose "metacontent" or "targetscontent" include - a path that indicate the desired file can be found there will the first - mirror that will be used to download that file. Successive mirrors with - matching paths will only be tried if downloading from earlier mirrors fails. - This behavior can be modified by the client code that uses the framework to, - for example, randomly select from the listed mirrors. - -5. Detailed Workflows - -5.1. The client application - - 1. The client application first instructs TUF to check for updates. - - 2. TUF downloads and verifies timestamp.json. - - 3. If timestamp.json indicates that snapshot.json has changed, TUF downloads - and verifies snapshot.json. - - 4. TUF determines which metadata files listed in snapshot.json differ from - those described in the last snapshot.json that TUF has seen. If root.json - has changed, the update process starts over using the new root.json. - - 5. TUF provides the software update system with a list of available files - according to targets.json. - - 6. The software update system instructs TUF to download a specific target file. - - 7. TUF downloads and verifies the file and then makes the file available to the - software update system. - - Note: If at any point in the above procedure there is a problem (i.e., if - unexpired, signed, valid metadata cannot be retrieved from the repository), - the Root file is downloaded and the process is retried once more (and only - once to avoid an infinite loop). Optionally, the software update system - using the framework can decide how to proceed rather than automatically - downloading a new Root file. - - - The client code instructs the framework to check for updates. The framework - downloads the timestamp.json file from a mirror and checks that the file is - properly signed by the timestamp role, is not expired, and is not older than - the last timestamp.json file retrieved. If the timestamp file lists the same - snapshot.json file as was previously seen, the client code is informed that no - updates are available and the update checking process stops. - - If the snapshot.json file has changed, the framework downloads the file and - verifies that it is properly signed by the snapshot role, is not expired, has - a newer timestamp than the last snapshot.json file seen, and matches the - description (hashes and size) in the timestamp.json file. The framework then - checks which metadata files listed in snapshot.json differ from those - described in the last snapshot.json file the framework had seen. If the - root.json file has changed, the framework updates this (following the same - security measures as with the other files) and starts the process over. If - any other metadata files have changed, the framework downloads and checks - those. - - By comparing the trusted targets from the old trusted metadata with the new - metadata, the framework is able to determine which target files have - changed. The framework ensures that any targets described in delegated - targets files are allowed to be provided by the delegated role. - - When the client code asks the framework to download a target file, the - framework downloads the file from (potentially trying multiple mirrors), - checks the downloaded file to ensure that it matches the information - described in the targets files, and then makes the file available to the - client code. - -6. Usage - - See http://www.theupdateframework.com/ for discussion of recommended usage - in various situations. - -6.1. Key management and migration - - All keys, except those for the timestamp and mirrors roles, should be - stored securely offline (e.g. encrypted and on a separate machine, in - special-purpose hardware, etc.). This document does not prescribe how keys - should be encrypted and stored, and so it is left to implementers of - this document to decide how best to secure them. - - To replace a compromised root key or any other top-level role key, the root - role signs a new root.json file that lists the updated trusted keys for the - role. When replacing root keys, an application will sign the new root.json - file with both the new and old root keys until all clients are known to have - obtained the new root.json file (a safe assumption is that this will be a - very long time or never). There is no risk posed by continuing to sign the - root.json file with revoked keys as once clients have updated they no longer - trust the revoked key. This is only to ensure outdated clients remain able - to update. - - To replace a delegated developer key, the role that delegated to that key - just replaces that key with another in the signed metadata where the - delegation is done. - -7. Consistent Snapshots - - So far, we have considered a TUF repository that is relatively static (in - terms of how often metadata and target files are updated). The problem is - that if the repository (which may be a community repository such as PyPI, - RubyGems, CPAN, or SourceForge) is volatile, in the sense that the - repository is continually producing new TUF metadata as well as its - targets, then should clients read metadata while the same metadata is being - written to, they would effectively see denial-of-service attacks. - Therefore, the repository needs to be careful about how it writes metadata - and targets. The high-level idea of the solution is that each snapshot will - be contained in a so-called consistent snapshot. If a client is reading - from one consistent snapshot, then the repository is free to write another - consistent snapshot without interrupting that client. For more reasons on - why we need consistent snapshots, please see - https://github.com/theupdateframework/pep-on-pypi-with-tuf#why-do-we-need-consistent-snapshots - -7.1. Writing consistent snapshots - - We now explain how a repository should write metadata and targets to - produce self-contained consistent snapshots. - - Simply put, TUF should write every metadata and target file as such: if the - file had the original name of filename.ext, then it should be written to - disk as digest.filename.ext, where digest is the hex digest of a - cryptographic hash of the file. This means that if the referrer metadata - lists N cryptographic hashes of the referred file, then there must be N - identical copies of the referred file, where each file will be - distinguished only by the value of the digest in its filename. The modified - filename need not include the name of the cryptographic hash function used - to produce the digest because, on a read, the choice of function follows - from the selection of a digest (which includes the name of the - cryptographic function) from all digests in the referred file. - - Additionally, the timestamp metadata (timestamp.json) should also be written - to disk whenever it is updated. It is optional for an implementation to - write identical copies at digest.timestamp.json for record-keeping purposes, - because a cryptographic hash of the timestamp metadata is usually not - known in advance. The same step applies to the root metadata (root.json), - although an implementation must write both root.json and digest.root.json - because it is possible to download root metadata both with and without - known hashes. These steps are required because these are the only metadata - files that may be requested without known hashes. - - Most importantly, no metadata file format must be updated to refer to the - names of metadata or target files with their hashes included. In other - words, if a metadata file A refers to another metadata or target file B as - filename.ext, then the filename must remain as filename.ext and not - digest.filename.ext. This rule is in place so that metadata signed by roles - with offline keys will not be forced to sign for the metadata file whenever - it is updated. In the next subsection, we will see how clients will - reproduce the name of the intended file. - - Finally, the root metadata should write the Boolean "consistent_snapshot" - attribute at the root level of its keys of attributes. If consistent - snapshots are not written by the repository, then the attribute may either - be left unspecified or be set to the False value. Otherwise, it must be - set to the True value. - - For more details on how this would apply on a community repository, please - see https://github.com/theupdateframework/pep-on-pypi-with-tuf#producing-consistent-snapshots - -7.2. Reading consistent snapshots - - We now explain how a client should read a self-contained consistent - snapshot. - - If the root metadata (root.json) is either missing the Boolean - "consistent_snapshot" attribute or the attribute is set to False, then the - client should do nothing different from the workflow in Section 5.1. - - Otherwise, the client must perform as follows: - 1. It must first retrieve the timestamp metadata (timestamp.json) from the - repository. - 2. If a threshold number of signatures of the timestamp or snapshot - metadata are not valid, then the client must download the root metadata - (root.json) from the repository and return to step 1. - 3. Otherwise, the client must download every subsequent metadata or - target file as follows: if the metadata or target file has the name - filename.ext, then the client must actually retrieve the file with the - name digest.filename.ext, where digest is the hex digest of a - cryptographic hash of the referred file as listed by its referrer file. - Even though the modified filename does not include the name of the - cryptographic hash function used to produce the chosen digest value, the - choice of function follows from the selection of the digest (which - includes the name of the cryptographic function) from all digests in the - referred file. - 4. Finally, the client must be careful to rename every metadata or target - file retrieved with the name digest.filename.ext to the name - filename.ext. - -F. Future directions and open questions - -F.1. Support for bogus clocks. - - The framework may need to offer an application-enablable "no, my clock is - _supposed_ to be wrong" mode, since others have noticed that many users seem - to have incorrect clocks. - +The TUF specification file has been moved to https://github.com/theupdateframework/specification/blob/master/tuf-spec.md diff --git a/docs/tuf-spec.md b/docs/tuf-spec.md new file mode 100644 index 0000000000..7d8df7b60f --- /dev/null +++ b/docs/tuf-spec.md @@ -0,0 +1 @@ +The TUF specification file has been moved to https://github.com/theupdateframework/specification/blob/master/tuf-spec.md diff --git a/docs/tuf-spec.txt b/docs/tuf-spec.txt index bba03f3e9c..7d8df7b60f 100644 --- a/docs/tuf-spec.txt +++ b/docs/tuf-spec.txt @@ -1,1116 +1 @@ - The Update Framework Specification - -7 October 2016 -Version 1.0 (Draft) - -1. Introduction - -1.1. Scope - - This document describes a framework for securing software update systems. - -1.2. Motivation - - Software is commonly updated through software update systems. These systems - can be package managers that are responsible for all of the software that is - installed on a system, application updaters that are only responsible for - individual installed applications, or software library managers that install - software that adds functionality such as plugins or programming language - libraries. - - Software update systems all have the common behavior of downloading files - that identify whether updates exist and, when updates do exist, downloading - the files that are required for the update. For the implementations - concerned with security, various integrity and authenticity checks are - performed on downloaded files. - - Software update systems are vulnerable to a variety of known attacks. This - is generally true even for implementations that have tried to be secure. - -1.3. History and credit - - Work on TUF began in late 2009. The core ideas are based off of previous - work done by Justin Cappos and Justin Samuel that identified security flaws - in all popular Linux package managers. More information and current - versions of this document can be found at https://www.updateframework.com/ - - The Global Environment for Network Innovations (GENI) and the National - Science Foundation (NSF) have provided support for the development of TUF. - (https://www.geni.net/) - (https://www.nsf.gov/) - - TUF's reference implementation is based heavily on Thandy, the application - updater for Tor (https://www.torproject.org/). Its design and this spec are - also largely based on Thandy's, with many parts being directly borrowed - from Thandy. The Thandy spec can be found here: - https://gitweb.torproject.org/thandy.git/tree/specs/thandy-spec.txt - - Whereas Thandy is an application updater for an individual software project, - TUF aims to provide a way to secure any software update system. We're very - grateful to the Tor Project and the Thandy developers as it is doubtful our - design and implementation would have been anywhere near as good without - being able to use their great work as a starting point. Thandy is the hard - work of Nick Mathewson, Sebastian Hahn, Roger Dingledine, Martin Peck, and - others. - -1.4. Non-goals - - We aren't creating a universal update system, but rather a simple and - flexible way that applications can have high levels of security with their - software update systems. Creating a universal software update system would - not be a reasonable goal due to the diversity of application-specific - functionality in software update systems and the limited usefulness that - such a system would have for securing legacy software update systems. - - We won't be defining package formats or even performing the actual update - of application files. We will provide the simplest mechanism possible that - remains easy to use and provides a secure way for applications to obtain and - verify files being distributed by trusted parties. - - We are not providing a means to bootstrap security so that arbitrary - installation of new software is secure. In practice this means that people - still need to use other means to verify the integrity and authenticity of - files they download manually. - - The framework will not have the responsibility of deciding on the correct - course of action in all error situations, such as those that can occur when - certain attacks are being performed. Instead, the framework will provide - the software update system the relevant information about any errors that - require security decisions which are situation-specific. How those errors - are handled is up to the software update system. - -1.5. Goals - - We need to provide a framework (a set of libraries, file formats, and - utilities) that can be used to secure new and existing software update - systems. - - The framework should enable applications to be secure from all known attacks - on the software update process. It is not concerned with exposing - information about what software is being updating (and thus what software - the client may be running) or the contents of updates. - - The framework should provide means to minimize the impact of key compromise. - To do so, it must support roles with multiple keys and threshold/quorum - trust (with the exception of minimally trusted roles designed to use a - single key). The compromise of roles using highly vulnerable keys should - have minimal impact. Therefore, online keys (keys which are used in an - automated fashion) must not be used for any role that clients ultimately - trust for files they may install. - - The framework must be flexible enough to meet the needs of a wide variety of - software update systems. - - The framework must be easy to integrate with software update systems. - -1.5.1 Goals for implementation - - The client side of the framework must be straightforward to implement in any - programming language and for any platform with the requisite networking and - crypto support. - - The framework should be easily customizable for use with any crypto - libraries. - - The process by which developers push updates to the repository must be - simple. - - The repository must serve only static files and be easy to mirror. - - The framework must be secure to use in environments that lack support for - SSL (TLS). This does not exclude the optional use of SSL when available, - but the framework will be designed without it. - -1.5.2. Goals for specific attacks to protect against - - Note: When saying the framework protects against an attack, this means that - the attack will not be successful. It does not mean that a client will - always be able to successfully update during an attack. Fundamentally, an - attacker positioned to intercept and modify a client's communication will - always be able to perform a denial of service. The part we have control - over is not allowing an inability to update to go unnoticed. - - Arbitrary installation attacks. An attacker installs anything they want on - the client system. That is, an attacker can provide arbitrary files in - response to download requests and the files will not be detected as - illegitimate. - - Endless data attacks. Attackers should not be able to respond to client - requests with huge amounts of data (extremely large files) that interfere - with the client's system. - - Extraneous dependencies attacks. Attackers should not be able to cause - clients to download or install software dependencies that are not the - intended dependencies. - - Fast-forward attacks. An attacker arbitrarily increases the version numbers - of project metadata files in the snapshot metadata well beyond the current - value, thus tricking a software update system into thinking any subsequent - updates are trying to rollback the package to a previous, out-of-date version. - In some situations, such as those where there is a maximum possible version - number, the perpetrator could use a number so high that the system would - never be able to match it with the one in the snapshot metadata, and thus - new updates could never be downloaded. - - Indefinite freeze attacks. Attackers should not be able to respond to - client requests with the same, outdated metadata without the client being - aware of the problem. - - Malicious mirrors preventing updates. Repository mirrors should be unable - to prevent updates from good mirrors. - - Mix-and-match attacks. Attackers should not be able to trick clients into - using a combination of metadata that never existed together on the - repository at the same time. - - Rollback attacks. Attackers should not be able to trick clients into - installing software that is older than that which the client previously knew - to be available. - - Slow retrieval attacks. Attackers should not be able to prevent clients - from being aware of interference with receiving updates by responding to - client requests so slowly that automated updates never complete. - - Vulnerability to key compromises. An attacker who is able to compromise a - single key or less than a given threshold of keys can compromise clients. - This includes relying on a single online key (such as only being protected - by SSL) or a single offline key (such as most software update systems use to - sign files). - - Wrong software installation. An attacker provides a client with a trusted - file that is not the one the client wanted. - -1.5.3. Goals for PKIs - - Software update systems using the framework's client code interface should - never have to directly manage keys. - - All keys must be easily and safely revocable. Trusting new keys for a role - must be easy. - - For roles where trust delegation is meaningful, a role should be able to - delegate full or limited trust to another role. - - The root of trust will not rely on external PKI. That is, no authority will - be derived from keys outside of the framework. - -2. System overview - - The framework ultimately provides a secure method of obtaining trusted - files. To avoid ambiguity, we will refer to the files the framework is used - to distribute as "target files". Target files are opaque to the framework. - Whether target files are packages containing multiple files, single text - files, or executable binaries is irrelevant to the framework. - - The metadata describing target files is the information necessary to - securely identify the file and indicate which roles are trusted to provide - the file. As providing additional information about - target files may be important to some software update systems using the - framework, additional arbitrary information can be provided with any target - file. This information will be included in signed metadata that describes - the target files. - - The following are the high-level steps of using the framework from the - viewpoint of a software update system using the framework. This is an - error-free case. - - Polling: - - Periodically, the software update system using the framework - instructs the framework to check each repository for updates. - If the framework reports to the application code that there are - updates, the application code determines whether it wants to - download the updated target files. Only target files that are - trusted (referenced by properly signed and timely metadata) are made - available by the framework. - - Fetching: - - For each file that the application wants, it asks the framework to - download the file. The framework downloads the file and performs - security checks to ensure that the downloaded file is exactly what is - expected according to the signed metadata. The application code is - not given access to the file until the security checks have been - completed. The application asks the framework to copy the downloaded - file to a location specified by the application. At this point, the - application has securely obtained the target file and can do with it - whatever it wishes. - -2.1. Roles and PKI - - In the discussion of roles that follows, it is important to remember that - the framework has been designed to allow a large amount of flexibility for - many different use cases. For example, it is possible to use the framework - with a single key that is the only key used in the entire system. This is - considered to be insecure but the flexibility is provided in order to meet - the needs of diverse use cases. - - There are four fundamental top-level roles in the framework: - - Root role - - Targets role - - Snapshot role - - Timestamp role - - There is also one optional top-level role: - - Mirrors role - - All roles can use one or more keys and require a threshold of signatures of - the role's keys in order to trust a given metadata file. - -2.1.1 Root role - - The root role delegates trust to specific keys trusted for all other - top-level roles used in the system. - - The client-side of the framework must ship with trusted root keys for each - configured repository. - - The root role's private keys must be kept very secure and thus should be - kept offline. - -2.1.2 Targets role - - The targets role's signature indicates which target files are trusted by - clients. The targets role signs metadata that describes these files, not - the actual target files themselves. - - In addition, the targets role can delegate full or partial trust to other - roles. Delegating trust means that the targets role indicates another role - (that is, another set of keys and the threshold required for trust) is - trusted to sign target file metadata. Partial trust delegation is when the - delegated role is only trusted for some of the target files that the - delegating role is trusted for. - - Delegated developer roles can further delegate trust to other delegated - roles. This provides for multiple levels of trust delegation where each - role can delegate full or partial trust for the target files they are - trusted for. The delegating role in these cases is still trusted. That is, - a role does not become untrusted when it has delegated trust. - - Delegated trust can be revoked at any time by the delegating role signing - new metadata that indicates the delegated role is no longer trusted. - -2.1.3 Snapshot role - - The snapshot role signs a metadata file that provides information about the - latest version of all of the other metadata on the repository (excluding the - timestamp file, discussed below). This information allows clients to know - which metadata files have been updated and also prevents mix-and-match - attacks. - -2.1.4 Timestamp role - - To prevent an adversary from replaying an out-of-date signed metadata file - whose signature has not yet expired, an automated process periodically signs - a timestamped statement containing the hash of the snapshot file. Even - though this timestamp key must be kept online, the risk posed to clients by - compromise of this key is minimal. - -2.1.5 Mirrors role - - Every repository has one or more mirrors from which files can be downloaded - by clients. A software update system using the framework may choose to - hard-code the mirror information in their software or they may choose to use - mirror metadata files that can optionally be signed by a mirrors role. - - The importance of using signed mirror lists depends on the application and - the users of that application. There is minimal risk to the application's - security from being tricked into contacting the wrong mirrors. This is - because the framework has very little trust in repositories. - -2.2. Threat Model And Analysis - - We assume an adversary who can respond to client requests, whether by acting - as a man-in-the-middle or through compromising repository mirrors. At - worst, such an adversary can deny updates to users if no good mirrors are - accessible. An inability to obtain updates is noticed by the framework. - - If an adversary compromises enough keys to sign metadata, the best that can - be done is to limit the number of users who are affected. The level to - which this threat is mitigated is dependent on how the application is using - the framework. This includes whether different keys have been used for - different signing roles. - - A detailed threat analysis is outside the scope of this document. This is - partly because the specific threat posted to clients in many situations is - largely determined by how the framework is being used. - -3. The repository - - An application uses the framework to interact with one or more repositories. - A repository is a conceptual source of target files of interest to the - application. Each repository has one or more mirrors which are the actual - providers of files to be downloaded. For example, each mirror may specify a - different host where files can be downloaded from over HTTP. - - The mirrors can be full or partial mirrors as long as the application-side - of the framework can ultimately obtain all of the files it needs. A mirror - is a partial mirror if it is missing files that a full mirror should have. - If a mirror is intended to only act as a partial mirror, the metadata and - target paths available from that mirror can be specified. - - Roles, trusted keys, and target files are completely separate between - repositories. A multi-repository setup is a multi-root system. When an - application uses the framework with multiple repositories, the framework - does not perform any "mixing" of the trusted content from each repository. - It is up to the application to determine the significance of the same or - different target files provided from separate repositories. - -3.1 Repository layout - - The filesystem layout in the repository is used for two purposes: - - To give mirrors an easy way to mirror only some of the repository. - - To specify which parts of the repository a given role has authority - to sign/provide. - -3.1.1 Target files - - The filenames and the directory structure of target files available from - a repository are not specified by the framework. The names of these files - and directories are completely at the discretion of the application using - the framework. - -3.1.2 Metadata files - - The filenames and directory structure of repository metadata are strictly - defined. The following are the metadata files of top-level roles relative - to the base URL of metadata available from a given repository mirror. - - /root.json - - Signed by the root keys; specifies trusted keys for the other - top-level roles. - - /snapshot.json - - Signed by the snapshot role's keys. Lists the version numbers of all - metadata files other than timestamp.json. For the root role, the - hash(es), size, and version number are listed. - - /targets.json - - Signed by the target role's keys. Lists hashes and sizes of target - files. - - /timestamp.json - - Signed by the timestamp role's keys. Lists hash(es), size, and version - number of the snapshot file. This is the first and potentially only - file that needs to be downloaded when clients poll for the existence - of updates. - - /mirrors.json (optional) - - Signed by the mirrors role's keys. Lists information about available - mirrors and the content available from each mirror. - - An implementation of the framework may optionally choose to make available - any metadata files in compressed (e.g. gzip'd) format. In doing so, the - filename of the compressed file should be the same as the original with the - addition of the file name extension for the compression type (e.g. - snapshot.json.gz). The original (uncompressed) file should always be made - available, as well. - -3.1.2.1 Metadata files for targets delegation - - When the targets role delegates trust to other roles, each delegated role - provides one signed metadata file. As is the case with the directory - structure of top-level metadata, the delegated files are relative to the - base URL of metadata available from a given repository mirror. - - A delegated role file is located at: - - /DELEGATED_ROLE.json - - where DELEGATED_ROLE is the name of the delegated role that has been - specified in targets.json. If this role further delegates trust to a role - named ANOTHER_ROLE, that role's signed metadata file is made available at: - - /ANOTHER_ROLE.json - -4. Document formats - - All of the formats described below include the ability to add more - attribute-value fields for backwards-compatible format changes. If - a backwards incompatible format change is needed, a new filename can - be used. - -4.1. Metaformat - - All documents use a subset of the JSON object format, with - floating-point numbers omitted. When calculating the digest of an - object, we use the "canonical JSON" subdialect as described at - http://wiki.laptop.org/go/Canonical_JSON - -4.2. File formats: general principles - - All signed metadata objects have the format: - - { "signed" : ROLE, - "signatures" : [ - { "keyid" : KEYID, - "method" : METHOD, - "sig" : SIGNATURE } - , ... ] - } - - where: ROLE is a dictionary whose "_type" field describes the role type. - KEYID is the identifier of the key signing the ROLE dictionary. - METHOD is the key signing method used to generate the signature. - SIGNATURE is a signature of the canonical JSON form of ROLE. - - The current reference implementation of TUF defines two signing methods, - although TUF is not restricted to any particular key signing method, - key type, or cryptographic library: - - "RSASSA-PSS" : RSA Probabilistic signature scheme with appendix. - The underlying hash function is SHA256. - - "ed25519" : Elliptic curve digital signature algorithm based on Twisted - Edwards curves. - - RSASSA-PSS: https://tools.ietf.org/html/rfc3447#page-29 - ed25519: https://ed25519.cr.yp.to/ - - All keys have the format: - - { "keytype" : KEYTYPE, - "keyval" : KEYVAL } - - where KEYTYPE is a string describing the type of the key and how it's - used to sign documents. The type determines the interpretation of - KEYVAL. - - We define two keytypes at present: 'rsa' and 'ed25519'. - - The 'rsa' format is: - - { "keytype" : "rsa", - "keyval" : { "public" : PUBLIC} - } - - where PUBLIC is in PEM format and a string. All RSA keys - must be at least 2048 bits. - - The 'ed25519' format is: - - { "keytype" : "ed25519", - "keyval" : { "public" : PUBLIC} - } - - where PUBLIC is a 32-byte string. - - The KEYID of a key is the hexdigest of the SHA-256 hash of the - canonical JSON form of the key. - - Metadata date-time data follows the ISO 8601 standard. The expected format - of the combined date and time string is "YYYY-MM-DDTHH:MM:SSZ". Time is - always in UTC, and the "Z" time zone designator is attached to indicate a - zero UTC offset. An example date-time string is "1985-10-21T01:21:00Z". - - -4.3. File formats: root.json - - The root.json file is signed by the root role's keys. It indicates - which keys are authorized for all top-level roles, including the root - role itself. Revocation and replacement of top-level role keys, including - for the root role, is done by changing the keys listed for the roles in - this file. - - The "signed" portion of root.json is as follows: - - { "_type" : "Root", - "version" : VERSION, - "expires" : EXPIRES, - "keys" : { - KEYID : KEY - , ... }, - "roles" : { - ROLE : { - "keyids" : [ KEYID, ... ] , - "threshold" : THRESHOLD } - , ... } - } - - VERSION is an integer that is greater than 0. Clients MUST NOT replace a - metadata file with a version number less than the one currently trusted. - - EXPIRES determines when metadata should be considered expired and no longer - trusted by clients. Clients MUST NOT trust an expired file. - - A ROLE is one of "root", "snapshot", "targets", "timestamp", or "mirrors". - A role for each of "root", "snapshot", "timestamp", and "targets" MUST be - specified in the key list. The role of "mirror" is optional. If not - specified, the mirror list will not need to be signed if mirror lists are - being used. - - The KEYID must be correct for the specified KEY. Clients MUST calculate - each KEYID to verify this is correct for the associated key. Clients MUST - ensure that for any KEYID represented in this key list and in other files, - only one unique key has that KEYID. - - The THRESHOLD for a role is an integer of the number of keys of that role - whose signatures are required in order to consider a file as being properly - signed by that role. - - A root.json example file: - - { - "signatures": [ - { - "keyid": "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6", - "method": "ed25519", - "sig": "a312b9c3cb4a1b693e8ebac5ee1ca9cc01f2661c14391917dcb111517f72370809 - f32c890c6b801e30158ac4efe0d4d87317223077784c7a378834249d048306" - } - ], - "signed": { - "_type": "Root", - "consistent_snapshot": false, - "expires": "2030-01-01T00:00:00Z", - "keys": { - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4": { - "keytype": "ed25519", - "keyval": { - "public": "72378e5bc588793e58f81c8533da64a2e8f1565c1fcc7f253496394ffc52542c" - } - }, - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b": { - "keytype": "ed25519", - "keyval": { - "public": "68ead6e54a43f8f36f9717b10669d1ef0ebb38cee6b05317669341309f1069cb" - } - }, - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6": { - "keytype": "ed25519", - "keyval": { - "public": "66dd78c5c2a78abc6fc6b267ff1a8017ba0e8bfc853dd97af351949bba021275" - } - }, - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309": { - "keytype": "ed25519", - "keyval": { - "public": "01c61f8dc7d77fcef973f4267927541e355e8ceda757e2c402818dad850f856e" - } - } - }, - "roles": { - "root": { - "keyids": [ - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6" - ], - "threshold": 1 - }, - "snapshot": { - "keyids": [ - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309" - ], - "threshold": 1 - }, - "targets": { - "keyids": [ - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b" - ], - "threshold": 1 - }, - "timestamp": { - "keyids": [ - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4" - ], - "threshold": 1 - } - }, - "version": 1 - } - } - -4.4. File formats: snapshot.json - - The snapshot.json file is signed by the snapshot role. It lists the version - numbers of all metadata on the repository, excluding timestamp.json and - mirrors.json. For the root role, the hash(es), size, and version number - are listed. - - The "signed" portion of snapshot.json is as follows: - - { "_type" : "Snapshot", - "version" : VERSION, - "expires" : EXPIRES, - "meta" : METAFILES - } - - METAFILES is an object whose format is the following: - - { METAPATH : { - "length" : LENGTH, - "hashes" : HASHES, - "version" : VERSION } - , ... - } - - METAPATH is the the metadata file's path on the repository relative to the - metadata base URL. - - The HASHES and LENGTH are the hashes and length of the file, both of which - are only specified for the root file. VERSION is listed for the root file - and all other roles available on the repository. LENGTH is an integer. - HASHES is a dictionary that specifies one or more hashes, including the - cryptographic hash function. For example: { "sha256": HASH, ... }. - - A snapshot.json example file: - - { - "signatures": [ - { - "keyid": "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309", - "method": "ed25519", - "sig": "f7f03b13e3f4a78a23561419fc0dd741a637e49ee671251be9f8f3fceedfc112e4 - 4ee3aaff2278fad9164ab039118d4dc53f22f94900dae9a147aa4d35dcfc0f" - } - ], - "signed": { - "_type": "Snapshot", - "expires": "2030-01-01T00:00:00Z", - "meta": { - "root.json": { - "hashes": { - "sha256": "52bbb30f683d166fae5c366e4582cfe8212aacbe1b21ae2026dae58ec55d3701" - }, - "length": 1831, - "version": 1 - }, - "targets.json": { - "version": 1 - }, - "project.json": { - "version": 1 - }, - } - }, - "version": 1 - } - -4.5. File formats: targets.json and delegated target roles - - The "signed" portion of targets.json is as follows: - - { "_type" : "Targets", - "version" : VERSION, - "expires" : EXPIRES, - "targets" : TARGETS, - ("delegations" : DELEGATIONS) - } - - TARGETS is an object whose format is the following: - - { TARGETPATH : { - "length" : LENGTH, - "hashes" : HASHES, - ("custom" : { ... }) } - , ... - } - - Each key of the TARGETS object is a TARGETPATH. A TARGETPATH is a path to - a file that is relative to a mirror's base URL of targets. - - It is allowed to have a TARGETS object with no TARGETPATH elements. This - can be used to indicate that no target files are available. - - If defined, the elements and values of "custom" will be made available to the - client application. The information in "custom" is opaque to the framework - and can include version numbers, dependencies, requirements, and any other - data that the application wants to include to describe the file at - TARGETPATH. The application may use this information to guide download - decisions. - - DELEGATIONS is an object whose format is the following: - - { "keys" : { - KEYID : KEY, - ... }, - "roles" : [{ - "name": ROLENAME, - "keyids" : [ KEYID, ... ] , - "threshold" : THRESHOLD, - ("path_hash_prefixes" : [ HEX_DIGEST, ... ] | - "paths" : [ PATHPATTERN, ... ]) - }, ... ] - } - - ROLENAME is the full name of the delegated role. For example, - "targets/projects" - - In order to discuss target paths, a role MUST specify only one of the - "path_hash_prefixes" or "paths" attributes, each of which we discuss next. - - The "path_hash_prefixes" list is used to succinctly describe a set of target - paths. Specifically, each HEX_DIGEST in "path_hash_prefixes" describes a set - of target paths; therefore, "path_hash_prefixes" is the union over each - prefix of its set of target paths. The target paths must meet this - condition: each target path, when hashed with the SHA-256 hash function to - produce a 64-byte hexadecimal digest (HEX_DIGEST), must share the same - prefix as one of the prefixes in "path_hash_prefixes". This is useful to - split a large number of targets into separate bins identified by consistent - hashing. - - The "paths" list describes paths that the role is trusted to provide. - Clients MUST check that a target is in one of the trusted paths of all roles - in a delegation chain, not just in a trusted path of the role that describes - the target file. The format of a PATHPATTERN may be either a path to a - single file, or a path to a directory to indicate all files and/or - subdirectories under that directory. - - A path to a directory is used to indicate all possible targets sharing that - directory as a prefix; e.g. if the directory is "targets/A", then targets - which match that directory include "targets/A/B.json" and - "targets/A/B/C.json". - - We are currently investigating a few "priority tag" schemes to resolve - conflicts between delegated roles that share responsibility for overlapping - target paths. One of the simplest of such schemes is for the client to - consider metadata in order of appearance of delegations; we treat the order - of delegations such that the first delegation is trusted more than the - second one, the second delegation is trusted more than the third one, and so - on. The metadata of the first delegation will override that of the second - delegation, the metadata of the second delegation will override that of the - third delegation, and so on. In order to accommodate this scheme, the - "roles" key in the DELEGATIONS object above points to an array, instead of a - hash table, of delegated roles. - - Another priority tag scheme would have the clients prefer the delegated role - with the latest metadata for a conflicting target path. Similar ideas were - explored in the Stork package manager (University of Arizona Tech Report - 08-04)[https://isis.poly.edu/~jcappos/papers/cappos_stork_dissertation_08.pdf]. - - The metadata files for delegated target roles has the same format as the - top-level targets.json metadata file. - - A targets.json example file: - - { - "signatures": [ - { - "keyid": "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b", - "method": "ed25519", - "sig": "e9fd40008fba263758a3ff1dc59f93e42a4910a282749af915fbbea1401178e5a0 - 12090c228f06db1deb75ad8ddd7e40635ac51d4b04301fce0fd720074e0209" - } - ], - "signed": { - "_type": "Targets", - "delegations": { - "keys": { - "ce3e02e72980b09ca6f5efa68197130b381921e5d0675e2e0c8f3c47e0626bba": { - "keytype": "ed25519", - "keyval": { - "public": "b6e40fb71a6041212a3d84331336ecaa1f48a0c523f80ccc762a034c727606fa" - } - } - }, - "roles": [ - { - "keyids": [ - "ce3e02e72980b09ca6f5efa68197130b381921e5d0675e2e0c8f3c47e0626bba" - ], - "name": "targets/project", - "paths": [ - "/project/file3.txt" - ], - "threshold": 1 - } - ] - }, - "expires": "2030-01-01T00:00:00Z", - "targets": { - "/file1.txt": { - "hashes": { - "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da" - }, - "length": 31 - }, - "/file2.txt": { - "hashes": { - "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99" - }, - "length": 39 - } - }, - "version": 1 - } - } - -4.6. File formats: timestamp.json - - The timestamp file is signed by a timestamp key. It indicates the - latest versions of other files and is frequently resigned to limit the - amount of time a client can be kept unaware of interference with obtaining - updates. - - Timestamp files will potentially be downloaded very frequently. Unnecessary - information in them will be avoided. - - The "signed" portion of timestamp.json is as follows: - - { "_type" : "Timestamp", - "version" : VERSION, - "expires" : EXPIRES, - "meta" : METAFILES - } - - METAFILES is the same is described for the snapshot.json file. In the case - of the timestamp.json file, this will commonly only include a description of - the snapshot.json file. - - A signed timestamp.json example file: - - { - "signatures": [ - { - "keyid": "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4", - "method": "ed25519", - "sig": "90d2a06c7a6c2a6a93a9f5771eb2e5ce0c93dd580bebc2080d10894623cfd6eaed - f4df84891d5aa37ace3ae3736a698e082e12c300dfe5aee92ea33a8f461f02" - } - ], - "signed": { - "_type": "Timestamp", - "expires": "2030-01-01T00:00:00Z", - "meta": { - "snapshot.json": { - "hashes": { - "sha256": "c14aeb4ac9f4a8fc0d83d12482b9197452f6adf3eb710e3b1e2b79e8d14cb681" - }, - "length": 1007, - "version": 1 - } - }, - "version": 1 - } - } - -4.7. File formats: mirrors.json - - The mirrors.json file is signed by the mirrors role. It indicates which - mirrors are active and believed to be mirroring specific parts of the - repository. - - The "signed" portion of mirrors.json is as follows: - - { "_type" : "Mirrorlist", - "version" : VERSION, - "expires" : EXPIRES, - "mirrors" : [ - { "urlbase" : URLBASE, - "metapath" : METAPATH, - "targetspath" : TARGETSPATH, - "metacontent" : [ PATHPATTERN ... ] , - "targetscontent" : [ PATHPATTERN ... ] , - ("custom" : { ... }) } - , ... ] - } - - URLBASE is the URL of the mirror which METAPATH and TARGETSPATH are relative - to. All metadata files will be retrieved from METAPATH and all target files - will be retrieved from TARGETSPATH. - - The lists of PATHPATTERN for "metacontent" and "targetscontent" describe the - metadata files and target files available from the mirror. - - The order of the list of mirrors is important. For any file to be - downloaded, whether it is a metadata file or a target file, the framework on - the client will give priority to the mirrors that are listed first. That is, - the first mirror in the list whose "metacontent" or "targetscontent" include - a path that indicate the desired file can be found there will the first - mirror that will be used to download that file. Successive mirrors with - matching paths will only be tried if downloading from earlier mirrors fails. - This behavior can be modified by the client code that uses the framework to, - for example, randomly select from the listed mirrors. - -5. Detailed Workflows - -5.1. The client application - - 1. The client application first instructs TUF to check for updates. - - 2. TUF downloads and verifies timestamp.json. - - 3. If timestamp.json indicates that snapshot.json has changed, TUF downloads - and verifies snapshot.json. - - 4. TUF determines which metadata files listed in snapshot.json differ from - those described in the last snapshot.json that TUF has seen. If root.json - has changed, the update process starts over using the new root.json. - - 5. TUF provides the software update system with a list of available files - according to targets.json. - - 6. The software update system instructs TUF to download a specific target file. - - 7. TUF downloads and verifies the file and then makes the file available to the - software update system. - - Note: If at any point in the above procedure there is a problem (i.e., if - unexpired, signed, valid metadata cannot be retrieved from the repository), - the Root file is downloaded and the process is retried once more (and only - once to avoid an infinite loop). Optionally, the software update system - using the framework can decide how to proceed rather than automatically - downloading a new Root file. - - - The client code instructs the framework to check for updates. The framework - downloads the timestamp.json file from a mirror and checks that the file is - properly signed by the timestamp role, is not expired, and is not older than - the last timestamp.json file retrieved. If the timestamp file lists the same - snapshot.json file as was previously seen, the client code is informed that no - updates are available and the update checking process stops. - - If the snapshot.json file has changed, the framework downloads the file and - verifies that it is properly signed by the snapshot role, is not expired, has - a newer timestamp than the last snapshot.json file seen, and matches the - description (hashes and size) in the timestamp.json file. The framework then - checks which metadata files listed in snapshot.json differ from those - described in the last snapshot.json file the framework had seen. If the - root.json file has changed, the framework updates this (following the same - security measures as with the other files) and starts the process over. If - any other metadata files have changed, the framework downloads and checks - those. - - By comparing the trusted targets from the old trusted metadata with the new - metadata, the framework is able to determine which target files have - changed. The framework ensures that any targets described in delegated - targets files are allowed to be provided by the delegated role. - - When the client code asks the framework to download a target file, the - framework downloads the file from (potentially trying multiple mirrors), - checks the downloaded file to ensure that it matches the information - described in the targets files, and then makes the file available to the - client code. - -6. Usage - - See https://www.theupdateframework.com/ for discussion of recommended usage - in various situations. - -6.1. Key management and migration - - All keys, except those for the timestamp and mirrors roles, should be - stored securely offline (e.g. encrypted and on a separate machine, in - special-purpose hardware, etc.). This document does not prescribe how keys - should be encrypted and stored, and so it is left to implementers of - this document to decide how best to secure them. - - To replace a compromised root key or any other top-level role key, the root - role signs a new root.json file that lists the updated trusted keys for the - role. When replacing root keys, an application will sign the new root.json - file with both the new and old root keys. Any time such a change is - required, the root.json file is versioned and accessible by version number, - e.g. 3.root.json. Clients update the set of trusted root keys by requesting - the current root.json and all previous root.json versions, until one is - found that has been signed by keys the client already trusts. This is to - ensure that outdated clients remain able to update, without requiring all - previous root keys to be kept to sign new root.json metadata. - - - To replace a delegated developer key, the role that delegated to that key - just replaces that key with another in the signed metadata where the - delegation is done. - -7. Consistent Snapshots - - So far, we have considered a TUF repository that is relatively static (in - terms of how often metadata and target files are updated). The problem is - that if the repository (which may be a community repository such as PyPI, - RubyGems, CPAN, or SourceForge) is volatile, in the sense that the - repository is continually producing new TUF metadata as well as its - targets, then should clients read metadata while the same metadata is being - written to, they would effectively see denial-of-service attacks. - Therefore, the repository needs to be careful about how it writes metadata - and targets. The high-level idea of the solution is that each snapshot will - be contained in a so-called consistent snapshot. If a client is reading - from one consistent snapshot, then the repository is free to write another - consistent snapshot without interrupting that client. For more reasons on - why we need consistent snapshots, please see - https://github.com/theupdateframework/pep-on-pypi-with-tuf#why-do-we-need-consistent-snapshots - -7.1. Writing consistent snapshots - - We now explain how a repository should write metadata and targets to - produce self-contained consistent snapshots. - - Simply put, TUF should write every metadata file as such: if the - file had the original name of filename.ext, then it should be written to - disk as version_number.filename.ext, where version_number is an integer. - - On the other hand, consistent target files should be written to disk as - digest.filename.ext. This means that if the referrer metadata lists N - cryptographic hashes of the referred file, then there must be N identical - copies of the referred file, where each file will be distinguished only by - the value of the digest in its filename. The modified filename need not - include the name of the cryptographic hash function used to produce the - digest because, on a read, the choice of function follows from the - selection of a digest (which includes the name of the cryptographic - function) from all digests in the referred file. - - Additionally, the timestamp metadata (timestamp.json) should also be - written to disk whenever it is updated. It is optional for an - implementation to write identical copies at digest.timestamp.json for - record-keeping purposes, because a cryptographic hash of the timestamp - metadata is usually not known in advance. The same step applies to the root - metadata (root.json), although an implementation must write both root.json - and digest.root.json because it is possible to download root metadata both - with and without known hashes. These steps are required because these are - the only metadata files that may be requested without known hashes. - - Most importantly, no metadata file format must be updated to refer to the - names of metadata or target files with their hashes included. In other - words, if a metadata file A refers to another metadata or target file B as - filename.ext, then the filename must remain as filename.ext and not - digest.filename.ext. This rule is in place so that metadata signed by roles - with offline keys will not be forced to sign for the metadata file whenever - it is updated. In the next subsection, we will see how clients will - reproduce the name of the intended file. - - Finally, the root metadata should write the Boolean "consistent_snapshot" - attribute at the root level of its keys of attributes. If consistent - snapshots are not written by the repository, then the attribute may either - be left unspecified or be set to the False value. Otherwise, it must be - set to the True value. - - For more details on how this would apply on a community repository, please - see https://github.com/theupdateframework/pep-on-pypi-with-tuf#producing-consistent-snapshots - -7.2. Reading consistent snapshots - - We now explain how a client should read a self-contained consistent - snapshot. - - If the root metadata (root.json) is either missing the Boolean - "consistent_snapshot" attribute or the attribute is set to False, then the - client should do nothing different from the workflow in Section 5.1. - - Otherwise, the client must perform as follows: - 1. It must first retrieve the timestamp metadata (timestamp.json) from the - repository. - 2. If a threshold number of signatures of the timestamp or snapshot - metadata are not valid, then the client must download the root metadata - (root.json) from the repository and return to step 1. - 3. Otherwise, the client must download every subsequent metadata or - target file as follows: if the metadata or target file has the name - filename.ext, then the client must actually retrieve the file with the - name digest.filename.ext, where digest is the hex digest of a - cryptographic hash of the referred file as listed by its referrer file. - Even though the modified filename does not include the name of the - cryptographic hash function used to produce the chosen digest value, the - choice of function follows from the selection of the digest (which - includes the name of the cryptographic function) from all digests in the - referred file. - 4. Finally, the client must be careful to rename every metadata or target - file retrieved with the name digest.filename.ext to the name - filename.ext. - -F. Future directions and open questions - -F.1. Support for bogus clocks. - - The framework may need to offer an application-enablable "no, my clock is - _supposed_ to be wrong" mode, since others have noticed that many users seem - to have incorrect clocks. - +The TUF specification file has been moved to https://github.com/theupdateframework/specification/blob/master/tuf-spec.md diff --git a/examples/README.md b/examples/README.md deleted file mode 100644 index 220f4725c8..0000000000 --- a/examples/README.md +++ /dev/null @@ -1,12 +0,0 @@ -This directory contains an example of a TUF repository, metadata, and key and -client files. - -## WARNING ## -These examples were last updated 2 years ago. We have since made changes to the -format of our metadata and key files, and will need to regenerate them so the -new tools can properly load them. We are currently working on a 1.0 release -that will make further tweaks to the format of metadata and key files, so these -examples will be modified once again. - -Note: The examples that are up-to-date and normally tested are located here: -https://github.com/theupdateframework/tuf/tree/develop/tests/repository_data/ diff --git a/examples/client/example_client.py b/examples/client/example_client.py deleted file mode 100755 index 01224e9b28..0000000000 --- a/examples/client/example_client.py +++ /dev/null @@ -1,94 +0,0 @@ -""" - - example_client.py - - - Vladimir Diaz - - - September 2012. - - - See LICENSE for licensing information. - - - Example script demonstrating custom python code a software updater - utilizing The Update Framework may write to securely update files. - The 'basic_client.py' script can be used on the command-line to perform - an update that will download and update all available targets; writing - custom code is not required with 'basic_client.py'. - - The custom examples below demonstrate: - (1) updating all targets - (2) updating all the targets of a specified role - (3) updating a specific target explicitly named. - - It assumes a server is listening on 'http://localhost:8001'. One can be - started by navigating to the 'examples/repository/' and starting: - $ python -m SimpleHTTPServer 8001 -""" - -import logging - -import tuf.client.updater - -# Uncomment the line below to enable printing of debugging information. -tuf.log.set_log_level(logging.INFO) - -# Set the local repository directory containing the metadata files. -settings.repository_directory = '.' - -# Set the repository mirrors. This dictionary is needed by the Updater -# class of updater.py. The client will download metadata and target -# files from any one of these mirrors. -repository_mirrors = {'mirror1': {'url_prefix': 'http://localhost:8001', - 'metadata_path': 'metadata', - 'targets_path': 'targets', - 'confined_target_dirs': ['']}} - -# Create the Upater object using the updater name 'tuf-example' -# and the repository mirrors defined above. -updater = tuf.client.updater.Updater('tuf-example', repository_mirrors) - -# Set the local destination directory to save the target files. -destination_directory = './targets' - -# Refresh the repository's top-level roles, store the target information for -# all the targets tracked, and determine which of these targets have been -# updated. -updater.refresh() -all_targets = updater.all_targets() -updated_targets = updater.updated_targets(all_targets, destination_directory) - -# Download each of these updated targets and save them locally. -for target in updated_targets: - try: - updater.download_target(target, destination_directory) - - except tuf.ssl_commons.exceptions.DownloadError as e: - pass - -# Remove any files from the destination directory that are no longer being -# tracked. -updater.remove_obsolete_targets(destination_directory) - - - -# Example demonstrating an update that only downloads the targets of -# a specific role (i.e., 'targets/project') -updater.refresh() -targets_of_role1 = updater.targets_of_role('targets/project') -updated_targets = updater.updated_targets(targets_of_role1, destination_directory) - -for target in updated_targets: - updater.download_target(target, destination_directory) - - - -# Example demonstrating an update that downloads a specific target. -updater.refresh() -target = updater.target('/file2.txt') -updated_target = updater.updated_targets([target], destination_directory) - -for target in updated_target: - updater.download_target(target, destination_directory) diff --git a/examples/client/metadata/current/root.json b/examples/client/metadata/current/root.json deleted file mode 100644 index 8128041ef4..0000000000 --- a/examples/client/metadata/current/root.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "signatures": [ - { - "keyid": "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6", - "method": "ed25519", - "sig": "a312b9c3cb4a1b693e8ebac5ee1ca9cc01f2661c14391917dcb111517f72370809f32c890c6b801e30158ac4efe0d4d87317223077784c7a378834249d048306" - } - ], - "signed": { - "_type": "Root", - "consistent_snapshot": false, - "expires": "2030-01-01T00:00:00Z", - "keys": { - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4": { - "keytype": "ed25519", - "keyval": { - "public": "72378e5bc588793e58f81c8533da64a2e8f1565c1fcc7f253496394ffc52542c" - } - }, - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b": { - "keytype": "ed25519", - "keyval": { - "public": "68ead6e54a43f8f36f9717b10669d1ef0ebb38cee6b05317669341309f1069cb" - } - }, - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6": { - "keytype": "ed25519", - "keyval": { - "public": "66dd78c5c2a78abc6fc6b267ff1a8017ba0e8bfc853dd97af351949bba021275" - } - }, - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309": { - "keytype": "ed25519", - "keyval": { - "public": "01c61f8dc7d77fcef973f4267927541e355e8ceda757e2c402818dad850f856e" - } - } - }, - "roles": { - "root": { - "keyids": [ - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6" - ], - "threshold": 1 - }, - "snapshot": { - "keyids": [ - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309" - ], - "threshold": 1 - }, - "targets": { - "keyids": [ - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b" - ], - "threshold": 1 - }, - "timestamp": { - "keyids": [ - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4" - ], - "threshold": 1 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/client/metadata/previous/root.json b/examples/client/metadata/previous/root.json deleted file mode 100644 index 8128041ef4..0000000000 --- a/examples/client/metadata/previous/root.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "signatures": [ - { - "keyid": "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6", - "method": "ed25519", - "sig": "a312b9c3cb4a1b693e8ebac5ee1ca9cc01f2661c14391917dcb111517f72370809f32c890c6b801e30158ac4efe0d4d87317223077784c7a378834249d048306" - } - ], - "signed": { - "_type": "Root", - "consistent_snapshot": false, - "expires": "2030-01-01T00:00:00Z", - "keys": { - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4": { - "keytype": "ed25519", - "keyval": { - "public": "72378e5bc588793e58f81c8533da64a2e8f1565c1fcc7f253496394ffc52542c" - } - }, - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b": { - "keytype": "ed25519", - "keyval": { - "public": "68ead6e54a43f8f36f9717b10669d1ef0ebb38cee6b05317669341309f1069cb" - } - }, - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6": { - "keytype": "ed25519", - "keyval": { - "public": "66dd78c5c2a78abc6fc6b267ff1a8017ba0e8bfc853dd97af351949bba021275" - } - }, - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309": { - "keytype": "ed25519", - "keyval": { - "public": "01c61f8dc7d77fcef973f4267927541e355e8ceda757e2c402818dad850f856e" - } - } - }, - "roles": { - "root": { - "keyids": [ - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6" - ], - "threshold": 1 - }, - "snapshot": { - "keyids": [ - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309" - ], - "threshold": 1 - }, - "targets": { - "keyids": [ - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b" - ], - "threshold": 1 - }, - "timestamp": { - "keyids": [ - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4" - ], - "threshold": 1 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/keystore/project_key b/examples/keystore/project_key deleted file mode 100644 index ad2c31d858..0000000000 --- a/examples/keystore/project_key +++ /dev/null @@ -1 +0,0 @@ -5bc5cdc1e18ff3ccbfd7c33a88bec596@@@@313030303030@@@@39656230336534656264303863303733353832386465383466346232633863356362616234306430333863353239393966643333373730353231386262623339@@@@31caf11a40e9eb48f499dcc6f80c88e8@@@@0400a24989069098ce702d6a8f81826ba482ffd9a17fff5a10c6f986b9179bae1be4d246db2fceda93ce12bd465e896e4e4e847c934eb120c794e12fbcc8569e4aa6964995f1fda9af50057644236c4194ebb78a588804363dbce492db7e4ef2ce39391013a7c2d68260a0e785b6cec39fc02cdff11e9c7a168ae133292fdbe5f2f968d2a0ae098ca9453ec33d175de181f771b016c760ceba51d9c20c431702656e8b09ebda6cf884096b89d80dd6c2c1752c04312e934cd79d49b91e8c7ef4448bf3b969fa6ed9727cac48f91e3921f41d5bf8ec4130ff45fda51e07cd16a7fa8daca1feb6f2dced6881c736565a1b72dc705a97606a0480c0dd681e65bea82fc261762cdad203fd49d45f \ No newline at end of file diff --git a/examples/keystore/project_key.pub b/examples/keystore/project_key.pub deleted file mode 100644 index 0f1c3193d9..0000000000 --- a/examples/keystore/project_key.pub +++ /dev/null @@ -1 +0,0 @@ -{"keytype": "ed25519", "keyval": {"public": "b6e40fb71a6041212a3d84331336ecaa1f48a0c523f80ccc762a034c727606fa"}} \ No newline at end of file diff --git a/examples/keystore/root_key b/examples/keystore/root_key deleted file mode 100644 index 0f87c3742b..0000000000 --- a/examples/keystore/root_key +++ /dev/null @@ -1 +0,0 @@ -70b05482c00cad0fccfcab4ac2b86b50@@@@313030303030@@@@62626137313336626662363862333962636161306561326437336237643436306162323966303038326636306363613965396161353566646330346666356635@@@@5f6612c00c8ed8627dd33754812bb7e1@@@@f651c3db5ac02aa8337264f0badd1f40fbe174ec3f8de8fd95188d31cf41b4863d7a28db297b8a5abd25e49d2af3c6cb2e25789088dce2b5113b7d7db16deea1eadf109d5ec004a2b5bcbdc29e13a9e4c803659def851800969918fb5930c56b816b119be490667f2d629309bc7578dad43c6b0ca6ef0c6f48ad68390fb1fc711b8a40bb5b1c197ae6f72d2b1e83bbc9050f46a3c69efe3b11c55a52d3f68f6e9fe58ad7e67dd6b136681b6d800fed22f15d31ee71ad1ed78f36b6d19b0771c22123f1dcb54a6b2e9742d2661014931cbab8fbbb001e3ac836bc5c64b19fa4cb881485acdafa6a0fd87044534608f50ae13920517c6f2ab2669f8edd4bc6cbe2fc1fc272264a819e2e34bc83 \ No newline at end of file diff --git a/examples/keystore/root_key.pub b/examples/keystore/root_key.pub deleted file mode 100644 index 7cf0139098..0000000000 --- a/examples/keystore/root_key.pub +++ /dev/null @@ -1 +0,0 @@ -{"keytype": "ed25519", "keyval": {"public": "66dd78c5c2a78abc6fc6b267ff1a8017ba0e8bfc853dd97af351949bba021275"}} \ No newline at end of file diff --git a/examples/keystore/snapshot_key b/examples/keystore/snapshot_key deleted file mode 100644 index 9f999f9479..0000000000 --- a/examples/keystore/snapshot_key +++ /dev/null @@ -1 +0,0 @@ -cff2ed748f0281d65c8eb535e81dbfd0@@@@313030303030@@@@30353732326564313863306632626336656631636635313666636330383537376466393833313937363261303638336632373530303938623665346239646230@@@@1eda960b65beefb9a2a4afcc9be973f6@@@@ea7c468f9e5ab91e874b2a0fa692a18056293261886400e5976a6c884b13cdd3905b0188f41b0c2b2846d36dabbeecf4d9d1c87123b5b236ccbce24f4b30c9ba25d00ccce270d3da1d202d2fd27a0a007381bf2284c58bb01a812050086264617f94a47cd3931b9129c6b105eec4ff56d1176e6fbde6bf5ade513a0f1c7551cecb04c678582e7fd0656936a1f232fa3a739df3dc4af07ad69580f54b1dd366c78e96c07249732621929aac056f4892af07772011246101cfb9886558fe2cfd1d09e9c280cbecdf2fb8dbeded4a628ef2fa04d5360dae327ccf15a12a00f2f32964ca03abaeaeb60bbece207e09c897413b8a630331df568822880be86359181664aa0806770ab11e30663594 \ No newline at end of file diff --git a/examples/keystore/snapshot_key.pub b/examples/keystore/snapshot_key.pub deleted file mode 100644 index cebf0249c6..0000000000 --- a/examples/keystore/snapshot_key.pub +++ /dev/null @@ -1 +0,0 @@ -{"keytype": "ed25519", "keyval": {"public": "01c61f8dc7d77fcef973f4267927541e355e8ceda757e2c402818dad850f856e"}} \ No newline at end of file diff --git a/examples/keystore/targets_key b/examples/keystore/targets_key deleted file mode 100644 index f56772a55f..0000000000 --- a/examples/keystore/targets_key +++ /dev/null @@ -1 +0,0 @@ -a0fdf15a675c849cc7bd4cfb46273164@@@@313030303030@@@@62303735356165326132633766333932633137643234323961636166356135666433333234376163356665656330306362366363383332386430356133663831@@@@0d7ce5bc8687edc1a292ebbe203284b6@@@@155dee106e192009ed4dace179e514f518d403d7a83d136047683683090c8dd1e80d6d5f6bd480e1e9e76bf63a265dfdc7e5bce07418039c6457c6d454fd0114fd7311c8508ead020e59e0b5b2ee55101d49535980e598c02c70ecb7bca77a54d04040080ca9403cd23bd0ae4cc5e33131321d2a4c8a4544553f5c54b7af0986a437f37696b4e7d76efed24be9a4c6928542fb93cfd88018537703a1ff0f6ac8fcb625714a161aaa7a548b39963ded1481b47020b1346544a8a9d96f1eb03a6cad326af0f5be43c6756dc1d4e6c7dee550077d7689949a671acd370680d6b29e594675ade9d7525a4ad8af7a1b13a3e4692192d70c189115d52b814af5a53bc9d0528aec6c6417f6f7d0bcb3 \ No newline at end of file diff --git a/examples/keystore/targets_key.pub b/examples/keystore/targets_key.pub deleted file mode 100644 index 5e1d1e1ac9..0000000000 --- a/examples/keystore/targets_key.pub +++ /dev/null @@ -1 +0,0 @@ -{"keytype": "ed25519", "keyval": {"public": "68ead6e54a43f8f36f9717b10669d1ef0ebb38cee6b05317669341309f1069cb"}} \ No newline at end of file diff --git a/examples/keystore/timestamp_key b/examples/keystore/timestamp_key deleted file mode 100644 index 42dc9bda51..0000000000 --- a/examples/keystore/timestamp_key +++ /dev/null @@ -1 +0,0 @@ -dd0f6b3bff19df2c4ab89e34a5b190e6@@@@313030303030@@@@37613435636666623966633230613439376661303631616635323365326636643039383236333638633162326262616231653531353066656262643265623566@@@@e6d72cc0a99144c4dbf509fa56092454@@@@10bb33112d4083f81d700740e78b315574a0d7b8c042921b1dcb7b8593caff1dbb10b8886ea2f4b7edcb49d3aabc9ec25db59dc0b121890555dc5d69291856f739b280de6fa6216e0ac92b9b95689b9f6ba1a414cb78ee6547968ea5ebd84e34972cf6d0e56cf0443b653f51d8e2742a7454e70039e548e4a69e97e73475940964307b5d5da440767531479b0c940dc8ffebafdf562e3a68d456f9438cb3c2253117180efd868b8b9fd4ea3e717501db8c0a9afe0bcdb34068eef4858103b2126b47d4c22a3d0f16cec0e5cd452201487eb6695139d8235a17a3c1a42fa7552d7ca45625b0000650f22851679ac00c7b71368d4dfc862a1823437b5cc244c282be0c01138fa39dc13511bd59 \ No newline at end of file diff --git a/examples/keystore/timestamp_key.pub b/examples/keystore/timestamp_key.pub deleted file mode 100644 index 3224de6272..0000000000 --- a/examples/keystore/timestamp_key.pub +++ /dev/null @@ -1 +0,0 @@ -{"keytype": "ed25519", "keyval": {"public": "72378e5bc588793e58f81c8533da64a2e8f1565c1fcc7f253496394ffc52542c"}} \ No newline at end of file diff --git a/examples/repository/metadata.staged/root.json b/examples/repository/metadata.staged/root.json deleted file mode 100644 index 8128041ef4..0000000000 --- a/examples/repository/metadata.staged/root.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "signatures": [ - { - "keyid": "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6", - "method": "ed25519", - "sig": "a312b9c3cb4a1b693e8ebac5ee1ca9cc01f2661c14391917dcb111517f72370809f32c890c6b801e30158ac4efe0d4d87317223077784c7a378834249d048306" - } - ], - "signed": { - "_type": "Root", - "consistent_snapshot": false, - "expires": "2030-01-01T00:00:00Z", - "keys": { - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4": { - "keytype": "ed25519", - "keyval": { - "public": "72378e5bc588793e58f81c8533da64a2e8f1565c1fcc7f253496394ffc52542c" - } - }, - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b": { - "keytype": "ed25519", - "keyval": { - "public": "68ead6e54a43f8f36f9717b10669d1ef0ebb38cee6b05317669341309f1069cb" - } - }, - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6": { - "keytype": "ed25519", - "keyval": { - "public": "66dd78c5c2a78abc6fc6b267ff1a8017ba0e8bfc853dd97af351949bba021275" - } - }, - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309": { - "keytype": "ed25519", - "keyval": { - "public": "01c61f8dc7d77fcef973f4267927541e355e8ceda757e2c402818dad850f856e" - } - } - }, - "roles": { - "root": { - "keyids": [ - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6" - ], - "threshold": 1 - }, - "snapshot": { - "keyids": [ - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309" - ], - "threshold": 1 - }, - "targets": { - "keyids": [ - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b" - ], - "threshold": 1 - }, - "timestamp": { - "keyids": [ - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4" - ], - "threshold": 1 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/repository/metadata.staged/snapshot.json b/examples/repository/metadata.staged/snapshot.json deleted file mode 100644 index 3294c89ede..0000000000 --- a/examples/repository/metadata.staged/snapshot.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "signatures": [ - { - "keyid": "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309", - "method": "ed25519", - "sig": "f7f03b13e3f4a78a23561419fc0dd741a637e49ee671251be9f8f3fceedfc112e44ee3aaff2278fad9164ab039118d4dc53f22f94900dae9a147aa4d35dcfc0f" - } - ], - "signed": { - "_type": "Snapshot", - "expires": "2030-01-01T00:00:00Z", - "meta": { - "root.json": { - "hashes": { - "sha256": "52bbb30f683d166fae5c366e4582cfe8212aacbe1b21ae2026dae58ec55d3701" - }, - "length": 1831 - }, - "targets.json": { - "hashes": { - "sha256": "f592d072e1193688a686267e8e10d7257b4ebfcf28133350dae88362d82a0c8a" - }, - "length": 1184 - }, - "targets.json.gz": { - "hashes": { - "sha256": "9f8aff5b55ee4b3140360d99b39fa755a3ea640462072b4fd74bdd72e6fe245a" - }, - "length": 599 - }, - "targets/project.json": { - "hashes": { - "sha256": "1f812e378264c3085bb69ec5f6663ed21e5882bbece3c3f8a0e8479f205ffb91" - }, - "length": 604 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/repository/metadata.staged/targets.json b/examples/repository/metadata.staged/targets.json deleted file mode 100644 index 6387d30ea4..0000000000 --- a/examples/repository/metadata.staged/targets.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "signatures": [ - { - "keyid": "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b", - "method": "ed25519", - "sig": "e9fd40008fba263758a3ff1dc59f93e42a4910a282749af915fbbea1401178e5a012090c228f06db1deb75ad8ddd7e40635ac51d4b04301fce0fd720074e0209" - } - ], - "signed": { - "_type": "Targets", - "delegations": { - "keys": { - "ce3e02e72980b09ca6f5efa68197130b381921e5d0675e2e0c8f3c47e0626bba": { - "keytype": "ed25519", - "keyval": { - "public": "b6e40fb71a6041212a3d84331336ecaa1f48a0c523f80ccc762a034c727606fa" - } - } - }, - "roles": [ - { - "keyids": [ - "ce3e02e72980b09ca6f5efa68197130b381921e5d0675e2e0c8f3c47e0626bba" - ], - "name": "targets/project", - "paths": [ - "/project/file3.txt" - ], - "threshold": 1 - } - ] - }, - "expires": "2030-01-01T00:00:00Z", - "targets": { - "/file1.txt": { - "hashes": { - "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da" - }, - "length": 31 - }, - "/file2.txt": { - "hashes": { - "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99" - }, - "length": 39 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/repository/metadata.staged/targets.json.gz b/examples/repository/metadata.staged/targets.json.gz deleted file mode 100644 index 85fa089054..0000000000 Binary files a/examples/repository/metadata.staged/targets.json.gz and /dev/null differ diff --git a/examples/repository/metadata.staged/targets/project.json b/examples/repository/metadata.staged/targets/project.json deleted file mode 100644 index 57f4195ab3..0000000000 --- a/examples/repository/metadata.staged/targets/project.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "signatures": [ - { - "keyid": "ce3e02e72980b09ca6f5efa68197130b381921e5d0675e2e0c8f3c47e0626bba", - "method": "ed25519", - "sig": "9095bf34b0cbf9790465c0956810cb3729bc96beed8ee7e42d98997b1e8ec0a6780e57556570687df4a559d563a569258eac15fd9832b2e8e6d048cc32b5f603" - } - ], - "signed": { - "_type": "Targets", - "delegations": { - "keys": {}, - "roles": [] - }, - "expires": "2030-01-01T00:00:00Z", - "targets": { - "/project/file3.txt": { - "hashes": { - "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b" - }, - "length": 28 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/repository/metadata.staged/timestamp.json b/examples/repository/metadata.staged/timestamp.json deleted file mode 100644 index 93e124e167..0000000000 --- a/examples/repository/metadata.staged/timestamp.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "signatures": [ - { - "keyid": "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4", - "method": "ed25519", - "sig": "90d2a06c7a6c2a6a93a9f5771eb2e5ce0c93dd580bebc2080d10894623cfd6eaedf4df84891d5aa37ace3ae3736a698e082e12c300dfe5aee92ea33a8f461f02" - } - ], - "signed": { - "_type": "Timestamp", - "expires": "2030-01-01T00:00:00Z", - "meta": { - "snapshot.json": { - "hashes": { - "sha256": "c14aeb4ac9f4a8fc0d83d12482b9197452f6adf3eb710e3b1e2b79e8d14cb681" - }, - "length": 1007 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/repository/metadata/root.json b/examples/repository/metadata/root.json deleted file mode 100644 index 8128041ef4..0000000000 --- a/examples/repository/metadata/root.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "signatures": [ - { - "keyid": "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6", - "method": "ed25519", - "sig": "a312b9c3cb4a1b693e8ebac5ee1ca9cc01f2661c14391917dcb111517f72370809f32c890c6b801e30158ac4efe0d4d87317223077784c7a378834249d048306" - } - ], - "signed": { - "_type": "Root", - "consistent_snapshot": false, - "expires": "2030-01-01T00:00:00Z", - "keys": { - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4": { - "keytype": "ed25519", - "keyval": { - "public": "72378e5bc588793e58f81c8533da64a2e8f1565c1fcc7f253496394ffc52542c" - } - }, - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b": { - "keytype": "ed25519", - "keyval": { - "public": "68ead6e54a43f8f36f9717b10669d1ef0ebb38cee6b05317669341309f1069cb" - } - }, - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6": { - "keytype": "ed25519", - "keyval": { - "public": "66dd78c5c2a78abc6fc6b267ff1a8017ba0e8bfc853dd97af351949bba021275" - } - }, - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309": { - "keytype": "ed25519", - "keyval": { - "public": "01c61f8dc7d77fcef973f4267927541e355e8ceda757e2c402818dad850f856e" - } - } - }, - "roles": { - "root": { - "keyids": [ - "f2d5020d08aea06a0a9192eb6a4f549e17032ebefa1aa9ac167c1e3e727930d6" - ], - "threshold": 1 - }, - "snapshot": { - "keyids": [ - "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309" - ], - "threshold": 1 - }, - "targets": { - "keyids": [ - "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b" - ], - "threshold": 1 - }, - "timestamp": { - "keyids": [ - "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4" - ], - "threshold": 1 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/repository/metadata/snapshot.json b/examples/repository/metadata/snapshot.json deleted file mode 100644 index 3294c89ede..0000000000 --- a/examples/repository/metadata/snapshot.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "signatures": [ - { - "keyid": "fce9cf1cc86b0945d6a042f334026f31ed8e4ee1510218f198e8d3f191d15309", - "method": "ed25519", - "sig": "f7f03b13e3f4a78a23561419fc0dd741a637e49ee671251be9f8f3fceedfc112e44ee3aaff2278fad9164ab039118d4dc53f22f94900dae9a147aa4d35dcfc0f" - } - ], - "signed": { - "_type": "Snapshot", - "expires": "2030-01-01T00:00:00Z", - "meta": { - "root.json": { - "hashes": { - "sha256": "52bbb30f683d166fae5c366e4582cfe8212aacbe1b21ae2026dae58ec55d3701" - }, - "length": 1831 - }, - "targets.json": { - "hashes": { - "sha256": "f592d072e1193688a686267e8e10d7257b4ebfcf28133350dae88362d82a0c8a" - }, - "length": 1184 - }, - "targets.json.gz": { - "hashes": { - "sha256": "9f8aff5b55ee4b3140360d99b39fa755a3ea640462072b4fd74bdd72e6fe245a" - }, - "length": 599 - }, - "targets/project.json": { - "hashes": { - "sha256": "1f812e378264c3085bb69ec5f6663ed21e5882bbece3c3f8a0e8479f205ffb91" - }, - "length": 604 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/repository/metadata/targets.json b/examples/repository/metadata/targets.json deleted file mode 100644 index 6387d30ea4..0000000000 --- a/examples/repository/metadata/targets.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "signatures": [ - { - "keyid": "93ec2c3dec7cc08922179320ccd8c346234bf7f21705268b93e990d5273a2a3b", - "method": "ed25519", - "sig": "e9fd40008fba263758a3ff1dc59f93e42a4910a282749af915fbbea1401178e5a012090c228f06db1deb75ad8ddd7e40635ac51d4b04301fce0fd720074e0209" - } - ], - "signed": { - "_type": "Targets", - "delegations": { - "keys": { - "ce3e02e72980b09ca6f5efa68197130b381921e5d0675e2e0c8f3c47e0626bba": { - "keytype": "ed25519", - "keyval": { - "public": "b6e40fb71a6041212a3d84331336ecaa1f48a0c523f80ccc762a034c727606fa" - } - } - }, - "roles": [ - { - "keyids": [ - "ce3e02e72980b09ca6f5efa68197130b381921e5d0675e2e0c8f3c47e0626bba" - ], - "name": "targets/project", - "paths": [ - "/project/file3.txt" - ], - "threshold": 1 - } - ] - }, - "expires": "2030-01-01T00:00:00Z", - "targets": { - "/file1.txt": { - "hashes": { - "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da" - }, - "length": 31 - }, - "/file2.txt": { - "hashes": { - "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99" - }, - "length": 39 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/repository/metadata/targets.json.gz b/examples/repository/metadata/targets.json.gz deleted file mode 100644 index 85fa089054..0000000000 Binary files a/examples/repository/metadata/targets.json.gz and /dev/null differ diff --git a/examples/repository/metadata/targets/project.json b/examples/repository/metadata/targets/project.json deleted file mode 100644 index 57f4195ab3..0000000000 --- a/examples/repository/metadata/targets/project.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "signatures": [ - { - "keyid": "ce3e02e72980b09ca6f5efa68197130b381921e5d0675e2e0c8f3c47e0626bba", - "method": "ed25519", - "sig": "9095bf34b0cbf9790465c0956810cb3729bc96beed8ee7e42d98997b1e8ec0a6780e57556570687df4a559d563a569258eac15fd9832b2e8e6d048cc32b5f603" - } - ], - "signed": { - "_type": "Targets", - "delegations": { - "keys": {}, - "roles": [] - }, - "expires": "2030-01-01T00:00:00Z", - "targets": { - "/project/file3.txt": { - "hashes": { - "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b" - }, - "length": 28 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/repository/metadata/timestamp.json b/examples/repository/metadata/timestamp.json deleted file mode 100644 index 93e124e167..0000000000 --- a/examples/repository/metadata/timestamp.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "signatures": [ - { - "keyid": "1a2b4110927d4cba257262f614896179ff85ca1f1353a41b5224ac474ca71cb4", - "method": "ed25519", - "sig": "90d2a06c7a6c2a6a93a9f5771eb2e5ce0c93dd580bebc2080d10894623cfd6eaedf4df84891d5aa37ace3ae3736a698e082e12c300dfe5aee92ea33a8f461f02" - } - ], - "signed": { - "_type": "Timestamp", - "expires": "2030-01-01T00:00:00Z", - "meta": { - "snapshot.json": { - "hashes": { - "sha256": "c14aeb4ac9f4a8fc0d83d12482b9197452f6adf3eb710e3b1e2b79e8d14cb681" - }, - "length": 1007 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/examples/repository/targets/file1.txt b/examples/repository/targets/file1.txt deleted file mode 100644 index 7bf3499f13..0000000000 --- a/examples/repository/targets/file1.txt +++ /dev/null @@ -1 +0,0 @@ -This is an example target file. \ No newline at end of file diff --git a/examples/repository/targets/file2.txt b/examples/repository/targets/file2.txt deleted file mode 100644 index 606f18efc8..0000000000 --- a/examples/repository/targets/file2.txt +++ /dev/null @@ -1 +0,0 @@ -This is an another example target file. \ No newline at end of file diff --git a/examples/repository/targets/project/file3.txt b/examples/repository/targets/project/file3.txt deleted file mode 100644 index 60464604aa..0000000000 --- a/examples/repository/targets/project/file3.txt +++ /dev/null @@ -1 +0,0 @@ -This is role1's target file. \ No newline at end of file diff --git a/setup.py b/setup.py index 868dfed0f5..55ef61a5fc 100755 --- a/setup.py +++ b/setup.py @@ -95,20 +95,21 @@ 'Operating System :: Microsoft :: Windows', 'Programming Language :: Python', 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Security', 'Topic :: Software Development' ], - install_requires = ['iso8601', 'six', 'securesystemslib>=0.10.2'], + install_requires = ['iso8601', 'six', 'securesystemslib>=0.10.7'], packages = find_packages(exclude=['tests']), scripts = [ 'tuf/scripts/basic_client.py', - 'tuf/scripts/tufcli.py' + 'tuf/scripts/tufcli.py', + 'tuf/scripts/simple_server.py' ] ) diff --git a/tests/.coveragerc b/tests/.coveragerc index ece205ca39..24b2cd4651 100644 --- a/tests/.coveragerc +++ b/tests/.coveragerc @@ -6,8 +6,6 @@ branch = True exclude_lines = pragma: no cover def check_crypto_libraries - def _get_password - def _prompt def __str__ if __name__ == .__main__.: diff --git a/tests/aggregate_tests.py b/tests/aggregate_tests.py index 4b9f6dd41a..b65077a53d 100755 --- a/tests/aggregate_tests.py +++ b/tests/aggregate_tests.py @@ -36,13 +36,6 @@ import glob import random -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest - # Generate a list of pathnames that match a pattern (i.e., that begin with # 'test_' and end with '.py'. A shell-style wildcard is used with glob() to # match desired filenames. All the tests matching the pattern will be loaded diff --git a/tests/repository_data/client/map.json b/tests/repository_data/client/map.json new file mode 100644 index 0000000000..ae5aa1ba9c --- /dev/null +++ b/tests/repository_data/client/map.json @@ -0,0 +1 @@ +{"repositories": {"repository1": ["http://localhost:30001"], "repository2": ["http://localhost:30002"]}, "mapping": [{"paths": ["*3.txt"], "terminating": false, "repositories": ["repository1", "repository2"]}, {"paths": ["*"], "repositories": ["repository2"]}]} diff --git a/tests/repository_data/client/test_repository/metadata/current/1.root.json b/tests/repository_data/client/test_repository/metadata/current/1.root.json index 7835711ffa..ccce5381b6 100644 --- a/tests/repository_data/client/test_repository/metadata/current/1.root.json +++ b/tests/repository_data/client/test_repository/metadata/current/1.root.json @@ -1,86 +1,87 @@ { "signatures": [ { - "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", - "method": "RSASSA-PSS", - "sig": "7965740b0a73d11b0b96c959f3c509107c065fe210487d92718a870f177b387c33737dc27a984689aeccff2184e967a8574ce6f065138687c8c106838fd07a0297d19beb553cf91fbdff6fab35928c2cbdb3cbbc8f8334ac21ee588e86a08cbee406cb0da49fd1908ca5418279f2ed474c54f88dc632a4812064ec708c9cc84712b4e1e688e71b31cc51cff4780dadb3d4350dda96a64c6839dc9be6e18c0a9468df42afea94ef0e9bf4fdb447e0aae24a7c7e60b0f0e53da6f2f9cc21ce2fc633dfadb4a280f97b1ed942c03779c59bef5a28661dada107f36c43ae8004befce217faeb8ca9fe8160a9318ee10c8cab8de84ed4e72a7e36707d85bd969a11fc11baf1475300d902caf8515e3abacd0bcdccdd48126cb118bee785b286ee45d28f4cbf9469403f0121f5774357b6542efdd95be7c5c93cef0f1fef7cc5adac65a8915386f3189e4577e12dbecbb7bcc5e5558748a70f99b85e7afce5ec08eeef4e886f4209cb511bd0a953d2fea72241f6f38ddcf436567cdb395b4c7beace0e" + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" } ], "signed": { - "_type": "Root", - "compression_algorithms": [ - "gz" - ], + "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "ed25519", + "keytype": "rsa", "keyval": { - "public": "e8bd29d20025d3ac755a27b8d8efe185e368244000d9d3b76e32afb968cb0ea8" - } + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" }, - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1": { + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "52de9284608be6b718a9d88c7c5d8d9d93b33732e00d670dd4ebe4bce8bbc83c" - } + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" }, - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503": { + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "rsa", + "keytype": "ed25519", "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAsDqUoiFJZX+5gm5pyI1l\nVc/N3yjJVOIl9GyiK0mRyzV3IzUQzhjq8nhk0eLfzXw2XwIAYOJC6dR/tGRG4JDx\nJkez5FFH4zLosr/XzT7CG5zxJ3kKICLD1v9rZQr5ZgARQDOpkxzPz46rGnE0sHd7\nMpnpPMScA1pMIzwM1RoPS4ntZipI1cl9M7HMQ6mkBp8/DNKCqaDWixJqaGgWrhhK\nhI/1mzBliMKriNxPKSCGVlOk/QpZft+y1fs42s0DMd5BOFBo+ZcoXLYRncg9S3A2\nxx/jT69Bt3ceiAZqnp7f6M+ZzoUifSelaoL7QIYg/GkEl+0oxTD0yRphGiCKwn9c\npSbn7NgnbjqSgIMeEtlf/5Coyrs26pyFf/9GbusddPSxxxwIJ/7IJuF7P1Yy0WpZ\nkMeY83h9n2IdnEYi+rpdbLJPQd7Fpu2xrdA3Fokj8AvCpcmxn8NIXZuK++r8/xsE\nAUL30HH7dgVn50AvdPaJnqAORT3OlabW0DK9prcwKnyzAgMBAAE=\n-----END PUBLIC KEY-----" - } + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" }, - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b": { + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "0692a846935833d685168ae8c98fee951d52d8aa76685b8ba55b8e1eada217c2" - } + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503" + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9" + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b" + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1" + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/client/test_repository/metadata/current/1.root.json.gz b/tests/repository_data/client/test_repository/metadata/current/1.root.json.gz deleted file mode 100644 index 741b2d2949..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/current/1.root.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/current/role1.json b/tests/repository_data/client/test_repository/metadata/current/role1.json index 96d921b5eb..332cd8050f 100644 --- a/tests/repository_data/client/test_repository/metadata/current/role1.json +++ b/tests/repository_data/client/test_repository/metadata/current/role1.json @@ -1,30 +1,30 @@ { "signatures": [ { - "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", - "method": "ed25519", - "sig": "e8f6db97fcad5eb2ca1cf5fc6b6d4579d026811581b0d2061af90c7cb26d966e15a06e7c596f663b05aa061308929f96136167359fc9d44919a36383403abd09" + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "749d6373835e9e89a269168d9af22bf7692ee7059a1db5ff1162e07b495ba47ae223e9ece6c27b2981d5d8bc046788d3fad9c2ba83d4be9b6547ed1f909c6204" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": { - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" - } + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" } }, "roles": [ { "keyids": [ - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role2", "paths": [], @@ -34,6 +34,7 @@ ] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": { "/file3.txt": { "hashes": { diff --git a/tests/repository_data/client/test_repository/metadata/current/role1.json.gz b/tests/repository_data/client/test_repository/metadata/current/role1.json.gz deleted file mode 100644 index 33bdf2ea23..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/current/role1.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/current/role2.json b/tests/repository_data/client/test_repository/metadata/current/role2.json index 20b1206a70..46e415a789 100644 --- a/tests/repository_data/client/test_repository/metadata/current/role2.json +++ b/tests/repository_data/client/test_repository/metadata/current/role2.json @@ -1,18 +1,18 @@ { "signatures": [ { - "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", - "method": "ed25519", - "sig": "8fdca8154157e983d86efb16917ad973941dfa75a47d99a88b393d0955f1508aff55b66d0592ff2ad2f431d6826d6544009a921b5aae503f3f795b09ed549f0a" + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "34ae1e3c897062419722c1747970a632e12060f3aef57314e6e6aa96c3a510a25ec5a8b12022058c768724607dd58106293089c87a7ee4b2ce5b3a8d44deeb06" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": {}, "roles": [] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": {}, "version": 1 } diff --git a/tests/repository_data/client/test_repository/metadata/current/role2.json.gz b/tests/repository_data/client/test_repository/metadata/current/role2.json.gz deleted file mode 100644 index 3d23a94cff..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/current/role2.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/current/root.json b/tests/repository_data/client/test_repository/metadata/current/root.json index 7835711ffa..ccce5381b6 100644 --- a/tests/repository_data/client/test_repository/metadata/current/root.json +++ b/tests/repository_data/client/test_repository/metadata/current/root.json @@ -1,86 +1,87 @@ { "signatures": [ { - "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", - "method": "RSASSA-PSS", - "sig": "7965740b0a73d11b0b96c959f3c509107c065fe210487d92718a870f177b387c33737dc27a984689aeccff2184e967a8574ce6f065138687c8c106838fd07a0297d19beb553cf91fbdff6fab35928c2cbdb3cbbc8f8334ac21ee588e86a08cbee406cb0da49fd1908ca5418279f2ed474c54f88dc632a4812064ec708c9cc84712b4e1e688e71b31cc51cff4780dadb3d4350dda96a64c6839dc9be6e18c0a9468df42afea94ef0e9bf4fdb447e0aae24a7c7e60b0f0e53da6f2f9cc21ce2fc633dfadb4a280f97b1ed942c03779c59bef5a28661dada107f36c43ae8004befce217faeb8ca9fe8160a9318ee10c8cab8de84ed4e72a7e36707d85bd969a11fc11baf1475300d902caf8515e3abacd0bcdccdd48126cb118bee785b286ee45d28f4cbf9469403f0121f5774357b6542efdd95be7c5c93cef0f1fef7cc5adac65a8915386f3189e4577e12dbecbb7bcc5e5558748a70f99b85e7afce5ec08eeef4e886f4209cb511bd0a953d2fea72241f6f38ddcf436567cdb395b4c7beace0e" + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" } ], "signed": { - "_type": "Root", - "compression_algorithms": [ - "gz" - ], + "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "ed25519", + "keytype": "rsa", "keyval": { - "public": "e8bd29d20025d3ac755a27b8d8efe185e368244000d9d3b76e32afb968cb0ea8" - } + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" }, - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1": { + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "52de9284608be6b718a9d88c7c5d8d9d93b33732e00d670dd4ebe4bce8bbc83c" - } + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" }, - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503": { + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "rsa", + "keytype": "ed25519", "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAsDqUoiFJZX+5gm5pyI1l\nVc/N3yjJVOIl9GyiK0mRyzV3IzUQzhjq8nhk0eLfzXw2XwIAYOJC6dR/tGRG4JDx\nJkez5FFH4zLosr/XzT7CG5zxJ3kKICLD1v9rZQr5ZgARQDOpkxzPz46rGnE0sHd7\nMpnpPMScA1pMIzwM1RoPS4ntZipI1cl9M7HMQ6mkBp8/DNKCqaDWixJqaGgWrhhK\nhI/1mzBliMKriNxPKSCGVlOk/QpZft+y1fs42s0DMd5BOFBo+ZcoXLYRncg9S3A2\nxx/jT69Bt3ceiAZqnp7f6M+ZzoUifSelaoL7QIYg/GkEl+0oxTD0yRphGiCKwn9c\npSbn7NgnbjqSgIMeEtlf/5Coyrs26pyFf/9GbusddPSxxxwIJ/7IJuF7P1Yy0WpZ\nkMeY83h9n2IdnEYi+rpdbLJPQd7Fpu2xrdA3Fokj8AvCpcmxn8NIXZuK++r8/xsE\nAUL30HH7dgVn50AvdPaJnqAORT3OlabW0DK9prcwKnyzAgMBAAE=\n-----END PUBLIC KEY-----" - } + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" }, - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b": { + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "0692a846935833d685168ae8c98fee951d52d8aa76685b8ba55b8e1eada217c2" - } + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503" + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9" + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b" + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1" + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/client/test_repository/metadata/current/snapshot.json b/tests/repository_data/client/test_repository/metadata/current/snapshot.json index 3ee5468cf9..a713f807b6 100644 --- a/tests/repository_data/client/test_repository/metadata/current/snapshot.json +++ b/tests/repository_data/client/test_repository/metadata/current/snapshot.json @@ -1,13 +1,12 @@ { "signatures": [ { - "keyid": "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9", - "method": "ed25519", - "sig": "01bc8667e2afbceef3df6ec6782088c77e59bdb5a29b75634200528bdedc8af9694e38a672fa2cd00051fb25ac12d3079bbc48f424783d23fb532b7508adf40f" + "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", + "sig": "d78e9013bab1da2a8425caa48143cd79a21632dce021ad7e1b883d83775035df333a8b26c9c952d832edaf9dc7be2ef612bdb21326fcc9849346d7e3a162050e" } ], "signed": { - "_type": "Snapshot", + "_type": "snapshot", "expires": "2030-01-01T00:00:00Z", "meta": { "role1.json": { @@ -17,16 +16,13 @@ "version": 1 }, "root.json": { - "hashes": { - "sha256": "294a5eea95c8aaed509c3a559c79044a336b6912395f28d5502aa93f0bfd2774" - }, - "length": 3329, "version": 1 }, "targets.json": { "version": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/client/test_repository/metadata/current/snapshot.json.gz b/tests/repository_data/client/test_repository/metadata/current/snapshot.json.gz deleted file mode 100644 index a57d005144..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/current/snapshot.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/current/targets.json b/tests/repository_data/client/test_repository/metadata/current/targets.json index 0620bfe42a..972034d03f 100644 --- a/tests/repository_data/client/test_repository/metadata/current/targets.json +++ b/tests/repository_data/client/test_repository/metadata/current/targets.json @@ -1,30 +1,30 @@ { "signatures": [ { - "keyid": "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b", - "method": "ed25519", - "sig": "74ee9970ed709ab65586ef99c0005102676a92f11e2a448bb685875b641d2efe3fd2bdefaa90e1a050bfbb34163834aadb43d13ac0c7452aa7df27c454c34507" + "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", + "sig": "3b1a1fcb912ea8e03b6f9ad0da29166149d4a6f038b552c204ccee1d396d2dd4095a3ce3c565581f08fa37dddc418b0aee40743a121b1f47c89d51da11f1dc02" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": { - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" - } + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" } }, "roles": [ { "keyids": [ - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role1", "paths": [ @@ -36,10 +36,11 @@ ] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": { "/file1.txt": { "custom": { - "file_permissions": "664" + "file_permissions": "644" }, "hashes": { "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", diff --git a/tests/repository_data/client/test_repository/metadata/current/targets.json.gz b/tests/repository_data/client/test_repository/metadata/current/targets.json.gz deleted file mode 100644 index 5a5190660b..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/current/targets.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/current/timestamp.json b/tests/repository_data/client/test_repository/metadata/current/timestamp.json index 43adf289a1..2cc752b30c 100644 --- a/tests/repository_data/client/test_repository/metadata/current/timestamp.json +++ b/tests/repository_data/client/test_repository/metadata/current/timestamp.json @@ -1,23 +1,23 @@ { "signatures": [ { - "keyid": "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1", - "method": "ed25519", - "sig": "0425f403669dc28aeda67015b56b62b724f7f36899e5bfba1edd9bc059a7921695de4fadcffe526e3e18ddef9b5b7bdff098dc7d058848ef9cddcad29000b70f" + "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", + "sig": "7dddbfe94d6d80253433551700ea6dfe4171a33f1227a07830e951900b8325d67c3dce6410b9cf55abefa3dfca0b57814a4965c2d6ee60bb0336755cd0557e03" } ], "signed": { - "_type": "Timestamp", + "_type": "timestamp", "expires": "2030-01-01T00:00:00Z", "meta": { "snapshot.json": { "hashes": { - "sha256": "5429cb36aa60a0a4e4d4ebe06cb8b1e4e2ec22e0535c17096bfaf8fd187204e4" + "sha256": "6990b6586ed545387c6a51db62173b903a5dff46b17b1bc3fe1e6ca0d0844f2f" }, - "length": 678, + "length": 554, "version": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/client/test_repository/metadata/current/timestamp.json.gz b/tests/repository_data/client/test_repository/metadata/current/timestamp.json.gz deleted file mode 100644 index ac8247df17..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/current/timestamp.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/previous/1.root.json b/tests/repository_data/client/test_repository/metadata/previous/1.root.json index 7835711ffa..ccce5381b6 100644 --- a/tests/repository_data/client/test_repository/metadata/previous/1.root.json +++ b/tests/repository_data/client/test_repository/metadata/previous/1.root.json @@ -1,86 +1,87 @@ { "signatures": [ { - "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", - "method": "RSASSA-PSS", - "sig": "7965740b0a73d11b0b96c959f3c509107c065fe210487d92718a870f177b387c33737dc27a984689aeccff2184e967a8574ce6f065138687c8c106838fd07a0297d19beb553cf91fbdff6fab35928c2cbdb3cbbc8f8334ac21ee588e86a08cbee406cb0da49fd1908ca5418279f2ed474c54f88dc632a4812064ec708c9cc84712b4e1e688e71b31cc51cff4780dadb3d4350dda96a64c6839dc9be6e18c0a9468df42afea94ef0e9bf4fdb447e0aae24a7c7e60b0f0e53da6f2f9cc21ce2fc633dfadb4a280f97b1ed942c03779c59bef5a28661dada107f36c43ae8004befce217faeb8ca9fe8160a9318ee10c8cab8de84ed4e72a7e36707d85bd969a11fc11baf1475300d902caf8515e3abacd0bcdccdd48126cb118bee785b286ee45d28f4cbf9469403f0121f5774357b6542efdd95be7c5c93cef0f1fef7cc5adac65a8915386f3189e4577e12dbecbb7bcc5e5558748a70f99b85e7afce5ec08eeef4e886f4209cb511bd0a953d2fea72241f6f38ddcf436567cdb395b4c7beace0e" + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" } ], "signed": { - "_type": "Root", - "compression_algorithms": [ - "gz" - ], + "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "ed25519", + "keytype": "rsa", "keyval": { - "public": "e8bd29d20025d3ac755a27b8d8efe185e368244000d9d3b76e32afb968cb0ea8" - } + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" }, - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1": { + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "52de9284608be6b718a9d88c7c5d8d9d93b33732e00d670dd4ebe4bce8bbc83c" - } + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" }, - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503": { + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "rsa", + "keytype": "ed25519", "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAsDqUoiFJZX+5gm5pyI1l\nVc/N3yjJVOIl9GyiK0mRyzV3IzUQzhjq8nhk0eLfzXw2XwIAYOJC6dR/tGRG4JDx\nJkez5FFH4zLosr/XzT7CG5zxJ3kKICLD1v9rZQr5ZgARQDOpkxzPz46rGnE0sHd7\nMpnpPMScA1pMIzwM1RoPS4ntZipI1cl9M7HMQ6mkBp8/DNKCqaDWixJqaGgWrhhK\nhI/1mzBliMKriNxPKSCGVlOk/QpZft+y1fs42s0DMd5BOFBo+ZcoXLYRncg9S3A2\nxx/jT69Bt3ceiAZqnp7f6M+ZzoUifSelaoL7QIYg/GkEl+0oxTD0yRphGiCKwn9c\npSbn7NgnbjqSgIMeEtlf/5Coyrs26pyFf/9GbusddPSxxxwIJ/7IJuF7P1Yy0WpZ\nkMeY83h9n2IdnEYi+rpdbLJPQd7Fpu2xrdA3Fokj8AvCpcmxn8NIXZuK++r8/xsE\nAUL30HH7dgVn50AvdPaJnqAORT3OlabW0DK9prcwKnyzAgMBAAE=\n-----END PUBLIC KEY-----" - } + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" }, - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b": { + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "0692a846935833d685168ae8c98fee951d52d8aa76685b8ba55b8e1eada217c2" - } + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503" + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9" + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b" + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1" + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/client/test_repository/metadata/previous/1.root.json.gz b/tests/repository_data/client/test_repository/metadata/previous/1.root.json.gz deleted file mode 100644 index 741b2d2949..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/previous/1.root.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/previous/role1.json b/tests/repository_data/client/test_repository/metadata/previous/role1.json index 96d921b5eb..332cd8050f 100644 --- a/tests/repository_data/client/test_repository/metadata/previous/role1.json +++ b/tests/repository_data/client/test_repository/metadata/previous/role1.json @@ -1,30 +1,30 @@ { "signatures": [ { - "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", - "method": "ed25519", - "sig": "e8f6db97fcad5eb2ca1cf5fc6b6d4579d026811581b0d2061af90c7cb26d966e15a06e7c596f663b05aa061308929f96136167359fc9d44919a36383403abd09" + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "749d6373835e9e89a269168d9af22bf7692ee7059a1db5ff1162e07b495ba47ae223e9ece6c27b2981d5d8bc046788d3fad9c2ba83d4be9b6547ed1f909c6204" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": { - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" - } + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" } }, "roles": [ { "keyids": [ - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role2", "paths": [], @@ -34,6 +34,7 @@ ] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": { "/file3.txt": { "hashes": { diff --git a/tests/repository_data/client/test_repository/metadata/previous/role1.json.gz b/tests/repository_data/client/test_repository/metadata/previous/role1.json.gz deleted file mode 100644 index 33bdf2ea23..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/previous/role1.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/previous/role2.json b/tests/repository_data/client/test_repository/metadata/previous/role2.json index 20b1206a70..46e415a789 100644 --- a/tests/repository_data/client/test_repository/metadata/previous/role2.json +++ b/tests/repository_data/client/test_repository/metadata/previous/role2.json @@ -1,18 +1,18 @@ { "signatures": [ { - "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", - "method": "ed25519", - "sig": "8fdca8154157e983d86efb16917ad973941dfa75a47d99a88b393d0955f1508aff55b66d0592ff2ad2f431d6826d6544009a921b5aae503f3f795b09ed549f0a" + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "34ae1e3c897062419722c1747970a632e12060f3aef57314e6e6aa96c3a510a25ec5a8b12022058c768724607dd58106293089c87a7ee4b2ce5b3a8d44deeb06" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": {}, "roles": [] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": {}, "version": 1 } diff --git a/tests/repository_data/client/test_repository/metadata/previous/role2.json.gz b/tests/repository_data/client/test_repository/metadata/previous/role2.json.gz deleted file mode 100644 index 3d23a94cff..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/previous/role2.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/previous/root.json b/tests/repository_data/client/test_repository/metadata/previous/root.json index 7835711ffa..ccce5381b6 100644 --- a/tests/repository_data/client/test_repository/metadata/previous/root.json +++ b/tests/repository_data/client/test_repository/metadata/previous/root.json @@ -1,86 +1,87 @@ { "signatures": [ { - "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", - "method": "RSASSA-PSS", - "sig": "7965740b0a73d11b0b96c959f3c509107c065fe210487d92718a870f177b387c33737dc27a984689aeccff2184e967a8574ce6f065138687c8c106838fd07a0297d19beb553cf91fbdff6fab35928c2cbdb3cbbc8f8334ac21ee588e86a08cbee406cb0da49fd1908ca5418279f2ed474c54f88dc632a4812064ec708c9cc84712b4e1e688e71b31cc51cff4780dadb3d4350dda96a64c6839dc9be6e18c0a9468df42afea94ef0e9bf4fdb447e0aae24a7c7e60b0f0e53da6f2f9cc21ce2fc633dfadb4a280f97b1ed942c03779c59bef5a28661dada107f36c43ae8004befce217faeb8ca9fe8160a9318ee10c8cab8de84ed4e72a7e36707d85bd969a11fc11baf1475300d902caf8515e3abacd0bcdccdd48126cb118bee785b286ee45d28f4cbf9469403f0121f5774357b6542efdd95be7c5c93cef0f1fef7cc5adac65a8915386f3189e4577e12dbecbb7bcc5e5558748a70f99b85e7afce5ec08eeef4e886f4209cb511bd0a953d2fea72241f6f38ddcf436567cdb395b4c7beace0e" + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" } ], "signed": { - "_type": "Root", - "compression_algorithms": [ - "gz" - ], + "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "ed25519", + "keytype": "rsa", "keyval": { - "public": "e8bd29d20025d3ac755a27b8d8efe185e368244000d9d3b76e32afb968cb0ea8" - } + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" }, - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1": { + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "52de9284608be6b718a9d88c7c5d8d9d93b33732e00d670dd4ebe4bce8bbc83c" - } + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" }, - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503": { + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "rsa", + "keytype": "ed25519", "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAsDqUoiFJZX+5gm5pyI1l\nVc/N3yjJVOIl9GyiK0mRyzV3IzUQzhjq8nhk0eLfzXw2XwIAYOJC6dR/tGRG4JDx\nJkez5FFH4zLosr/XzT7CG5zxJ3kKICLD1v9rZQr5ZgARQDOpkxzPz46rGnE0sHd7\nMpnpPMScA1pMIzwM1RoPS4ntZipI1cl9M7HMQ6mkBp8/DNKCqaDWixJqaGgWrhhK\nhI/1mzBliMKriNxPKSCGVlOk/QpZft+y1fs42s0DMd5BOFBo+ZcoXLYRncg9S3A2\nxx/jT69Bt3ceiAZqnp7f6M+ZzoUifSelaoL7QIYg/GkEl+0oxTD0yRphGiCKwn9c\npSbn7NgnbjqSgIMeEtlf/5Coyrs26pyFf/9GbusddPSxxxwIJ/7IJuF7P1Yy0WpZ\nkMeY83h9n2IdnEYi+rpdbLJPQd7Fpu2xrdA3Fokj8AvCpcmxn8NIXZuK++r8/xsE\nAUL30HH7dgVn50AvdPaJnqAORT3OlabW0DK9prcwKnyzAgMBAAE=\n-----END PUBLIC KEY-----" - } + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" }, - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b": { + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "0692a846935833d685168ae8c98fee951d52d8aa76685b8ba55b8e1eada217c2" - } + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503" + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9" + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b" + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1" + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/client/test_repository/metadata/previous/snapshot.json b/tests/repository_data/client/test_repository/metadata/previous/snapshot.json index 3ee5468cf9..a713f807b6 100644 --- a/tests/repository_data/client/test_repository/metadata/previous/snapshot.json +++ b/tests/repository_data/client/test_repository/metadata/previous/snapshot.json @@ -1,13 +1,12 @@ { "signatures": [ { - "keyid": "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9", - "method": "ed25519", - "sig": "01bc8667e2afbceef3df6ec6782088c77e59bdb5a29b75634200528bdedc8af9694e38a672fa2cd00051fb25ac12d3079bbc48f424783d23fb532b7508adf40f" + "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", + "sig": "d78e9013bab1da2a8425caa48143cd79a21632dce021ad7e1b883d83775035df333a8b26c9c952d832edaf9dc7be2ef612bdb21326fcc9849346d7e3a162050e" } ], "signed": { - "_type": "Snapshot", + "_type": "snapshot", "expires": "2030-01-01T00:00:00Z", "meta": { "role1.json": { @@ -17,16 +16,13 @@ "version": 1 }, "root.json": { - "hashes": { - "sha256": "294a5eea95c8aaed509c3a559c79044a336b6912395f28d5502aa93f0bfd2774" - }, - "length": 3329, "version": 1 }, "targets.json": { "version": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/client/test_repository/metadata/previous/snapshot.json.gz b/tests/repository_data/client/test_repository/metadata/previous/snapshot.json.gz deleted file mode 100644 index a57d005144..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/previous/snapshot.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/previous/targets.json b/tests/repository_data/client/test_repository/metadata/previous/targets.json index 0620bfe42a..972034d03f 100644 --- a/tests/repository_data/client/test_repository/metadata/previous/targets.json +++ b/tests/repository_data/client/test_repository/metadata/previous/targets.json @@ -1,30 +1,30 @@ { "signatures": [ { - "keyid": "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b", - "method": "ed25519", - "sig": "74ee9970ed709ab65586ef99c0005102676a92f11e2a448bb685875b641d2efe3fd2bdefaa90e1a050bfbb34163834aadb43d13ac0c7452aa7df27c454c34507" + "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", + "sig": "3b1a1fcb912ea8e03b6f9ad0da29166149d4a6f038b552c204ccee1d396d2dd4095a3ce3c565581f08fa37dddc418b0aee40743a121b1f47c89d51da11f1dc02" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": { - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" - } + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" } }, "roles": [ { "keyids": [ - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role1", "paths": [ @@ -36,10 +36,11 @@ ] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": { "/file1.txt": { "custom": { - "file_permissions": "664" + "file_permissions": "644" }, "hashes": { "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", diff --git a/tests/repository_data/client/test_repository/metadata/previous/targets.json.gz b/tests/repository_data/client/test_repository/metadata/previous/targets.json.gz deleted file mode 100644 index 5a5190660b..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/previous/targets.json.gz and /dev/null differ diff --git a/tests/repository_data/client/test_repository/metadata/previous/timestamp.json b/tests/repository_data/client/test_repository/metadata/previous/timestamp.json index 43adf289a1..2cc752b30c 100644 --- a/tests/repository_data/client/test_repository/metadata/previous/timestamp.json +++ b/tests/repository_data/client/test_repository/metadata/previous/timestamp.json @@ -1,23 +1,23 @@ { "signatures": [ { - "keyid": "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1", - "method": "ed25519", - "sig": "0425f403669dc28aeda67015b56b62b724f7f36899e5bfba1edd9bc059a7921695de4fadcffe526e3e18ddef9b5b7bdff098dc7d058848ef9cddcad29000b70f" + "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", + "sig": "7dddbfe94d6d80253433551700ea6dfe4171a33f1227a07830e951900b8325d67c3dce6410b9cf55abefa3dfca0b57814a4965c2d6ee60bb0336755cd0557e03" } ], "signed": { - "_type": "Timestamp", + "_type": "timestamp", "expires": "2030-01-01T00:00:00Z", "meta": { "snapshot.json": { "hashes": { - "sha256": "5429cb36aa60a0a4e4d4ebe06cb8b1e4e2ec22e0535c17096bfaf8fd187204e4" + "sha256": "6990b6586ed545387c6a51db62173b903a5dff46b17b1bc3fe1e6ca0d0844f2f" }, - "length": 678, + "length": 554, "version": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/client/test_repository/metadata/previous/timestamp.json.gz b/tests/repository_data/client/test_repository/metadata/previous/timestamp.json.gz deleted file mode 100644 index ac8247df17..0000000000 Binary files a/tests/repository_data/client/test_repository/metadata/previous/timestamp.json.gz and /dev/null differ diff --git a/tests/repository_data/generate.py b/tests/repository_data/generate.py index 27975e5c71..18064de7ee 100755 --- a/tests/repository_data/generate.py +++ b/tests/repository_data/generate.py @@ -135,13 +135,6 @@ repository.targets('role1').expiration = datetime.datetime(2030, 1, 1, 0, 0) repository.targets('role2').expiration = datetime.datetime(2030, 1, 1, 0, 0) -# Compress the top-level role metadata so that the unit tests have a -# pre-generated example of compressed metadata. -repository.root.compressions = ['gz'] -repository.targets.compressions = ['gz'] -repository.snapshot.compressions = ['gz'] -repository.timestamp.compressions = ['gz'] - # Create the actual metadata files, which are saved to 'metadata.staged'. if not options.dry_run: repository.writeall() diff --git a/tests/repository_data/generate_project_data.py b/tests/repository_data/generate_project_data.py index dabd6dc4b6..1d25681c6d 100755 --- a/tests/repository_data/generate_project_data.py +++ b/tests/repository_data/generate_project_data.py @@ -45,14 +45,14 @@ # assigned verification keys, which clients use to verify the signatures created # by the corresponding private keys. project_public = import_rsa_publickey_from_file(project_key_file + '.pub') -targets_public = import_rsa_publickey_from_file(targets_key_file + '.pub') -delegation_public = import_rsa_publickey_from_file(delegation_key_file + '.pub') +targets_public = import_ed25519_publickey_from_file(targets_key_file + '.pub') +delegation_public = import_ed25519_publickey_from_file(delegation_key_file + '.pub') # Import the private keys. These private keys are needed to generate the # signatures included in metadata. project_private = import_rsa_privatekey_from_file(project_key_file, 'password') -targets_private = import_rsa_privatekey_from_file(targets_key_file, 'password') -delegation_private = import_rsa_privatekey_from_file(delegation_key_file, 'password') +targets_private = import_ed25519_privatekey_from_file(targets_key_file, 'password') +delegation_private = import_ed25519_privatekey_from_file(delegation_key_file, 'password') os.mkdir("project") os.mkdir("project/targets") diff --git a/tests/repository_data/keystore/delegation_key b/tests/repository_data/keystore/delegation_key index 9656a6fb3b..461169d63c 100644 --- a/tests/repository_data/keystore/delegation_key +++ b/tests/repository_data/keystore/delegation_key @@ -1 +1 @@ -f027d1bf6ee9918c9d7af5f5957c5755@@@@100000@@@@7b8556aa892f5ca3fe5473355c1f8f419ae86017d65176e8b818d70d06beda58@@@@512b9499b34212d1c49dc612734858d9@@@@613469c7b1ab4f0643074560d955cb33209981bbb2e26e4dc4da7727a9963e76e623182f9e6f189e7f02d5feb52910b797c1c32fedce5396012870c402ce2dfaed05ab373ca693472672f28ecd39644ccfdb9c44857385a5e67ab447c91ad8efc037f290d92921dc41acf5dd350326f5c58108fc8c64e290c8244dbbd69a93ef32f6dd109f065a9a080b913d6cd2333262a8866509dd2b93181f305be4a633747a4c9a73d19dd83eb198463694a1560f773ebb586e2147790b4ecf6b60db8132537ee206ba7286d2a385f82ea0a620e744e6a24d807c93a0a133bf70c573d6ac1a54ef37db24b243c775e45cf61d6ed1268777b316939ea91379ea370d1ad2ee14cee5188f1f2e121dbac6fd \ No newline at end of file +68593a508472ad3007915379e6b1f3c0@@@@100000@@@@615986af4d1ba89aeadc2f489f89b0e8d46da133a6f75c7b162b8f99f63f86ed@@@@8319255f9856c4f40f9d71bc10e79e5d@@@@1dc7b20f1c668a1f544dc39c7a9fcb3c4a4dd34d1cc8c9d8f779bab026cf0b8e0f46e53bc5ed20bf0e5048b94a5d2ea176e79c12bcc7daa65cd55bf810deebeec5bc903ce9e5316d7dbba88f1a2b51d3f9bc782f8fa9b21dff91609ad0260e21a2039223f816d0fe97ace2e204d0025d327b38d27aa6cd87e85aa8883bfcb6d12f93155d72ffd3c7717a0570cf9811eb6d6a340baa0f27433315d83322c685fec02053ff8c173c4ebf91a258e83402f39546821e3352baa7b246e33b2a573a8ff7b289682407abbcb9184249d4304db68d3bf8e124e94377fd62dde5c4f3b7617d483776345154d047d139b1e559351577da315f54e16153c510159e1908231574bcf49c4f96cafe6530e86a09e9eee47bcff78f2fed2984754c895733938999ff085f9e3532d7174fd76dc09921506dd2137e16ec4926998f5d9df8a8ffb3e6649c71bc32571b2e24357739fa1a56be \ No newline at end of file diff --git a/tests/repository_data/keystore/delegation_key.pub b/tests/repository_data/keystore/delegation_key.pub index 1536e82011..d600bffbfa 100644 --- a/tests/repository_data/keystore/delegation_key.pub +++ b/tests/repository_data/keystore/delegation_key.pub @@ -1 +1 @@ -{"keytype": "ed25519", "keyid_hash_algorithms": ["sha256", "sha512"], "keyval": {"public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a"}} \ No newline at end of file +{"keyval": {"public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9"}, "keytype": "ed25519", "scheme": "ed25519", "keyid_hash_algorithms": ["sha256", "sha512"]} \ No newline at end of file diff --git a/tests/repository_data/keystore/root_key b/tests/repository_data/keystore/root_key index c54376920b..1b8fb14529 100644 --- a/tests/repository_data/keystore/root_key +++ b/tests/repository_data/keystore/root_key @@ -1,42 +1,42 @@ ------BEGIN RSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: DES-EDE3-CBC,6015A3049DBF1CD5 - -KYD7wtT4LYqb8GjyRru/4w7iX9AXzUEjMMCUNCtRsOppaqtk6W1JPEeVmoAPwVTv -P19RUS6oF0Kp6R1SJGJOIBpNUzmvGdXzsWtjQJF9g5oNftrS8gpM25+lAViMy5W/ -yxZ1nNsifG+fqRQ291gFrwYuLhHb5WYyOtVAf5FakEdNjMiD1F2Jgq7HdkA9Exxg -dm/6yDmB7b1oJtP7qmJuQLn3UuzbZ/qBMA8WkFpgveyxbPCA+8AmoodlArXokByr -iSXUAFTznPzUCNq/nPOuq8JjTGMEyfamD2czi40cRfG+Ix+Dq8HHRINeT51xY3uF -+aY+2eTQaSb2WiJxPYw5leXQuIuIU0DwCan2yVUiMZhXv/VymqBj3126jJFJgCYH -AZYpV6sHzO415NMceeIEfyJFyum0I59urAiL7xqv9w6aXSjtL/sb9vkwH0VpFgQt -d6za1ol+teZJuHOvGADvcfkX0EdFsltb9/R0ALNVP4+NEOZxYV3Ilc0w4TioZsBo -Dbim/5oZBH/ZcfSE2BURHpeoPCBKuEVIu7MjINbh7TNOWNybPZORpNiUrwwbEwxM -aNLyfwJR7zx2sWuSknJIyDGTssUCpDV7IwZWMOFs3LgAM8/r45Im0IDU0zxidSwm -UZFhSrIgsPSgnx2HbF6hmIQk5DnSqlRejhBVPd+oHMZwvcBm3NxfwEQ5xJ8UovZn -QiF4ztgwXjI/RltVZR8fiqNdxrOS0KITZ8UC3DuM8yI45XhLH2aRyuXGxRmmMDZa -2J8Y/Rn1r0FTlWreV16hC9B8546Uxd131ifsJQJHozsJVbzjwRNIWgEuNwLK8t11 -2C/Dpz/uS7qaC6jWiBxVCuux/t3pFhuz1sovfYbMdQDpSKXZnlEeulzgSV94rBSS -yJvcv6XEliM+/V+A5o0FFIDcpUdAWe48ExexI7SoIKDaHYq/roVA5O513FL5HRqf -Oy/i3DAtuadkp34A5f1vjFfDr5fw6FOB0BtwPNZRVgIxJYqMv2IDxVSq5MSSfTLf -7Ju1sUeXPLTd/r7HM8CURo0zYeaf+NNs9zqTm17IbRHiwPKBhsZExFdYvJx8Wa9I -6Z5qZsoeMSoKK2EjRCKVTbft4g8AgEaRsRv5e0v3+ns008vbx12os5RMDTncVO8A -/zc+AX8Td8m2Vniz4PKR6lUEVnE972rcu5RVZb7nr1tufh6lxshCQ+guqBzaEg/i -+ts+cGEQf7ZensqWnSffFMp1tKyxmYNuflU7BHQdZfiVx4ezIP1rVka5VNuRIZsv -zPV6uu3OALgH4LUj3zJBD6nVX+E5qx2A/Be5nXRyTT6KDLWIFq0EaQN/kJUUT+v4 -Qm+bntwLcCKsPLitH94e3e6IFHCrXobLEbfE5Y8yXdIB8mRI2IfI1IYOv1BtvF1T -ukND/fN4gZY6fgZ4adUm+17VV0kjNOO3dkEJPgGgH2XsB4fwhj3NQRg/8RN4Xy0z -BtNSGFiM3Z+Sp98fKmm8A0tjGmax1vFt5QygPjyFvor/I2SXMwgBqyTpvoQ8tTZp -ZWw6fGprHx5qkpd2XHksG2o8lG70X8TnKQte9BUUfRE3c51vhILlPT0HWflmxMuT -qivGLKSjrsinoCXd005UQPyLb22r31owpG70EuG4YM4XIgR3AeW3IlWeBL47o8pk -JnSA6wxPb6Vfq3zbnCCk/1RGRZ/2Qch3xE2E5UNOsikbFAhNfRjtAUmiDdlhGEm3 -BJMI10ntVNkNbX3bVkT084OIhgP7HDc9TnbTvSNYkZ0lF2fEaVRQebP0NKrWQa5b -N+xRBbPQpqzb+hD15AVZt3D/jPCYsLHnpOMLIV/wnNnpzpgwEjqoYazld95J4xBk -w3D4MyM/7GIKUNYH7VVr8l40PqNECxAV/gI0P1DuwRagGRsqjf2DknQADDVma1V5 -lHxPgbfcGUPoxKLHxme6r/2qhhuLxF1GBKy0tfNmBupYYtRjKRXex76j0y1yvg/3 -21i5Y63NtFpw/1rvgEeQ5WcProT4Xe9fr4v/mvxP9oYaATPs6VQzPFXzqkrMEel3 -/EF2j2KsLF5HpiSlJmEfXU6zqYYAuk5p8fX7FZ2PCLaaxMZBR/2Qe12Ncjx15D0X -yCa9x1vSZX0jZq0MciEpxs2vEKxqKwJpYHLM0+AmjMuhQQtTI40MOG8TVlBwSf2R -RObEFSvAJqUz9x/Wxog48vNkOz4vB/ezjlOcWTh3upGlHowIwvKTGUCzjWZUNYOG -cKfMoqmA5C6BFyt5EfPrTgx1JhHA7qYVN1/aVcXec4nkO5xIFBglJrFDxLfwXWu0 -fvRsmA5XCPcnvqH7HfZotg2qUfteSHzaNOhT7xQHMEvDXB6rZDNuFSXC3n53JM9l ------END RSA PRIVATE KEY----- \ No newline at end of file +-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIHbTBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQIsnvMDGLfuE8CAggA +MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBBN6jE1eBpMFrFfAs0FkzHQBIIH +EAacUao28rDMs/rL1H4hrWN6OqkcAzjMG/dNDDiLtljpHsYHYWg417cz4eVsWVw7 +SzP005D5qu78oBa35Yg22zW3vlHS1RDlPKFpYrFliwgaWaxVx7CrKhGXq8CeoCnS +aymN43o493TExHUOGgjTU7eLPXk8eVZ5aO+ml+i/YyPldrEwcghsBDD27zwXOgZk +qwFoCxCWVUCRcywaTGGvRQ13GVcLYlj+CjTzp2ctXzcWhGK77kPhtVFXpGO00vVn +7i2kyZm8tLXXFJ+fAMm3OCyyIUnFlf2KuYRECksUvGbscgIH/W2O6qvq7klgappB +xiyI8dlBeOboxtdbnqoSkodac0pfY8a7b0SIw5H6U/2hiNEQx2o/gFMFq8OklwiW +gO3PCjtG/bXFYqBjzBtBdAQ77UEv3pbeZNReLx7gCn7YIyLQ5ltqG2Kmbp8pb08w +hFJm6CcHkBP4GkfzNGtagJCbqX0ys5yG2DxqGZAGPynydwr3EbrvF8UToAaVpgR4 +7RqVk/uZf48UM6M/I8Q0aHz1fja9pwY7H/syyBs2R3Pn98O2HxZ8futqxefCImbs +DL6cd+VCFjmgsIQBYku2eqYEm98MLWHsiLbNPnyjgmrMElBVWNBlYsYXxqgL+lR1 +fvNBZlYCr7ZthfD+DtxmRU3rApl2Hi22x5IwI7N/4B3/+nRKJLRoc1gW+kekE91j +PRB30iLR+a5FkFA0u6ymRw7TvYY2u8Y8zbWwhC1rtCTCDcFAOGMGiDxSwbJX7e9y +cjGPZH+9daNEH03B51MlGwPee511ehtMa1RhWWCGsMsWzeOpIqy1yzPxGkAO0+Wo +ReNgtlOcjKanW6gdOpiGAeZRKBBYKZhAj8ogs958ZWYRVpNUzNs8ihMRuH4PSJzE +BrJFqgvk+YXwZFLw2ugZmjPRdjbCJOVdh25xAMy+hrlL4ZwWT50WHYsfGDUeM/kq +uwidpU94Xi4C5MJww0Z7grztbmUqRqNGiPyqGakgB7LtEwPICOaxeHSYOu+PTklF +0Sl2aEH7VuptfVknndd8AX0ozMrSFe0jh5I5CA+Bu315EJfHgHiYB31VpKKpY6Bn +Naeb2rH+CpajLNC7ULcDRpHRZNkolX6nHLf63PGPhD6x1HdJWlfQAXk7+mNFtVZ5 +ugXD/6Hei9w0JYAbPr0Up2tw2KPIRW75CFJdpIwqTdV20ZfP4kbUZOfOK9ltWyB1 +2q6OXliEfvzRYXI8TbUfZ6RpgH6j8VWia/ER/q4O0cKoQ5UfP3RgKil2Jz3QJTYe +E6DVJkv5NtSRK7ZkdtI8SZCkOQ0Rhz0NKmQhDlftoQOYWmLkPJenQVNxra6hOO2l +6cZ2e1AVv+8csR/22Qipve8IRfqLsH48dKP3cXZSM/7CaF/q1Wgkc+nZBOLVpK5P +Q6+bCljxtdlbR5bzTrbz2ELorGCH3bNg+O73MD27wtNbkb2ZmleVXc5WU733CKr1 +8edMWaAtWMkLNUlCJ8bnBOGb2sIy9PXzEWn1kECDhQSgcSaBnIglU03z/5/9HLpc +8lpC0yUTIhwX0zr8G0ZpirIcfvjNhq4qksR8bahc8eNkf6Rn3sB4E8uSv0UbxG/V +OibWXabyb5t5J261+WWmalz02Q4iQso0YIUOZBiKAlY4mIf2sWQX4rFSWconYBb5 +me5+BBVfJN7WO0RGG8aliqj8op/BkwhS2P1cWKntIm7DWKr5QyU/oj044ZpxkwZd +TL5n+puYkijgUkcvab+ew9x+f3speWdv2a9Zuk3mKEO4TcKnchE/4M/mIzoX/bmI +KLsZ2c7WUySfGzFBEZUY6NUR3bkehIDOY7fCnS0Dz7rSbImNVsMp8QbgANvK6YL8 +M6MJfZKWh6VEBm2athFV8Rc+q1Bf0VMO5+/8ay+GSFN+EIbPZZOwmNpzlIg6m0LS +ix+7/k1H3vjHwhxRa3g/2vqoY/mwdvjb1+bMsejygGV0vF57R5Zlm842ZWPaVQYz +T5gElaP+BXDIo7pkXMOrvr9oKkDFWPhhKpfzm94i5QUpYGJIbr811e4tQzh9WfrX +nnaARPhUrE+Yhy5ghWMDwA8So2FoUlCzS9zAW5cgMPdwvn/zraY0HCp8wGW/yNl6 +jhwSvmUa2SnQkPuR977lkWodLOU9mwOnvZqplmhprh4w+znoPcuTNM5XQ7Rxulfx +ZOJZ7NjLr3t2gY2Ni4Su961GcG9/1qgb/gbh+epzpIWaMSfJhXwBv/TmDppg1IB/ +q1Y2ICtZX0V6/szszPsPpBcqRpMAa6T12pL/J6OVYcnSrX6lzY8uVzM4Va1M/Fwn +C45VwvBK0StZY2T+CWdAoG20wA9IJhSr8xajCxR1UNsNgrQ84dJN6KduURbNmMTM +m5fryjMFAoykt+cz1TOq7G3sFLslYkWH8DP1mdknC1uC +-----END ENCRYPTED PRIVATE KEY----- diff --git a/tests/repository_data/keystore/root_key.pub b/tests/repository_data/keystore/root_key.pub index a04eb42080..11cc245f38 100644 --- a/tests/repository_data/keystore/root_key.pub +++ b/tests/repository_data/keystore/root_key.pub @@ -1,11 +1,11 @@ -----BEGIN PUBLIC KEY----- -MIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAsDqUoiFJZX+5gm5pyI1l -Vc/N3yjJVOIl9GyiK0mRyzV3IzUQzhjq8nhk0eLfzXw2XwIAYOJC6dR/tGRG4JDx -Jkez5FFH4zLosr/XzT7CG5zxJ3kKICLD1v9rZQr5ZgARQDOpkxzPz46rGnE0sHd7 -MpnpPMScA1pMIzwM1RoPS4ntZipI1cl9M7HMQ6mkBp8/DNKCqaDWixJqaGgWrhhK -hI/1mzBliMKriNxPKSCGVlOk/QpZft+y1fs42s0DMd5BOFBo+ZcoXLYRncg9S3A2 -xx/jT69Bt3ceiAZqnp7f6M+ZzoUifSelaoL7QIYg/GkEl+0oxTD0yRphGiCKwn9c -pSbn7NgnbjqSgIMeEtlf/5Coyrs26pyFf/9GbusddPSxxxwIJ/7IJuF7P1Yy0WpZ -kMeY83h9n2IdnEYi+rpdbLJPQd7Fpu2xrdA3Fokj8AvCpcmxn8NIXZuK++r8/xsE -AUL30HH7dgVn50AvdPaJnqAORT3OlabW0DK9prcwKnyzAgMBAAE= +MIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe +PkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i +xmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity +fQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa +ndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc +MdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV +z94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y +R47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA +a82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE= -----END PUBLIC KEY----- diff --git a/tests/repository_data/keystore/snapshot_key b/tests/repository_data/keystore/snapshot_key index 9c0f3b3506..08c954fdd1 100644 --- a/tests/repository_data/keystore/snapshot_key +++ b/tests/repository_data/keystore/snapshot_key @@ -1 +1 @@ -481eac26df5b6688c775c03de82dffa1@@@@100000@@@@65c88dd37151f6d869cf4ecdd46ccc8269da3eb2d491a8b28622fdc71d324ffb@@@@cb3d36a32d0cd80485b045dc665b1089@@@@c3b3df6223832cb2879dcbab8904c369cfbd525aa27d9e2d60fd2da023c8b63a7a503baaaeb3511c5dcd941c872029df235e950bae85ab987d3e521ab4b2aaa3f12abbfacb81045f0e4584a98e474f9170690c26921ebd72df98cf5a9f656f3ace8e4f9be19019b0c9efc1077fd6e29d2d9b259257b5a2940fb85f90a7638090d3c4bafc98dd8db217137957a8a72a83c9fe751288decd537ad583bcd30154632c98c743988fe4bb47bfcda229445343d20ea8698679b78a1625f9b77cc86d4a2c47272d7ffb20b60ab403dac47fed3a863a74cec128231a70e3c71527e4f0a3761a878f90bbfd40eb31072be9f974284125a5e38ab5728d1dd29b9ee6c6f3133c18043f6ae3d5dfa2d0ec57 \ No newline at end of file +a87b80b8a0d39b919b9638181e7b274e@@@@100000@@@@132edd670981aaf1980673966266174d944d735eb5b0b7ec83ed97da5c212249@@@@bd08ae9898ac5f81fc14e418e9790f9b@@@@399250c9aad40035e0acff48db59697bc3cf33d55b52aa272246addeaaf318d931d3a72964f0c84eccf5b89279b8233685330ad884f7b39bf369553133b985f9396bd5e24cb8e343643923022565a645e188a1165e427aedc389cca821d6a93cb2d8d16cea8ffeb56469bcb9f2f66e03d581a2ea37da271980dd02b84717fe475e13a305b4ae714c11c94f6711c744bb291a146d7419474584bad4be152d0299273c1fad6cd95232a4bf07f39c16da7f4d13201a88fad822cb328008e8a2762baf974b5d5080451751fb8ef53a01ca734157be78b3eb13c6270e4e98b138c78388360e7f558389871b7a32b4d5572626b3112264a0b56dbbb1138c9765872a71dd4e7d31006c2e690f5ede608ce633ad94ebb7d1ddec1a7eac2168fc5d36efe590c4c2059c6f3bcf75ab63474eede3ce4fdc93c6564058b14a0fa9bf3cb6d58c53315b406409ee4aeb18abe072734df0 \ No newline at end of file diff --git a/tests/repository_data/keystore/snapshot_key.pub b/tests/repository_data/keystore/snapshot_key.pub index 9222aacf48..d08bb848c1 100644 --- a/tests/repository_data/keystore/snapshot_key.pub +++ b/tests/repository_data/keystore/snapshot_key.pub @@ -1 +1 @@ -{"keytype": "ed25519", "keyid_hash_algorithms": ["sha256", "sha512"], "keyval": {"public": "e8bd29d20025d3ac755a27b8d8efe185e368244000d9d3b76e32afb968cb0ea8"}} \ No newline at end of file +{"keyval": {"public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd"}, "keytype": "ed25519", "scheme": "ed25519", "keyid_hash_algorithms": ["sha256", "sha512"]} \ No newline at end of file diff --git a/tests/repository_data/keystore/targets_key b/tests/repository_data/keystore/targets_key index 3f872f4ecf..c3883ec3c5 100644 --- a/tests/repository_data/keystore/targets_key +++ b/tests/repository_data/keystore/targets_key @@ -1 +1 @@ -1d4fbaef4715e89f94655857f0bac179@@@@100000@@@@5eab500ba01eab1dc380999f03bf6ef1d6ce8d5534e56278dc3c97cb48f6db0f@@@@2e5a0b1a8e05b61f0d78cb5ed2d2077f@@@@665ef8e0272749664c19df418c937d86c4ee9be94851762e622f6eb80c6694738baa397e597fc6761d2d5c24a1db98014c44d489a7348adea62d0e961234b32aa2ee4d69f400c909dbadf5b76d1e579ae7dd595856757e6da7fa310a218261fdb9efb867962a1ed19109015e7755c23f42ea819773df2779dccab2c1bad2cbfdb05ed4b38da1f58d780f937a47d66058f97ca5d8720a8312b4370ce4e75c8aba5ae864a239cc27645da216df331cee9f9a103d788c64de2e42c8630a9f54ef0ddc87327abc121de9d928147e3f8e12a94a2423cef569affc977677b263eb4c42506002d65edaa0b58cddc9075bb60dd0971bfa604d1e952b67a9fc78455693c134b50c34acb8d139a0fb49d0 \ No newline at end of file +a5a903322888df0bf8275b215f2044fe@@@@100000@@@@5f6b803652cb6d5bce4e07b1482597adb96d06c2efa3393abdcc0425f70be692@@@@0664811967e2f413927ce51a7f43a80e@@@@cf1dccd034400195c667c064198ef25555f3f94bf9cf77fbe300246618e557ad0efa775ef90bd46c842696c45d14033199860b2214c3641e87889a41171f8a2c763d004681b66b462ff34599e8d9da87f5642d2a015b75d3f601d198e0467fa4bc28f65c76260585e0cce71281f67a8053116f0f06883155f602811071b56bf75bf54daae5968b0a31cf829510f3c52c0eeb8f1c6bb8b8cb0c3edb4c6c2dd9d13bee00c5d63c3f98e0904eebb609864f4ab4fcc2c17bba8fd36aa06bc96bc1922eb10557051a674acf2cb01ff3efb7d55411df6915bbc49a095ff4472dc441e2765244f801d0df07b754c952d039f39b4530930a14be42cb2041f22eeb306b12f12158fcd2beb033db1be21f5a6ab72335cf16dfbd19cbf39c00b0a571d2b0e25df032be53a49a7a70ecebebb441d327c638cf31804381afaf809cd1c75f9070e83240fbaaa87bea0799404ece788862 \ No newline at end of file diff --git a/tests/repository_data/keystore/targets_key.pub b/tests/repository_data/keystore/targets_key.pub index 7ac5726afd..e859eb228e 100644 --- a/tests/repository_data/keystore/targets_key.pub +++ b/tests/repository_data/keystore/targets_key.pub @@ -1 +1 @@ -{"keytype": "ed25519", "keyid_hash_algorithms": ["sha256", "sha512"], "keyval": {"public": "0692a846935833d685168ae8c98fee951d52d8aa76685b8ba55b8e1eada217c2"}} \ No newline at end of file +{"keyval": {"public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815"}, "keytype": "ed25519", "scheme": "ed25519", "keyid_hash_algorithms": ["sha256", "sha512"]} \ No newline at end of file diff --git a/tests/repository_data/keystore/timestamp_key b/tests/repository_data/keystore/timestamp_key index 0e6786137e..ca82579003 100644 --- a/tests/repository_data/keystore/timestamp_key +++ b/tests/repository_data/keystore/timestamp_key @@ -1 +1 @@ -371c9b3b39a3c35ca0968d045ce58d1d@@@@100000@@@@975c02c936e651682f3b80f8d782411192ed17eb3e32de148cf13ea679c705d8@@@@ee4467b6250c9f88cdd4952d891e4681@@@@83336db56e4e30f481194e319c6b89de25bf795a57b27f8b060405881c79cdc4cfaa8acf886a4ba017e4bf78e78285b7ba67bf974e38d1e733961c49bc6bad1076c85bed527b468039f7980783caf0dcf32093ad96fe2fd56cf149dd7608bc582b0732a408e63085cdca59e2044ed688e7ae22883eefff48578c42c413d9ad4eed0f22758ce540c87b24365e906ddb1ffb628de5c97717fab73710c0f9425054bc31563ea217ef07e6d01b7e67a8d22ba4f30a0c1969317cca46bccc3c80ad9c085d52260e62d31f13c5a3192a4065529056211a634d5b21ad1386d5ddd9756c24c1c55ed7f63f84367b6072a919312c3e2ccca595aea64746b0f9dbe0ef6548090d17a2ad128bbf7bf1e756 \ No newline at end of file +677a42cd6c1df08d0c6156ae356c2875@@@@100000@@@@3850dbcf2973b80044912d630f05039df64775b63d1cf43e750d3cd8a457c64f@@@@bf01961c386d9fefb4b29db7f6ef0c7f@@@@96d37abafb902f821134d2034855d23b78c82e5b768b092fcf0d3b6b28a74734877a5014b26e5fed289d24f7cf6b393445c3231554c5b6d9711192cf9bd2fb7490497d7d76c619a0cfc70abae026b5068fb66db0138b04f890917daad66ca1f7baabdcbb5282e46a2f1c6ff2e8c241ff16ef31e918ca1387a15bc2ceadb2f75ce68fcff08186b5b901a499efe1f674319b503ff8b6fc004b71d0ecb94253f38c58349ab749e72f492e541e7504d25a0bfe791f53eb95c4524431b0f952fc3d7c7204a2a4aab44d33fe09cb36b337339e2a004bf15dfd925b63930905972749441a0c6e50ec9b1748a4cfbacf10b402ebd9c0074fcb38d236fd3146f60232862b0501e8e6caa9f81c223de03ba7b25a1d4bc2d031901dc445f25ce302d2189b8b8de443bc6f562f941b55595655193ab6b84c1ec2302ca056c70e8efb1cad909c50e82e0b7da9ad64202d149e4e837409 \ No newline at end of file diff --git a/tests/repository_data/keystore/timestamp_key.pub b/tests/repository_data/keystore/timestamp_key.pub index c111a80650..69ba7ded1d 100644 --- a/tests/repository_data/keystore/timestamp_key.pub +++ b/tests/repository_data/keystore/timestamp_key.pub @@ -1 +1 @@ -{"keytype": "ed25519", "keyid_hash_algorithms": ["sha256", "sha512"], "keyval": {"public": "52de9284608be6b718a9d88c7c5d8d9d93b33732e00d670dd4ebe4bce8bbc83c"}} \ No newline at end of file +{"keyval": {"public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4"}, "keytype": "ed25519", "scheme": "ed25519", "keyid_hash_algorithms": ["sha256", "sha512"]} \ No newline at end of file diff --git a/tests/repository_data/map.json b/tests/repository_data/map.json new file mode 100644 index 0000000000..ae5aa1ba9c --- /dev/null +++ b/tests/repository_data/map.json @@ -0,0 +1 @@ +{"repositories": {"repository1": ["http://localhost:30001"], "repository2": ["http://localhost:30002"]}, "mapping": [{"paths": ["*3.txt"], "terminating": false, "repositories": ["repository1", "repository2"]}, {"paths": ["*"], "repositories": ["repository2"]}]} diff --git a/tests/repository_data/project.backup/targets/file1.txt b/tests/repository_data/project.backup/targets/file1.txt deleted file mode 100644 index 7bf3499f13..0000000000 --- a/tests/repository_data/project.backup/targets/file1.txt +++ /dev/null @@ -1 +0,0 @@ -This is an example target file. \ No newline at end of file diff --git a/tests/repository_data/project.backup/targets/file2.txt b/tests/repository_data/project.backup/targets/file2.txt deleted file mode 100644 index 606f18efc8..0000000000 --- a/tests/repository_data/project.backup/targets/file2.txt +++ /dev/null @@ -1 +0,0 @@ -This is an another example target file. \ No newline at end of file diff --git a/tests/repository_data/project.backup/targets/file3.txt b/tests/repository_data/project.backup/targets/file3.txt deleted file mode 100644 index 60464604aa..0000000000 --- a/tests/repository_data/project.backup/targets/file3.txt +++ /dev/null @@ -1 +0,0 @@ -This is role1's target file. \ No newline at end of file diff --git a/tests/repository_data/project.backup/test-flat/project.cfg b/tests/repository_data/project.backup/test-flat/project.cfg deleted file mode 100644 index 135cbaed69..0000000000 --- a/tests/repository_data/project.backup/test-flat/project.cfg +++ /dev/null @@ -1 +0,0 @@ -{"project_name": "test-flat", "targets_location": "/home/santiago/Documents/v2014/TUF/tuf/tests/repository_data/project/targets", "prefix": "prefix", "metadata_location": "test-flat", "threshold": 1, "public_keys": {"6986b667c736a3b37471e030cf4ce7aa6c7e0d530325e64c2660276b77be3754": {"keytype": "rsa", "keyval": {"public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7J15ZaeDQPrhQsRj29wB\nPhibH+Do59xsT2396L+uCg793gZlar5wZN2eHSh725cNQWyTAa9LwG+lXaKMukQ+\n8176CKR2J5sv3DezrGVu3x8V1qhyJyy79FlNZRVYTVqNaYzvJzxsVnFPpg7f8B7C\nffiqWJr9XkpqwRlCpxooXm4hplZ7uek5Ku21CzQ4OWg7hbuc+ZjCGzpXfm8NuosU\n7TipnKGpEt0Agiph5g6TB2/scoeFar1CKMONIl80maxzAQk+xkWgiJ00+Z2qFCsx\nESfis/YkILS6RMFyZz7oa1WwMtUjYmrsRuz+jlFcbNuxZpIkaISiG9a2YdGcJ1Aj\n3QIDAQAB\n-----END PUBLIC KEY-----"}}}, "layout_type": "flat"} \ No newline at end of file diff --git a/tests/repository_data/project.backup/test-flat/test-flat.json b/tests/repository_data/project.backup/test-flat/test-flat.json deleted file mode 100644 index 98c5665e27..0000000000 --- a/tests/repository_data/project.backup/test-flat/test-flat.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "signatures": [ - { - "keyid": "6986b667c736a3b37471e030cf4ce7aa6c7e0d530325e64c2660276b77be3754", - "method": "RSASSA-PSS", - "sig": "4c979969f2e1850be960d5e4dd4b344fbaa56f7a5cbc00de3e9f7f36e3e4be3787e55ceb790831955d9c6136376f383a7cb8f288d4d8f114abbdbf368d860ece3640178a1f9b75221a9a31e27cfc153f3dcf7417cbd45a503f6b025e679a69b75f30087faa855d2812023dc05d8eea477930ecd81ea4a19ff7985af8e18d20233bffcb6151fbf749ede1acecd6c9e7fe60c11cf89c49a0ec3b08770e2ef6d7669828f850c5631f6b09dbb977b059d88f32bd0d0bfa7e5bc9bf9197febe730a068de1426f6ead68a8199d2f8e62455d24a9f902679e489624df90c9c4d35e31fe605bd6bba07a7e01547bec85a3216826aaee942d907617583440cbaf5f7889e4" - } - ], - "signed": { - "_type": "Targets", - "delegations": { - "keys": { - "a404d46b042a2eb92f0cc4b28849f8e9107c9e185c45c8ece64a302626af805f": { - "keytype": "rsa", - "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqKdTRVn3mLQFUf02Rpug\nwVEU4yJtechILLb6nM7+urfwLe6f7EsNCDFhkiTP7vKuQywdLYrhwZKYZMDmaVnI\nq4d/tBLvb/jGY/IPFVvWbAOWtwWG7apiAFrcp3Idq6EKGaVVLn7tyv74+nisssYJ\ncVKodlkzpgX1Ibrdq73BUlAxhEQNDAUM5bzyJUW0BU4OSjUoFKCgc8BSkNcSLwXO\nRpyqAwDpPWiL68N1Dch7R9uD6GE9aREY9SKoYsNCvUOraIcme4fJZ3NmxpN3SVnX\ntepoiJo2iAtORtEI1yTCv/dOPap/iebveeCjn667HkMezJodSR8X3pMgMKMVyxhJ\ngwIDAQAB\n-----END PUBLIC KEY-----" - } - } - }, - "roles": [ - { - "backtrack": true, - "keyids": [ - "a404d46b042a2eb92f0cc4b28849f8e9107c9e185c45c8ece64a302626af805f" - ], - "name": "test-flat/role1", - "paths": [ - "/file3.txt" - ], - "threshold": 1 - } - ] - }, - "expires": "2030-01-01T00:00:00Z", - "targets": { - "prefix/file1.txt": { - "hashes": { - "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da" - }, - "length": 31 - }, - "prefix/file2.txt": { - "hashes": { - "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99" - }, - "length": 39 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/tests/repository_data/project.backup/test-flat/test-flat/role1.json b/tests/repository_data/project.backup/test-flat/test-flat/role1.json deleted file mode 100644 index 20e9f7cf80..0000000000 --- a/tests/repository_data/project.backup/test-flat/test-flat/role1.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "signatures": [ - { - "keyid": "a404d46b042a2eb92f0cc4b28849f8e9107c9e185c45c8ece64a302626af805f", - "method": "RSASSA-PSS", - "sig": "37b67f5fba59954c1e678bf21cc9ac84173656d3c5dc5b6b45830d1f00d33c2c9d251e14cea3f0f72eabeea8e050179c6eeb7e679fba754e2f7ae41503aed3527690a1d70a9a67aa15408b642062dff05f1e9d65cfa917c99042ae45e9e366aaf9caed4bad81435014ea4aaba8ecec8c2019574f7d1e57a8eef02dcb4511ceff6c2c877c21741a9ed12b519cff8007163bcaa3c57a9364bf0fff5d5aa50d8522ddaeaa70da617072a340b8ab7667c77ae34981eaa964a43349070cc00a1d380eb1ec9c5bf0202738d12578af50017cae2b0859e54359e86f23e2e2065a282903a6f25e3853ba916b9f79aded0bca71180c2357a593e220a7a82be8a7fb5edcb1" - } - ], - "signed": { - "_type": "Targets", - "delegations": { - "keys": {}, - "roles": [] - }, - "expires": "2030-01-01T00:00:00Z", - "targets": { - "prefix/file3.txt": { - "hashes": { - "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b" - }, - "length": 28 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/tests/repository_data/project.backup/test-repo/metadata/test-repo-like.json b/tests/repository_data/project.backup/test-repo/metadata/test-repo-like.json deleted file mode 100644 index 10caafc608..0000000000 --- a/tests/repository_data/project.backup/test-repo/metadata/test-repo-like.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "signatures": [ - { - "keyid": "6986b667c736a3b37471e030cf4ce7aa6c7e0d530325e64c2660276b77be3754", - "method": "RSASSA-PSS", - "sig": "28bba2c262540c899d7d49896fac834744ff935891475ca793515bbff832c85778e624f4287bad19c962448f7f109d29b22d1ec74873e1a98e86f440302e8b0897490ce5bf23c7e4311f891073809efdb6b02e641fa4c0e29ba784a66b492e4c7e875298694b88655aec10e4fb43163690de10d32adec47984e0b765a85d49d8708c44f11731be24ef97b9d5e94bfd95ca7242a43dd761a858f51ddfb67f6318c3f1515f303a9ec4529a61a464cc0ed0f5087df17a2a9edabc26ae1335bdd45b1e9bed0b8713dbe244ec90d061f1f4ec8019d63dc563c27df2bed3ba8c9919264d7b9f66c648eca98fd3c47f9093ae8ac6d9abb5e93e5702c3d8f950b81f3e74" - } - ], - "signed": { - "_type": "Targets", - "delegations": { - "keys": { - "a404d46b042a2eb92f0cc4b28849f8e9107c9e185c45c8ece64a302626af805f": { - "keytype": "rsa", - "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqKdTRVn3mLQFUf02Rpug\nwVEU4yJtechILLb6nM7+urfwLe6f7EsNCDFhkiTP7vKuQywdLYrhwZKYZMDmaVnI\nq4d/tBLvb/jGY/IPFVvWbAOWtwWG7apiAFrcp3Idq6EKGaVVLn7tyv74+nisssYJ\ncVKodlkzpgX1Ibrdq73BUlAxhEQNDAUM5bzyJUW0BU4OSjUoFKCgc8BSkNcSLwXO\nRpyqAwDpPWiL68N1Dch7R9uD6GE9aREY9SKoYsNCvUOraIcme4fJZ3NmxpN3SVnX\ntepoiJo2iAtORtEI1yTCv/dOPap/iebveeCjn667HkMezJodSR8X3pMgMKMVyxhJ\ngwIDAQAB\n-----END PUBLIC KEY-----" - } - } - }, - "roles": [ - { - "backtrack": true, - "keyids": [ - "a404d46b042a2eb92f0cc4b28849f8e9107c9e185c45c8ece64a302626af805f" - ], - "name": "test-repo-like/role1", - "paths": [ - "/file3.txt" - ], - "threshold": 1 - } - ] - }, - "expires": "2030-01-01T00:00:00Z", - "targets": { - "prefix/file1.txt": { - "hashes": { - "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da" - }, - "length": 31 - }, - "prefix/file2.txt": { - "hashes": { - "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99" - }, - "length": 39 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/tests/repository_data/project.backup/test-repo/metadata/test-repo-like/role1.json b/tests/repository_data/project.backup/test-repo/metadata/test-repo-like/role1.json deleted file mode 100644 index 588d8f6e5d..0000000000 --- a/tests/repository_data/project.backup/test-repo/metadata/test-repo-like/role1.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "signatures": [ - { - "keyid": "a404d46b042a2eb92f0cc4b28849f8e9107c9e185c45c8ece64a302626af805f", - "method": "RSASSA-PSS", - "sig": "36281e10b1643ab18d5a46ec4a87a103d451e329c53508ff54958bbf5f0806e18a1de7a9a77cc08b553f5337a49a6129fa47c70fb9312e82d66c6d3711c377f86af26143b469bdc32475a4cb1eb7cdeee6d1c7662df3181941a0aaa3b6d3320d0a48e6707a4dc845c6c5c9460d3329891617be3467d3e0bf23ec683c06fa1ca4fde91dee065d12b52b964cee10b3bf588184376bbae3316150de5830734c53889966bd2b1d6a72c834ea1706efe28e6b5d5f2479f2f8c76460067247212577b2e9da9954ee88940b814a83808c0fc6be513535ff616bf07e60992fe8831b41b09669058c1a75e82df3a11e63e5f86cd84af7014ef5e4db7b7ece0d62fee34c17" - } - ], - "signed": { - "_type": "Targets", - "delegations": { - "keys": {}, - "roles": [] - }, - "expires": "2030-01-01T00:00:00Z", - "targets": { - "prefix/file3.txt": { - "hashes": { - "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b" - }, - "length": 28 - } - }, - "version": 1 - } -} \ No newline at end of file diff --git a/tests/repository_data/project.backup/test-repo/project.cfg b/tests/repository_data/project.backup/test-repo/project.cfg deleted file mode 100644 index 7cb3041025..0000000000 --- a/tests/repository_data/project.backup/test-repo/project.cfg +++ /dev/null @@ -1 +0,0 @@ -{"project_name": "test-repo-like", "targets_location": "targets", "prefix": "prefix", "metadata_location": "metadata", "threshold": 1, "public_keys": {"6986b667c736a3b37471e030cf4ce7aa6c7e0d530325e64c2660276b77be3754": {"keytype": "rsa", "keyval": {"public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7J15ZaeDQPrhQsRj29wB\nPhibH+Do59xsT2396L+uCg793gZlar5wZN2eHSh725cNQWyTAa9LwG+lXaKMukQ+\n8176CKR2J5sv3DezrGVu3x8V1qhyJyy79FlNZRVYTVqNaYzvJzxsVnFPpg7f8B7C\nffiqWJr9XkpqwRlCpxooXm4hplZ7uek5Ku21CzQ4OWg7hbuc+ZjCGzpXfm8NuosU\n7TipnKGpEt0Agiph5g6TB2/scoeFar1CKMONIl80maxzAQk+xkWgiJ00+Z2qFCsx\nESfis/YkILS6RMFyZz7oa1WwMtUjYmrsRuz+jlFcbNuxZpIkaISiG9a2YdGcJ1Aj\n3QIDAQAB\n-----END PUBLIC KEY-----"}}}, "layout_type": "repo-like"} \ No newline at end of file diff --git a/tests/repository_data/project.backup/test-repo/targets/file1.txt b/tests/repository_data/project.backup/test-repo/targets/file1.txt deleted file mode 100644 index 7bf3499f13..0000000000 --- a/tests/repository_data/project.backup/test-repo/targets/file1.txt +++ /dev/null @@ -1 +0,0 @@ -This is an example target file. \ No newline at end of file diff --git a/tests/repository_data/project.backup/test-repo/targets/file2.txt b/tests/repository_data/project.backup/test-repo/targets/file2.txt deleted file mode 100644 index 606f18efc8..0000000000 --- a/tests/repository_data/project.backup/test-repo/targets/file2.txt +++ /dev/null @@ -1 +0,0 @@ -This is an another example target file. \ No newline at end of file diff --git a/tests/repository_data/project.backup/test-repo/targets/file3.txt b/tests/repository_data/project.backup/test-repo/targets/file3.txt deleted file mode 100644 index 60464604aa..0000000000 --- a/tests/repository_data/project.backup/test-repo/targets/file3.txt +++ /dev/null @@ -1 +0,0 @@ -This is role1's target file. \ No newline at end of file diff --git a/tests/repository_data/project/test-flat/project.cfg b/tests/repository_data/project/test-flat/project.cfg index 2d909b51cb..1564431d80 100644 --- a/tests/repository_data/project/test-flat/project.cfg +++ b/tests/repository_data/project/test-flat/project.cfg @@ -1 +1 @@ -{"project_name": "test-flat", "targets_location": "/home/vlad/projects/tuf/tests/repository_data/project/targets", "prefix": "prefix", "metadata_location": "test-flat", "threshold": 1, "public_keys": {"6986b667c736a3b37471e030cf4ce7aa6c7e0d530325e64c2660276b77be3754": {"keytype": "rsa", "keyval": {"public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7J15ZaeDQPrhQsRj29wB\nPhibH+Do59xsT2396L+uCg793gZlar5wZN2eHSh725cNQWyTAa9LwG+lXaKMukQ+\n8176CKR2J5sv3DezrGVu3x8V1qhyJyy79FlNZRVYTVqNaYzvJzxsVnFPpg7f8B7C\nffiqWJr9XkpqwRlCpxooXm4hplZ7uek5Ku21CzQ4OWg7hbuc+ZjCGzpXfm8NuosU\n7TipnKGpEt0Agiph5g6TB2/scoeFar1CKMONIl80maxzAQk+xkWgiJ00+Z2qFCsx\nESfis/YkILS6RMFyZz7oa1WwMtUjYmrsRuz+jlFcbNuxZpIkaISiG9a2YdGcJ1Aj\n3QIDAQAB\n-----END PUBLIC KEY-----"}}}, "layout_type": "flat"} \ No newline at end of file +{"project_name": "test-flat", "targets_location": "/Users/vlad/projects/vladforks/tuf/tests/repository_data/project/targets", "prefix": "prefix", "metadata_location": "test-flat", "threshold": 1, "public_keys": {"4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": {"keyval": {"public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----"}, "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyid_hash_algorithms": ["sha256", "sha512"]}}, "layout_type": "flat"} \ No newline at end of file diff --git a/tests/repository_data/project/test-flat/role1.json b/tests/repository_data/project/test-flat/role1.json index bbeefbfc01..db887f1d83 100644 --- a/tests/repository_data/project/test-flat/role1.json +++ b/tests/repository_data/project/test-flat/role1.json @@ -1,22 +1,23 @@ { "signatures": [ { - "keyid": "a404d46b042a2eb92f0cc4b28849f8e9107c9e185c45c8ece64a302626af805f", - "method": "RSASSA-PSS", - "sig": "27fe1879eb797bedc2036da8fdfd1336d8d23ca881ee03167d5a2f7c4c97cc1602d6899aaf776a5304861b497d76f9b150d72c52b768eaa424109794fab9140735d007d6c3bd3768ea54ac9b77594844a160218b0a477687e64ae817dbf31fa84dc3b0f8e30fd4ece1d89ef4657303d89ecc966c0b6a30befde027116e8634fa601a8922c0a77094b991ebe8d9fa8acf3ac43e86e89280e8803ef55b16600f4548910542fa94e87355281bb7a7f862bea182334972f7e3c7b6ae9e23d83afceb52a05a2e25cf0caadb1a0fdc9c4dcfd126a97ae51eb1cc353355394daedbb4608a0b8ae294a2ece4b7e52d2953329fc4571c43cbdfd5c7ce4fec4e6824ef4f1c" + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "07e58cf4b7c66a92f60eef22a01b0f6607a58168dad03b8d1dbe50aa1e36f001b8b7fe4f1d300145c568d2a422d806ec399e0417a309e013a982b1c1b09ced0a" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": {}, "roles": [] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": { "prefix/file3.txt": { "hashes": { - "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b" + "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b", + "sha512": "ef5beafa16041bcdd2937140afebd485296cd54f7348ecd5a4d035c09759608de467a7ac0eb58753d0242df873c305e8bffad2454aa48f44480f15efae1cacd0" }, "length": 28 } diff --git a/tests/repository_data/project/test-flat/test-flat.json b/tests/repository_data/project/test-flat/test-flat.json index 77cc967e51..2457b94f30 100644 --- a/tests/repository_data/project/test-flat/test-flat.json +++ b/tests/repository_data/project/test-flat/test-flat.json @@ -1,47 +1,54 @@ { "signatures": [ { - "keyid": "6986b667c736a3b37471e030cf4ce7aa6c7e0d530325e64c2660276b77be3754", - "method": "RSASSA-PSS", - "sig": "8eec37d8b76bb5cc45f9984e2c48eea2894165874a0d027c89a7829c7339166a8ee977c3e4b42bab5869cd105e151dac058f8953d1f423a9e21fc6bc4f879e72988bb69b8b673255a6c0cfe55ed799f3d11a413ffe85722b943c07c74b8f773f91e06f8b0fc085aefb23715bca7225422e26c132ea3f931334a12874735ac22bcc1f2051f87c51ea6849a6267d2fd1a03236dbc3a57929def3316723ff0e8939f615ca9e4e009bcd6ff689ef83e7a9761c1c6d2e4447caa87e95581c46771b7eb3bec82d7aad19de1cc7f1930bb7d40778d41056efb60631773a39fba2d1ca24a55de7f945729952d4cf704b169d98788d5ad35ec3086b228f5c34d3546f6b78" + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "76a51bd99272eea87442d1814f4b5b766283cd9df0be16fcf6d65f6cfafce09e5f37a44fe3946c297ab738de9e0d678960f3502dba1e6518b64595e7ae9db8f583956971d7f5495c587569ffe0d174f770a8e19de723a26cca222b711cb0d9f37b95b7fbd3ddc3e0a785dfc6e79d5c72790d757583457305e152fdd9b505522efcedc0a334280b1aa86df4d615c70089eeada16ba94e9a83c75b19c6bfcc6c6d585f07095495193c4422ec67d7438397d444e608ef5b259928a4a86258f54e4ce609e5780c8bb369d69f373fc1b74b6856c6d962177ee351a10315b8afd3f5fa3cb676d67b1b11b226cc7857a418538219df0441c2019caa93eeeff2ba04b34104a091661b1871d9d84db9bc7a3f050129bfad3edde23d69f9684a28d7cb088e03941df41952ca53584f47ae232982029a218a432f8aa1fd6e22f187cde96004f8fccf8760161fb01c58d80c8debd4d41bdc7879dcbc90d434353cf394a6e86a4c1a6fd6c72d9cd61e3234479a63a9b2f2f2f402a5dce730a753f10e28d8a783" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": { - "a404d46b042a2eb92f0cc4b28849f8e9107c9e185c45c8ece64a302626af805f": { - "keytype": "rsa", + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { + "keyid_hash_algorithms": [ + "sha256", + "sha512" + ], + "keytype": "ed25519", "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqKdTRVn3mLQFUf02Rpug\nwVEU4yJtechILLb6nM7+urfwLe6f7EsNCDFhkiTP7vKuQywdLYrhwZKYZMDmaVnI\nq4d/tBLvb/jGY/IPFVvWbAOWtwWG7apiAFrcp3Idq6EKGaVVLn7tyv74+nisssYJ\ncVKodlkzpgX1Ibrdq73BUlAxhEQNDAUM5bzyJUW0BU4OSjUoFKCgc8BSkNcSLwXO\nRpyqAwDpPWiL68N1Dch7R9uD6GE9aREY9SKoYsNCvUOraIcme4fJZ3NmxpN3SVnX\ntepoiJo2iAtORtEI1yTCv/dOPap/iebveeCjn667HkMezJodSR8X3pMgMKMVyxhJ\ngwIDAQAB\n-----END PUBLIC KEY-----" - } + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" } }, "roles": [ { - "backtrack": true, "keyids": [ - "a404d46b042a2eb92f0cc4b28849f8e9107c9e185c45c8ece64a302626af805f" + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role1", "paths": [ "/file3.txt" ], + "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": { "prefix/file1.txt": { "hashes": { - "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da" + "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", + "sha512": "467430a68afae8e9f9c0771ea5d78bf0b3a0d79a2d3d3b40c69fde4dd42c461448aef76fcef4f5284931a1ffd0ac096d138ba3a0d6ca83fa8d7285a47a296f77" }, "length": 31 }, "prefix/file2.txt": { "hashes": { - "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99" + "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99", + "sha512": "052b49a21e03606b28942db69aa597530fe52d47ee3d748ba65afcd14b857738e36bc1714c4f4adde46c3e683548552fe5c96722e0e0da3acd9050c2524902d8" }, "length": 39 } diff --git a/tests/repository_data/project/test-flat/test-flat.json.gz b/tests/repository_data/project/test-flat/test-flat.json.gz deleted file mode 100644 index d962f1e01c..0000000000 Binary files a/tests/repository_data/project/test-flat/test-flat.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata.staged/1.root.json b/tests/repository_data/repository/metadata.staged/1.root.json index 7835711ffa..ccce5381b6 100644 --- a/tests/repository_data/repository/metadata.staged/1.root.json +++ b/tests/repository_data/repository/metadata.staged/1.root.json @@ -1,86 +1,87 @@ { "signatures": [ { - "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", - "method": "RSASSA-PSS", - "sig": "7965740b0a73d11b0b96c959f3c509107c065fe210487d92718a870f177b387c33737dc27a984689aeccff2184e967a8574ce6f065138687c8c106838fd07a0297d19beb553cf91fbdff6fab35928c2cbdb3cbbc8f8334ac21ee588e86a08cbee406cb0da49fd1908ca5418279f2ed474c54f88dc632a4812064ec708c9cc84712b4e1e688e71b31cc51cff4780dadb3d4350dda96a64c6839dc9be6e18c0a9468df42afea94ef0e9bf4fdb447e0aae24a7c7e60b0f0e53da6f2f9cc21ce2fc633dfadb4a280f97b1ed942c03779c59bef5a28661dada107f36c43ae8004befce217faeb8ca9fe8160a9318ee10c8cab8de84ed4e72a7e36707d85bd969a11fc11baf1475300d902caf8515e3abacd0bcdccdd48126cb118bee785b286ee45d28f4cbf9469403f0121f5774357b6542efdd95be7c5c93cef0f1fef7cc5adac65a8915386f3189e4577e12dbecbb7bcc5e5558748a70f99b85e7afce5ec08eeef4e886f4209cb511bd0a953d2fea72241f6f38ddcf436567cdb395b4c7beace0e" + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" } ], "signed": { - "_type": "Root", - "compression_algorithms": [ - "gz" - ], + "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "ed25519", + "keytype": "rsa", "keyval": { - "public": "e8bd29d20025d3ac755a27b8d8efe185e368244000d9d3b76e32afb968cb0ea8" - } + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" }, - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1": { + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "52de9284608be6b718a9d88c7c5d8d9d93b33732e00d670dd4ebe4bce8bbc83c" - } + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" }, - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503": { + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "rsa", + "keytype": "ed25519", "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAsDqUoiFJZX+5gm5pyI1l\nVc/N3yjJVOIl9GyiK0mRyzV3IzUQzhjq8nhk0eLfzXw2XwIAYOJC6dR/tGRG4JDx\nJkez5FFH4zLosr/XzT7CG5zxJ3kKICLD1v9rZQr5ZgARQDOpkxzPz46rGnE0sHd7\nMpnpPMScA1pMIzwM1RoPS4ntZipI1cl9M7HMQ6mkBp8/DNKCqaDWixJqaGgWrhhK\nhI/1mzBliMKriNxPKSCGVlOk/QpZft+y1fs42s0DMd5BOFBo+ZcoXLYRncg9S3A2\nxx/jT69Bt3ceiAZqnp7f6M+ZzoUifSelaoL7QIYg/GkEl+0oxTD0yRphGiCKwn9c\npSbn7NgnbjqSgIMeEtlf/5Coyrs26pyFf/9GbusddPSxxxwIJ/7IJuF7P1Yy0WpZ\nkMeY83h9n2IdnEYi+rpdbLJPQd7Fpu2xrdA3Fokj8AvCpcmxn8NIXZuK++r8/xsE\nAUL30HH7dgVn50AvdPaJnqAORT3OlabW0DK9prcwKnyzAgMBAAE=\n-----END PUBLIC KEY-----" - } + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" }, - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b": { + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "0692a846935833d685168ae8c98fee951d52d8aa76685b8ba55b8e1eada217c2" - } + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503" + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9" + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b" + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1" + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/repository/metadata.staged/1.root.json.gz b/tests/repository_data/repository/metadata.staged/1.root.json.gz deleted file mode 100644 index 741b2d2949..0000000000 Binary files a/tests/repository_data/repository/metadata.staged/1.root.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata.staged/role1.json b/tests/repository_data/repository/metadata.staged/role1.json index 96d921b5eb..332cd8050f 100644 --- a/tests/repository_data/repository/metadata.staged/role1.json +++ b/tests/repository_data/repository/metadata.staged/role1.json @@ -1,30 +1,30 @@ { "signatures": [ { - "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", - "method": "ed25519", - "sig": "e8f6db97fcad5eb2ca1cf5fc6b6d4579d026811581b0d2061af90c7cb26d966e15a06e7c596f663b05aa061308929f96136167359fc9d44919a36383403abd09" + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "749d6373835e9e89a269168d9af22bf7692ee7059a1db5ff1162e07b495ba47ae223e9ece6c27b2981d5d8bc046788d3fad9c2ba83d4be9b6547ed1f909c6204" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": { - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" - } + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" } }, "roles": [ { "keyids": [ - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role2", "paths": [], @@ -34,6 +34,7 @@ ] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": { "/file3.txt": { "hashes": { diff --git a/tests/repository_data/repository/metadata.staged/role1.json.gz b/tests/repository_data/repository/metadata.staged/role1.json.gz deleted file mode 100644 index 33bdf2ea23..0000000000 Binary files a/tests/repository_data/repository/metadata.staged/role1.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata.staged/role2.json b/tests/repository_data/repository/metadata.staged/role2.json index 20b1206a70..46e415a789 100644 --- a/tests/repository_data/repository/metadata.staged/role2.json +++ b/tests/repository_data/repository/metadata.staged/role2.json @@ -1,18 +1,18 @@ { "signatures": [ { - "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", - "method": "ed25519", - "sig": "8fdca8154157e983d86efb16917ad973941dfa75a47d99a88b393d0955f1508aff55b66d0592ff2ad2f431d6826d6544009a921b5aae503f3f795b09ed549f0a" + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "34ae1e3c897062419722c1747970a632e12060f3aef57314e6e6aa96c3a510a25ec5a8b12022058c768724607dd58106293089c87a7ee4b2ce5b3a8d44deeb06" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": {}, "roles": [] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": {}, "version": 1 } diff --git a/tests/repository_data/repository/metadata.staged/role2.json.gz b/tests/repository_data/repository/metadata.staged/role2.json.gz deleted file mode 100644 index 3d23a94cff..0000000000 Binary files a/tests/repository_data/repository/metadata.staged/role2.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata.staged/root.json b/tests/repository_data/repository/metadata.staged/root.json index 7835711ffa..ccce5381b6 100644 --- a/tests/repository_data/repository/metadata.staged/root.json +++ b/tests/repository_data/repository/metadata.staged/root.json @@ -1,86 +1,87 @@ { "signatures": [ { - "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", - "method": "RSASSA-PSS", - "sig": "7965740b0a73d11b0b96c959f3c509107c065fe210487d92718a870f177b387c33737dc27a984689aeccff2184e967a8574ce6f065138687c8c106838fd07a0297d19beb553cf91fbdff6fab35928c2cbdb3cbbc8f8334ac21ee588e86a08cbee406cb0da49fd1908ca5418279f2ed474c54f88dc632a4812064ec708c9cc84712b4e1e688e71b31cc51cff4780dadb3d4350dda96a64c6839dc9be6e18c0a9468df42afea94ef0e9bf4fdb447e0aae24a7c7e60b0f0e53da6f2f9cc21ce2fc633dfadb4a280f97b1ed942c03779c59bef5a28661dada107f36c43ae8004befce217faeb8ca9fe8160a9318ee10c8cab8de84ed4e72a7e36707d85bd969a11fc11baf1475300d902caf8515e3abacd0bcdccdd48126cb118bee785b286ee45d28f4cbf9469403f0121f5774357b6542efdd95be7c5c93cef0f1fef7cc5adac65a8915386f3189e4577e12dbecbb7bcc5e5558748a70f99b85e7afce5ec08eeef4e886f4209cb511bd0a953d2fea72241f6f38ddcf436567cdb395b4c7beace0e" + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" } ], "signed": { - "_type": "Root", - "compression_algorithms": [ - "gz" - ], + "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "ed25519", + "keytype": "rsa", "keyval": { - "public": "e8bd29d20025d3ac755a27b8d8efe185e368244000d9d3b76e32afb968cb0ea8" - } + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" }, - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1": { + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "52de9284608be6b718a9d88c7c5d8d9d93b33732e00d670dd4ebe4bce8bbc83c" - } + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" }, - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503": { + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "rsa", + "keytype": "ed25519", "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAsDqUoiFJZX+5gm5pyI1l\nVc/N3yjJVOIl9GyiK0mRyzV3IzUQzhjq8nhk0eLfzXw2XwIAYOJC6dR/tGRG4JDx\nJkez5FFH4zLosr/XzT7CG5zxJ3kKICLD1v9rZQr5ZgARQDOpkxzPz46rGnE0sHd7\nMpnpPMScA1pMIzwM1RoPS4ntZipI1cl9M7HMQ6mkBp8/DNKCqaDWixJqaGgWrhhK\nhI/1mzBliMKriNxPKSCGVlOk/QpZft+y1fs42s0DMd5BOFBo+ZcoXLYRncg9S3A2\nxx/jT69Bt3ceiAZqnp7f6M+ZzoUifSelaoL7QIYg/GkEl+0oxTD0yRphGiCKwn9c\npSbn7NgnbjqSgIMeEtlf/5Coyrs26pyFf/9GbusddPSxxxwIJ/7IJuF7P1Yy0WpZ\nkMeY83h9n2IdnEYi+rpdbLJPQd7Fpu2xrdA3Fokj8AvCpcmxn8NIXZuK++r8/xsE\nAUL30HH7dgVn50AvdPaJnqAORT3OlabW0DK9prcwKnyzAgMBAAE=\n-----END PUBLIC KEY-----" - } + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" }, - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b": { + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "0692a846935833d685168ae8c98fee951d52d8aa76685b8ba55b8e1eada217c2" - } + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503" + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9" + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b" + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1" + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/repository/metadata.staged/snapshot.json b/tests/repository_data/repository/metadata.staged/snapshot.json index 3ee5468cf9..a713f807b6 100644 --- a/tests/repository_data/repository/metadata.staged/snapshot.json +++ b/tests/repository_data/repository/metadata.staged/snapshot.json @@ -1,13 +1,12 @@ { "signatures": [ { - "keyid": "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9", - "method": "ed25519", - "sig": "01bc8667e2afbceef3df6ec6782088c77e59bdb5a29b75634200528bdedc8af9694e38a672fa2cd00051fb25ac12d3079bbc48f424783d23fb532b7508adf40f" + "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", + "sig": "d78e9013bab1da2a8425caa48143cd79a21632dce021ad7e1b883d83775035df333a8b26c9c952d832edaf9dc7be2ef612bdb21326fcc9849346d7e3a162050e" } ], "signed": { - "_type": "Snapshot", + "_type": "snapshot", "expires": "2030-01-01T00:00:00Z", "meta": { "role1.json": { @@ -17,16 +16,13 @@ "version": 1 }, "root.json": { - "hashes": { - "sha256": "294a5eea95c8aaed509c3a559c79044a336b6912395f28d5502aa93f0bfd2774" - }, - "length": 3329, "version": 1 }, "targets.json": { "version": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/repository/metadata.staged/snapshot.json.gz b/tests/repository_data/repository/metadata.staged/snapshot.json.gz deleted file mode 100644 index a57d005144..0000000000 Binary files a/tests/repository_data/repository/metadata.staged/snapshot.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata.staged/targets.json b/tests/repository_data/repository/metadata.staged/targets.json index 0620bfe42a..972034d03f 100644 --- a/tests/repository_data/repository/metadata.staged/targets.json +++ b/tests/repository_data/repository/metadata.staged/targets.json @@ -1,30 +1,30 @@ { "signatures": [ { - "keyid": "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b", - "method": "ed25519", - "sig": "74ee9970ed709ab65586ef99c0005102676a92f11e2a448bb685875b641d2efe3fd2bdefaa90e1a050bfbb34163834aadb43d13ac0c7452aa7df27c454c34507" + "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", + "sig": "3b1a1fcb912ea8e03b6f9ad0da29166149d4a6f038b552c204ccee1d396d2dd4095a3ce3c565581f08fa37dddc418b0aee40743a121b1f47c89d51da11f1dc02" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": { - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" - } + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" } }, "roles": [ { "keyids": [ - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role1", "paths": [ @@ -36,10 +36,11 @@ ] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": { "/file1.txt": { "custom": { - "file_permissions": "664" + "file_permissions": "644" }, "hashes": { "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", diff --git a/tests/repository_data/repository/metadata.staged/targets.json.gz b/tests/repository_data/repository/metadata.staged/targets.json.gz deleted file mode 100644 index 5a5190660b..0000000000 Binary files a/tests/repository_data/repository/metadata.staged/targets.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata.staged/timestamp.json b/tests/repository_data/repository/metadata.staged/timestamp.json index 43adf289a1..2cc752b30c 100644 --- a/tests/repository_data/repository/metadata.staged/timestamp.json +++ b/tests/repository_data/repository/metadata.staged/timestamp.json @@ -1,23 +1,23 @@ { "signatures": [ { - "keyid": "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1", - "method": "ed25519", - "sig": "0425f403669dc28aeda67015b56b62b724f7f36899e5bfba1edd9bc059a7921695de4fadcffe526e3e18ddef9b5b7bdff098dc7d058848ef9cddcad29000b70f" + "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", + "sig": "7dddbfe94d6d80253433551700ea6dfe4171a33f1227a07830e951900b8325d67c3dce6410b9cf55abefa3dfca0b57814a4965c2d6ee60bb0336755cd0557e03" } ], "signed": { - "_type": "Timestamp", + "_type": "timestamp", "expires": "2030-01-01T00:00:00Z", "meta": { "snapshot.json": { "hashes": { - "sha256": "5429cb36aa60a0a4e4d4ebe06cb8b1e4e2ec22e0535c17096bfaf8fd187204e4" + "sha256": "6990b6586ed545387c6a51db62173b903a5dff46b17b1bc3fe1e6ca0d0844f2f" }, - "length": 678, + "length": 554, "version": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/repository/metadata.staged/timestamp.json.gz b/tests/repository_data/repository/metadata.staged/timestamp.json.gz deleted file mode 100644 index ac8247df17..0000000000 Binary files a/tests/repository_data/repository/metadata.staged/timestamp.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata/1.root.json b/tests/repository_data/repository/metadata/1.root.json index 7835711ffa..ccce5381b6 100644 --- a/tests/repository_data/repository/metadata/1.root.json +++ b/tests/repository_data/repository/metadata/1.root.json @@ -1,86 +1,87 @@ { "signatures": [ { - "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", - "method": "RSASSA-PSS", - "sig": "7965740b0a73d11b0b96c959f3c509107c065fe210487d92718a870f177b387c33737dc27a984689aeccff2184e967a8574ce6f065138687c8c106838fd07a0297d19beb553cf91fbdff6fab35928c2cbdb3cbbc8f8334ac21ee588e86a08cbee406cb0da49fd1908ca5418279f2ed474c54f88dc632a4812064ec708c9cc84712b4e1e688e71b31cc51cff4780dadb3d4350dda96a64c6839dc9be6e18c0a9468df42afea94ef0e9bf4fdb447e0aae24a7c7e60b0f0e53da6f2f9cc21ce2fc633dfadb4a280f97b1ed942c03779c59bef5a28661dada107f36c43ae8004befce217faeb8ca9fe8160a9318ee10c8cab8de84ed4e72a7e36707d85bd969a11fc11baf1475300d902caf8515e3abacd0bcdccdd48126cb118bee785b286ee45d28f4cbf9469403f0121f5774357b6542efdd95be7c5c93cef0f1fef7cc5adac65a8915386f3189e4577e12dbecbb7bcc5e5558748a70f99b85e7afce5ec08eeef4e886f4209cb511bd0a953d2fea72241f6f38ddcf436567cdb395b4c7beace0e" + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" } ], "signed": { - "_type": "Root", - "compression_algorithms": [ - "gz" - ], + "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "ed25519", + "keytype": "rsa", "keyval": { - "public": "e8bd29d20025d3ac755a27b8d8efe185e368244000d9d3b76e32afb968cb0ea8" - } + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" }, - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1": { + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "52de9284608be6b718a9d88c7c5d8d9d93b33732e00d670dd4ebe4bce8bbc83c" - } + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" }, - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503": { + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "rsa", + "keytype": "ed25519", "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAsDqUoiFJZX+5gm5pyI1l\nVc/N3yjJVOIl9GyiK0mRyzV3IzUQzhjq8nhk0eLfzXw2XwIAYOJC6dR/tGRG4JDx\nJkez5FFH4zLosr/XzT7CG5zxJ3kKICLD1v9rZQr5ZgARQDOpkxzPz46rGnE0sHd7\nMpnpPMScA1pMIzwM1RoPS4ntZipI1cl9M7HMQ6mkBp8/DNKCqaDWixJqaGgWrhhK\nhI/1mzBliMKriNxPKSCGVlOk/QpZft+y1fs42s0DMd5BOFBo+ZcoXLYRncg9S3A2\nxx/jT69Bt3ceiAZqnp7f6M+ZzoUifSelaoL7QIYg/GkEl+0oxTD0yRphGiCKwn9c\npSbn7NgnbjqSgIMeEtlf/5Coyrs26pyFf/9GbusddPSxxxwIJ/7IJuF7P1Yy0WpZ\nkMeY83h9n2IdnEYi+rpdbLJPQd7Fpu2xrdA3Fokj8AvCpcmxn8NIXZuK++r8/xsE\nAUL30HH7dgVn50AvdPaJnqAORT3OlabW0DK9prcwKnyzAgMBAAE=\n-----END PUBLIC KEY-----" - } + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" }, - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b": { + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "0692a846935833d685168ae8c98fee951d52d8aa76685b8ba55b8e1eada217c2" - } + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503" + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9" + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b" + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1" + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/repository/metadata/1.root.json.gz b/tests/repository_data/repository/metadata/1.root.json.gz deleted file mode 100644 index 741b2d2949..0000000000 Binary files a/tests/repository_data/repository/metadata/1.root.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata/role1.json b/tests/repository_data/repository/metadata/role1.json index 96d921b5eb..332cd8050f 100644 --- a/tests/repository_data/repository/metadata/role1.json +++ b/tests/repository_data/repository/metadata/role1.json @@ -1,30 +1,30 @@ { "signatures": [ { - "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", - "method": "ed25519", - "sig": "e8f6db97fcad5eb2ca1cf5fc6b6d4579d026811581b0d2061af90c7cb26d966e15a06e7c596f663b05aa061308929f96136167359fc9d44919a36383403abd09" + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "749d6373835e9e89a269168d9af22bf7692ee7059a1db5ff1162e07b495ba47ae223e9ece6c27b2981d5d8bc046788d3fad9c2ba83d4be9b6547ed1f909c6204" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": { - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" - } + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" } }, "roles": [ { "keyids": [ - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role2", "paths": [], @@ -34,6 +34,7 @@ ] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": { "/file3.txt": { "hashes": { diff --git a/tests/repository_data/repository/metadata/role1.json.gz b/tests/repository_data/repository/metadata/role1.json.gz deleted file mode 100644 index 33bdf2ea23..0000000000 Binary files a/tests/repository_data/repository/metadata/role1.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata/role2.json b/tests/repository_data/repository/metadata/role2.json index 20b1206a70..46e415a789 100644 --- a/tests/repository_data/repository/metadata/role2.json +++ b/tests/repository_data/repository/metadata/role2.json @@ -1,18 +1,18 @@ { "signatures": [ { - "keyid": "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9", - "method": "ed25519", - "sig": "8fdca8154157e983d86efb16917ad973941dfa75a47d99a88b393d0955f1508aff55b66d0592ff2ad2f431d6826d6544009a921b5aae503f3f795b09ed549f0a" + "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", + "sig": "34ae1e3c897062419722c1747970a632e12060f3aef57314e6e6aa96c3a510a25ec5a8b12022058c768724607dd58106293089c87a7ee4b2ce5b3a8d44deeb06" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": {}, "roles": [] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": {}, "version": 1 } diff --git a/tests/repository_data/repository/metadata/role2.json.gz b/tests/repository_data/repository/metadata/role2.json.gz deleted file mode 100644 index 3d23a94cff..0000000000 Binary files a/tests/repository_data/repository/metadata/role2.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata/root.json b/tests/repository_data/repository/metadata/root.json index 7835711ffa..ccce5381b6 100644 --- a/tests/repository_data/repository/metadata/root.json +++ b/tests/repository_data/repository/metadata/root.json @@ -1,86 +1,87 @@ { "signatures": [ { - "keyid": "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503", - "method": "RSASSA-PSS", - "sig": "7965740b0a73d11b0b96c959f3c509107c065fe210487d92718a870f177b387c33737dc27a984689aeccff2184e967a8574ce6f065138687c8c106838fd07a0297d19beb553cf91fbdff6fab35928c2cbdb3cbbc8f8334ac21ee588e86a08cbee406cb0da49fd1908ca5418279f2ed474c54f88dc632a4812064ec708c9cc84712b4e1e688e71b31cc51cff4780dadb3d4350dda96a64c6839dc9be6e18c0a9468df42afea94ef0e9bf4fdb447e0aae24a7c7e60b0f0e53da6f2f9cc21ce2fc633dfadb4a280f97b1ed942c03779c59bef5a28661dada107f36c43ae8004befce217faeb8ca9fe8160a9318ee10c8cab8de84ed4e72a7e36707d85bd969a11fc11baf1475300d902caf8515e3abacd0bcdccdd48126cb118bee785b286ee45d28f4cbf9469403f0121f5774357b6542efdd95be7c5c93cef0f1fef7cc5adac65a8915386f3189e4577e12dbecbb7bcc5e5558748a70f99b85e7afce5ec08eeef4e886f4209cb511bd0a953d2fea72241f6f38ddcf436567cdb395b4c7beace0e" + "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", + "sig": "344e2399b982470a06bbaeb7cd52eb58dfa7fba65d179a737b3655ce79fe722480f9515163010b714debd46a3f9bd60cd04c3c3093c050a9bb6b6fdd3dd1e902b3871198bf13c7c4ace9813527ac0b6f2d7652c64d7993ef88328bc82bea69ffec13dfd545e3ff0d327613b4eea6ad2c50baffd4d7233104427d0c6066fbec5f0091a8004c7f40c0ee59abf4955ab8682d8401f09adb42b0e8897c96ac7eb37843287c60b631ee67187d01e5b3936d84a36881c8e9726dc30bc62442f3849d94c96c62eb0aa66c9d3869e403940f8f7df527b2d3aa109cbf37d1349a8043395c6d2ad3550a64db0eaa39d45d6e30ea7fd06457bf0f033afba6796fbb7160e094e67da21c2f8da2220d10cef5df691f1f919ff14fb00e2b4057d5c099ce8b2d5d2a9e233dc7eb1dc77b24f4f22b219e67e45b3592a1ac873e96456078d82a12c085186f6b50131f2bad32f6bdf3359b141fb898819c98866f55eff686ad5cda01d2661dff6c39b66e5d83dfb65bb276ea4164b1f86c3371dbe635b31c118067ee" } ], "signed": { - "_type": "Root", - "compression_algorithms": [ - "gz" - ], + "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9": { + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "ed25519", + "keytype": "rsa", "keyval": { - "public": "e8bd29d20025d3ac755a27b8d8efe185e368244000d9d3b76e32afb968cb0ea8" - } + "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" }, - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1": { + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "52de9284608be6b718a9d88c7c5d8d9d93b33732e00d670dd4ebe4bce8bbc83c" - } + "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" + }, + "scheme": "ed25519" }, - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503": { + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], - "keytype": "rsa", + "keytype": "ed25519", "keyval": { - "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAsDqUoiFJZX+5gm5pyI1l\nVc/N3yjJVOIl9GyiK0mRyzV3IzUQzhjq8nhk0eLfzXw2XwIAYOJC6dR/tGRG4JDx\nJkez5FFH4zLosr/XzT7CG5zxJ3kKICLD1v9rZQr5ZgARQDOpkxzPz46rGnE0sHd7\nMpnpPMScA1pMIzwM1RoPS4ntZipI1cl9M7HMQ6mkBp8/DNKCqaDWixJqaGgWrhhK\nhI/1mzBliMKriNxPKSCGVlOk/QpZft+y1fs42s0DMd5BOFBo+ZcoXLYRncg9S3A2\nxx/jT69Bt3ceiAZqnp7f6M+ZzoUifSelaoL7QIYg/GkEl+0oxTD0yRphGiCKwn9c\npSbn7NgnbjqSgIMeEtlf/5Coyrs26pyFf/9GbusddPSxxxwIJ/7IJuF7P1Yy0WpZ\nkMeY83h9n2IdnEYi+rpdbLJPQd7Fpu2xrdA3Fokj8AvCpcmxn8NIXZuK++r8/xsE\nAUL30HH7dgVn50AvdPaJnqAORT3OlabW0DK9prcwKnyzAgMBAAE=\n-----END PUBLIC KEY-----" - } + "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" + }, + "scheme": "ed25519" }, - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b": { + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "0692a846935833d685168ae8c98fee951d52d8aa76685b8ba55b8e1eada217c2" - } + "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" + }, + "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ - "5602f4df0cd26b2112f0833b1ce8d5fcbb595754961d3a04f37b9815e2ced503" + "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ - "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9" + "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ - "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b" + "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ - "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1" + "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/repository/metadata/snapshot.json b/tests/repository_data/repository/metadata/snapshot.json index 3ee5468cf9..a713f807b6 100644 --- a/tests/repository_data/repository/metadata/snapshot.json +++ b/tests/repository_data/repository/metadata/snapshot.json @@ -1,13 +1,12 @@ { "signatures": [ { - "keyid": "182216b8800c50ddf000043b31ddf90d815c754ab4e0b31a5952a839b371bed9", - "method": "ed25519", - "sig": "01bc8667e2afbceef3df6ec6782088c77e59bdb5a29b75634200528bdedc8af9694e38a672fa2cd00051fb25ac12d3079bbc48f424783d23fb532b7508adf40f" + "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", + "sig": "d78e9013bab1da2a8425caa48143cd79a21632dce021ad7e1b883d83775035df333a8b26c9c952d832edaf9dc7be2ef612bdb21326fcc9849346d7e3a162050e" } ], "signed": { - "_type": "Snapshot", + "_type": "snapshot", "expires": "2030-01-01T00:00:00Z", "meta": { "role1.json": { @@ -17,16 +16,13 @@ "version": 1 }, "root.json": { - "hashes": { - "sha256": "294a5eea95c8aaed509c3a559c79044a336b6912395f28d5502aa93f0bfd2774" - }, - "length": 3329, "version": 1 }, "targets.json": { "version": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/repository/metadata/snapshot.json.gz b/tests/repository_data/repository/metadata/snapshot.json.gz deleted file mode 100644 index a57d005144..0000000000 Binary files a/tests/repository_data/repository/metadata/snapshot.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata/targets.json b/tests/repository_data/repository/metadata/targets.json index 0620bfe42a..972034d03f 100644 --- a/tests/repository_data/repository/metadata/targets.json +++ b/tests/repository_data/repository/metadata/targets.json @@ -1,30 +1,30 @@ { "signatures": [ { - "keyid": "a0a0f0cf08daff7afd1eb6582756d43987aa73f028044836a5519259706ca19b", - "method": "ed25519", - "sig": "74ee9970ed709ab65586ef99c0005102676a92f11e2a448bb685875b641d2efe3fd2bdefaa90e1a050bfbb34163834aadb43d13ac0c7452aa7df27c454c34507" + "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", + "sig": "3b1a1fcb912ea8e03b6f9ad0da29166149d4a6f038b552c204ccee1d396d2dd4095a3ce3c565581f08fa37dddc418b0aee40743a121b1f47c89d51da11f1dc02" } ], "signed": { - "_type": "Targets", + "_type": "targets", "delegations": { "keys": { - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9": { + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { - "public": "3b11296fe2dba14a2ef204e542e9e4195293bcf3042655e3d7e4ef5afe3cf36a" - } + "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" + }, + "scheme": "ed25519" } }, "roles": [ { "keyids": [ - "a0650f29dde63403cc4eec28a1c66f2262d6339434a01c63a881a48bedd9bca9" + "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role1", "paths": [ @@ -36,10 +36,11 @@ ] }, "expires": "2030-01-01T00:00:00Z", + "spec_version": "1.0", "targets": { "/file1.txt": { "custom": { - "file_permissions": "664" + "file_permissions": "644" }, "hashes": { "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", diff --git a/tests/repository_data/repository/metadata/targets.json.gz b/tests/repository_data/repository/metadata/targets.json.gz deleted file mode 100644 index 5a5190660b..0000000000 Binary files a/tests/repository_data/repository/metadata/targets.json.gz and /dev/null differ diff --git a/tests/repository_data/repository/metadata/timestamp.json b/tests/repository_data/repository/metadata/timestamp.json index 43adf289a1..2cc752b30c 100644 --- a/tests/repository_data/repository/metadata/timestamp.json +++ b/tests/repository_data/repository/metadata/timestamp.json @@ -1,23 +1,23 @@ { "signatures": [ { - "keyid": "3f09f6468a522bea0364a23315872d8400875dfdb24ff4ecd32f27164d5c23c1", - "method": "ed25519", - "sig": "0425f403669dc28aeda67015b56b62b724f7f36899e5bfba1edd9bc059a7921695de4fadcffe526e3e18ddef9b5b7bdff098dc7d058848ef9cddcad29000b70f" + "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", + "sig": "7dddbfe94d6d80253433551700ea6dfe4171a33f1227a07830e951900b8325d67c3dce6410b9cf55abefa3dfca0b57814a4965c2d6ee60bb0336755cd0557e03" } ], "signed": { - "_type": "Timestamp", + "_type": "timestamp", "expires": "2030-01-01T00:00:00Z", "meta": { "snapshot.json": { "hashes": { - "sha256": "5429cb36aa60a0a4e4d4ebe06cb8b1e4e2ec22e0535c17096bfaf8fd187204e4" + "sha256": "6990b6586ed545387c6a51db62173b903a5dff46b17b1bc3fe1e6ca0d0844f2f" }, - "length": 678, + "length": 554, "version": 1 } }, + "spec_version": "1.0", "version": 1 } } \ No newline at end of file diff --git a/tests/repository_data/repository/metadata/timestamp.json.gz b/tests/repository_data/repository/metadata/timestamp.json.gz deleted file mode 100644 index ac8247df17..0000000000 Binary files a/tests/repository_data/repository/metadata/timestamp.json.gz and /dev/null differ diff --git a/tests/test_arbitrary_package_attack.py b/tests/test_arbitrary_package_attack.py index 89ab017c5e..e6f7810a0a 100755 --- a/tests/test_arbitrary_package_attack.py +++ b/tests/test_arbitrary_package_attack.py @@ -42,13 +42,7 @@ import subprocess import logging import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf import tuf.formats @@ -272,7 +266,8 @@ def test_with_tuf_and_metadata_tampering(self): tuf.formats.check_signable_object_format(metadata) with open(metadata_path, 'wb') as file_object: - json.dumps(metadata, file_object, indent=1, sort_keys=True).encode('utf-8') + file_object.write(json.dumps(metadata, indent=1, + separators=(',', ': '), sort_keys=True).encode('utf-8')) # Verify that the malicious 'targets.json' is not downloaded. Perform # a refresh of top-level metadata to demonstrate that the malicious diff --git a/tests/test_developer_tool.py b/tests/test_developer_tool.py index b4dded4d09..61585eb206 100755 --- a/tests/test_developer_tool.py +++ b/tests/test_developer_tool.py @@ -22,6 +22,7 @@ import logging import tempfile import shutil +import unittest import tuf import tuf.log @@ -347,10 +348,6 @@ def test_write(self): # + backup the name. name_backup = project._project_name - # Set the compressions. We will be checking this part here too. - project.compressions = ['gz'] - project('delegation').compressions = project.compressions - # Write and reload. self.assertRaises(securesystemslib.exceptions.Error, project.write) project.write(write_partial=True) diff --git a/tests/test_endless_data_attack.py b/tests/test_endless_data_attack.py index fc66501267..be0d896375 100755 --- a/tests/test_endless_data_attack.py +++ b/tests/test_endless_data_attack.py @@ -45,13 +45,7 @@ import subprocess import logging import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf import tuf.formats diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py new file mode 100755 index 0000000000..232071af01 --- /dev/null +++ b/tests/test_exceptions.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +""" + + test_exceptions.py + + + Vladimir Diaz + + + July 13, 2017. + + + See LICENSE for licensing information. + + + Test cases for exceptions.py (mainly the exceptions defined there). +""" + +# Help with Python 3 compatibility, where the print statement is a function, an +# implicit relative import is invalid, and the '/' operator performs true +# division. Example: print 'hello world' raises a 'SyntaxError' exception. +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +import unittest +import logging + +import tuf.exceptions + +logger = logging.getLogger('test_exceptions') + +class TestExceptions(unittest.TestCase): + def setUp(self): + pass + + + def tearDown(self): + pass + + + def test_bad_signature_error(self): + bad_signature_error = tuf.exceptions.BadSignatureError('bad sig') + logger.error(bad_signature_error) + + + def test_bad_hash_error(self): + bad_hash_error = tuf.exceptions.BadHashError('1234', '5678') + logger.error(bad_hash_error) + + +# Run the unit tests. +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_extraneous_dependencies_attack.py b/tests/test_extraneous_dependencies_attack.py index 8f8a7d8c75..0e78578fba 100755 --- a/tests/test_extraneous_dependencies_attack.py +++ b/tests/test_extraneous_dependencies_attack.py @@ -47,13 +47,7 @@ import subprocess import logging import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf.formats import tuf.log diff --git a/tests/test_formats.py b/tests/test_formats.py index d35da8bf36..7885e30338 100755 --- a/tests/test_formats.py +++ b/tests/test_formats.py @@ -66,7 +66,7 @@ def test_schemas(self): 'KEYIDS_SCHEMA': (securesystemslib.formats.KEYIDS_SCHEMA, ['123456789abcdef', '123456789abcdef']), - 'SIG_METHOD_SCHEMA': (securesystemslib.formats.SIG_METHOD_SCHEMA, 'ed25519'), + 'SIG_SCHEME_SCHEMA': (securesystemslib.formats.SIG_SCHEME_SCHEMA, 'rsassa-pss-sha256'), 'RELPATH_SCHEMA': (securesystemslib.formats.RELPATH_SCHEMA, 'metadata/root/'), @@ -105,11 +105,13 @@ def test_schemas(self): 'KEY_SCHEMA': (securesystemslib.formats.KEY_SCHEMA, {'keytype': 'rsa', + 'scheme': 'rsassa-pss-sha256', 'keyval': {'public': 'pubkey', 'private': 'privkey'}}), 'RSAKEY_SCHEMA': (securesystemslib.formats.RSAKEY_SCHEMA, {'keytype': 'rsa', + 'scheme': 'rsassa-pss-sha256', 'keyid': '123456789abcdef', 'keyval': {'public': 'pubkey', 'private': 'privkey'}}), @@ -138,7 +140,6 @@ def test_schemas(self): 'SIGNATURE_SCHEMA': (securesystemslib.formats.SIGNATURE_SCHEMA, {'keyid': '123abc', - 'method': 'evp', 'sig': 'A4582BCF323BCEF'}), 'SIGNATURESTATUS_SCHEMA': (securesystemslib.formats.SIGNATURESTATUS_SCHEMA, @@ -147,21 +148,22 @@ def test_schemas(self): 'bad_sigs': ['123abc'], 'unknown_sigs': ['123abc'], 'untrusted_sigs': ['123abc'], - 'unknown_method_sigs': ['123abc']}), + 'unknown_signing_schemes': ['123abc']}), 'SIGNABLE_SCHEMA': (tuf.formats.SIGNABLE_SCHEMA, {'signed': 'signer', 'signatures': [{'keyid': '123abc', - 'method': 'evp', 'sig': 'A4582BCF323BCEF'}]}), 'KEYDICT_SCHEMA': (securesystemslib.formats.KEYDICT_SCHEMA, {'123abc': {'keytype': 'rsa', + 'scheme': 'rsassa-pss-sha256', 'keyval': {'public': 'pubkey', 'private': 'privkey'}}}), 'KEYDB_SCHEMA': (securesystemslib.formats.KEYDB_SCHEMA, {'123abc': {'keytype': 'rsa', + 'scheme': 'rsassa-pss-sha256', 'keyid': '123456789abcdef', 'keyval': {'public': 'pubkey', 'private': 'privkey'}}}), @@ -194,12 +196,13 @@ def test_schemas(self): 'paths': ['path1/', 'path2']}}), 'ROOT_SCHEMA': (tuf.formats.ROOT_SCHEMA, - {'_type': 'Root', + {'_type': 'root', + 'spec_version': '1.0', 'version': 8, 'consistent_snapshot': False, - 'compression_algorithms': ['gz'], 'expires': '1985-10-21T13:20:00Z', 'keys': {'123abc': {'keytype': 'rsa', + 'scheme': 'rsassa-pss-sha256', 'keyval': {'public': 'pubkey', 'private': 'privkey'}}}, 'roles': {'root': {'keyids': ['123abc'], @@ -207,13 +210,15 @@ def test_schemas(self): 'paths': ['path1/', 'path2']}}}), 'TARGETS_SCHEMA': (tuf.formats.TARGETS_SCHEMA, - {'_type': 'Targets', + {'_type': 'targets', + 'spec_version': '1.0', 'version': 8, 'expires': '1985-10-21T13:20:00Z', 'targets': {'metadata/targets.json': {'length': 1024, 'hashes': {'sha256': 'ABCD123'}, 'custom': {'type': 'metadata'}}}, 'delegations': {'keys': {'123abc': {'keytype':'rsa', + 'scheme': 'rsassa-pss-sha256', 'keyval': {'public': 'pubkey', 'private': 'privkey'}}}, 'roles': [{'name': 'root', 'keyids': ['123abc'], @@ -221,13 +226,15 @@ def test_schemas(self): 'paths': ['path1/', 'path2']}]}}), 'SNAPSHOT_SCHEMA': (tuf.formats.SNAPSHOT_SCHEMA, - {'_type': 'Snapshot', + {'_type': 'snapshot', + 'spec_version': '1.0', 'version': 8, 'expires': '1985-10-21T13:20:00Z', 'meta': {'snapshot.json': {'version': 1024}}}), 'TIMESTAMP_SCHEMA': (tuf.formats.TIMESTAMP_SCHEMA, - {'_type': 'Timestamp', + {'_type': 'timestamp', + 'spec_version': '1.0', 'version': 8, 'expires': '1985-10-21T13:20:00Z', 'meta': {'metadattimestamp.json': {'length': 1024, @@ -248,8 +255,9 @@ def test_schemas(self): 'custom': {'type': 'mirror'}}}), 'MIRRORLIST_SCHEMA': (tuf.formats.MIRRORLIST_SCHEMA, - {'_type': 'Mirrors', + {'_type': 'mirrors', 'version': 8, + 'spec_version': '1.0', 'expires': '1985-10-21T13:20:00Z', 'mirrors': [{'url_prefix': 'http://localhost:8001', 'metadata_path': 'metadata/', @@ -260,6 +268,8 @@ def test_schemas(self): # Iterate 'valid_schemas', ensuring each 'valid_schema' correctly matches # its respective 'schema_type'. for schema_name, (schema_type, valid_schema) in six.iteritems(valid_schemas): + if not schema_type.matches(valid_schema): + print('bad schema: ' + repr(valid_schema)) self.assertEqual(True, schema_type.matches(valid_schema)) # Test conditions for invalid schemas. @@ -330,6 +340,7 @@ def test_TimestampFile(self): + def test_RootFile(self): # Test conditions for valid instances of 'tuf.formats.RootFile'. version = 8 @@ -337,6 +348,7 @@ def test_RootFile(self): expires = '1985-10-21T13:20:00Z' keydict = {'123abc': {'keytype': 'rsa', + 'scheme': 'rsassa-pss-sha256', 'keyval': {'public': 'pubkey', 'private': 'privkey'}}} @@ -344,18 +356,14 @@ def test_RootFile(self): 'threshold': 1, 'paths': ['path1/', 'path2']}} - compression_algorithms = ['gz'] - make_metadata = tuf.formats.RootFile.make_metadata from_metadata = tuf.formats.RootFile.from_metadata ROOT_SCHEMA = tuf.formats.ROOT_SCHEMA self.assertTrue(ROOT_SCHEMA.matches(make_metadata(version, expires, - keydict, roledict, - consistent_snapshot, - compression_algorithms))) + keydict, roledict, consistent_snapshot))) metadata = make_metadata(version, expires, keydict, roledict, - consistent_snapshot, compression_algorithms) + consistent_snapshot) self.assertTrue(isinstance(from_metadata(metadata), tuf.formats.RootFile)) # Test conditions for invalid arguments. @@ -363,28 +371,15 @@ def test_RootFile(self): bad_expires = 'eight' bad_keydict = 123 bad_roledict = 123 - bad_compression_algorithms = ['nozip'] - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata, bad_version, - expires, - keydict, roledict, - consistent_snapshot, - compression_algorithms) - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata, version, - bad_expires, - keydict, roledict, - consistent_snapshot, - compression_algorithms) - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata, version, - expires, - bad_keydict, roledict, - consistent_snapshot, - compression_algorithms) - self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata, version, - expires, - keydict, bad_roledict, - consistent_snapshot, - compression_algorithms) + self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata, + bad_version, expires, keydict, roledict, consistent_snapshot) + self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata, + version, bad_expires, keydict, roledict, consistent_snapshot) + self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata, + version, expires, bad_keydict, roledict, consistent_snapshot) + self.assertRaises(securesystemslib.exceptions.FormatError, make_metadata, + version, expires, keydict, bad_roledict, consistent_snapshot) self.assertRaises(securesystemslib.exceptions.FormatError, from_metadata, 'bad') @@ -430,6 +425,7 @@ def test_TargetsFile(self): 'custom': {'type': 'metadata'}}} delegations = {'keys': {'123abc': {'keytype':'rsa', + 'scheme': 'rsassa-pss-sha256', 'keyval': {'public': 'pubkey', 'private': 'privkey'}}}, 'roles': [{'name': 'root', 'keyids': ['123abc'], @@ -548,12 +544,13 @@ def test_parse_base64(self): def test_make_signable(self): # Test conditions for expected make_signable() behavior. - root = {'_type': 'Root', + root = {'_type': 'root', + 'spec_version': '1.0', 'version': 8, 'consistent_snapshot': False, - 'compression_algorithms': ['gz'], 'expires': '1985-10-21T13:20:00Z', 'keys': {'123abc': {'keytype': 'rsa', + 'scheme': 'rsassa-pss-sha256', 'keyval': {'public': 'pubkey', 'private': 'privkey'}}}, 'roles': {'root': {'keyids': ['123abc'], @@ -573,6 +570,8 @@ def test_make_signable(self): + + def test_make_fileinfo(self): # Test conditions for valid arguments. length = 1024 @@ -680,13 +679,13 @@ def test_expected_meta_rolename(self): # Test conditions for valid arguments. expected_rolename = tuf.formats.expected_meta_rolename - self.assertEqual('Root', expected_rolename('root')) - self.assertEqual('Targets', expected_rolename('targets')) - self.assertEqual('Snapshot', expected_rolename('snapshot')) - self.assertEqual('Timestamp', expected_rolename('timestamp')) - self.assertEqual('Mirrors', expected_rolename('mirrors')) - self.assertEqual('Targets Role', expected_rolename('targets role')) - self.assertEqual('Root', expected_rolename('Root')) + self.assertEqual('root', expected_rolename('Root')) + self.assertEqual('targets', expected_rolename('Targets')) + self.assertEqual('snapshot', expected_rolename('Snapshot')) + self.assertEqual('timestamp', expected_rolename('Timestamp')) + self.assertEqual('mirrors', expected_rolename('Mirrors')) + self.assertEqual('targets role', expected_rolename('Targets Role')) + self.assertEqual('root', expected_rolename('Root')) # Test conditions for invalid arguments. self.assertRaises(securesystemslib.exceptions.FormatError, expected_rolename, 123) @@ -697,12 +696,13 @@ def test_expected_meta_rolename(self): def test_check_signable_object_format(self): # Test condition for a valid argument. - root = {'_type': 'Root', + root = {'_type': 'root', + 'spec_version': '1.0', 'version': 8, 'consistent_snapshot': False, - 'compression_algorithms': ['gz'], 'expires': '1985-10-21T13:20:00Z', 'keys': {'123abc': {'keytype': 'rsa', + 'scheme': 'rsassa-pss-sha256', 'keyval': {'public': 'pubkey', 'private': 'privkey'}}}, 'roles': {'root': {'keyids': ['123abc'], @@ -714,7 +714,7 @@ def test_check_signable_object_format(self): # Test conditions for invalid arguments. check_signable = tuf.formats.check_signable_object_format - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, 'Root') + self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, 'root') self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, 123) self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, tuf.formats.RootFile) self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, True) @@ -724,9 +724,9 @@ def test_check_signable_object_format(self): self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, root) root['signed']['_type'] = saved_type - root['signed']['_type'] = 'root' - self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, root) root['signed']['_type'] = 'Root' + self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, root) + root['signed']['_type'] = 'root' del root['signed']['expires'] self.assertRaises(securesystemslib.exceptions.FormatError, check_signable, root) diff --git a/tests/test_indefinite_freeze_attack.py b/tests/test_indefinite_freeze_attack.py index fd75141bb0..4f8854925f 100755 --- a/tests/test_indefinite_freeze_attack.py +++ b/tests/test_indefinite_freeze_attack.py @@ -50,13 +50,7 @@ import subprocess import logging import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf.formats import tuf.log diff --git a/tests/test_interpose_updater.py b/tests/test_interpose_updater.py index 93583d5d63..9e4b1d7f94 100755 --- a/tests/test_interpose_updater.py +++ b/tests/test_interpose_updater.py @@ -32,6 +32,7 @@ import time import copy import json +import unittest import tuf import tuf.roledb @@ -43,13 +44,6 @@ import securesystemslib -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest - - logger = logging.getLogger('tuf.test_interpose_updater') diff --git a/tests/test_key_revocation_integration.py b/tests/test_key_revocation_integration.py index b673944dd4..26577b456d 100755 --- a/tests/test_key_revocation_integration.py +++ b/tests/test_key_revocation_integration.py @@ -43,13 +43,7 @@ import random import subprocess import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf import tuf.log diff --git a/tests/test_keydb.py b/tests/test_keydb.py index d8b3971fa1..a061feb244 100755 --- a/tests/test_keydb.py +++ b/tests/test_keydb.py @@ -311,13 +311,9 @@ def test_create_keydb_from_root_metadata(self): version = 8 consistent_snapshot = False expires = '1985-10-21T01:21:00Z' - compression_algorithms = ['gz'] - root_metadata = tuf.formats.RootFile.make_metadata(version, - expires, - keydict, roledict, - consistent_snapshot, - compression_algorithms) + root_metadata = tuf.formats.RootFile.make_metadata(version, expires, + keydict, roledict, consistent_snapshot) self.assertEqual(None, tuf.keydb.create_keydb_from_root_metadata(root_metadata)) tuf.keydb.create_keydb_from_root_metadata(root_metadata) @@ -331,17 +327,17 @@ def test_create_keydb_from_root_metadata(self): # Test conditions for arguments with invalid formats. self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, None) + tuf.keydb.create_keydb_from_root_metadata, None) self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, '') + tuf.keydb.create_keydb_from_root_metadata, '') self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, 123) + tuf.keydb.create_keydb_from_root_metadata, 123) self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, ['123']) + tuf.keydb.create_keydb_from_root_metadata, ['123']) self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, {'bad': '123'}) + tuf.keydb.create_keydb_from_root_metadata, {'bad': '123'}) self.assertRaises(securesystemslib.exceptions.FormatError, - tuf.keydb.create_keydb_from_root_metadata, root_metadata, 123) + tuf.keydb.create_keydb_from_root_metadata, root_metadata, 123) # Verify that a keydb cannot be created for a non-existent repository name. tuf.keydb.create_keydb_from_root_metadata(root_metadata, 'non-existent') @@ -367,13 +363,9 @@ def test_create_keydb_from_root_metadata(self): keydict[keyid3] = rsakey3 version = 8 expires = '1985-10-21T01:21:00Z' - compression_algorithms = ['gz'] - root_metadata = tuf.formats.RootFile.make_metadata(version, - expires, - keydict, roledict, - consistent_snapshot, - compression_algorithms) + root_metadata = tuf.formats.RootFile.make_metadata(version, expires, + keydict, roledict, consistent_snapshot) self.assertEqual(None, tuf.keydb.create_keydb_from_root_metadata(root_metadata)) # Ensure only 'keyid2' was added to the keydb database. 'keyid' and diff --git a/tests/test_mix_and_match_attack.py b/tests/test_mix_and_match_attack.py index 61c1bdbc3e..845f473e36 100755 --- a/tests/test_mix_and_match_attack.py +++ b/tests/test_mix_and_match_attack.py @@ -44,13 +44,7 @@ import subprocess import logging import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf.exceptions import tuf.log diff --git a/tests/test_multiple_repositories_integration.py b/tests/test_multiple_repositories_integration.py index 16a2f3880c..7cf9348eb3 100755 --- a/tests/test_multiple_repositories_integration.py +++ b/tests/test_multiple_repositories_integration.py @@ -14,10 +14,8 @@ See LICENSE for licensing information. - Verify that clients are able to keep track of multiple repositories and - separate sets of metadata for each. - - TODO: Verify that multiple repositories can be set for the repository tool. + Verify that clients and the repository tools are able to keep track of + multiple repositories and separate sets of metadata for each. """ # Help with Python 3 compatibility, where the print statement is a function, an @@ -36,115 +34,64 @@ import logging import time import shutil +import unittest +import json import tuf import tuf.log import tuf.roledb import tuf.client.updater as updater import tuf.settings +import securesystemslib import tuf.unittest_toolbox as unittest_toolbox import tuf.repository_tool as repo_tool -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest - logger = logging.getLogger('test_multiple_repositories_integration') repo_tool.disable_console_log_messages() class TestMultipleRepositoriesIntegration(unittest_toolbox.Modified_TestCase): - @classmethod - def setUpClass(cls): - # setUpClass() is called before any of the test cases are executed. - - # Create a temporary directory to store the repository, metadata, and - # target files. 'temporary_directory' must be deleted in TearDownModule() - # so that temporary files are always removed, even when exceptions occur. - cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - - # Launch a SimpleHTTPServer (serves files in the current directory). - # Test cases will request metadata and target files that have been - # pre-generated in 'tuf/tests/repository_data', which will be served by the - # SimpleHTTPServer launched here. The test cases of this unit test assume - # the pre-generated metadata files have a specific structure, such - # as a delegated role 'targets/role1', three target files, five key files, - # etc. - cls.SERVER_PORT = random.randint(30000, 45000) - cls.SERVER_PORT2 = random.randint(30000, 45000) - command = ['python', 'simple_server.py', str(cls.SERVER_PORT)] - command2 = ['python', 'simple_server.py', str(cls.SERVER_PORT2)] - cls.server_process = subprocess.Popen(command, stderr=subprocess.PIPE) - cls.server_process2 = subprocess.Popen(command2, stderr=subprocess.PIPE) - logger.info('Server processes started.') - logger.info('Server process id: ' + str(cls.server_process.pid)) - logger.info('Serving on port: ' + str(cls.SERVER_PORT)) - logger.info('Server 2 process id: ' + str(cls.server_process2.pid)) - logger.info('Serving 2 on port: ' + str(cls.SERVER_PORT2)) - cls.url = 'http://localhost:' + str(cls.SERVER_PORT) + os.path.sep - cls.url2 = 'http://localhost:' + str(cls.SERVER_PORT2) + os.path.sep - - # NOTE: Following error is raised if a delay is not applied: - # - time.sleep(1) - - - - @classmethod - def tearDownClass(cls): - # tearDownModule() is called after all the test cases have run. - # http://docs.python.org/2/library/unittest.html#class-and-module-fixtures - - # Remove the temporary repository directory, which should contain all the - # metadata, targets, and key files generated of all the test cases. - shutil.rmtree(cls.temporary_directory) - - # Kill the SimpleHTTPServer process. - if cls.server_process.returncode is None: - logger.info('Server process ' + str(cls.server_process.pid) + ' terminated.') - cls.server_process.kill() - - if cls.server_process2.returncode is None: - logger.info('Server 2 process ' + str(cls.server_process2.pid) + ' terminated.') - cls.server_process2.kill() - - - def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) + self.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) + # Copy the original repository files provided in the test folder so that # any modifications made to repository files are restricted to the copies. # The 'repository_data' directory is expected to exist in 'tuf/tests/'. original_repository_files = os.path.join(os.getcwd(), 'repository_data') - temporary_repository_root = self.make_temp_directory(directory= + + self.temporary_repository_root = self.make_temp_directory(directory= self.temporary_directory) # The original repository, keystore, and client directories will be copied # for each test case. original_repository = os.path.join(original_repository_files, 'repository') original_client = os.path.join(original_repository_files, 'client', 'test_repository') + original_keystore = os.path.join(original_repository_files, 'keystore') + original_map_file = os.path.join(original_repository_files, 'map.json') # Save references to the often-needed client repository directories. # Test cases need these references to access metadata and target files. - self.repository_directory = os.path.join(temporary_repository_root, + self.repository_directory = os.path.join(self.temporary_repository_root, 'repository_server1') - self.repository_directory2 = os.path.join(temporary_repository_root, + self.repository_directory2 = os.path.join(self.temporary_repository_root, 'repository_server2') # Setting 'tuf.settings.repositories_directory' with the temporary client # directory copied from the original repository files. - tuf.settings.repositories_directory = temporary_repository_root + tuf.settings.repositories_directory = self.temporary_repository_root repository_name = 'repository1' repository_name2 = 'repository2' - self.client_directory = os.path.join(temporary_repository_root, repository_name) - self.client_directory2 = os.path.join(temporary_repository_root, repository_name2) + self.client_directory = os.path.join(self.temporary_repository_root, repository_name) + self.client_directory2 = os.path.join(self.temporary_repository_root, repository_name2) + + self.keystore_directory = os.path.join(self.temporary_repository_root, 'keystore') + self.map_file = os.path.join(self.client_directory, 'map.json') + self.map_file2 = os.path.join(self.client_directory2, 'map.json') # Copy the original 'repository', 'client', and 'keystore' directories # to the temporary repository the test cases can use. @@ -152,14 +99,46 @@ def setUp(self): shutil.copytree(original_repository, self.repository_directory2) shutil.copytree(original_client, self.client_directory) shutil.copytree(original_client, self.client_directory2) + shutil.copyfile(original_map_file, self.map_file) + shutil.copyfile(original_map_file, self.map_file2) + shutil.copytree(original_keystore, self.keystore_directory) - # Set the url prefix required by the 'tuf/client/updater.py' updater. - # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. - repository_basepath = self.repository_directory[len(os.getcwd()):] - url_prefix = \ - 'http://localhost:' + str(self.SERVER_PORT) + repository_basepath - url_prefix2 = \ - 'http://localhost:' + str(self.SERVER_PORT2) + repository_basepath + # Launch a SimpleHTTPServer (serves files in the current directory). + # Test cases will request metadata and target files that have been + # pre-generated in 'tuf/tests/repository_data', which will be served by the + # SimpleHTTPServer launched here. The test cases of this unit test assume + # the pre-generated metadata files have a specific structure, such + # as a delegated role 'targets/role1', three target files, five key files, + # etc. + self.SERVER_PORT = random.randint(30000, 45000) + self.SERVER_PORT2 = random.randint(30000, 45000) + + command = ['simple_server.py', str(self.SERVER_PORT)] + command2 = ['simple_server.py', str(self.SERVER_PORT2)] + + self.server_process = subprocess.Popen(command, stderr=subprocess.PIPE, + cwd=self.repository_directory) + + logger.debug('Server process started.') + logger.debug('Server process id: ' + str(self.server_process.pid)) + logger.debug('Serving on port: ' + str(self.SERVER_PORT)) + + self.server_process2 = subprocess.Popen(command2, stderr=subprocess.PIPE, + cwd=self.repository_directory2) + + + logger.debug('Server process 2 started.') + logger.debug('Server 2 process id: ' + str(self.server_process2.pid)) + logger.debug('Serving 2 on port: ' + str(self.SERVER_PORT2)) + self.url = 'http://localhost:' + str(self.SERVER_PORT) + os.path.sep + self.url2 = 'http://localhost:' + str(self.SERVER_PORT2) + os.path.sep + + # NOTE: Following error is raised if a delay is not applied: + # + time.sleep(.8) + + url_prefix = 'http://localhost:' + str(self.SERVER_PORT) + url_prefix2 = 'http://localhost:' + str(self.SERVER_PORT2) self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, 'metadata_path': 'metadata', @@ -184,11 +163,25 @@ def tearDown(self): # directories that may have been created during each test case. unittest_toolbox.Modified_TestCase.tearDown(self) + # Remove the temporary repository directory, which should contain all the + # metadata, targets, and key files generated of all the test cases. + shutil.rmtree(self.temporary_directory) + + # Kill the SimpleHTTPServer process. + if self.server_process.returncode is None: + logger.info('Server process ' + str(self.server_process.pid) + ' terminated.') + self.server_process.kill() + + if self.server_process2.returncode is None: + logger.info('Server 2 process ' + str(self.server_process2.pid) + ' terminated.') + self.server_process2.kill() + # updater.Updater() populates the roledb with the name "test_repository" tuf.roledb.clear_roledb(clear_all=True) tuf.keydb.clear_keydb(clear_all=True) + def test_update(self): self.assertEqual('repository1', str(self.repository_updater)) self.assertEqual('repository2', str(self.repository_updater2)) @@ -208,26 +201,73 @@ def test_update(self): # 'role1.json' should be downloaded, because it provides info for the # requested 'file3.txt'. - valid_targetinfo = self.repository_updater.get_one_valid_targetinfo('file3.txt') + valid_targetinfo = self.repository_updater.get_one_valid_targetinfo('/file3.txt') self.assertEqual(sorted(['role2', 'role1', 'root', 'snapshot', 'targets', 'timestamp']), sorted(tuf.roledb.get_rolenames('repository1'))) + def test_repository_tool(self): - repository_name1 = 'repository1' + repository_name = 'repository1' repository_name2 = 'repository2' - self.assertEqual(repository_name1, str(self.repository_updater)) + self.assertEqual(repository_name, str(self.repository_updater)) self.assertEqual(repository_name2, str(self.repository_updater2)) - repository1 = repo_tool.load_repository(self.repository_directory, repository_name1) + repository = repo_tool.load_repository(self.repository_directory, repository_name) repository2 = repo_tool.load_repository(self.repository_directory2, repository_name2) - repository2.timestamp.version = 2 - self.assertEqual([], tuf.roledb.get_dirty_roles(repository_name1)) + repository.timestamp.version = 88 + self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles(repository_name)) + self.assertEqual([], tuf.roledb.get_dirty_roles(repository_name2)) + + repository2.timestamp.version = 100 self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles(repository_name2)) + key_file = os.path.join(self.keystore_directory, 'timestamp_key') + timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file, "password") + + repository.timestamp.load_signing_key(timestamp_private) + repository2.timestamp.load_signing_key(timestamp_private) + + repository.write('timestamp', increment_version_number=False) + repository2.write('timestamp', increment_version_number=False) + + # And move the staged metadata to the "live" metadata. + shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) + shutil.rmtree(os.path.join(self.repository_directory2, 'metadata')) + + shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), + os.path.join(self.repository_directory, 'metadata')) + shutil.copytree(os.path.join(self.repository_directory2, 'metadata.staged'), + os.path.join(self.repository_directory2, 'metadata')) + + # Verify that the client retrieves the expected updates. + logger.info('Downloading timestamp from server 1.') + self.repository_updater.refresh() + + self.assertEqual(88, self.repository_updater.metadata['current']['timestamp']['version']) + logger.info('Downloading timestamp from server 2.') + self.repository_updater2.refresh() + + self.assertEqual(100, self.repository_updater2.metadata['current']['timestamp']['version']) + + # Test the behavior of the multi-repository updater. + map_file = securesystemslib.util.load_json_file(self.map_file) + map_file['repositories'][repository_name] = ['http://localhost:' + str(self.SERVER_PORT)] + map_file['repositories'][repository_name2] = ['http://localhost:' + str(self.SERVER_PORT2)] + with open(self.map_file, 'w') as file_object: + file_object.write(json.dumps(map_file)) + + multi_repo_updater = updater.MultiRepoUpdater(self.map_file) + targetinfo, my_updater = multi_repo_updater.get_one_valid_targetinfo('file3.txt') + + + my_updater.download_target(targetinfo, self.temporary_directory) + self.assertTrue(os.path.exists(os.path.join(self.temporary_directory, 'file3.txt'))) + + if __name__ == '__main__': unittest.main() diff --git a/tests/test_replay_attack.py b/tests/test_replay_attack.py index 9fb7c7a88e..30866ade90 100755 --- a/tests/test_replay_attack.py +++ b/tests/test_replay_attack.py @@ -45,13 +45,7 @@ import subprocess import logging import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf.formats import tuf.log @@ -62,8 +56,8 @@ import securesystemslib import six -# The repository tool is imported and logs console messages by default. Disable -# console log messages generated by this unit test. +# The repository tool is imported and logs console messages by default. +# Disable console log messages generated by this unit test. repo_tool.disable_console_log_messages() logger = logging.getLogger('tuf.test_replay_attack') diff --git a/tests/test_repository_lib.py b/tests/test_repository_lib.py index 815ade6f2f..584e0ca91e 100755 --- a/tests/test_repository_lib.py +++ b/tests/test_repository_lib.py @@ -34,13 +34,7 @@ import shutil import stat import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf import tuf.formats @@ -71,6 +65,8 @@ def setUpClass(cls): # Create a temporary directory to store the repository, metadata, and target # files. 'temporary_directory' must be deleted in TearDownClass() so that # temporary files are always removed, even when exceptions occur. + tuf.roledb.clear_roledb(clear_all=True) + tuf.keydb.clear_keydb(clear_all=True) cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) @@ -83,6 +79,8 @@ def tearDownClass(cls): # Remove the temporary repository directory, which should contain all the # metadata, targets, and key files generated for the test cases. + tuf.roledb.clear_roledb(clear_all=True) + tuf.keydb.clear_keydb(clear_all=True) shutil.rmtree(cls.temporary_directory) @@ -274,8 +272,10 @@ def test_import_ed25519_publickey_from_file(self): # Invalid public key imported (contains unexpected keytype.) keytype = imported_ed25519_key['keytype'] keyval = imported_ed25519_key['keyval'] + scheme = imported_ed25519_key['scheme'] ed25519key_metadata_format = \ - securesystemslib.keys.format_keyval_to_metadata(keytype, keyval, private=False) + securesystemslib.keys.format_keyval_to_metadata(keytype, scheme, + keyval, private=False) ed25519key_metadata_format['keytype'] = 'invalid_keytype' with open(ed25519_keypath + '.pub', 'wb') as file_object: @@ -596,8 +596,16 @@ def test_generate_snapshot_metadata(self): repository_junk = repo_tool.load_repository(repository_directory) + # For testing purposes, store an invalid metadata file in the metadata directory + # to verify that it isn't loaded by generate_snapshot_metadata(). Unknown + # metadata file extensions should be ignored. + invalid_metadata_file = os.path.join(metadata_directory, 'role_file.xml') + with open(invalid_metadata_file, 'w') as file_object: + file_object.write('bad extension on metadata file') + root_filename = 'root' targets_filename = 'targets' + snapshot_metadata = \ repo_lib.generate_snapshot_metadata(metadata_directory, version, expiration_date, root_filename, @@ -743,19 +751,17 @@ def test_write_metadata_file(self): root_signable = securesystemslib.util.load_json_file(root_filename) output_filename = os.path.join(temporary_directory, 'root.json') - compression_algorithms = ['gz'] version_number = root_signable['signed']['version'] + 1 self.assertFalse(os.path.exists(output_filename)) repo_lib.write_metadata_file(root_signable, output_filename, version_number, - compression_algorithms, consistent_snapshot=False) + consistent_snapshot=False) self.assertTrue(os.path.exists(output_filename)) - self.assertTrue(os.path.exists(output_filename + '.gz')) # Attempt to over-write the previously written metadata file. An exception # is not raised in this case, only a debug message is logged. repo_lib.write_metadata_file(root_signable, output_filename, version_number, - compression_algorithms, consistent_snapshot=False) + consistent_snapshot=False) # Try to write a consistent metadate file. An exception is not raised in # this case. For testing purposes, root.json should be a hard link to the @@ -763,7 +769,7 @@ def test_write_metadata_file(self): # the latest consistent files. tuf.settings.CONSISTENT_METHOD = 'hard_link' repo_lib.write_metadata_file(root_signable, output_filename, version_number, - compression_algorithms, consistent_snapshot=True) + consistent_snapshot=True) # Test if the consistent files are properly named # Filename format of a consistent file: .rolename.json @@ -774,9 +780,7 @@ def test_write_metadata_file(self): # Try to add more consistent metadata files. version_number += 1 repo_lib.write_metadata_file(root_signable, output_filename, - version_number, - compression_algorithms, - consistent_snapshot=True) + version_number, consistent_snapshot=True) # Test if the the latest root.json points to the expected consistent file # and consistent metadata do not all point to the same root.json @@ -790,76 +794,34 @@ def test_write_metadata_file(self): tuf.settings.CONSISTENT_METHOD = 'somebadidea' self.assertRaises(securesystemslib.exceptions.InvalidConfigurationError, repo_lib.write_metadata_file, root_signable, output_filename, - version_number, compression_algorithms, consistent_snapshot=True) + version_number, consistent_snapshot=True) # Try to create a link to root.json when root.json doesn't exist locally. # repository_lib should log a message if this is the case. tuf.settings.CONSISTENT_METHOD = 'hard_link' os.remove(output_filename) repo_lib.write_metadata_file(root_signable, output_filename, version_number, - compression_algorithms, consistent_snapshot=True) + consistent_snapshot=True) # Reset CONSISTENT_METHOD so that subsequent tests work as expected. tuf.settings.CONSISTENT_METHOD = 'copy' - # Test for unknown compression algorithm. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - root_signable, output_filename, version_number, compression_algorithms=['bad_algo'], - consistent_snapshot=False) - # Test improperly formatted arguments. self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - 3, output_filename, version_number, - compression_algorithms, False) + 3, output_filename, version_number, False) self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - root_signable, 3, version_number, compression_algorithms, - False) + root_signable, 3, version_number, False) self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - root_signable, output_filename, '3', - compression_algorithms, False) + root_signable, output_filename, '3', False) self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file, - root_signable, output_filename, version_number, - compression_algorithms, 3) - - - - def test__write_compressed_metadata(self): - # Test for invalid 'compressed_filename' argument and set - # 'write_new_metadata' to False. - file_object = securesystemslib.util.TempFile() - existing_filename = os.path.join('repository_data', 'repository', - 'metadata', 'root.json') - - write_new_metadata = False - repo_lib._write_compressed_metadata(file_object, - compressed_filename=existing_filename, - write_new_metadata=write_new_metadata, - consistent_snapshot=False, - version_number=8) - - # Test writing of compressed metadata when consistent snapshots is enabled. - file_object = securesystemslib.util.TempFile() - shutil.copy(existing_filename, os.path.join(self.temporary_directory, '8.root.json.gz')) - shutil.copy(existing_filename, os.path.join(self.temporary_directory, '8.root.json.zip')) - shutil.copy(existing_filename, os.path.join(self.temporary_directory, 'root.json.zip')) - compressed_filename = os.path.join(self.temporary_directory, 'root.json.gz') - - # For testing purposes, add additional compression algorithms to - # repo_lib.SUPPORTED_COMPRESSION_EXTENSIONS. - repo_lib.SUPPORTED_COMPRESSION_EXTENSIONS = ['gz', 'zip', 'bz2'] - repo_lib._write_compressed_metadata(file_object, - compressed_filename=compressed_filename, - write_new_metadata=True, - consistent_snapshot=True, - version_number=8) - repo_lib.SUPPORTED_COMPRESSION_EXTENSIONS = ['gz'] + root_signable, output_filename, version_number, 3) + def test_create_tuf_client_directory(self): # Test normal case. temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) - repository_directory = os.path.join('repository_data', - 'repository') + repository_directory = os.path.join('repository_data', 'repository') client_directory = os.path.join(temporary_directory, 'client') repo_lib.create_tuf_client_directory(repository_directory, client_directory) @@ -875,15 +837,16 @@ def test_create_tuf_client_directory(self): # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.create_tuf_client_directory, - 3, client_directory) - self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.create_tuf_client_directory, - repository_directory, 3) + self.assertRaises(securesystemslib.exceptions.FormatError, + repo_lib.create_tuf_client_directory, 3, client_directory) + self.assertRaises(securesystemslib.exceptions.FormatError, + repo_lib.create_tuf_client_directory, repository_directory, 3) # Test invalid argument (i.e., client directory already exists.) - self.assertRaises(securesystemslib.exceptions.RepositoryError, repo_lib.create_tuf_client_directory, - repository_directory, client_directory) + self.assertRaises(securesystemslib.exceptions.RepositoryError, + repo_lib.create_tuf_client_directory, repository_directory, + client_directory) # Test invalid client metadata directory (i.e., non-errno.EEXIST exceptions # should be re-raised.) @@ -897,7 +860,7 @@ def test_create_tuf_client_directory(self): os.chmod(client_directory, current_client_directory_mode & ~stat.S_IWUSR) self.assertRaises(OSError, repo_lib.create_tuf_client_directory, - repository_directory, client_directory) + repository_directory, client_directory) # Reset the client directory's mode. os.chmod(client_directory, current_client_directory_mode) @@ -906,7 +869,8 @@ def test_create_tuf_client_directory(self): def test__check_directory(self): # Test for non-existent directory. - self.assertRaises(securesystemslib.exceptions.Error, repo_lib._check_directory, 'non-existent') + self.assertRaises(securesystemslib.exceptions.Error, + repo_lib._check_directory, 'non-existent') @@ -948,8 +912,7 @@ def test__generate_and_write_metadata(self): repo_lib._generate_and_write_metadata('obsolete_role', obsolete_metadata, targets_directory, metadata_directory, consistent_snapshot=False, - filenames=None, compression_algorithms=['gz'], - repository_name=repository_name) + filenames=None, repository_name=repository_name) snapshot_filepath = os.path.join('repository_data', 'repository', 'metadata', 'snapshot.json') @@ -1016,14 +979,12 @@ def test__load_top_level_metadata(self): signable = securesystemslib.util.load_json_file(os.path.join(metadata_directory, 'root.json')) signable['signatures'].append(signable['signatures'][0]) - repo_lib.write_metadata_file(signable, root_file, 8, ['gz'], False) + repo_lib.write_metadata_file(signable, root_file, 8, False) - # Remove compressed metadata so that we can test for loading of a - # repository with no compression enabled. - for role_file in os.listdir(metadata_directory): - if role_file.endswith('.json.gz'): - role_filename = os.path.join(metadata_directory, role_file) - os.remove(role_filename) + # Attempt to load a repository that contains a compressed Root file. + repository = repo_tool.create_new_repository(repository_directory, repository_name) + filenames = repo_lib.get_metadata_filenames(metadata_directory) + repo_lib._load_top_level_metadata(repository, filenames, repository_name) filenames = repo_lib.get_metadata_filenames(metadata_directory) repository = repo_tool.create_new_repository(repository_directory, repository_name) diff --git a/tests/test_repository_tool.py b/tests/test_repository_tool.py index 4acc486988..9d00777103 100755 --- a/tests/test_repository_tool.py +++ b/tests/test_repository_tool.py @@ -35,13 +35,6 @@ import sys import errno -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest - import tuf import tuf.log import tuf.formats @@ -84,6 +77,9 @@ def tearDownClass(cls): def setUp(self): + tuf.roledb.clear_roledb(clear_all=True) + tuf.keydb.clear_keydb(clear_all=True) + tuf.roledb.create_roledb('test_repository') tuf.keydb.create_keydb('test_repository') @@ -224,7 +220,6 @@ def test_writeall(self): repository.targets('role1').load_signing_key(role1_privkey) # (6) Write repository. - repository.targets.compressions = ['gz'] repository.writeall() # Verify that the expected metadata is written. @@ -237,10 +232,6 @@ def test_writeall(self): self.assertTrue(os.path.exists(role_filepath)) - if role == 'targets.json': - compressed_filepath = role_filepath + '.gz' - self.assertTrue(os.path.exists(compressed_filepath)) - # Verify the 'role1.json' delegation is also written. role1_filepath = os.path.join(metadata_directory, 'role1.json') role1_signable = securesystemslib.util.load_json_file(role1_filepath) @@ -370,8 +361,7 @@ def test_writeall(self): self.assertEqual(['timestamp'], tuf.roledb.get_dirty_roles(repository_name)) # Test improperly formatted arguments. - self.assertRaises(securesystemslib.exceptions.FormatError, repository.writeall, 3, False) - self.assertRaises(securesystemslib.exceptions.FormatError, repository.writeall, False, 3) + self.assertRaises(securesystemslib.exceptions.FormatError, repository.writeall, 3) @@ -385,11 +375,9 @@ def test_get_filepaths_in_directory(self): # Verify the expected filenames. get_filepaths_in_directory() returns # a list of absolute paths. metadata_files = repo.get_filepaths_in_directory(metadata_directory) - expected_files = ['1.root.json', '1.root.json.gz', 'root.json', - 'targets.json', 'targets.json.gz', 'snapshot.json', - 'snapshot.json.gz', 'timestamp.json', - 'timestamp.json.gz', 'role1.json', 'role1.json.gz', - 'role2.json', 'role2.json.gz'] + expected_files = ['1.root.json', 'root.json', + 'targets.json', 'snapshot.json', + 'timestamp.json', 'role1.json', 'role2.json'] basenames = [] for filepath in metadata_files: @@ -445,7 +433,7 @@ def __init__(self): roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, 'signatures': [], 'version': 0, 'consistent_snapshot': False, - 'compressions': [''], 'expires': expiration, + 'expires': expiration, 'partial_loaded': False} tuf.roledb.add_role(self._rolename, roleinfo, @@ -570,24 +558,6 @@ def test_signing_keys(self): - def test_compressions(self): - # Test default case, where only uncompressed metadata is supported. - self.assertEqual(self.metadata.compressions, ['']) - - # Test compressions getter after a compressions algorithm is added. - self.metadata.compressions = ['gz'] - - self.assertEqual(self.metadata.compressions, ['', 'gz']) - - - # Test improperly formatted argument. - try: - self.metadata.compressions = 3 - except securesystemslib.exceptions.FormatError: - pass - else: - self.fail('Setter failed to detect improperly formatted compressions') - def test_add_verification_key(self): @@ -605,8 +575,7 @@ def test_add_verification_key(self): expiration = expiration.isoformat() + 'Z' roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, 'signatures': [], 'version': 0, - 'consistent_snapshot': False, - 'compressions': [''], 'expires': expiration, + 'consistent_snapshot': False, 'expires': expiration, 'partial_loaded': False} tuf.roledb.add_role('Root', roleinfo, 'test_repository') @@ -1705,11 +1674,6 @@ def test_load_repository(self): with open(bad_root_content, 'wb') as file_object: file_object.write(b'bad') - # Remove the compressed version of role1 to test whether the - # load_repository() complains or not (it logs a message). - role1_path = os.path.join(metadata_directory, 'role1.json.gz') - os.remove(role1_path) - repository = repo_tool.load_repository(repository_directory) self.assertTrue(isinstance(repository, repo_tool.Repository)) @@ -1758,6 +1722,47 @@ def test_dirty_roles(self): repository.dirty_roles() + + def test_dump_signable_metadata(self): + metadata_directory = os.path.join('repository_data', + 'repository', 'metadata') + targets_metadata_file = os.path.join(metadata_directory, 'targets.json') + + metadata_content = repo_tool.dump_signable_metadata(targets_metadata_file) + + # Test for an invalid targets metadata file.. + self.assertRaises(securesystemslib.exceptions.FormatError, repo_tool.dump_signable_metadata, 1) + self.assertRaises(IOError, repo_tool.dump_signable_metadata, 'bad file path') + + + + def test_append_signature(self): + metadata_directory = os.path.join('repository_data', + 'repository', 'metadata') + targets_metadata_path = os.path.join(metadata_directory, 'targets.json') + + temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) + tmp_targets_metadata_path = os.path.join(temporary_directory, 'targets.json') + shutil.copyfile(targets_metadata_path, tmp_targets_metadata_path) + + # Test for normal case. + targets_metadata = securesystemslib.util.load_json_file(tmp_targets_metadata_path) + num_signatures = len(targets_metadata['signatures']) + signature = targets_metadata['signatures'][0] + + repo_tool.append_signature(signature, tmp_targets_metadata_path) + + targets_metadata = securesystemslib.util.load_json_file(tmp_targets_metadata_path) + self.assertTrue(num_signatures, len(targets_metadata['signatures'])) + + # Test for invalid arguments. + self.assertRaises(securesystemslib.exceptions.FormatError, + repo_tool.append_signature, 1, tmp_targets_metadata_path) + + self.assertRaises(securesystemslib.exceptions.FormatError, + repo_tool.append_signature, signature, 1) + + # Run the test cases. if __name__ == '__main__': unittest.main() diff --git a/tests/test_roledb.py b/tests/test_roledb.py index 88c5c613d9..bb0d09e8cf 100755 --- a/tests/test_roledb.py +++ b/tests/test_roledb.py @@ -542,13 +542,9 @@ def test_create_roledb_from_root_metadata(self): version = 8 consistent_snapshot = False expires = '1985-10-21T01:21:00Z' - compression_algorithms = ['gz'] root_metadata = tuf.formats.RootFile.make_metadata(version, - expires, - keydict, roledict, - consistent_snapshot, - compression_algorithms) + expires, keydict, roledict, consistent_snapshot) self.assertEqual(None, tuf.roledb.create_roledb_from_root_metadata(root_metadata)) @@ -594,12 +590,9 @@ def test_create_roledb_from_root_metadata(self): # Generate 'root_metadata' to verify that 'release' and 'root' are added # to the role database. root_metadata = tuf.formats.RootFile.make_metadata(version, - expires, - keydict, roledict, - consistent_snapshot, - compression_algorithms) + expires, keydict, roledict, consistent_snapshot) self.assertEqual(None, - tuf.roledb.create_roledb_from_root_metadata(root_metadata)) + tuf.roledb.create_roledb_from_root_metadata(root_metadata)) # Ensure only 'root' and 'release' were added to the role database. self.assertEqual(2, len(tuf.roledb._roledb_dict['default'])) diff --git a/tests/test_root_versioning_integration.py b/tests/test_root_versioning_integration.py index 790706fd03..25b2200e48 100755 --- a/tests/test_root_versioning_integration.py +++ b/tests/test_root_versioning_integration.py @@ -27,12 +27,7 @@ import tempfile import shutil import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest -else: - import unittest2 as unittest +import unittest import tuf import tuf.log @@ -179,7 +174,6 @@ def test_root_role_versioning(self): repository.targets('role1').load_signing_key(role1_privkey) # (6) Write repository. - repository.targets.compressions = ['gz'] repository.writeall() self.assertTrue(os.path.exists(os.path.join(metadata_directory, 'root.json'))) diff --git a/tests/test_sig.py b/tests/test_sig.py index 43c5050518..c68a4de6e7 100755 --- a/tests/test_sig.py +++ b/tests/test_sig.py @@ -70,7 +70,7 @@ def test_get_signature_status_no_role(self): self.assertEqual([], sig_status['bad_sigs']) self.assertEqual([], sig_status['unknown_sigs']) self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_method_sigs']) + self.assertEqual([], sig_status['unknown_signing_schemes']) # A valid signable, but non-existent role argument. self.assertRaises(tuf.exceptions.UnknownRoleError, @@ -116,7 +116,7 @@ def test_get_signature_status_bad_sig(self): self.assertEqual([KEYS[0]['keyid']], sig_status['bad_sigs']) self.assertEqual([], sig_status['unknown_sigs']) self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_method_sigs']) + self.assertEqual([], sig_status['unknown_signing_schemes']) self.assertFalse(tuf.sig.verify(signable, 'Root')) @@ -126,20 +126,21 @@ def test_get_signature_status_bad_sig(self): tuf.roledb.remove_role('Root') - def test_get_signature_status_unknown_method(self): + def test_get_signature_status_unknown_signing_scheme(self): signable = {'signed' : 'test', 'signatures' : []} signable['signatures'].append(securesystemslib.keys.create_signature( KEYS[0], signable['signed'])) - signable['signatures'][0]['method'] = 'fake-sig-method' + valid_scheme = KEYS[0]['scheme'] + KEYS[0]['scheme'] = 'unknown_signing_scheme' tuf.keydb.add_key(KEYS[0]) threshold = 1 roleinfo = tuf.formats.make_role_metadata( [KEYS[0]['keyid']], threshold) - tuf.roledb.add_role('Root', roleinfo) + tuf.roledb.add_role('root', roleinfo) - sig_status = tuf.sig.get_signature_status(signable, 'Root') + sig_status = tuf.sig.get_signature_status(signable, 'root') self.assertEqual(1, sig_status['threshold']) self.assertEqual([], sig_status['good_sigs']) @@ -147,14 +148,15 @@ def test_get_signature_status_unknown_method(self): self.assertEqual([], sig_status['unknown_sigs']) self.assertEqual([], sig_status['untrusted_sigs']) self.assertEqual([KEYS[0]['keyid']], - sig_status['unknown_method_sigs']) + sig_status['unknown_signing_schemes']) - self.assertFalse(tuf.sig.verify(signable, 'Root')) + self.assertFalse(tuf.sig.verify(signable, 'root')) # Done. Let's remove the added key(s) from the key database. + KEYS[0]['scheme'] = valid_scheme tuf.keydb.remove_key(KEYS[0]['keyid']) # Remove the role. - tuf.roledb.remove_role('Root') + tuf.roledb.remove_role('root') def test_get_signature_status_single_key(self): @@ -177,7 +179,7 @@ def test_get_signature_status_single_key(self): self.assertEqual([], sig_status['bad_sigs']) self.assertEqual([], sig_status['unknown_sigs']) self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_method_sigs']) + self.assertEqual([], sig_status['unknown_signing_schemes']) self.assertTrue(tuf.sig.verify(signable, 'Root')) @@ -189,7 +191,7 @@ def test_get_signature_status_single_key(self): self.assertEqual([], sig_status['bad_sigs']) self.assertEqual([KEYS[0]['keyid']], sig_status['unknown_sigs']) self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_method_sigs']) + self.assertEqual([], sig_status['unknown_signing_schemes']) # Done. Let's remove the added key(s) from the key database. tuf.keydb.remove_key(KEYS[0]['keyid']) @@ -217,7 +219,7 @@ def test_get_signature_status_below_threshold(self): self.assertEqual([], sig_status['bad_sigs']) self.assertEqual([], sig_status['unknown_sigs']) self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_method_sigs']) + self.assertEqual([], sig_status['unknown_signing_schemes']) self.assertFalse(tuf.sig.verify(signable, 'Root')) @@ -252,7 +254,7 @@ def test_get_signature_status_below_threshold_unrecognized_sigs(self): self.assertEqual([], sig_status['bad_sigs']) self.assertEqual([KEYS[2]['keyid']], sig_status['unknown_sigs']) self.assertEqual([], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_method_sigs']) + self.assertEqual([], sig_status['unknown_signing_schemes']) self.assertFalse(tuf.sig.verify(signable, 'Root')) @@ -291,7 +293,7 @@ def test_get_signature_status_below_threshold_unauthorized_sigs(self): self.assertEqual([], sig_status['bad_sigs']) self.assertEqual([], sig_status['unknown_sigs']) self.assertEqual([KEYS[1]['keyid']], sig_status['untrusted_sigs']) - self.assertEqual([], sig_status['unknown_method_sigs']) + self.assertEqual([], sig_status['unknown_signing_schemes']) self.assertFalse(tuf.sig.verify(signable, 'Root')) diff --git a/tests/test_slow_retrieval_attack.py b/tests/test_slow_retrieval_attack.py index 11e468982a..4779185eb4 100755 --- a/tests/test_slow_retrieval_attack.py +++ b/tests/test_slow_retrieval_attack.py @@ -48,13 +48,7 @@ import subprocess import logging import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf.log import tuf.client.updater as updater diff --git a/tests/test_unittest_toolbox.py b/tests/test_unittest_toolbox.py new file mode 100755 index 0000000000..18b39b3aea --- /dev/null +++ b/tests/test_unittest_toolbox.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python + +""" + + test_unittest_toolbox.py + + + Vladimir Diaz + + + July 14, 2017. + + + See LICENSE for licensing information. + + + Test cases for unittest_toolbox.py. +""" + +# Help with Python 3 compatibility, where the print statement is a function, an +# implicit relative import is invalid, and the '/' operator performs true +# division. Example: print 'hello world' raises a 'SyntaxError' exception. +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +import unittest +import logging +import shutil + +import tuf.unittest_toolbox as unittest_toolbox + +logger = logging.getLogger('test_unittest_toolbox') + + +class TestUnittestToolbox(unittest_toolbox.Modified_TestCase): + def setUp(self): + unittest_toolbox.Modified_TestCase.setUp(self) + + def tearDown(self): + unittest_toolbox.Modified_TestCase.tearDown(self) + + + def test_tear_down_already_deleted_dir(self): + temp_directory = self.make_temp_directory() + + # Delete the temp directory to make sure unittest_toolbox doesn't + # complain about the missing temp_directory. + shutil.rmtree(temp_directory) + + +# Run the unit tests. +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_updater.py b/tests/test_updater.py old mode 100755 new mode 100644 index 1163afa5c2..903702d265 --- a/tests/test_updater.py +++ b/tests/test_updater.py @@ -56,22 +56,16 @@ import subprocess import sys import errno - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf import tuf.exceptions - import tuf.log import tuf.formats import tuf.keydb import tuf.roledb import tuf.repository_tool as repo_tool +import tuf.repository_lib as repo_lib import tuf.unittest_toolbox as unittest_toolbox import tuf.client.updater as updater @@ -133,6 +127,8 @@ def tearDownClass(cls): def setUp(self): # We are inheriting from custom class. unittest_toolbox.Modified_TestCase.setUp(self) + tuf.roledb.clear_roledb(clear_all=True) + tuf.keydb.clear_keydb(clear_all=True) self.repository_name = 'test_repository' @@ -312,6 +308,14 @@ def test_1__load_metadata_from_file(self): self.assertEqual(self.repository_updater.metadata['current']['role1'], role1_meta['signed']) + # Verify that _load_metadata_from_file() doesn't raise an exception for + # improperly formatted metadata, and doesn't load the bad file. + with open(role1_filepath, 'ab') as file_object: + file_object.write(b'bad JSON data') + + self.repository_updater._load_metadata_from_file('current', 'role1') + self.assertEqual(len(self.repository_updater.metadata['current']), 5) + # Test if we fail gracefully if we can't deserialize a meta file self.repository_updater._load_metadata_from_file('current', 'empty_file') self.assertFalse('empty_file' in self.repository_updater.metadata['current']) @@ -325,29 +329,29 @@ def test_1__load_metadata_from_file(self): - """ def test_1__rebuild_key_and_role_db(self): # Setup root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) root_metadata = self.repository_updater.metadata['current']['root'] root_threshold = root_metadata['roles']['root']['threshold'] - print('\nnumber of root keys: ' + str(len(root_metadata['keys'].keys()))) - print('\nKeys in root metadata: ' + repr(root_metadata['keys'].keys())) number_of_root_keys = len(root_metadata['keys']) self.assertEqual(root_roleinfo['threshold'], root_threshold) - # Ensure we add 1 to the number of root keys (actually, the number of root + + # Ensure we add 2 to the number of root keys (actually, the number of root # keys multiplied by the number of keyid hash algorithms), to include the - # delegated targets key. The delegated roles of 'targets.json' are also - # loaded when the repository object is instantiated. - print('\ndifference: ' + repr(list(set(tuf.keydb._keydb_dict[self.repository_name].keys()) - set(root_metadata['keys'].keys())))) - self.assertEqual(number_of_root_keys * 2 + 1, len(tuf.keydb._keydb_dict[self.repository_name])) + # delegated targets key (+1 for its sha512 keyid). The delegated roles of + # 'targets.json' are also loaded when the repository object is + # instantiated. + + self.assertEqual(number_of_root_keys * 2 + 2, len(tuf.keydb._keydb_dict[self.repository_name])) # Test: normal case. self.repository_updater._rebuild_key_and_role_db() root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) self.assertEqual(root_roleinfo['threshold'], root_threshold) + # _rebuild_key_and_role_db() will only rebuild the keys and roles specified # in the 'root.json' file, unlike __init__(). Instantiating an updater # object calls both _rebuild_key_and_role_db() and _import_delegations(). @@ -363,7 +367,6 @@ def test_1__rebuild_key_and_role_db(self): root_roleinfo = tuf.roledb.get_roleinfo('root', self.repository_name) self.assertEqual(root_roleinfo['threshold'], 8) self.assertEqual(number_of_root_keys * 2 - 2, len(tuf.keydb._keydb_dict[self.repository_name])) - """ @@ -403,6 +406,13 @@ def test_1__update_versioninfo(self): self.assertEqual(len(versioninfo_dict), 3) self.assertEqual(versioninfo_dict['bad_role.json'], None) + # Verify that the versioninfo specified in Timestamp is used if the Snapshot + # role hasn't been downloaded yet. + del self.repository_updater.metadata['current']['snapshot'] + #self.assertRaises(self.repository_updater._update_versioninfo('snapshot.json')) + self.repository_updater._update_versioninfo('snapshot.json') + self.assertEqual(versioninfo_dict['snapshot.json']['version'], 1) + @@ -456,17 +466,31 @@ def test_2__fileinfo_has_changed(self): self.assertTrue(self.repository_updater._fileinfo_has_changed('root.json', new_root_fileinfo)) + # Verify that _fileinfo_has_changed() returns True if no fileinfo (or set + # to None) exists for some role. + self.assertTrue(self.repository_updater._fileinfo_has_changed('bad.json', + new_root_fileinfo)) + + saved_fileinfo = self.repository_updater.fileinfo['root.json'] + self.repository_updater.fileinfo['root.json'] = None + self.assertTrue(self.repository_updater._fileinfo_has_changed('root.json', + new_root_fileinfo)) + + + self.repository_updater.fileinfo['root.json'] = saved_fileinfo + new_root_fileinfo['hashes']['sha666'] = '666' + self.repository_updater._fileinfo_has_changed('root.json', + new_root_fileinfo) - """ def test_2__import_delegations(self): # Setup. # In order to test '_import_delegations' the parent of the delegation # has to be in Repository.metadata['current'], but it has to be inserted # there without using '_load_metadata_from_file()' since it calls # '_import_delegations()'. - repository_name = self.repository_updater.updater_name + repository_name = self.repository_updater.repository_name tuf.keydb.clear_keydb(repository_name) tuf.roledb.clear_roledb(repository_name) @@ -476,10 +500,9 @@ def test_2__import_delegations(self): self.repository_updater._rebuild_key_and_role_db() self.assertEqual(len(tuf.roledb._roledb_dict[repository_name]), 4) + # Take into account the number of keyids algorithms supported by default, # which this test condition expects to be two (sha256 and sha512). - print('\nkeydb_dict len: ' + repr(len(tuf.keydb._keydb_dict[repository_name].keys()))) - print('\nkeydb_dict: ' + repr(tuf.keydb._keydb_dict[repository_name].keys())) self.assertEqual(4 * 2, len(tuf.keydb._keydb_dict[repository_name])) # Test: pass a role without delegations. @@ -497,8 +520,8 @@ def test_2__import_delegations(self): self.assertEqual(len(tuf.roledb._roledb_dict[repository_name]), 5) # The number of root keys (times the number of key hash algorithms) + - # delegation's key. - self.assertEqual(len(tuf.keydb._keydb_dict[repository_name]), 4 * 2 + 1) + # delegation's key (+1 for its sha512 keyid). + self.assertEqual(len(tuf.keydb._keydb_dict[repository_name]), 4 * 2 + 2) # Verify that roledb dictionary was added. self.assertTrue('role1' in tuf.roledb._roledb_dict[repository_name]) @@ -526,24 +549,23 @@ def test_2__import_delegations(self): self.repository_updater.metadata['current']['targets']\ ['delegations']['keys'][existing_keyid]['keytype'] = 'ed25519' - # Verify that _import_delegations() raises an exception if any key in - # 'delegations' is improperly formatted (i.e., bad keyid). - tuf.keydb.clear_keydb(repository_name) - - self.repository_updater.metadata['current']['targets']['delegations']\ - ['keys'].update({'123': self.repository_updater.metadata['current']\ - ['targets']['delegations']['keys'][existing_keyid]}) - self.assertRaises(securesystemslib.exceptions.Error, self.repository_updater._import_delegations, - 'targets') + # Verify that _import_delegations() raises an exception if one of the + # delegated keys is malformed. + valid_keyval = self.repository_updater.metadata['current']['targets']\ + ['delegations']['keys'][existing_keyid]['keyval'] - # Restore the keyid of 'existing_keyids2'. self.repository_updater.metadata['current']['targets']\ - ['delegations']['keys'][existing_keyid]['keyid'] = existing_keyid + ['delegations']['keys'][existing_keyid]['keyval'] = 1 + self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._import_delegations, 'targets') - # Verify that _import_delegations() raises an exception if it fails to add - # one of the roles loaded from parent role's 'delegations'. - """ + self.repository_updater.metadata['current']['targets']\ + ['delegations']['keys'][existing_keyid]['keyval'] = valid_keyval + # Verify that _import_delegations() raises an exception if one of the + # delegated roles is malformed. + self.repository_updater.metadata['current']['targets']\ + ['delegations']['roles'][0]['name'] = 1 + self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._import_delegations, 'targets') @@ -682,18 +704,16 @@ def test_3__update_metadata(self): # version is installed if the compressed one is downloaded. self.assertFalse('targets' in self.repository_updater.metadata['current']) self.repository_updater._update_metadata('targets', - DEFAULT_TARGETS_FILELENGTH, - targets_versioninfo['version'], - 'gzip') + DEFAULT_TARGETS_FILELENGTH, targets_versioninfo['version']) self.assertTrue('targets' in self.repository_updater.metadata['current']) self.assertEqual(targets_versioninfo['version'], - self.repository_updater.metadata['current']['targets']['version']) + self.repository_updater.metadata['current']['targets']['version']) # Test: Invalid / untrusted version numbers. - # Invalid version number for the uncompressed version of 'targets.json'. + # Invalid version number for 'targets.json'. self.assertRaises(tuf.exceptions.NoWorkingMirrorError, - self.repository_updater._update_metadata, - 'targets', DEFAULT_TARGETS_FILELENGTH, 88) + self.repository_updater._update_metadata, + 'targets', DEFAULT_TARGETS_FILELENGTH, 88) # Verify that the specific exception raised is correct for the previous # case. @@ -705,19 +725,13 @@ def test_3__update_metadata(self): for mirror_error in six.itervalues(e.mirror_errors): assert isinstance(mirror_error, securesystemslib.exceptions.BadVersionNumberError) - # Invalid version number for the compressed version of 'targets.json' - self.assertRaises(tuf.exceptions.NoWorkingMirrorError, - self.repository_updater._update_metadata, - 'targets', DEFAULT_TARGETS_FILELENGTH, 88, - 'gzip') - # Verify that the specific exception raised is correct for the previous # case. The version number is checked, so the specific error in # this case should be 'securesystemslib.exceptions.BadVersionNumberError'. try: self.repository_updater._update_metadata('targets', DEFAULT_TARGETS_FILELENGTH, - 88, 'gzip') + 88) except tuf.exceptions.NoWorkingMirrorError as e: for mirror_error in six.itervalues(e.mirror_errors): @@ -727,6 +741,56 @@ def test_3__update_metadata(self): + def test_3__get_metadata_file(self): + + valid_tuf_version = tuf.formats.TUF_VERSION_NUMBER + tuf.formats.TUF_VERSION_NUMBER = '2' + + repository = repo_tool.load_repository(self.repository_directory) + repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) + repository.writeall() + + # Move the staged metadata to the "live" metadata. + shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) + shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), + os.path.join(self.repository_directory, 'metadata')) + + upperbound_filelength = tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH + try: + self.repository_updater._get_metadata_file('timestamp', 'timestamp.json', + upperbound_filelength, 1) + + except tuf.exceptions.NoWorkingMirrorError as e: + for mirror_error in six.itervalues(e.mirror_errors): + assert isinstance(mirror_error, securesystemslib.exceptions.BadVersionNumberError) + + # Test for an improperly formatted TUF version number. + tuf.formats.TUF_VERSION_NUMBER = 'BAD' + repository = repo_tool.load_repository(self.repository_directory) + repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) + repository.writeall() + + # Move the staged metadata to the "live" metadata. + shutil.rmtree(os.path.join(self.repository_directory, 'metadata')) + shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'), + os.path.join(self.repository_directory, 'metadata')) + + try: + self.repository_updater._get_metadata_file('timestamp', 'timestamp.json', + upperbound_filelength, 1) + + except tuf.exceptions.NoWorkingMirrorError as e: + for mirror_error in six.itervalues(e.mirror_errors): + assert isinstance(mirror_error, securesystemslib.exceptions.FormatError) + + # Reset the TUF_VERSION_NUMBER so that subsequent unit tests use the + # expected value. + tuf.formats.TUF_VERSION_NUMBER = valid_tuf_version + + + + + def test_3__update_metadata_if_changed(self): # Setup. # The client repository is initially loaded with only four top-level roles. @@ -750,6 +814,8 @@ def test_3__update_metadata_if_changed(self): target3 = os.path.join(self.repository_directory, 'targets', 'file3.txt') repository.targets.add_target(target3) + repository.root.version = repository.root.version + 1 + repository.root.load_signing_key(self.role_keys['root']['private']) repository.targets.load_signing_key(self.role_keys['targets']['private']) repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) repository.timestamp.load_signing_key(self.role_keys['timestamp']['private']) @@ -768,6 +834,7 @@ def test_3__update_metadata_if_changed(self): self.repository_updater._update_metadata('timestamp', DEFAULT_TIMESTAMP_FILELENGTH) self.repository_updater._update_metadata_if_changed('snapshot', 'timestamp') self.repository_updater._update_metadata_if_changed('targets') + self.repository_updater._update_metadata_if_changed('root') targets_path = os.path.join(self.client_metadata_current, 'targets.json') self.assertTrue(os.path.exists(targets_path)) self.assertTrue(self.repository_updater.metadata['current']['targets']) @@ -775,8 +842,7 @@ def test_3__update_metadata_if_changed(self): # Test for an invalid 'referenced_metadata' argument. self.assertRaises(tuf.exceptions.RepositoryError, - self.repository_updater._update_metadata_if_changed, - 'snapshot', 'bad_role') + self.repository_updater._update_metadata_if_changed, 'snapshot', 'bad_role') @@ -881,12 +947,13 @@ def test_4__refresh_targets_metadata(self): # Verify that client's metadata files were refreshed successfully. self.assertEqual(len(self.repository_updater.metadata['current']), 6) - # Test for compressed metadata roles. - self.repository_updater.metadata['current']['snapshot']['meta']['targets.json.gz'] = \ - self.repository_updater.metadata['current']['snapshot']['meta']['targets.json'] - self.repository_updater._refresh_targets_metadata(refresh_all_delegated_roles=True) - + # Test for non-existing rolename. + self.repository_updater._refresh_targets_metadata('bad_rolename', + refresh_all_delegated_roles=False) + # Test that non-json metadata in Snapshot is ignored. + self.repository_updater.metadata['current']['snapshot']['meta']['bad_role.xml'] = {} + self.repository_updater._refresh_targets_metadata(refresh_all_delegated_roles=True) @@ -1252,8 +1319,8 @@ def test_7_updated_targets(self): repository.targets.load_signing_key(self.role_keys['targets']['private']) repository.snapshot.load_signing_key(self.role_keys['snapshot']['private']) - with open(target1, 'a') as file_object: - file_object.write('append extra text') + with open(target1, 'ab') as file_object: + file_object.write(b'append extra text') length, hashes = securesystemslib.util.get_file_details(target1) @@ -1393,6 +1460,14 @@ def test_10__soft_check_file_length(self): self.repository_updater._soft_check_file_length, temp_file_object, 1) + # Verify that an exception is not raised if the file length <= the observed + # file length. + temp_file_object.seek(0) + self.repository_updater._soft_check_file_length(temp_file_object, 3) + temp_file_object.seek(0) + self.repository_updater._soft_check_file_length(temp_file_object, 4) + + def test_10__targets_of_role(self): @@ -1411,26 +1486,312 @@ def test_10__targets_of_role(self): 2) + + def test_10__preorder_depth_first_walk(self): + + # Test that infinit loop is prevented if the target file is not found and + # the max number of delegations is reached. + valid_max_number_of_delegations = tuf.settings.MAX_NUMBER_OF_DELEGATIONS + tuf.settings.MAX_NUMBER_OF_DELEGATIONS = 0 + self.assertEqual(None, self.repository_updater._preorder_depth_first_walk('unknown.txt')) + + # Reset the setting for max number of delegations so that subsequent unit + # tests reference the expected setting. + tuf.settings.MAX_NUMBER_OF_DELEGATIONS = valid_max_number_of_delegations + + # Attempt to create a circular delegation, where role1 performs a + # delegation to the top-level Targets role. The updater should ignore the + # delegation and not raise an exception. + targets_path = os.path.join(self.client_metadata_current, 'targets.json') + targets_metadata = securesystemslib.util.load_json_file(targets_path) + targets_metadata['signed']['delegations']['roles'][0]['paths'] = ['/file8.txt'] + with open(targets_path, 'wb') as file_object: + file_object.write(repo_lib._get_written_metadata(targets_metadata)) + + role1_path = os.path.join(self.client_metadata_current, 'role1.json') + role1_metadata = securesystemslib.util.load_json_file(role1_path) + role1_metadata['signed']['delegations']['roles'][0]['name'] = 'targets' + role1_metadata['signed']['delegations']['roles'][0]['paths'] = ['/file8.txt'] + with open(role1_path, 'wb') as file_object: + file_object.write(repo_lib._get_written_metadata(role1_metadata)) + + role2_path = os.path.join(self.client_metadata_current, 'role2.json') + role2_metadata = securesystemslib.util.load_json_file(role2_path) + role2_metadata['signed']['delegations']['roles'] = role1_metadata['signed']['delegations']['roles'] + role2_metadata['signed']['delegations']['roles'][0]['paths'] = ['/file8.txt'] + with open(role2_path, 'wb') as file_object: + file_object.write(repo_lib._get_written_metadata(role2_metadata)) + + logger.debug('attempting circular delegation') + self.assertEqual(None, self.repository_updater._preorder_depth_first_walk('/file8.txt')) + + + + + + def test_10__visit_child_role(self): # Call _visit_child_role and test the dict keys: 'paths', # 'path_hash_prefixes', and if both are missing. targets_role = self.repository_updater.metadata['current']['targets'] - + targets_role['delegations']['roles'][0]['paths'] = ['/*.txt', '/target.exe'] child_role = targets_role['delegations']['roles'][0] + + role1_path = os.path.join(self.client_metadata_current, 'role1.json') + role1_metadata = securesystemslib.util.load_json_file(role1_path) + role1_metadata['signed']['delegations']['roles'][0]['paths'] = ['/*.exe'] + with open(role1_path, 'wb') as file_object: + file_object.write(repo_lib._get_written_metadata(role1_metadata)) + + self.assertEqual(self.repository_updater._visit_child_role(child_role, + '/target.exe'), child_role['name']) + + # Test for a valid path hash prefix... + child_role['path_hash_prefixes'] = ['8baf'] self.assertEqual(self.repository_updater._visit_child_role(child_role, - '/file3.txt', targets_role['delegations']), child_role['name']) + '/file3.txt'), child_role['name']) - # Test path hash prefixes. - child_role['path_hash_prefixes'] = ['8baf', '0000'] + # ... and an invalid one, as well. + child_role['path_hash_prefixes'] = ['badd'] self.assertEqual(self.repository_updater._visit_child_role(child_role, - '/file3.txt', targets_role['delegations']), child_role['name']) + '/file3.txt'), None) - # Test if both 'path' and 'path_hash_prefixes' is missing. + # Test for a forbidden target. + del child_role['path_hash_prefixes'] + self.repository_updater._visit_child_role(child_role, '/forbidden.tgz') + + # Verify that unequal path_hash_prefixes are skipped. + child_role['path_hash_prefixes'] = ['bad', 'bad'] + self.assertEqual(None, self.repository_updater._visit_child_role(child_role, + '/unknown.exe')) + + # Test if both 'path' and 'path_hash_prefixes' are missing. del child_role['paths'] del child_role['path_hash_prefixes'] self.assertRaises(securesystemslib.exceptions.FormatError, self.repository_updater._visit_child_role, - child_role, targets_role['delegations'], child_role['name']) + child_role, child_role['name']) + + + + def test_11__verify_uncompressed_metadata_file(self): + # Test for invalid metadata content. + metadata_file_object = securesystemslib.util.TempFile() + metadata_file_object.write(b'X') + metadata_file_object.seek(0) + + self.assertRaises(tuf.exceptions.InvalidMetadataJSONError, + self.repository_updater._verify_uncompressed_metadata_file, + metadata_file_object, 'root') + + + + def test_12__verify_root_chain_link(self): + # Test for an invalid signature in the chain link. + # current = (i.e., 1.root.json) + # next = signable for the next metadata in the chain (i.e., 2.root.json) + rolename = 'root' + current_root = self.repository_updater.metadata['current']['root'] + + targets_path = os.path.join(self.repository_directory, 'metadata', 'targets.json') + + # 'next_invalid_root' is a Targets signable, as written to disk. + # We use the Targets metadata here to ensure the signatures are invalid. + next_invalid_root = securesystemslib.util.load_json_file(targets_path) + + self.assertRaises(securesystemslib.exceptions.BadSignatureError, + self.repository_updater._verify_root_chain_link, rolename, current_root, + next_invalid_root) + + + + def test_13__get_file(self): + # Test for an "unsafe" download, where the file is downloaded up to + # a required length (and no more). The "safe" download approach + # downloads an exact required length. + targets_path = os.path.join(self.repository_directory, 'metadata', 'targets.json') + + file_size, file_hashes = securesystemslib.util.get_file_details(targets_path) + file_type = 'meta' + + def verify_target_file(targets_path): + # Every target file must have its length and hashes inspected. + self.repository_updater._hard_check_file_length(targets_path, file_size) + self.repository_updater._check_hashes(targets_path, file_hashes) + + self.repository_updater._get_file('targets.json', verify_target_file, + file_type, file_size, download_safely=True) + + self.repository_updater._get_file('targets.json', verify_target_file, + file_type, file_size, download_safely=False) + + + + def test_14__targets_of_role(self): + # Test case where a list of targets is given. By default, the 'targets' + # parameter is None. + targets = [{'filepath': 'file1.txt', 'fileinfo': {'length': 1, 'hashes': {'sha256': 'abc'}}}] + self.repository_updater._targets_of_role('targets', + targets=targets, skip_refresh=False) + + + + +class TestMultiRepoUpdater(unittest_toolbox.Modified_TestCase): + + @classmethod + def setUpClass(cls): + # setUpClass() is called before tests in an individual class are executed. + + # Create a temporary directory to store the repository, metadata, and target + # files. 'temporary_directory' must be deleted in TearDownModule() so that + # temporary files are always removed, even when exceptions occur. + cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) + + # Launch a SimpleHTTPServer (serves files in the current directory). + # Test cases will request metadata and target files that have been + # pre-generated in 'tuf/tests/repository_data', which will be served + # by the SimpleHTTPServer launched here. The test cases of 'test_updater.py' + # assume the pre-generated metadata files have a specific structure, such + # as a delegated role 'targets/role1', three target files, five key files, + # etc. + cls.SERVER_PORT = 8001 + cls.SERVER_PORT2 = 8002 + command = ['python', 'simple_server.py', str(cls.SERVER_PORT)] + command2 = ['python', 'simple_server.py', str(cls.SERVER_PORT2)] + cls.server_process = subprocess.Popen(command, stderr=subprocess.PIPE) + cls.server_process2 = subprocess.Popen(command2, stderr=subprocess.PIPE) + logger.info('\n\tServer process started.') + logger.info('\tServer process id: ' + str(cls.server_process.pid)) + logger.info('\tServing on port: ' + str(cls.SERVER_PORT)) + cls.url = 'http://localhost:' + str(cls.SERVER_PORT) + os.path.sep + + logger.info('\n\tServer process started.') + logger.info('\tServer process id: ' + str(cls.server_process2.pid)) + logger.info('\tServing on port: ' + str(cls.SERVER_PORT2)) + cls.url2 = 'http://localhost:' + str(cls.SERVER_PORT2) + os.path.sep + + # NOTE: Following error is raised if a delay is not applied: + # + time.sleep(1) + + + + @classmethod + def tearDownClass(cls): + # tearDownModule() is called after all the tests have run. + # http://docs.python.org/2/library/unittest.html#class-and-module-fixtures + + # Remove the temporary repository directory, which should contain all the + # metadata, targets, and key files generated for the test cases. + shutil.rmtree(cls.temporary_directory) + + # Kill the SimpleHTTPServer process. + if cls.server_process.returncode is None: + logger.info('\tServer process ' + str(cls.server_process.pid) + ' terminated.') + cls.server_process.kill() + + + + def setUp(self): + # We are inheriting from custom class. + unittest_toolbox.Modified_TestCase.setUp(self) + + self.repository_name = 'test_repository' + self.repository_name2 = 'test_repository2' + + # Copy the original repository files provided in the test folder so that + # any modifications made to repository files are restricted to the copies. + # The 'repository_data' directory is expected to exist in 'tuf.tests/'. + original_repository_files = os.path.join(os.getcwd(), 'repository_data') + temporary_repository_root = \ + self.make_temp_directory(directory=self.temporary_directory) + + # The original repository, keystore, and client directories will be copied + # for each test case. + original_repository = os.path.join(original_repository_files, 'repository') + original_keystore = os.path.join(original_repository_files, 'keystore') + original_client = os.path.join(original_repository_files, 'client') + + # Save references to the often-needed client repository directories. + # Test cases need these references to access metadata and target files. + self.repository_directory = \ + os.path.join(temporary_repository_root, 'repository') + self.keystore_directory = \ + os.path.join(temporary_repository_root, 'keystore') + + self.client_directory = os.path.join(temporary_repository_root, + 'client') + self.client_metadata = os.path.join(self.client_directory, + self.repository_name, 'metadata') + self.client_metadata_current = os.path.join(self.client_metadata, + 'current') + self.client_metadata_previous = os.path.join(self.client_metadata, + 'previous') + + # Copy the original 'repository', 'client', and 'keystore' directories + # to the temporary repository the test cases can use. + shutil.copytree(original_repository, self.repository_directory) + shutil.copytree(original_client, self.client_directory) + shutil.copytree(original_keystore, self.keystore_directory) + + # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. + repository_basepath = self.repository_directory[len(os.getcwd()):] + url_prefix = \ + 'http://localhost:' + str(self.SERVER_PORT) + repository_basepath + + # Setting 'tuf.settings.repository_directory' with the temporary client + # directory copied from the original repository files. + tuf.settings.repositories_directory = self.client_directory + + self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, + 'metadata_path': 'metadata', + 'targets_path': 'targets', + 'confined_target_dirs': ['']}} + + self.map_file = os.path.join(self.client_directory, 'map.json') + + # Creating a repository instance. The test cases will use this client + # updater to refresh metadata, fetch target files, etc. + self.multi_repo_updater = updater.MultiRepoUpdater(self.map_file) + + # Metadata role keys are needed by the test cases to make changes to the + # repository (e.g., adding a new target file to 'targets.json' and then + # requesting a refresh()). + self.role_keys = _load_role_keys(self.keystore_directory) + + + + def tearDown(self): + # We are inheriting from custom class. + unittest_toolbox.Modified_TestCase.tearDown(self) + tuf.roledb.clear_roledb(clear_all=True) + tuf.keydb.clear_keydb(clear_all=True) + + + + + # UNIT TESTS. + def test__init__(self): + # The client's repository requires a metadata directory (and the 'current' + # and 'previous' sub-directories), and at least the 'root.json' file. + # setUp(), called before each test case, instantiates the required updater + # objects and keys. The needed objects/data is available in + # 'self.repository_updater', 'self.client_directory', etc. + + # Test: Invalid arguments. + # Invalid 'updater_name' argument. String expected. + self.assertRaises(securesystemslib.exceptions.FormatError, + updater.MultiRepoUpdater, 8) + + # Restore 'tuf.settings.repositories_directory' to the original client + # directory. + tuf.settings.repositories_directory = self.client_directory + + # Test for valid instantiation. + map_file = os.path.join(self.client_directory, 'map.json') + multi_repo_updater = updater.MultiRepoUpdater(map_file) + diff --git a/tests/test_updater_root_rotation_integration.py b/tests/test_updater_root_rotation_integration.py index 27a1190c24..2e2c8dc518 100755 --- a/tests/test_updater_root_rotation_integration.py +++ b/tests/test_updater_root_rotation_integration.py @@ -47,13 +47,7 @@ import random import subprocess import sys - -# 'unittest2' required for testing under Python < 2.7. -if sys.version_info >= (2, 7): - import unittest - -else: - import unittest2 as unittest +import unittest import tuf import tuf.log diff --git a/tox.ini b/tox.ini index e7a3a9fbf7..55e9d10853 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py26, py27, py35 +envlist = py27, py33, py34, py35, py36 [testenv] changedir = tests @@ -18,12 +18,3 @@ deps = -r{toxinidir}/requirements.txt install_command = pip install --pre {opts} {packages} - - -[testenv:py26] -deps = - -r{toxinidir}/requirements.txt - unittest2 - importlib - -install_command = pip install --pre {opts} {packages} diff --git a/tuf/README.md b/tuf/README.md index be7baa6028..2185253e26 100644 --- a/tuf/README.md +++ b/tuf/README.md @@ -51,7 +51,7 @@ in interactive mode: ```Bash $ python -Python 2.7.3 (default, Sep 26 2013, 20:08:41) +Python 2.7.3 (default, Sep 26 2013, 20:08:41) [GCC 4.6.3] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> from tuf.repository_tool import * @@ -115,11 +115,11 @@ text without prepended symbols is the output of a command. # "keystore/root_key" and the public key to "keystore/root_key.pub" (both saved # to the current working directory). The 'keystore' directory can be manually # created in the current directory to store the keys created in these examples. -# If 'keystore' directory does not exist, it will be created. +# If 'keystore' directory does not exist, it will be created. >>> generate_and_write_rsa_keypair("keystore/root_key", bits=2048, password="password") -# If the key length is unspecified, it defaults to 3072 bits. A length of less -# than 2048 bits raises an exception. A password may be supplied as an +# If the key length is unspecified, it defaults to 3072 bits. A length of less +# than 2048 bits raises an exception. A password may be supplied as an # argument, otherwise a user prompt is presented. >>> generate_and_write_rsa_keypair("keystore/root_key2") Enter a password for the RSA key: @@ -161,7 +161,7 @@ which cryptography library is used. # Generate and write an Ed25519 key pair. The private key is saved encrypted. # A 'password' argument may be supplied, otherwise a prompt is presented. >>> generate_and_write_ed25519_keypair('keystore/ed25519_key') -Enter a password for the Ed25519 key: +Enter a password for the Ed25519 key: Confirm: # Import the ed25519 public key just created . . . @@ -169,7 +169,7 @@ Confirm: # and its corresponding private key. >>> private_ed25519_key = import_ed25519_privatekey_from_file('keystore/ed25519_key') -Enter a password for the encrypted Ed25519 key: +Enter a password for the encrypted Ed25519 key: ``` ### Create Top-level Metadata ### @@ -179,7 +179,7 @@ on a TUF repository. The following sub-sections demonstrate the required roles. The top-level roles to be created are `root`, `timestamp`, `snapshot`, and `target`. -We begin with `root`, the locus of trust that specifies the public keys of the +We begin with `root`, the locus of trust that specifies the public keys of the top-level roles, including itself. @@ -202,8 +202,8 @@ top-level roles, including itself. # A role's verification key(s) (to be more precise, the verification key's # keyid) may be queried. Other attributes include: signing_keys, version, -# signatures, expiration, threshold, delegations (attribute available only to a -# Targets role), and compressions. +# signatures, expiration, threshold, and delegations (attribute available only +# to a Targets role). >>> repository.root.keys ['b23514431a53676595922e955c2d547293da4a7917e3ca243a175e72bbf718df'] @@ -246,7 +246,7 @@ Dirty roles: ['root'] #### Create Timestamp, Snapshot, Targets Now that `root.json` has been set, the other top-level roles may be created. The signing keys added to these roles must correspond to the public keys -specified by the Root role. +specified by the Root role. On the client side, `root.json` must always exist. The other top-level roles, created next, are requested by repository clients in (Root -> Timestamp -> @@ -260,7 +260,7 @@ secure manner. >>> import datetime # Generate keys for the remaining top-level roles. The root keys have been set above. -# The password argument may be omitted if a password prompt is needed. +# The password argument may be omitted if a password prompt is needed. >>> generate_and_write_rsa_keypair("keystore/targets_key", password="password") >>> generate_and_write_rsa_keypair("keystore/snapshot_key", password="password") >>> generate_and_write_rsa_keypair("keystore/timestamp_key", password="password") @@ -426,6 +426,29 @@ new metadata to disk. >>> repository.writeall() ``` +#### Dump Metadata and Append Signature #### + +The following two functions are intended for those that wish to independently +sign metadata. Repository maintainers can dump the portion of metadata that is +normally signed, sign it with an external signing tool, and append the +signature to already existing metadata. + +First, the signable portion of metadata can be generated +as follows: + +```Python +>>> signable_content = dump_signable_metadata('targets.json') +``` + +The externally generated signature can then be appended to metadata: +```Python +>>> append_signature(signature, 'targets.json') +``` + +Note that the format of the signature is the format expected in metadata, which +is a dictionary that contains a KEYID, the signature itself, etc. See the +specification and METADATA.md for a detailed example. + ### Delegations ### All of the target files available on the software repository created so far have been added to one role (the top-level Targets role). However, what if @@ -494,14 +517,14 @@ In summary, the five steps a repository maintainer follows to create a TUF repository are: 1. Create a directory for the software repository that holds the TUF metadata and the target files. -2. Create top-level roles (`root.json`, `snapshot.json`, `targets.json`, and `timestamp.json`.) +2. Create top-level roles (`root.json`, `snapshot.json`, `targets.json`, and `timestamp.json`.) 3. Add target files to the `targets` role. 4. Optionally, create delegated roles to distribute target files. 5. Write the changes. The repository tool saves repository changes to a `metadata.staged` directory. Repository maintainers may push finalized changes to the "live" repository by -copying the staged directory to its destination. +copying the staged directory to its destination. ```Bash # Copy the staged metadata directory changes to the live repository. $ cp -r "repository/metadata.staged/" "repository/metadata/" @@ -528,7 +551,7 @@ target file names specified in metadata do not contain digests in their names.) The repository maintainer is responsible for the duration of multiple versions of metadata and target files available on a repository. Generating consistent metadata and target files on the repository is enabled by setting the -`consistent_snapshot` argument of writeall() or write(): +`consistent_snapshot` argument of writeall() or write(): ```Python >>> repository.writeall(consistent_snapshot=True) ``` @@ -647,7 +670,7 @@ $ mv 'repository/targets/file2.txt' 'repository/targets/file2.txt.backup' $ echo 'bad_target' > 'repository/targets/file2.txt' ``` -We next reset our local timestamp (so that a new update is prompted), and +We next reset our local timestamp (so that a new update is prompted), and the target files previously downloaded by the client. ```Bash $ rm -rf "client/targets/" "client/metadata/current/timestamp.json" @@ -694,7 +717,7 @@ indicates when metadata should no longer be trusted. In the following simulation, the client first tries to perform an update. ```Bash -$ python basic_client.py --repo http://localhost:8001 +$ python basic_client.py --repo http://localhost:8001 ``` According to the logger (`tuf.log` file in the current working directory), @@ -734,7 +757,7 @@ $ cp repository/metadata/timestamp.json /tmp We should next generate a new Timestamp file on the repository side. ```Bash $ python ->>> from tuf.repository_tool import * +>>> from tuf.repository_tool import * >>> repository = load_repository('repository') >>> repository.timestamp.version 1 @@ -742,7 +765,7 @@ $ python >>> repository.dirty_roles() Dirty roles: [u'timestamp'] >>> private_timestamp_key = import_rsa_privatekey_from_file("keystore/timestamp_key") -Enter a password for the encrypted RSA file: +Enter a password for the encrypted RSA file: >>> repository.timestamp.load_signing_key(private_timestamp_key) >>> repository.write('timestamp') @@ -809,7 +832,7 @@ expected size, and no more. The target file available on the software repository does contain more data than expected, though. ```Bash -$ python basic_client.py --repo http://localhost:8001 +$ python basic_client.py --repo http://localhost:8001 ``` At this point, part of the "file1.txt" file should have been fetched. That is, @@ -818,14 +841,14 @@ appended data ignored. If we inspect the logger, we'd disover the following: ```Bash [2016-10-06 21:37:39,092 UTC] [tuf.download] [INFO] [_download_file:235@download.py] -Downloading: u'http://localhost:8001/targets/file1.txt' - +Downloading: u'http://localhost:8001/targets/file1.txt' + [2016-10-06 21:37:39,145 UTC] [tuf.download] [INFO] [_check_downloaded_length:610@download.py] -Downloaded 31 bytes out of the expected 31 bytes. - +Downloaded 31 bytes out of the expected 31 bytes. + [2016-10-06 21:37:39,145 UTC] [tuf.client.updater] [INFO] [_get_file:1372@updater.py] -Not decompressing http://localhost:8001/targets/file1.txt - +Not decompressing http://localhost:8001/targets/file1.txt + [2016-10-06 21:37:39,145 UTC] [tuf.client.updater] [INFO] [_check_hashes:778@updater.py] The file's sha256 hash is correct: 65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da ``` diff --git a/tuf/client/README.md b/tuf/client/README.md index d00872940b..2e69a823fc 100644 --- a/tuf/client/README.md +++ b/tuf/client/README.md @@ -1,4 +1,4 @@ -#updater.py +# updater.py **updater.py** is intended as the only TUF module that software update systems need to utilize for a low-level integration. It provides a single class representing an updater that includes methods to download, install, and @@ -152,7 +152,7 @@ for target in updated_target: target_custom_data = target['fileinfo']['custom'] ``` -###A Simple Integration Example with basic_client.py +### A Simple Integration Example with basic_client.py ``` Bash # Assume a simple TUF repository has been setup with 'tuf.repository_tool.py'. $ basic_client.py --repo http://localhost:8001 diff --git a/tuf/client/updater.py b/tuf/client/updater.py index e58c2bd0b7..e90931af2d 100755 --- a/tuf/client/updater.py +++ b/tuf/client/updater.py @@ -140,6 +140,263 @@ iso8601_logger = logging.getLogger('iso8601') iso8601_logger.disabled = True +# Metadata includes the specification version number that it follows. +# All downloaded metadata must be equal to our supported major version of 1. +# For example, "1.4.3" and "1.0.0" are supported. "2.0.0" is not supported. +SUPPORTED_MAJOR_VERSION = 1 + +class MultiRepoUpdater(object): + """ + + Provide a way for clients to request a target file from multiple + repositories. Which repositories to query is determined by the map + file (i.e,. map.json). + + See TAP 4 for more information on the map file and requesting updates from + multiple repositories. TAP 4 describes how users may specify that a + certain repository should be used for some targets, while other + repositories should be used for other targets. + + + map_file: + The path of the map file. The map file is needed to determine which + repositories to query given a target file. + + + tuf.exceptions.Error, if the map file cannot be loaded. + + + None. + + + None. + """ + + def __init__(self, map_file): + # Does 'map_file' have the correct format? + # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. + securesystemslib.formats.PATH_SCHEMA.check_match(map_file) + + # The map file dictionary that associates targets with repositories. + self.map_file = {} + + # A dictionary mapping repositories to TUF updaters. + self.repository_names_to_updaters = {} + + try: + self.map_file = securesystemslib.util.load_json_file(map_file) + + except (securesystemslib.exceptions.Error) as e: + raise tuf.exceptions.Error('Cannot load the map file: ' + str(e)) + + + + + + def get_one_valid_targetinfo(self, target_filename): + """ + + Return the targetinfo, if any, for the given 'target_filename'. + + + target_filename: + The relative path of the target file to update. + + + tuf.FormatError, if the argument is improperly formatted. + + + None. + + + The targetinfo (conformant with tuf.formats.TARGETINFO_SCHEMA) for + 'target_filename', if available. Return None if no targetinfo is + available. + """ + + # {"repository_name": [mirror URLs, ...], ...} + repository_names_to_mirrors = self.map_file['repositories'] + repositories_directory = tuf.settings.repositories_directory + + for repository_name in repository_names_to_mirrors: + logger.debug('Interrogating repository: ' + repr(repository_name)) + # Each repository must cache its metadata in a separate location. + repository_directory = os.path.join(repositories_directory, repository_name) + if not os.path.isdir(repository_directory): + raise tuf.exceptions.Error('The metadata directory' + ' for ' + repr(repository_name) + ' must exist at ' + repr(repository_directory)) + + else: + logger.debug('Found local directory for ' + repr(repository_name)) + + # The latest known root metadata file must already be on disk. + root_file = os.path.join(repository_directory, 'metadata', + 'current', 'root.json') + if not os.path.isfile(root_file): + raise tuf.exceptions.Error('The Root file must exist at ' + repr(root_file)) + + else: + logger.debug('Found local Root file at ' + repr(root_file)) + + # Iterate mappings. + # [{"paths": [], "repositories": [], "terminating": Boolean}, ...] + for mapping in self.map_file['mapping']: + logger.debug('Interrogating mappings..' + repr(mapping)) + # If this mapping is relevant to the target... + if self.paths_match_target(mapping['paths'], target_filename): + targetinfos = [] + + # Use the *unmodified* TUF updater for a single repository to fetch the + # targetinfo from each repository. + for repository_name in mapping['repositories']: + logger.debug('Updating from repository...') + targetinfo, updater = self._update_from_repository(repository_name, + repository_names_to_mirrors, target_filename) + logger.debug('Adding targetinfo: ' + repr(targetinfo)) + targetinfos.append(targetinfo) + + # If the targetinfo on each repository is equal to the others, and it + # is not empty, then return the targetinfo. + logger.debug('Verifying that all targetinfo are equal') + if self._targets_are_equal_and_not_empty(targetinfos): + + return targetinfo, updater + + else: + continue + + # If we are here, it means either the mapping is irrelevant to the + # target, or the targets were missing from all repositories in this + # mapping, or the targets on all repositories did not match. In that + # case, are we allowed to continue to the next mapping? Let's check + # the terminating entry. + if mapping['terminating']: + return None + + # If we are here, it means either there were no mappings, or none of the + # mappings provided the target. + logger.debug('Did not find the target.') + return None + + + + + + def paths_match_target(self, paths, target_filename): + for path in paths: + logger.debug('Interrogating path ' + repr(path) + 'for target: ' + repr(target_filename)) + if fnmatch.fnmatch(target_filename, path): + logger.debug('Found a match for ' + repr(target_filename)) + return True + + else: + logger.debug('Continue searching for relevant paths.') + continue + + # If we are here, then none of the paths are relevant to the target. + logger.debug('None of the paths are relevant.') + return False + + + + + + + def get_updater(self, repository_name, repository_names_to_mirrors): + # NOTE: Do not refresh metadata for a repository that has been visited. + updater = self.repository_names_to_updaters.get(repository_name) + + if not updater: + # Create repository mirrors object needed by the tuf.client.updater.Updater(). + # Each 'repository_name' can have more than one mirror. + mirrors = {} + for url in repository_names_to_mirrors[repository_name]: + mirrors[url] = { + 'url_prefix': url, + 'metadata_path': 'metadata', + 'targets_path': 'targets', + 'confined_target_dirs': ['']} + + # NOTE: State (e.g., keys) should NOT be shared across different updater + # instances. + updater = tuf.client.updater.Updater(repository_name, mirrors) + + try: + updater.refresh() + + except: + return None + + else: + self.repository_names_to_updaters[repository_name] = updater + + return updater + + + + + + def _update_from_repository(self, repository_name, repository_names_to_mirrors, + target_filename): + # Set the repository directory containing the metadata. + updater = self.get_updater(repository_name, repository_names_to_mirrors) + + try: + return updater.get_one_valid_targetinfo(target_filename), updater + + except: + return None + + + + + + def _targets_are_equal_and_not_empty(self, targetinfos): + """ + If not empty, check only that length and hashes are equal; ignore custom + targets metadata. + """ + + # Target is empty. + if len(targetinfos) == 0: + return False + + else: + prev_targetinfo = targetinfos[0] + + # Target is empty. + if not prev_targetinfo: + return False + + else: + for curr_targetinfo in targetinfos[1:]: + # Target is empty. + if not curr_targetinfo: + return False + + else: + prev_length = prev_targetinfo['fileinfo']['length'] + curr_length = curr_targetinfo['fileinfo']['length'] + if prev_length != curr_length: + return False + + prev_hashes = prev_targetinfo['fileinfo']['hashes'] + curr_hashes = curr_targetinfo['fileinfo']['hashes'] + if prev_hashes.keys() != curr_hashes.keys(): + return False + + for function, prev_digest in prev_hashes.items(): + if prev_digest != curr_hashes[function]: + return False + + prev_targetinfo = curr_targetinfo + + # If we are here, then all the targets are equal. + return True + + + + class Updater(object): """ @@ -450,7 +707,7 @@ def _load_metadata_from_file(self, metadata_set, metadata_role): self._rebuild_key_and_role_db() self.consistent_snapshot = metadata_object['consistent_snapshot'] - elif metadata_object['_type'] == 'Targets': + elif metadata_object['_type'] == 'targets': # TODO: Should we also remove the keys of the delegated roles? self._import_delegations(metadata_role) @@ -540,12 +797,11 @@ def _import_delegations(self, parent_role): # Iterate the keys of the delegated roles of 'parent_role' and load them. for keyid, keyinfo in six.iteritems(keys_info): if keyinfo['keytype'] in ['rsa', 'ed25519']: - key, keyids = securesystemslib.keys.format_metadata_to_key(keyinfo) # We specify the keyid to ensure that it's the correct keyid # for the key. try: - tuf.keydb.add_key(key, keyid, self.repository_name) + key, keyids = securesystemslib.keys.format_metadata_to_key(keyinfo) for keyid in keyids: key['keyid'] = keyid tuf.keydb.add_key(key, keyid=None, repository_name=self.repository_name) @@ -575,7 +831,7 @@ def _import_delegations(self, parent_role): logger.warning('Role already exists: ' + rolename) except: - logger.exception('Failed to add delegated role: ' + rolename + '.') + logger.exception('Failed to add delegated role: ' + repr(rolename) + '.') raise @@ -688,7 +944,7 @@ def refresh(self, unsafely_update_root_if_necessary=True): - def _update_root_metadata(self, current_root_metadata, compression_algorithm=None): + def _update_root_metadata(self, current_root_metadata): """ The root file must be signed by the current root threshold and keys as @@ -705,9 +961,6 @@ def _update_root_metadata(self, current_root_metadata, compression_algorithm=Non current_root_metadata: The currently held version of root. - compresison_algorithm: - The compression algorithm used to compress remote metadata. - Updates the root metadata files with the latest information. @@ -718,8 +971,7 @@ def _update_root_metadata(self, current_root_metadata, compression_algorithm=Non # Retrieve the latest, remote root.json. latest_root_metadata_file = \ self._get_metadata_file('root', 'root.json', - tuf.settings.DEFAULT_ROOT_REQUIRED_LENGTH, None, - compression_algorithm=compression_algorithm) + tuf.settings.DEFAULT_ROOT_REQUIRED_LENGTH, None) latest_root_metadata = \ securesystemslib.util.load_json_string(latest_root_metadata_file.read().decode('utf-8')) @@ -737,8 +989,8 @@ def _update_root_metadata(self, current_root_metadata, compression_algorithm=Non # in the latest root.json after running through the intermediates with # _update_metadata(). self.consistent_snapshot = True - self._update_metadata('root', tuf.settings.DEFAULT_ROOT_REQUIRED_LENGTH, version=version, - compression_algorithm=compression_algorithm) + self._update_metadata('root', tuf.settings.DEFAULT_ROOT_REQUIRED_LENGTH, + version=version) @@ -926,18 +1178,13 @@ def verify_target_file(target_file_object): self._hard_check_file_length(target_file_object, file_length) self._check_hashes(target_file_object, file_hashes) - # Target files, unlike metadata files, are not decompressed; the - # 'compression' argument to _get_file() is needed only for decompression of - # metadata. Target files may be compressed or uncompressed. if self.consistent_snapshot: target_digest = random.choice(list(file_hashes.values())) dirname, basename = os.path.split(target_filepath) target_filepath = os.path.join(dirname, target_digest + '.' + basename) return self._get_file(target_filepath, verify_target_file, - 'target', file_length, compression=None, - verify_compressed_file_function=None, - download_safely=True) + 'target', file_length, download_safely=True) @@ -1016,8 +1263,7 @@ def _verify_uncompressed_metadata_file(self, metadata_file_object, def _get_metadata_file(self, metadata_role, remote_filename, - upperbound_filelength, expected_version, - compression_algorithm): + upperbound_filelength, expected_version): """ Non-public method that tries downloading, up to a certain length, a @@ -1040,10 +1286,6 @@ def _get_metadata_file(self, metadata_role, remote_filename, The expected and required version number of the 'metadata_role' file downloaded. 'expected_version' is an integer. - compression_algorithm: - The name of the compression algorithm (e.g., 'gzip'). The algorithm is - needed if the remote metadata file is compressed. - tuf.exceptions.NoWorkingMirrorError: The metadata could not be fetched. This is raised only when all known @@ -1069,19 +1311,29 @@ def _get_metadata_file(self, metadata_role, remote_filename, file_object = tuf.download.unsafe_download(file_mirror, upperbound_filelength) - if compression_algorithm is not None: - logger.info('Decompressing ' + str(file_mirror)) - file_object.decompress_temp_file_object(compression_algorithm) - - else: - logger.info('Not decompressing ' + str(file_mirror)) - # Verify 'file_object' according to the callable function. # 'file_object' is also verified if decompressed above (i.e., the # uncompressed version). metadata_signable = \ securesystemslib.util.load_json_string(file_object.read().decode('utf-8')) + # Determine if the specification version number is supported. It is + # assumed that "spec_version" is in (major.minor.fix) format, (for + # example: "1.4.3") and that releases with the same major version + # number maintain backwards compatibility. Consequently, if the major + # version number of new metadata equals our expected major version + # number, the new metadata is safe to parse. + try: + spec_version_parsed = metadata_signable['signed']['spec_version'].split('.') + if int(spec_version_parsed[0]) != SUPPORTED_MAJOR_VERSION: + raise securesystemslib.exceptions.BadVersionNumberError('Downloaded' + ' metadata that specifies an unsupported spec_version. Supported' + ' major version number: ' + repr(SUPPORTED_MAJOR_VERSION)) + + except (ValueError, TypeError): + raise securesystemslib.exceptions.FormatError('Improperly' + ' formatted spec_version, which must be in major.minor.fix format') + # If the version number is unspecified, ensure that the version number # downloaded is greater than the currently trusted version number for # 'metadata_role'. @@ -1154,8 +1406,7 @@ def _verify_root_chain_link(self, role, current, next): def _get_file(self, filepath, verify_file_function, file_type, - file_length, compression=None, - verify_compressed_file_function=None, download_safely=True): + file_length, download_safely=True): """ Non-public method that tries downloading, up to a certain length, a @@ -1182,15 +1433,6 @@ def _get_file(self, filepath, verify_file_function, file_type, The expected length, or upper bound, of the target or metadata file to be downloaded. - compression: - The name of the compression algorithm (e.g., 'gzip'), if the metadata - file is compressed. - - verify_compressed_file_function: - If compression is specified, in the case of metadata files, this - callable function may be set to perform verification of the compressed - version of the metadata file. Decompressed metadata is also verified. - download_safely: A boolean switch to toggle safe or unsafe download of the file. @@ -1217,9 +1459,10 @@ def _get_file(self, filepath, verify_file_function, file_type, for file_mirror in file_mirrors: try: - # TODO: Instead of the more fragile 'download_safely' switch, unroll the - # function into two separate ones: one for "safe" download, and the other one - # for "unsafe" download? This should induce safer and more readable code. + # TODO: Instead of the more fragile 'download_safely' switch, unroll + # the function into two separate ones: one for "safe" download, and the + # other one for "unsafe" download? This should induce safer and more + # readable code. if download_safely: file_object = tuf.download.safe_download(file_mirror, file_length) @@ -1227,15 +1470,6 @@ def _get_file(self, filepath, verify_file_function, file_type, file_object = tuf.download.unsafe_download(file_mirror, file_length) - if compression is not None: - if verify_compressed_file_function is not None: - verify_compressed_file_function(file_object) - logger.info('Decompressing ' + str(file_mirror)) - file_object.decompress_temp_file_object(compression) - - else: - logger.info('Not decompressing ' + str(file_mirror)) - # Verify 'file_object' according to the callable function. # 'file_object' is also verified if decompressed above (i.e., the # uncompressed version). @@ -1262,8 +1496,7 @@ def _get_file(self, filepath, verify_file_function, file_type, - def _update_metadata(self, metadata_role, upperbound_filelength, version=None, - compression_algorithm=None): + def _update_metadata(self, metadata_role, upperbound_filelength, version=None): """ Non-public method that downloads, verifies, and 'installs' the metadata @@ -1285,12 +1518,6 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None, The expected and required version number of the 'metadata_role' file downloaded. 'expected_version' is an integer. - compression_algorithm: - A string designating the compression type of 'metadata_role'. - The 'snapshot' metadata file may be optionally downloaded and stored in - compressed form. Currently, only metadata files compressed with 'gzip' - are considered. Any other string is ignored. - tuf.exceptions.NoWorkingMirrorError: The metadata cannot be updated. This is not specific to a single @@ -1308,12 +1535,7 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None, # Construct the metadata filename as expected by the download/mirror modules. metadata_filename = metadata_role + '.json' - uncompressed_metadata_filename = metadata_filename - - # The 'snapshot' or Targets metadata may be compressed. Add the appropriate - # extension to 'metadata_filename'. - if compression_algorithm == 'gzip': - metadata_filename = metadata_filename + '.gz' + metadata_filename = metadata_filename # Attempt a file download from each mirror until the file is downloaded and # verified. If the signature of the downloaded file is valid, proceed, @@ -1330,10 +1552,6 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None, # for each other. In this case, we will download the metadata up to the # best length we can get for it, not request a specific version, but # perform the rest of the checks (e.g., signature verification). - # - # Note also that we presently support decompression of only "safe" - # metadata, but this is easily extend to "unsafe" metadata as well as - # "safe" targets. remote_filename = metadata_filename filename_version = '' @@ -1345,8 +1563,7 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None, metadata_file_object = \ self._get_metadata_file(metadata_role, remote_filename, - upperbound_filelength, version, - compression_algorithm) + upperbound_filelength, version) # The metadata has been verified. Move the metadata file into place. # First, move the 'current' metadata file to the 'previous' directory @@ -1371,16 +1588,7 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None, metadata_signable = \ securesystemslib.util.load_json_string(metadata_file_object.read().decode('utf-8')) - if compression_algorithm == 'gzip': - current_uncompressed_filepath = \ - os.path.join(self.metadata_directory['current'], - uncompressed_metadata_filename) - current_uncompressed_filepath = \ - os.path.abspath(current_uncompressed_filepath) - metadata_file_object.move(current_uncompressed_filepath) - - else: - metadata_file_object.move(current_filepath) + metadata_file_object.move(current_filepath) # Extract the metadata object so we can store it to the metadata store. # 'current_metadata_object' set to 'None' if there is not an object @@ -1398,7 +1606,7 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None, logger.debug('Updated ' + repr(current_filepath) + '.') self.metadata['previous'][metadata_role] = current_metadata_object self.metadata['current'][metadata_role] = updated_metadata_object - self._update_versioninfo(uncompressed_metadata_filename) + self._update_versioninfo(metadata_filename) # Ensure the role and key information of the top-level roles is also updated # according to the newly-installed Root metadata. @@ -1411,7 +1619,7 @@ def _update_metadata(self, metadata_role, upperbound_filelength, version=None, def _update_metadata_if_changed(self, metadata_role, - referenced_metadata='snapshot'): + referenced_metadata='snapshot'): """ Non-public method that updates the metadata for 'metadata_role' if it has @@ -1469,7 +1677,7 @@ def _update_metadata_if_changed(self, metadata_role, None. """ - uncompressed_metadata_filename = metadata_role + '.json' + metadata_filename = metadata_role + '.json' expected_versioninfo = None expected_fileinfo = None @@ -1493,57 +1701,26 @@ def _update_metadata_if_changed(self, metadata_role, # strictly greater than its currently trusted version number. expected_versioninfo = self.metadata['current'][referenced_metadata] \ ['meta'] \ - [uncompressed_metadata_filename] + [metadata_filename] - if not self._versioninfo_has_been_updated(uncompressed_metadata_filename, + if not self._versioninfo_has_been_updated(metadata_filename, expected_versioninfo): - logger.info(repr(uncompressed_metadata_filename) + ' up-to-date.') + logger.info(repr(metadata_filename) + ' up-to-date.') - # Since we have not downloaded a new version of this metadata, we - # should check to see if our local version is stale and notify the user - # if so. This raises tuf.exceptions.ExpiredMetadataError if the metadata we - # have is expired. Resolves issue #322. + # Since we have not downloaded a new version of this metadata, we should + # check to see if our local version is stale and notify the user if so. + # This raises tuf.exceptions.ExpiredMetadataError if the metadata we have + # is expired. Resolves issue #322. self._ensure_not_expired(self.metadata['current'][metadata_role], metadata_role) + # TODO: If 'metadata_role' is root or snapshot, we should verify that # root's hash matches what's in snapshot, and that snapshot hash matches # what's listed in timestamp.json. return - logger.debug('Metadata ' + repr(uncompressed_metadata_filename) + ' has changed.') - - # There might be a compressed version of 'snapshot.json' or Targets - # metadata available for download. Check the 'meta' field of - # 'referenced_metadata' to see if it is listed when 'metadata_role' - # is 'snapshot'. The full rolename for delegated Targets metadata - # must begin with 'targets/'. The snapshot role lists all the Targets - # metadata available on the repository, including any that may be in - # compressed form. - # - # In addition to validating the fileinfo (i.e., file lengths and hashes) - # of the uncompressed metadata, the compressed version is also verified to - # match its respective fileinfo. Verifying the compressed fileinfo ensures - # untrusted data is not decompressed prior to verifying hashes, or - # decompressing a file that may be invalid or partially intact. - compression = None - - # Check for the availability of compressed versions of 'snapshot.json', - # 'targets.json', and delegated Targets (that also start with 'targets'). - # For 'targets.json' and delegated metadata, 'referenced_metata' - # should always be 'snapshot'. 'snapshot.json' specifies all roles - # provided by a repository, including their version numbers. - if metadata_role == 'snapshot' or metadata_role.startswith('targets'): - if 'gzip' in self.metadata['current']['root']['compression_algorithms']: - compression = 'gzip' - gzip_metadata_filename = uncompressed_metadata_filename + '.gz' - logger.debug('Compressed version of ' + - repr(uncompressed_metadata_filename) + ' is available at ' + - repr(gzip_metadata_filename) + '.') - - else: - logger.debug('Compressed version of ' + - repr(uncompressed_metadata_filename) + ' not available.') + logger.debug('Metadata ' + repr(metadata_filename) + ' has changed.') # The file lengths of metadata are unknown, only their version numbers are # known. Set an upper limit for the length of the downloaded file for each @@ -1561,7 +1738,7 @@ def _update_metadata_if_changed(self, metadata_role, try: self._update_metadata(metadata_role, upperbound_filelength, - expected_versioninfo['version'], compression) + expected_versioninfo['version']) except: # The current metadata we have is not current but we couldn't get new @@ -2047,7 +2224,7 @@ def all_targets(self): def _refresh_targets_metadata(self, rolename='targets', - refresh_all_delegated_roles=False): + refresh_all_delegated_roles=False): """ Non-public method that refreshes the targets metadata of 'rolename'. If @@ -2377,8 +2554,7 @@ def _preorder_depth_first_walk(self, target_filepath): child_roles_to_visit = [] # NOTE: This may be a slow operation if there are many delegated roles. for child_role in child_roles: - child_role_name = self._visit_child_role(child_role, target_filepath, - delegations) + child_role_name = self._visit_child_role(child_role, target_filepath) if child_role['terminating'] and child_role_name is not None: logger.debug('Adding child role ' + repr(child_role_name)) logger.debug('Not backtracking to other roles.') @@ -2463,11 +2639,11 @@ def _get_target_from_targets_role(self, role_name, targets, target_filepath): - def _visit_child_role(self, child_role, target_filepath, parent_delegations): + def _visit_child_role(self, child_role, target_filepath): """ - Non-public method that determines whether the given 'child_role' has been - delegated the target with the name 'target_filepath'. + Non-public method that determines whether the given 'target_filepath' + is an allowed path of 'child_role'. Ensure that we explore only delegated roles trusted with the target. The metadata for 'child_role' should have been refreshed prior to this point, @@ -2475,7 +2651,7 @@ def _visit_child_role(self, child_role, target_filepath, parent_delegations): verified (as intended). The paths/targets that 'child_role' is allowed to specify in its metadata depends on the delegating role, and thus is left to the caller to verify. We verify here that 'target_filepath' - is an allowed path according to its parent role ('parent_delegations'). + is an allowed path according to the delegated 'child_role'. TODO: Should the TUF spec restrict the repository to one particular algorithm? Should we allow the repository to specify in the role @@ -2484,16 +2660,12 @@ def _visit_child_role(self, child_role, target_filepath, parent_delegations): child_role: The delegation targets role object of 'child_role', containing its - paths, path_hash_prefixes, keys and so on. + paths, path_hash_prefixes, keys, and so on. target_filepath: The path to the target file on the repository. This will be relative to the 'targets' (or equivalent) directory on a given mirror. - parent_delegations: - The 'delegations' entry of 'child_role's delegating role. A delegating - role specifies the paths/targets that a child role is trusted to sign. - None. @@ -2511,63 +2683,43 @@ def _visit_child_role(self, child_role, target_filepath, parent_delegations): child_role_paths = child_role.get('paths') child_role_path_hash_prefixes = child_role.get('path_hash_prefixes') - # A boolean indicator that tell us whether 'child_role' has been delegated - # the target with the name 'target_filepath'. - child_role_is_relevant = False - if child_role_path_hash_prefixes is not None: target_filepath_hash = self._get_target_hash(target_filepath) for child_role_path_hash_prefix in child_role_path_hash_prefixes: if target_filepath_hash.startswith(child_role_path_hash_prefix): - child_role_is_relevant = True + return child_role_name else: continue elif child_role_paths is not None: + # Is 'child_role_name' allowed to sign for 'target_filepath'? for child_role_path in child_role_paths: # A child role path may be an explicit path or pattern (Unix - # shell-style wildcards). The child role 'child_role_name' is added if - # 'target_filepath' is equal or matches 'child_role_path'. Explicit - # filepaths are also added. + # shell-style wildcards). The child role 'child_role_name' is returned + # if 'target_filepath' is equal to or matches 'child_role_path'. + # Explicit filepaths are also considered matches. if fnmatch.fnmatch(target_filepath, child_role_path): - child_role_is_relevant = True + logger.debug('Child role ' + repr(child_role_name) + ' is allowed to' + ' sign for ' + repr(target_filepath)) + + return child_role_name else: - logger.debug('Target path' + repr(target_filepath) + ' does not' - ' match child role path ' + repr(child_role_path)) + logger.debug('The given target path' + repr(target_filepath) + ' is' + ' not an allowed trusted path of ' + repr(child_role_path)) + + continue else: # 'role_name' should have been validated when it was downloaded. # The 'paths' or 'path_hash_prefixes' fields should not be missing, # so we raise a format error here in case they are both missing. - raise securesystemslib.exceptions.FormatError(repr(child_role_name) + ' has neither ' - '"paths" nor "path_hash_prefixes".') - - if child_role_is_relevant: - # Is the child role allowed by its parent role to specify this path - # in its metadata? - try: - securesystemslib.util.ensure_all_targets_allowed(child_role_name, - [target_filepath], parent_delegations) - - except tuf.exceptions.ForbiddenTargetError: - logger.debug('Child role ' + repr(child_role_name) + ' has target ' + \ - repr(target_filepath) + ', but is not allowed to sign for' - ' it according to its delegating role.') - return None - - else: - logger.debug('Child role ' + repr(child_role_name) + ' has target ' + \ - repr(target_filepath)) - return child_role_name - - else: - logger.debug('Child role ' + repr(child_role_name) + \ - ' does not have target ' + repr(target_filepath)) - return None - + raise securesystemslib.exceptions.FormatError(repr(child_role_name) + ' ' + 'has neither a "paths" nor "path_hash_prefixes". At least' + ' one of these attributes must be present.') + return None diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py index df2215795c..b8463d696f 100755 --- a/tuf/developer_tool.py +++ b/tuf/developer_tool.py @@ -134,9 +134,6 @@ # The full list of supported TUF metadata extensions. from tuf.repository_lib import METADATA_EXTENSIONS -# The recognized compression extensions. -from tuf.repository_lib import SUPPORTED_COMPRESSION_EXTENSIONS - # Supported key types. from tuf.repository_lib import SUPPORTED_KEY_TYPES @@ -525,9 +522,8 @@ def _generate_and_write_metadata(rolename, metadata_filename, write_partial, if tuf.sig.verify(signable, rolename, repository_name) or write_partial: _remove_invalid_and_duplicate_signatures(signable, repository_name) - compressions = roleinfo['compressions'] filename = write_metadata_file(signable, metadata_filename, - metadata['version'], compressions, False) + metadata['version'], False) # 'signable' contains an invalid threshold of signatures. else: @@ -764,7 +760,7 @@ def _save_project_configuration(metadata_directory, targets_directory, for key in public_keys: key_info = tuf.keydb.get_key(key) key_metadata = format_keyval_to_metadata(key_info['keytype'], - key_info['keyval']) + key_info['scheme'], key_info['keyval']) project_config['public_keys'][key] = key_metadata # Save the actual file. @@ -912,7 +908,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, for role in targets_metadata['delegations']['roles']: rolename = role['name'] roleinfo = {'name': role['name'], 'keyids': role['keyids'], - 'threshold': role['threshold'], 'compressions': [''], + 'threshold': role['threshold'], 'signing_keyids': [], 'signatures': [], 'partial_loaded':False, 'delegations': {'keys':{}, 'roles':[]} } @@ -967,9 +963,6 @@ def load_project(project_directory, prefix='', new_targets_location=None, roleinfo['delegations'] = metadata_object['delegations'] roleinfo['partial_loaded'] = False - if os.path.exists(metadata_path+'.gz'): - roleinfo['compressions'].append('gz') - # If the metadata was partially loaded, update the roleinfo flag. if _metadata_is_partially_loaded(metadata_name, signable, roleinfo, repository_name=repository_name): @@ -1003,8 +996,7 @@ def load_project(project_directory, prefix='', new_targets_location=None, rolename = role['name'] roleinfo = {'name': role['name'], 'keyids': role['keyids'], 'threshold': role['threshold'], - 'compressions': [''], 'signing_keyids': [], - 'signatures': [], + 'signing_keyids': [], 'signatures': [], 'partial_loaded': False, 'delegations': {'keys': {}, 'roles': []}} diff --git a/tuf/download.py b/tuf/download.py index 48bd77cd18..f9e3f69e6e 100755 --- a/tuf/download.py +++ b/tuf/download.py @@ -44,7 +44,7 @@ import six # 'ssl.match_hostname' was added in Python 3.2. The vendored version is needed -# for Python 2.6 and 2.7. +# for Python 2.7. try: from ssl import match_hostname, CertificateError @@ -682,10 +682,7 @@ class VerifiedHTTPSConnection(six.moves.http_client.HTTPSConnection): def connect(self): self.connection_kwargs = {} - - # for > py2.5 - if hasattr(self, 'timeout'): - self.connection_kwargs.update(timeout = self.timeout) + self.connection_kwargs.update(timeout = self.timeout) # for >= py2.7 if hasattr(self, 'source_address'): diff --git a/tuf/exceptions.py b/tuf/exceptions.py index a964d3b8d4..80e2bb051a 100755 --- a/tuf/exceptions.py +++ b/tuf/exceptions.py @@ -149,18 +149,6 @@ class UnsupportedLibraryError(Error): pass -class DecompressionError(Error): - """Indicate that some error happened while decompressing a file.""" - - def __init__(self, exception): - # Store the original exception. - self.exception = exception - - def __str__(self): - # Show the original exception. - return repr(self.exception) - - class DownloadError(Error): """Indicate an error occurred while attempting to download a file.""" pass diff --git a/tuf/formats.py b/tuf/formats.py index 000b247f1d..cc63a43cc5 100755 --- a/tuf/formats.py +++ b/tuf/formats.py @@ -74,7 +74,6 @@ import binascii import calendar import re -import string import datetime import time @@ -86,6 +85,13 @@ import six + +# TUF specification version. The constant should be updated when the version +# number of the specification changes. All metadata should list this version +# number. +TUF_VERSION_NUMBER = '1.0' +SPECIFICATION_VERSION_SCHEMA = SCHEMA.AnyString() + # A datetime in 'YYYY-MM-DDTHH:MM:SSZ' ISO 8601 format. The "Z" zone designator # for the zero UTC offset is always used (i.e., a numerical offset is not # supported.) Example: '2015-10-21T13:20:00Z'. Note: This is a simple format @@ -148,10 +154,6 @@ # A value that is either True or False, on or off, etc. BOOLEAN_SCHEMA = SCHEMA.Boolean() -# List of supported compression extensions. -COMPRESSIONS_SCHEMA = SCHEMA.ListOf( - SCHEMA.OneOf([SCHEMA.String(''), SCHEMA.String('gz')])) - # A string representing a role's name. ROLENAME_SCHEMA = SCHEMA.AnyString() @@ -269,13 +271,6 @@ # as requiring them to be a power of 2. NUMBINS_SCHEMA = SCHEMA.Integer(lo=1) -# Supported compression extension (e.g., 'gz'). -COMPRESSION_SCHEMA = SCHEMA.OneOf([SCHEMA.String(''), SCHEMA.String('gz')]) - -# List of supported compression extensions. -COMPRESSIONS_SCHEMA = SCHEMA.ListOf( - SCHEMA.OneOf([SCHEMA.String(''), SCHEMA.String('gz')])) - # The fileinfo format of targets specified in the repository and # developer tools. The second element of this list holds custom data about the # target, such as file permissions, author(s), last modified, etc. @@ -296,7 +291,6 @@ version = SCHEMA.Optional(METADATAVERSION_SCHEMA), expires = SCHEMA.Optional(ISO8601_DATETIME_SCHEMA), signatures = SCHEMA.Optional(securesystemslib.formats.SIGNATURES_SCHEMA), - compressions = SCHEMA.Optional(COMPRESSIONS_SCHEMA), paths = SCHEMA.Optional(SCHEMA.OneOf([RELPATHS_SCHEMA, PATH_FILEINFO_SCHEMA])), path_hash_prefixes = SCHEMA.Optional(PATH_HASH_PREFIXES_SCHEMA), delegations = SCHEMA.Optional(DELEGATIONS_SCHEMA), @@ -311,10 +305,10 @@ # Root role: indicates root keys and top-level roles. ROOT_SCHEMA = SCHEMA.Object( object_name = 'ROOT_SCHEMA', - _type = SCHEMA.String('Root'), + _type = SCHEMA.String('root'), + spec_version = SPECIFICATION_VERSION_SCHEMA, version = METADATAVERSION_SCHEMA, consistent_snapshot = BOOLEAN_SCHEMA, - compression_algorithms = COMPRESSIONS_SCHEMA, expires = ISO8601_DATETIME_SCHEMA, keys = KEYDICT_SCHEMA, roles = ROLEDICT_SCHEMA) @@ -322,7 +316,8 @@ # Targets role: Indicates targets and delegates target paths to other roles. TARGETS_SCHEMA = SCHEMA.Object( object_name = 'TARGETS_SCHEMA', - _type = SCHEMA.String('Targets'), + _type = SCHEMA.String('targets'), + spec_version = SPECIFICATION_VERSION_SCHEMA, version = METADATAVERSION_SCHEMA, expires = ISO8601_DATETIME_SCHEMA, targets = FILEDICT_SCHEMA, @@ -332,15 +327,16 @@ # timestamp). SNAPSHOT_SCHEMA = SCHEMA.Object( object_name = 'SNAPSHOT_SCHEMA', - _type = SCHEMA.String('Snapshot'), + _type = SCHEMA.String('snapshot'), version = securesystemslib.formats.METADATAVERSION_SCHEMA, expires = securesystemslib.formats.ISO8601_DATETIME_SCHEMA, + spec_version = SPECIFICATION_VERSION_SCHEMA, meta = FILEINFODICT_SCHEMA) # Timestamp role: indicates the latest version of the snapshot file. TIMESTAMP_SCHEMA = SCHEMA.Object( object_name = 'TIMESTAMP_SCHEMA', - _type = SCHEMA.String('Timestamp'), + _type = SCHEMA.String('timestamp'), version = securesystemslib.formats.METADATAVERSION_SCHEMA, expires = securesystemslib.formats.ISO8601_DATETIME_SCHEMA, meta = securesystemslib.formats.FILEDICT_SCHEMA) @@ -380,7 +376,7 @@ # serve. MIRRORLIST_SCHEMA = SCHEMA.Object( object_name = 'MIRRORLIST_SCHEMA', - _type = SCHEMA.String('Mirrors'), + _type = SCHEMA.String('mirrors'), version = METADATAVERSION_SCHEMA, expires = securesystemslib.formats.ISO8601_DATETIME_SCHEMA, mirrors = SCHEMA.ListOf(MIRROR_SCHEMA)) @@ -459,7 +455,6 @@ def make_signable(object): - class MetaFile(object): """ @@ -519,7 +514,8 @@ def from_metadata(object): @staticmethod def make_metadata(version, expiration_date, filedict): - result = {'_type' : 'Timestamp'} + result = {'_type' : 'timestamp'} + result['spec_version'] = TUF_VERSION_NUMBER result['version'] = version result['expires'] = expiration_date result['meta'] = filedict @@ -533,16 +529,13 @@ def make_metadata(version, expiration_date, filedict): class RootFile(MetaFile): - def __init__(self, version, expires, keys, roles, consistent_snapshot, - compression_algorithms): + def __init__(self, version, expires, keys, roles, consistent_snapshot): self.info = {} self.info['version'] = version self.info['expires'] = expires self.info['keys'] = keys self.info['roles'] = roles self.info['consistent_snapshot'] = consistent_snapshot - self.info['compression_algorithms'] = compression_algorithms - @staticmethod def from_metadata(object): @@ -555,22 +548,19 @@ def from_metadata(object): keys = object['keys'] roles = object['roles'] consistent_snapshot = object['consistent_snapshot'] - compression_algorithms = object['compression_algorithms'] - return RootFile(version, expires, keys, roles, consistent_snapshot, - compression_algorithms) + return RootFile(version, expires, keys, roles, consistent_snapshot) @staticmethod - def make_metadata(version, expiration_date, keydict, roledict, - consistent_snapshot, compression_algorithms): - result = {'_type' : 'Root'} + def make_metadata(version, expiration_date, keydict, roledict, consistent_snapshot): + result = {'_type' : 'root'} + result['spec_version'] = TUF_VERSION_NUMBER result['version'] = version result['expires'] = expiration_date result['keys'] = keydict result['roles'] = roledict result['consistent_snapshot'] = consistent_snapshot - result['compression_algorithms'] = compression_algorithms # Is 'result' a Root metadata file? # Raise 'securesystemslib.exceptions.FormatError' if not. @@ -604,7 +594,8 @@ def from_metadata(object): @staticmethod def make_metadata(version, expiration_date, versiondict): - result = {'_type' : 'Snapshot'} + result = {'_type' : 'snapshot'} + result['spec_version'] = TUF_VERSION_NUMBER result['version'] = version result['expires'] = expiration_date result['meta'] = versiondict @@ -651,10 +642,12 @@ def make_metadata(version, expiration_date, filedict=None, delegations=None): raise securesystemslib.exceptions.Error('We don\'t allow completely' ' empty targets metadata.') - result = {'_type' : 'Targets'} + result = {'_type' : 'targets'} + result['spec_version'] = TUF_VERSION_NUMBER result['version'] = version result['expires'] = expiration_date result['targets'] = {} + if filedict is not None: result['targets'] = filedict if delegations is not None: @@ -688,11 +681,11 @@ def make_metadata(): # A dict holding the recognized schemas for the top-level roles. SCHEMAS_BY_TYPE = { - 'Root' : ROOT_SCHEMA, - 'Targets' : TARGETS_SCHEMA, - 'Snapshot' : SNAPSHOT_SCHEMA, - 'Timestamp' : TIMESTAMP_SCHEMA, - 'Mirrors' : MIRRORLIST_SCHEMA} + 'root' : ROOT_SCHEMA, + 'targets' : TARGETS_SCHEMA, + 'snapshot' : SNAPSHOT_SCHEMA, + 'timestamp' : TIMESTAMP_SCHEMA, + 'mirrors' : MIRRORLIST_SCHEMA} # A dict holding the recognized class names for the top-level roles. # That is, the role classes listed in this module (e.g., class TargetsFile()). @@ -1098,7 +1091,7 @@ def expected_meta_rolename(meta_rolename): # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. securesystemslib.formats.NAME_SCHEMA.check_match(meta_rolename) - return string.capwords(meta_rolename) + return meta_rolename.lower() diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py index 8fee64eb70..e5cbcbfb62 100755 --- a/tuf/repository_lib.py +++ b/tuf/repository_lib.py @@ -51,6 +51,7 @@ import tuf.settings import securesystemslib +import securesystemslib.interface import iso8601 import six @@ -95,9 +96,6 @@ # Supported key types. SUPPORTED_KEY_TYPES = ['rsa', 'ed25519'] -# The recognized compression extensions. -SUPPORTED_COMPRESSION_EXTENSIONS = ['.gz'] - # The full list of supported TUF metadata extensions. METADATA_EXTENSIONS = ['.json.gz', '.json'] @@ -106,12 +104,9 @@ def _generate_and_write_metadata(rolename, metadata_filename, - targets_directory, metadata_directory, - consistent_snapshot=False, filenames=None, - compression_algorithms=['gz'], - allow_partially_signed=False, - increment_version_number=True, - repository_name='default'): + targets_directory, metadata_directory, consistent_snapshot=False, + filenames=None, allow_partially_signed=False, increment_version_number=True, + repository_name='default'): """ Non-public function that can generate and write the metadata for the specified 'rolename'. It also increments the version number of 'rolename' if @@ -130,7 +125,7 @@ def _generate_and_write_metadata(rolename, metadata_filename, # Generate the appropriate role metadata for 'rolename'. if rolename == 'root': metadata = generate_root_metadata(roleinfo['version'], roleinfo['expires'], - consistent_snapshot, compression_algorithms, repository_name) + consistent_snapshot, repository_name) _log_warning_if_expires_soon(ROOT_FILENAME, roleinfo['expires'], ROOT_EXPIRES_WARN_SECONDS) @@ -220,7 +215,7 @@ def should_write(): if rolename == 'root': consistent_snapshot = True filename = write_metadata_file(signable, metadata_filename, - metadata['version'], compression_algorithms, consistent_snapshot) + metadata['version'], consistent_snapshot) # 'signable' contains an invalid threshold of signatures. else: @@ -246,13 +241,11 @@ def should_write(): # .root.json and root.json). if rolename == 'root': filename = write_metadata_file(signable, metadata_filename, - metadata['version'], compression_algorithms, - consistent_snapshot=True) + metadata['version'], consistent_snapshot=True) else: filename = write_metadata_file(signable, metadata_filename, - metadata['version'], compression_algorithms, - consistent_snapshot) + metadata['version'], consistent_snapshot) return signable, filename @@ -260,45 +253,6 @@ def should_write(): -def _prompt(message, result_type=str): - """ - Non-public function that prompts the user for input by logging 'message', - converting the input to 'result_type', and returning the value to the - caller. - """ - - return result_type(six.moves.input(message)) - - - - - -def _get_password(prompt='Password: ', confirm=False): - """ - Non-public function that returns the password entered by the user. If - 'confirm' is True, the user is asked to enter the previously entered - password once again. If they match, the password is returned to the caller. - """ - - while True: - # getpass() prompts the user for a password without echoing - # the user input. - password = getpass.getpass(prompt, sys.stderr) - - if not confirm: - return password - password2 = getpass.getpass('Confirm: ', sys.stderr) - - if password == password2: - return password - - else: - print('Mismatch; try again.') - - - - - def _metadata_is_partially_loaded(rolename, signable, roleinfo, repository_name): """ Non-public function that determines whether 'rolename' is loaded with @@ -618,9 +572,6 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): logger.debug('Found a Root signature that is already loaded:' ' ' + repr(signature)) - if os.path.exists(root_filename + '.gz'): - roleinfo['compressions'].append('gz') - else: logger.debug('A compressed Root file was not found.') @@ -658,11 +609,6 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): roleinfo = tuf.roledb.get_roleinfo('timestamp', repository_name) roleinfo['expires'] = timestamp_metadata['expires'] roleinfo['version'] = timestamp_metadata['version'] - if os.path.exists(timestamp_filename + '.gz'): - roleinfo['compressions'].append('gz') - - else: - logger.debug('A compressed Timestamp file was not found.') if _metadata_is_partially_loaded('timestamp', signable, roleinfo, repository_name): roleinfo['partial_loaded'] = True @@ -703,11 +649,6 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): roleinfo = tuf.roledb.get_roleinfo('snapshot', repository_name) roleinfo['expires'] = snapshot_metadata['expires'] roleinfo['version'] = snapshot_metadata['version'] - if os.path.exists(snapshot_filename + '.gz'): - roleinfo['compressions'].append('gz') - - else: - logger.debug('A compressed Snapshot file was not loaded.') if _metadata_is_partially_loaded('snapshot', signable, roleinfo, repository_name): roleinfo['partial_loaded'] = True @@ -746,11 +687,6 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): roleinfo['version'] = targets_metadata['version'] roleinfo['expires'] = targets_metadata['expires'] roleinfo['delegations'] = targets_metadata['delegations'] - if os.path.exists(targets_filename + '.gz'): - roleinfo['compressions'].append('gz') - - else: - logger.debug('Compressed Targets file cannot be loaded.') if _metadata_is_partially_loaded('targets', signable, roleinfo, repository_name): roleinfo['partial_loaded'] = True @@ -775,7 +711,6 @@ def _load_top_level_metadata(repository, top_level_filenames, repository_name): # repository maintainer should have also been made aware of the duplicate # key when it was added. try: - tuf.keydb.add_key(key_object, repository_name=repository_name) for keyid in keyids: #pragma: no branch key_object['keyid'] = keyid tuf.keydb.add_key(key_object, keyid=None, @@ -854,50 +789,8 @@ def generate_and_write_rsa_keypair(filepath, bits=DEFAULT_RSA_KEY_BITS, None. """ - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of - # objects and object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(filepath) - - # Does 'bits' have the correct format? - securesystemslib.formats.RSAKEYBITS_SCHEMA.check_match(bits) - - # If the caller does not provide a password argument, prompt for one. - if password is None: # pragma: no cover - message = 'Enter a password for the RSA key file: ' - password = _get_password(message, confirm=True) - - # Does 'password' have the correct format? - securesystemslib.formats.PASSWORD_SCHEMA.check_match(password) - - # Generate public and private RSA keys, encrypted the private portion - # and store them in PEM format. - rsa_key = securesystemslib.keys.generate_rsa_key(bits) - public = rsa_key['keyval']['public'] - private = rsa_key['keyval']['private'] - encrypted_pem = securesystemslib.keys.create_rsa_encrypted_pem(private, password) - - # Write public key (i.e., 'public', which is in PEM format) to - # '.pub'. If the parent directory of filepath does not exist, - # create it (and all its parent directories, if necessary). - securesystemslib.util.ensure_parent_dir(filepath) - - # Create a tempororary file, write the contents of the public key, and move - # to final destination. - file_object = securesystemslib.util.TempFile() - file_object.write(public.encode('utf-8')) - - # The temporary file is closed after the final move. - file_object.move(filepath + '.pub') - - # Write the private key in encrypted PEM format to ''. - # Unlike the public key file, the private key does not have a file - # extension. - file_object = securesystemslib.util.TempFile() - file_object.write(encrypted_pem.encode('utf-8')) - file_object.move(filepath) - + securesystemslib.interface.generate_and_write_rsa_keypair( + filepath, bits, password) @@ -937,35 +830,8 @@ def import_rsa_privatekey_from_file(filepath, password=None): An RSA key object, conformant to 'securesystemslib.RSAKEY_SCHEMA'. """ - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(filepath) - - # If the caller does not provide a password argument, prompt for one. - # Password confirmation disabled here, which should ideally happen only - # when creating encrypted key files (i.e., improve usability). - if password is None: # pragma: no cover - message = 'Enter a password for the encrypted RSA file: ' - password = _get_password(message, confirm=False) - - # Does 'password' have the correct format? - securesystemslib.formats.PASSWORD_SCHEMA.check_match(password) - - encrypted_pem = None - - # Read the contents of 'filepath' that should be an encrypted PEM. - with open(filepath, 'rb') as file_object: - encrypted_pem = file_object.read().decode('utf-8') - - # Convert 'encrypted_pem' to 'securesystemslib.RSAKEY_SCHEMA' format. - # Raise 'securesystemslib.exceptions.CryptoError' if 'encrypted_pem' is - # invalid. - rsa_key = securesystemslib.keys.import_rsakey_from_private_pem(encrypted_pem, - password) - - return rsa_key + return securesystemslib.interface.import_rsa_privatekey_from_file( + filepath, password) @@ -1000,26 +866,7 @@ def import_rsa_publickey_from_file(filepath): An RSA key object conformant to 'securesystemslib.RSAKEY_SCHEMA'. """ - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(filepath) - - # Read the contents of the key file that should be in PEM format and contains - # the public portion of the RSA key. - with open(filepath, 'rb') as file_object: - rsa_pubkey_pem = file_object.read().decode('utf-8') - - # Convert 'rsa_pubkey_pem' to 'securesystemslib.RSAKEY_SCHEMA' format. - try: - rsakey_dict = securesystemslib.keys.import_rsakey_from_public_pem(rsa_pubkey_pem) - - except securesystemslib.exceptions.FormatError as e: - raise securesystemslib.exceptions.Error('Cannot import improperly formatted' - ' PEM file.' + repr(str(e))) - - return rsakey_dict + return securesystemslib.interface.import_rsa_publickey_from_file(filepath) @@ -1065,53 +912,8 @@ def generate_and_write_ed25519_keypair(filepath, password=None): None. """ - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(filepath) - - # If the caller does not provide a password argument, prompt for one. - if password is None: # pragma: no cover - message = 'Enter a password for the Ed25519 key: ' - password = _get_password(message, confirm=True) - - # Does 'password' have the correct format? - securesystemslib.formats.PASSWORD_SCHEMA.check_match(password) - - # Generate a new ED25519 key object and encrypt it. The cryptography library - # used is determined by the user, or by default (set in - # 'settings.ED25519_CRYPTO_LIBRARY'). Raise - # 'securesystemslib.exceptions.CryptoError' or - # 'securesystemslib.exceptions.UnsupportedLibraryError', if 'ed25519_key' - # cannot be encrypted. - ed25519_key = securesystemslib.keys.generate_ed25519_key() - encrypted_key = securesystemslib.keys.encrypt_key(ed25519_key, password) - - # ed25519 public key file contents in metadata format (i.e., does not include - # the keyid portion). - keytype = ed25519_key['keytype'] - keyval = ed25519_key['keyval'] - ed25519key_metadata_format = \ - securesystemslib.keys.format_keyval_to_metadata(keytype, keyval, private=False) - - # Write the public key, conformant to 'securesystemslib.KEY_SCHEMA', to - # '.pub'. - securesystemslib.util.ensure_parent_dir(filepath) - - # Create a tempororary file, write the contents of the public key, and move - # to final destination. - file_object = securesystemslib.util.TempFile() - file_object.write(json.dumps(ed25519key_metadata_format).encode('utf-8')) - - # The temporary file is closed after the final move. - file_object.move(filepath + '.pub') - - # Write the encrypted key string, conformant to - # 'securesystemslib.ENCRYPTEDKEY_SCHEMA', to ''. - file_object = securesystemslib.util.TempFile() - file_object.write(encrypted_key.encode('utf-8')) - file_object.move(filepath) + securesystemslib.interface.generate_and_write_ed25519_keypair( + filepath, password) @@ -1142,26 +944,7 @@ def import_ed25519_publickey_from_file(filepath): 'securesystemslib.ED25519KEY_SCHEMA'. """ - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(filepath) - - # ED25519 key objects are saved in json and metadata format. Return the - # loaded key object in securesystemslib.ED25519KEY_SCHEMA' format that - # also includes the keyid. - ed25519_key_metadata = securesystemslib.util.load_json_file(filepath) - ed25519_key, junk = securesystemslib.keys.format_metadata_to_key(ed25519_key_metadata) - - # Raise an exception if an unexpected key type is imported. Redundant - # validation of 'keytype'. 'securesystemslib.keys.format_metadata_to_key()' - # should have fully validated 'ed25519_key_metadata'. - if ed25519_key['keytype'] != 'ed25519': # pragma: no cover - message = 'Invalid key type loaded: ' + repr(ed25519_key['keytype']) - raise securesystemslib.exceptions.FormatError(message) - - return ed25519_key + return securesystemslib.interface.import_ed25519_publickey_from_file(filepath) @@ -1208,44 +991,8 @@ def import_ed25519_privatekey_from_file(filepath, password=None): An ed25519 key object of the form: 'securesystemslib.ED25519KEY_SCHEMA'. """ - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(filepath) - - # If the caller does not provide a password argument, prompt for one. - # Password confirmation disabled here, which should ideally happen only - # when creating encrypted key files (i.e., improve usability). - if password is None: # pragma: no cover - message = 'Enter a password for the encrypted Ed25519 key: ' - password = _get_password(message, confirm=False) - - # Does 'password' have the correct format? - securesystemslib.formats.PASSWORD_SCHEMA.check_match(password) - - # Store the encrypted contents of 'filepath' prior to calling the decryption - # routine. - encrypted_key = None - - with open(filepath, 'rb') as file_object: - encrypted_key = file_object.read() - - # Decrypt the loaded key file, calling the appropriate cryptography library - # (i.e., set by the user) and generating the derived encryption key from - # 'password'. Raise 'securesystemslib.exceptions.CryptoError' or - # 'securesystemslib.exceptions.UnsupportedLibraryError' if the decryption - # fails. - key_object = securesystemslib.keys.decrypt_key(encrypted_key.decode('utf-8'), - password) - - # Raise an exception if an unexpected key type is imported. - if key_object['keytype'] != 'ed25519': - message = 'Invalid key type loaded: ' + repr(key_object['keytype']) - raise securesystemslib.exceptions.FormatError(message) - - return key_object - + return securesystemslib.interface.import_ed25519_privatekey_from_file( + filepath, password) @@ -1453,7 +1200,7 @@ def get_target_hash(target_filepath): def generate_root_metadata(version, expiration_date, consistent_snapshot, - compression_algorithms=['gz'], repository_name='default'): + repository_name='default'): """ Create the root metadata. 'tuf.roledb.py' and 'tuf.keydb.py' @@ -1475,11 +1222,6 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, filename of any target file located in the targets directory. Each digest is stripped from the target filename and listed in the snapshot metadata. - compression_algorithms: - A list of compression algorithms to use when generating the compressed - metadata files for the repository. The root file specifies the - algorithms used by the repository. - repository_name: The name of the repository. If not supplied, 'rolename' is added to the 'default' repository. @@ -1507,7 +1249,6 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, tuf.formats.METADATAVERSION_SCHEMA.check_match(version) securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - tuf.formats.COMPRESSIONS_SCHEMA.check_match(compression_algorithms) securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) # The role and key dictionaries to be saved in the root metadata object. @@ -1545,8 +1286,10 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, if key['keytype'] in ['rsa', 'ed25519']: keytype = key['keytype'] keyval = key['keyval'] + scheme = key['scheme'] keydict[keyid] = \ - securesystemslib.keys.format_keyval_to_metadata(keytype, keyval, private=False) + securesystemslib.keys.format_keyval_to_metadata(keytype, + scheme, keyval, private=False) # This is not a recognized key. Raise an exception. else: @@ -1568,9 +1311,7 @@ def generate_root_metadata(version, expiration_date, consistent_snapshot, # Generate the root metadata object. root_metadata = tuf.formats.RootFile.make_metadata(version, expiration_date, - keydict, roledict, - consistent_snapshot, - compression_algorithms) + keydict, roledict, consistent_snapshot) return root_metadata @@ -1774,14 +1515,11 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, metadata_directory = _check_directory(metadata_directory) - # Set the fileinfo of 'root.json', and the versioninfo of - # 'targets.json'. 'fileinfodict' shall contain the version number of all - # available delegated roles on the repository. + # Snapshot's 'fileinfodict' shall contain the version number of Root, + # Targets, and all delegated roles fo the repository. fileinfodict = {} - root_path = os.path.join(metadata_directory, root_filename + '.json') - length, hashes = securesystemslib.util.get_file_details(root_path) - root_version = get_metadata_versioninfo('root', repository_name) - fileinfodict[ROOT_FILENAME] = tuf.formats.make_fileinfo(length, hashes, version=root_version['version']) + fileinfodict[ROOT_FILENAME] = get_metadata_versioninfo(root_filename, + repository_name) fileinfodict[TARGETS_FILENAME] = get_metadata_versioninfo(targets_filename, repository_name) @@ -1814,6 +1552,11 @@ def generate_snapshot_metadata(metadata_directory, version, expiration_date, fileinfodict[metadata_name] = get_metadata_versioninfo(rolename, repository_name) + else: + logger.debug('Metadata file has an unsupported file' + ' extension: ' + metadata_filename) + continue + # Generate the Snapshot metadata object. snapshot_metadata = tuf.formats.SnapshotFile.make_metadata(version, expiration_date, @@ -1982,16 +1725,10 @@ def sign_metadata(metadata_object, keyids, filename, repository_name): -def write_metadata_file(metadata, filename, version_number, - compression_algorithms, consistent_snapshot): +def write_metadata_file(metadata, filename, version_number, consistent_snapshot): """ - If necessary, write the 'metadata' signable object to 'filename', and the - compressed version of the metadata file if 'compression' is set. - - Note: Compression algorithms like gzip attach a timestamp to compressed - files, so a metadata file compressed multiple times may generate different - digests even though the uncompressed content has not changed. + If necessary, write the 'metadata' signable object to 'filename'. metadata: @@ -2000,27 +1737,22 @@ def write_metadata_file(metadata, filename, version_number, filename: The filename of the metadata to be written (e.g., 'root.json'). - If a compression algorithm is specified in 'compression_algorithms', the - compression extention is appended to 'filename'. version_number: The version number of the metadata file to be written. The version number is needed for consistent snapshots, which prepend the version number to 'filename'. - compression_algorithms: - Specify the algorithms, as a list of strings, used to compress the - 'metadata'; The only currently available compression option is 'gz' - (gzip). - consistent_snapshot: Boolean that determines whether the metadata file's digest should be prepended to the filename. - securesystemslib.exceptions.FormatError, if the arguments are improperly formatted. + securesystemslib.exceptions.FormatError, if the arguments are improperly + formatted. - securesystemslib.exceptions.Error, if the directory of 'filename' does not exist. + securesystemslib.exceptions.Error, if the directory of 'filename' does not + exist. Any other runtime (e.g., IO) exception. @@ -2039,7 +1771,6 @@ def write_metadata_file(metadata, filename, version_number, tuf.formats.SIGNABLE_SCHEMA.check_match(metadata) securesystemslib.formats.PATH_SCHEMA.check_match(filename) tuf.formats.METADATAVERSION_SCHEMA.check_match(version_number) - tuf.formats.COMPRESSIONS_SCHEMA.check_match(compression_algorithms) securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) # Verify the directory of 'filename', and convert 'filename' to its absolute @@ -2106,115 +1837,21 @@ def write_metadata_file(metadata, filename, version_number, os.link(written_consistent_filename, written_filename) else: - raise securesystemslib.exceptions.InvalidConfigurationError('The consistent method specified' - ' in tuf.settings.py is not supported, try either "copy" or "hard_link"') + raise securesystemslib.exceptions.InvalidConfigurationError('The' + ' consistent method specified in tuf.settings.py is not supported, try' + ' either "copy" or "hard_link"') else: logger.debug('Not creating a consistent snapshot for ' + repr(written_filename)) logger.debug('Saving ' + repr(written_filename)) file_object.move(written_filename) - # Generate the compressed versions of 'metadata', if necessary. A compressed - # file may be written (without needing to write the uncompressed version) if - # the repository maintainer adds compression after writing the uncompressed - # version. - for compression_algorithm in compression_algorithms: - file_object = None - - # Ignore the empty string that signifies non-compression. The uncompressed - # file was previously written above, if necessary. - if not len(compression_algorithm): - continue - - elif compression_algorithm == 'gz': - file_object = securesystemslib.util.TempFile() - compressed_filename = filename + '.gz' - - # Instantiate a gzip object, but save compressed content to - # 'file_object' (i.e., GzipFile instance is based on its 'fileobj' - # argument). - gzip_object = gzip.GzipFile(fileobj=file_object, mode='wb') - try: - gzip_object.write(file_content) - - finally: - gzip_object.close() - - # This else clause should not be reached because the - # 'compression_algorithms' list is validated against the - # COMPRESSIONS_SCHEMA above. - else: # pragma: no cover - raise securesystemslib.exceptions.FormatError('Unknown compression algorithm:' - ' ' + repr(compression_algorithm)) - - # Save the compressed version, ensuring an unchanged file is not re-saved. - # Re-saving the same compressed version may cause its digest to - # unexpectedly change (gzip includes a timestamp) even though content has - # not changed. - _write_compressed_metadata(file_object, compressed_filename, - True, consistent_snapshot, - version_number) return written_filename -def _write_compressed_metadata(file_object, compressed_filename, - write_new_metadata, consistent_snapshot, version_number): - """ - Write compressed versions of metadata, ensuring compressed file that have - not changed are not re-written, the digest of the compressed file is properly - added to the compressed filename, and consistent snapshots are also saved. - Ensure compressed files are written to a temporary location, and then - moved to their destinations. - """ - - # If a consistent snapshot is unneeded, 'file_object' may be simply moved - # 'compressed_filename' if not already written. - if not consistent_snapshot: - if write_new_metadata or not os.path.exists(compressed_filename): - file_object.move(compressed_filename) - - # The temporary file must be closed if 'file_object.move()' is not used. - # securesystemslib.util.TempFile() automatically closes the temp file when move() is - # called - else: - file_object.close_temp_file() - - # consistent snapshots = True. Ensure the version number is included in the - # compressed filename written, provided it does not already exist. - else: - compressed_content = file_object.read() - consistent_filename = None - version_and_filename = None - - # Attach the version number to the compressed, consistent snapshot filename. - dirname, basename = os.path.split(compressed_filename) - - for compression_extension in SUPPORTED_COMPRESSION_EXTENSIONS: - if basename.endswith(compression_extension): - basename = basename.split(compression_extension, 1)[0] - version_and_filename = str(version_number) + '.' + basename + compression_extension - consistent_filename = os.path.join(dirname, version_and_filename) - - else: - logger.debug('Skipping compression extension: ' + repr(compression_extension)) - - # Move the 'securesystemslib.util.TempFile' object to one of the filenames so that it is - # saved and the temporary file closed. - if not os.path.exists(consistent_filename): - logger.debug('Saving ' + repr(consistent_filename)) - file_object.move(consistent_filename) - - else: - logger.debug('Skipping already written compressed file:' - ' ' + repr(consistent_filename)) - - - - - def _log_status_of_top_level_roles(targets_directory, metadata_directory, repository_name): """ diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py index f967b1e9c6..2d95c7f727 100755 --- a/tuf/repository_tool.py +++ b/tuf/repository_tool.py @@ -69,10 +69,9 @@ tuf.log.add_console_handler() tuf.log.set_console_log_level(logging.INFO) -# The algorithm used by the repository to generate the digests of the -# target filepaths, which are included in metadata files and may be prepended -# to the filenames of consistent snapshots. -HASH_FUNCTION = 'sha256' +# The algorithm used by the repository to generate the path hash prefixes +# of hashed bin delegations. Please see delegate_hashed_bins() +HASH_FUNCTION = tuf.settings.DEFAULT_HASH_ALGORITHM # The targets and metadata directory names. Metadata files are written # to the staged metadata directory instead of the "live" one. @@ -187,7 +186,7 @@ def __init__(self, repository_directory, metadata_directory, - def writeall(self, consistent_snapshot=False, compression_algorithms=['gz']): + def writeall(self, consistent_snapshot=False): """ Write all the JSON Metadata objects to their corresponding files. @@ -203,11 +202,6 @@ def writeall(self, consistent_snapshot=False, compression_algorithms=['gz']): .README.json Example: 13.root.json' - compression_algorithms: - A list of compression algorithms. Each of these algorithms will be - used to compress all of the metadata available on the repository. - By default, all metadata is compressed with gzip. - tuf.exceptions.UnsignedMetadataError, if any of the top-level and delegated roles do not have the minimum threshold of signatures. @@ -225,7 +219,6 @@ def writeall(self, consistent_snapshot=False, compression_algorithms=['gz']): # 'securesystemslib.exceptions.FormatError' if any are improperly # formatted. securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - tuf.formats.COMPRESSIONS_SCHEMA.check_match(compression_algorithms) # At this point, tuf.keydb and tuf.roledb must be fully populated, # otherwise writeall() throws a 'tuf.exceptions.UnsignedMetadataError' for @@ -253,7 +246,8 @@ def writeall(self, consistent_snapshot=False, compression_algorithms=['gz']): dirty_rolename + METADATA_EXTENSION) repo_lib._generate_and_write_metadata(dirty_rolename, dirty_filename, self._targets_directory, self._metadata_directory, - consistent_snapshot, filenames, repository_name=self._repository_name) + consistent_snapshot, filenames, + repository_name=self._repository_name) # Metadata should be written in (delegated targets -> root -> targets -> # snapshot -> timestamp) order. Begin by generating the 'root.json' @@ -263,13 +257,15 @@ def writeall(self, consistent_snapshot=False, compression_algorithms=['gz']): if 'root' in dirty_rolenames or consistent_snapshot: repo_lib._generate_and_write_metadata('root', filenames['root'], self._targets_directory, self._metadata_directory, - consistent_snapshot, filenames, repository_name=self._repository_name) + consistent_snapshot, filenames, + repository_name=self._repository_name) # Generate the 'targets.json' metadata file. if 'targets' in dirty_rolenames: repo_lib._generate_and_write_metadata('targets', filenames['targets'], self._targets_directory, self._metadata_directory, - consistent_snapshot, repository_name=self._repository_name) + consistent_snapshot, + repository_name=self._repository_name) # Generate the 'snapshot.json' metadata file. if 'snapshot' in dirty_rolenames: @@ -282,7 +278,8 @@ def writeall(self, consistent_snapshot=False, compression_algorithms=['gz']): if 'timestamp' in dirty_rolenames: repo_lib._generate_and_write_metadata('timestamp', filenames['timestamp'], self._targets_directory, self._metadata_directory, consistent_snapshot, - filenames, repository_name=self._repository_name) + filenames, + repository_name=self._repository_name) tuf.roledb.unmark_dirty(dirty_rolenames, self._repository_name) @@ -338,7 +335,8 @@ def write(self, rolename, consistent_snapshot=False, increment_version_number=Tr repo_lib._generate_and_write_metadata(rolename, rolename_filename, self._targets_directory, self._metadata_directory, consistent_snapshot, - filenames=filenames, allow_partially_signed=True, + filenames=filenames, + allow_partially_signed=True, increment_version_number=increment_version_number, repository_name=self._repository_name) @@ -550,8 +548,8 @@ class Metadata(object): top-level roles: Root, Targets, Snapshot, and Timestamp. The Metadata class provides methods that are needed by all top-level roles, such as adding and removing public keys, private keys, and signatures. Metadata - attributes, such as rolename, version, threshold, expiration, key list, and - compressions, is also provided by the Metadata base class. + attributes, such as rolename, version, threshold, expiration, and key list + are also provided by the Metadata base class. None. @@ -1318,87 +1316,6 @@ def signing_keys(self): - @property - def compressions(self): - """ - - A getter method that returns a list of the file compression algorithms - used when the metadata is written to disk. If ['gz'] is set for the - 'targets.json' role, the metadata files 'targets.json' and - 'targets.json.gz' are written. - - >>> - >>> - >>> - - - None. - - - None. - - - None. - - - A list of compression algorithms, conformant to - 'tuf.formats.COMPRESSIONS_SCHEMA'. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - compressions = roleinfo['compressions'] - - return compressions - - - - @compressions.setter - def compressions(self, compression_list): - """ - - A setter method for the file compression algorithms used when the - metadata is written to disk. If ['gz'] is set for the 'targets.json' role - the metadata files 'targets.json' and 'targets.json.gz' are written. - - >>> - >>> - >>> - - - compression_list: - A list of file compression algorithms, conformant to - 'tuf.formats.COMPRESSIONS_SCHEMA'. - - - securesystemslib.exceptions.FormatError, if 'compression_list' is - improperly formatted. - - - Updates the role's compression algorithms listed in 'tuf.roledb.py'. - - - None. - """ - - # Does 'compression_name' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.COMPRESSIONS_SCHEMA.check_match(compression_list) - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - - # Add the compression algorithms of 'compression_list' to the role's - # entry in 'tuf.roledb.py'. - for compression in compression_list: - if compression not in roleinfo['compressions']: - roleinfo['compressions'].append(compression) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - - class Root(Metadata): @@ -1453,8 +1370,7 @@ def __init__(self, repository_name): roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, 'signatures': [], 'version': 0, 'consistent_snapshot': False, - 'compressions': [''], 'expires': expiration, - 'partial_loaded': False} + 'expires': expiration, 'partial_loaded': False} try: tuf.roledb.add_role(self._rolename, roleinfo, self._repository_name) @@ -1521,8 +1437,8 @@ def __init__(self, repository_name): expiration = expiration.isoformat() + 'Z' roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'compressions': [''], - 'expires': expiration, 'partial_loaded': False} + 'signatures': [], 'version': 0, 'expires': expiration, + 'partial_loaded': False} try: tuf.roledb.add_role(self.rolename, roleinfo, self._repository_name) @@ -1584,8 +1500,8 @@ def __init__(self, repository_name): expiration = expiration.isoformat() + 'Z' roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'compressions': [''], - 'expires': expiration, 'partial_loaded': False} + 'signatures': [], 'version': 0, 'expires': expiration, + 'partial_loaded': False} try: tuf.roledb.add_role(self._rolename, roleinfo, self._repository_name) @@ -1689,7 +1605,7 @@ def __init__(self, targets_directory, rolename='targets', roleinfo=None, # If 'roleinfo' is not provided, set an initial default. if roleinfo is None: roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'version': 0, 'compressions': [''], 'expires': expiration, + 'version': 0, 'expires': expiration, 'signatures': [], 'paths': {}, 'path_hash_prefixes': [], 'partial_loaded': False, 'delegations': {'keys': {}, 'roles': []}} @@ -2322,7 +2238,8 @@ def delegate(self, rolename, public_keys, list_of_targets, threshold=1, for key in public_keys: keyid = key['keyid'] key_metadata_format = securesystemslib.keys.format_keyval_to_metadata(key['keytype'], - key['keyval']) + key['scheme'], key['keyval']) + # Update 'keyids' and 'keydict'. new_keydict = {keyid: key_metadata_format} keydict.update(new_keydict) @@ -2363,7 +2280,7 @@ def delegate(self, rolename, public_keys, list_of_targets, threshold=1, expiration = expiration.isoformat() + 'Z' roleinfo = {'name': rolename, 'keyids': keyids, 'signing_keyids': [], - 'threshold': threshold, 'version': 0, 'compressions': [''], + 'threshold': threshold, 'version': 0, 'expires': expiration, 'signatures': [], 'partial_loaded': False, 'paths': relative_targetpaths, 'delegations': {'keys': {}, 'roles': []}} @@ -3088,7 +3005,6 @@ def load_repository(repository_directory, repository_name='default'): 'signing_keyids': [], 'signatures': [], 'partial_loaded': False, - 'compressions': [], 'paths': {}, } @@ -3100,12 +3016,6 @@ def load_repository(repository_directory, repository_name='default'): roleinfo['paths'].update({filepath: fileinfo.get('custom', {})}) roleinfo['delegations'] = metadata_object['delegations'] - if os.path.exists(metadata_path + '.gz'): - roleinfo['compressions'].append('gz') - - else: - logger.debug('A compressed version does not exist.') - tuf.roledb.add_role(metadata_name, roleinfo, repository_name) loaded_metadata.append(metadata_name) @@ -3127,9 +3037,12 @@ def load_repository(repository_directory, repository_name='default'): # The repository maintainer should have also been made aware of the # duplicate key when it was added. for key_metadata in six.itervalues(metadata_object['delegations']['keys']): - key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) + key_object, keyids = securesystemslib.keys.format_metadata_to_key(key_metadata) try: - tuf.keydb.add_key(key_object, repository_name=repository_name) + for keyid in keyids: # pragma: no branch + key_object['keyid'] = keyid + tuf.keydb.add_key(key_object, keyid=None, + repository_name=repository_name) except securesystemslib.exceptions.KeyAlreadyExistsError: pass @@ -3140,6 +3053,113 @@ def load_repository(repository_directory, repository_name='default'): +def dump_signable_metadata(metadata_filepath): + """ + + Dump the "signed" portion of metadata. It is the portion that is normally + signed by the repository tool, which is in canonicalized JSON form. + This function is intented for external tools that wish to independently + sign metadata. + + The normal workflow for this use case is to: + (1) call dump_signable_metadata(metadata_filepath) + (2) sign the output with an external tool + (3) call append_signature(signature, metadata_filepath) + + + metadata_filepath: + The path to the metadata file. For example, + repository/metadata/root.json. + + + securesystemslib.exceptions.FormatError, if the arguments are improperly + formatted. + + IOError, if 'metadata_filepath' cannot be opened. + + + None. + + + Metadata content that is normally signed by the repository tool (i.e., the + "signed" portion of a metadata file). + """ + + # Are the argument properly formatted? + securesystemslib.formats.PATH_SCHEMA.check_match(metadata_filepath) + + signable = securesystemslib.util.load_json_file(metadata_filepath) + + # Is 'signable' a valid metadata file? + tuf.formats.SIGNABLE_SCHEMA.check_match(signable) + + return securesystemslib.formats.encode_canonical(signable['signed']) + + + + + +def append_signature(signature, metadata_filepath): + """ + + Append 'signature' to the metadata at 'metadata_filepath'. The signature + is assumed to be valid, and externally generated by signing the output of + dump_signable_metadata(metadata_filepath). This function is intended for + external tools that wish to independently sign metadata. + + The normal workflow for this use case is to: + (1) call dump_signable_metadata(metadata_filepath) + (2) sign the output with an external tool + (3) call append_signature(signature, metadata_filepath) + + + signature: + A TUF signature structure that contains the KEYID, signing method, and + the signature. It conforms to securesystemslib.formats.SIGNATURE_SCHEMA. + + For example: + + { + "keyid": "a0a0f0cf08...", + "method": "ed25519", + "sig": "14f6e6566ec13..." + } + + metadata_filepath: + The path to the metadata file. For example, + repository/metadata/root.json. + + + securesystemslib.exceptions.FormatError, if the arguments are improperly + formatted. + + + 'metadata_filepath' is overwritten. + + + None. + """ + + # Are the arguments properly formatted? + securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) + securesystemslib.formats.PATH_SCHEMA.check_match(metadata_filepath) + + signable = securesystemslib.util.load_json_file(metadata_filepath) + + # Is 'signable' a valid metadata file? + tuf.formats.SIGNABLE_SCHEMA.check_match(signable) + + signable['signatures'].append(signature) + + file_object = securesystemslib.util.TempFile() + + written_metadata_content = json.dumps(signable, indent=1, + separators=(',', ': '), sort_keys=True).encode('utf-8') + + file_object.write(written_metadata_content) + file_object.move(metadata_filepath) + + if __name__ == '__main__': # The interactive sessions of the documentation strings can # be tested by running repository_tool.py as a standalone module: diff --git a/tuf/roledb.py b/tuf/roledb.py index 1ad93cbd4c..9f098cbd47 100755 --- a/tuf/roledb.py +++ b/tuf/roledb.py @@ -134,7 +134,6 @@ def create_roledb_from_root_metadata(root_metadata, repository_name='default'): roleinfo['signatures'] = [] roleinfo['signing_keyids'] = [] - roleinfo['compressions'] = [''] roleinfo['partial_loaded'] = False if rolename.startswith('targets'): @@ -810,8 +809,8 @@ def get_role_threshold(rolename, repository_name='default'): securesystemslib.exceptions.FormatError, if the arguments do not have the correct object format. - securesystemslib.exceptions.UnknownRoleError, if 'rolename' cannot be found - in in the role database. + tuf.exceptions.UnknownRoleError, if 'rolename' cannot be found + in the role database. securesystemslib.exceptions.InvalidNameError, if 'rolename' is incorrectly formatted, or 'repository_name' does not exist in the role database. @@ -822,6 +821,7 @@ def get_role_threshold(rolename, repository_name='default'): A threshold integer value. """ + # Raise 'securesystemslib.exceptions.FormatError' if 'repository_name' is # improperly formatted. securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) @@ -885,10 +885,6 @@ def get_role_paths(rolename, repository_name='default'): global _roledb_dict global _dirty_roles - if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not' ' exist: ' + - repository_name) - roleinfo = _roledb_dict[repository_name][rolename] # Paths won't exist for non-target roles. @@ -949,10 +945,6 @@ def get_delegated_rolenames(rolename, repository_name='default'): global _roledb_dict global _dirty_roles - if repository_name not in _roledb_dict or repository_name not in _dirty_roles: - raise securesystemslib.exceptions.InvalidNameError('Repository name does not' - ' exist: ' + repository_name) - # get_roleinfo() raises a 'securesystemslib.exceptions.InvalidNameError' if # 'repository_name' does not exist in the role database. roleinfo = get_roleinfo(rolename, repository_name) diff --git a/tuf/scripts/simple_server.py b/tuf/scripts/simple_server.py new file mode 100755 index 0000000000..ba2f104e0a --- /dev/null +++ b/tuf/scripts/simple_server.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +""" + + simple_server.py + + + Konstantin Andrianov. + + + February 15, 2012. + + + See LICENSE for licensing information. + + + This is a basic server that was designed to be used in conjunction with + test_download.py to test download.py module. + + + SimpleHTTPServer: + http://docs.python.org/library/simplehttpserver.html#module-SimpleHTTPServer +""" + +# Help with Python 3 compatibility, where the print statement is a function, an +# implicit relative import is invalid, and the '/' operator performs true +# division. Example: print 'hello world' raises a 'SyntaxError' exception. +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +import sys +import random + +import six + +PORT = 0 + +def _port_gen(): + return random.randint(30000, 45000) + +if len(sys.argv) > 1: + try: + PORT = int(sys.argv[1]) + if PORT < 30000 or PORT > 45000: + raise ValueError + + except ValueError: + PORT = _port_gen() + +else: + PORT = _port_gen() + +Handler = six.moves.SimpleHTTPServer.SimpleHTTPRequestHandler +httpd = six.moves.socketserver.TCPServer(('', PORT), Handler) + +httpd.serve_forever() diff --git a/tuf/settings.py b/tuf/settings.py index 81eff8ecd8..a545d95d29 100755 --- a/tuf/settings.py +++ b/tuf/settings.py @@ -105,3 +105,9 @@ # a hard link to 2.root.json (for example). This approach is more efficient in # terms of disk space usage. By default, we use 'copy'. CONSISTENT_METHOD = 'copy' + +# A setting for the instances where a default hashing algorithm is needed. +# This setting is currently used to calculate the path hash prefixes of hashed +# bin delegations. The other instances (e.g., digest of files) that require a +# hashing algorithm rely on settings in the securesystemslib external library. +DEFAULT_HASH_ALGORITHM = 'sha256' diff --git a/tuf/sig.py b/tuf/sig.py index d572f92be6..32db62d34e 100755 --- a/tuf/sig.py +++ b/tuf/sig.py @@ -70,14 +70,13 @@ def get_signature_status(signable, role=None, repository_name='default', keys in 'tuf.keydb', a set of roles in 'tuf.roledb', and a role, the status of these signatures can be determined. This method will iterate the signatures in 'signable' and enumerate all the keys that are valid, - invalid, unrecognized, unauthorized, or generated using an unknown method. + invalid, unrecognized, or unauthorized. signable: A dictionary containing a list of signatures and a 'signed' identifier. signable = {'signed': 'signer', 'signatures': [{'keyid': keyid, - 'method': 'evp', 'sig': sig}]} Conformant to tuf.formats.SIGNABLE_SCHEMA. @@ -101,7 +100,7 @@ def get_signature_status(signable, role=None, repository_name='default', securesystemslib.exceptions.FormatError, if 'signable' does not have the correct format. - securesystemslib.exceptions.UnknownRoleError, if 'role' is not recognized. + tuf.exceptions.UnknownRoleError, if 'role' is not recognized. None. @@ -132,19 +131,24 @@ def get_signature_status(signable, role=None, repository_name='default', # The fields of the signature_status dict, where each field stores keyids. A # description of each field: - # good_sigs = keys confirmed to have produced 'sig' and 'method' using - # 'signed', which are associated with 'role'; + # + # good_sigs = keys confirmed to have produced 'sig' using 'signed', which are + # associated with 'role'; + # # bad_sigs = negation of good_sigs; + # # unknown_sigs = keys not found in the 'keydb' database; + # # untrusted_sigs = keys that are not in the list of keyids associated with # 'role'; - # unknown_method_sigs = keys found to have used an unsupported method - # of generating signatures. + # + # unknown_signing_scheme = signing schemes specified in keys that are + # unsupported; good_sigs = [] bad_sigs = [] unknown_sigs = [] untrusted_sigs = [] - unknown_method_sigs = [] + unknown_signing_schemes = [] # Extract the relevant fields from 'signable' that will allow us to identify # the different classes of keys (i.e., good_sigs, bad_sigs, etc.). @@ -156,7 +160,6 @@ def get_signature_status(signable, role=None, repository_name='default', for signature in signatures: sig = signature['sig'] keyid = signature['keyid'] - method = signature['method'] # Does the signature use an unrecognized key? try: @@ -166,12 +169,12 @@ def get_signature_status(signable, role=None, repository_name='default', unknown_sigs.append(keyid) continue - # Does the signature use an unknown key signing method? + # Does the signature use an unknown/unsupported signing scheme? try: valid_sig = securesystemslib.keys.verify_signature(key, signature, signed) - except securesystemslib.exceptions.UnknownMethodError: - unknown_method_sigs.append(keyid) + except securesystemslib.exceptions.UnsupportedAlgorithmError: + unknown_signing_schemes.append(keyid) continue # We are now dealing with either a trusted or untrusted key... @@ -188,7 +191,7 @@ def get_signature_status(signable, role=None, repository_name='default', continue # Unknown role, re-raise exception. - except securesystemslib.exceptions.UnknownRoleError: + except tuf.exceptions.UnknownRoleError: raise # This is an unset role, thus an unknown signature. @@ -211,7 +214,7 @@ def get_signature_status(signable, role=None, repository_name='default', threshold = \ tuf.roledb.get_role_threshold(role, repository_name=repository_name) - except securesystemslib.exceptions.UnknownRoleError: + except tuf.exceptions.UnknownRoleError: raise else: @@ -223,7 +226,7 @@ def get_signature_status(signable, role=None, repository_name='default', signature_status['bad_sigs'] = bad_sigs signature_status['unknown_sigs'] = unknown_sigs signature_status['untrusted_sigs'] = untrusted_sigs - signature_status['unknown_method_sigs'] = unknown_method_sigs + signature_status['unknown_signing_schemes'] = unknown_signing_schemes return signature_status diff --git a/tuf/unittest_toolbox.py b/tuf/unittest_toolbox.py index 67ef728630..b9ced97956 100755 --- a/tuf/unittest_toolbox.py +++ b/tuf/unittest_toolbox.py @@ -85,7 +85,7 @@ def tearDown(self): try: # OSError will occur if the directory was already removed. cleanup_function() - + except OSError: pass @@ -93,11 +93,15 @@ def tearDown(self): def make_temp_directory(self, directory=None): """Creates and returns an absolute path of a directory.""" + prefix = self.__class__.__name__+'_' temp_directory = tempfile.mkdtemp(prefix=prefix, dir=directory) + def _destroy_temp_directory(): shutil.rmtree(temp_directory) + self._cleanup.append(_destroy_temp_directory) + return temp_directory