From 460cf54ece7eba418c50407ee35544c5d63588a7 Mon Sep 17 00:00:00 2001 From: Ian Ling Date: Fri, 8 Apr 2022 08:52:57 -0700 Subject: [PATCH 1/6] Overhaul structs, refactor JSON parser and saver Signed-off-by: Ian Ling --- README.md | 3 +- builder/build.go | 27 +- builder/build_test.go | 337 +++++---- builder/builder2v1/build_creation_info.go | 42 +- .../builder2v1/build_creation_info_test.go | 112 +-- builder/builder2v1/build_file.go | 19 +- builder/builder2v1/build_file_test.go | 33 +- builder/builder2v1/build_package.go | 4 +- builder/builder2v1/build_package_test.go | 47 +- builder/builder2v2/build_creation_info.go | 39 +- .../builder2v2/build_creation_info_test.go | 112 +-- builder/builder2v2/build_file.go | 14 +- builder/builder2v2/build_file_test.go | 12 +- builder/builder2v2/build_package.go | 4 +- builder/builder2v2/build_package_test.go | 26 +- examples/1-load/example_load.go | 22 +- examples/10-jsonloader/example_json_loader.go | 12 +- examples/4-search/example_search.go | 2 +- examples/5-report/example_report.go | 20 +- examples/6-licensediff/example_licensediff.go | 40 +- examples/7-rdfloader/exampleRDFLoader.go | 8 +- examples/8-jsontotv/examplejsontotv.go | 4 +- examples/9-tvtojson/exampletvtojson.go | 4 +- .../json/SPDXJSONExample-v2.2.spdx.json | 561 +++++++-------- .../sample-docs/tv/SPDXTagExample-v2.2.spdx | 657 +++++++++--------- .../xls/SPDXSpreadsheetExample-v2.2.xlsx | Bin 0 -> 14949 bytes .../xml/SPDXXMLExample-v2.2.spdx.xml | 443 ++++++++++++ .../yaml/SPDXYAMLExample-2.2.spdx.yaml | 390 +++++++++++ go.mod | 5 +- go.sum | 4 + idsearcher/idsearcher.go | 20 +- idsearcher/idsearcher_test.go | 318 +++++---- json/json_test.go | 449 ++++++++++++ json/parser.go | 29 + json/writer.go | 25 + jsonloader/jsonloader.go | 24 - jsonloader/jsonloader_test.go | 70 -- .../parser2v2/jsonfiles/jsonloadertest.json | 6 - .../jsonfiles/otherlicensestest.json | 21 - jsonloader/parser2v2/jsonfiles/test.json | 278 -------- jsonloader/parser2v2/parse_annotations.go | 48 -- .../parser2v2/parse_annotations_test.go | 156 ----- jsonloader/parser2v2/parse_creation_info.go | 122 ---- .../parser2v2/parse_creation_info_test.go | 227 ------ jsonloader/parser2v2/parse_files.go | 122 ---- jsonloader/parser2v2/parse_files_test.go | 210 ------ jsonloader/parser2v2/parse_other_license.go | 45 -- .../parser2v2/parse_other_license_test.go | 113 --- jsonloader/parser2v2/parse_package.go | 211 ------ jsonloader/parser2v2/parse_package_test.go | 376 ---------- jsonloader/parser2v2/parse_relationship.go | 51 -- .../parser2v2/parse_relationship_test.go | 145 ---- jsonloader/parser2v2/parse_reviews.go | 45 -- jsonloader/parser2v2/parse_reviews_test.go | 86 --- jsonloader/parser2v2/parse_snippets.go | 89 --- jsonloader/parser2v2/parse_snippets_test.go | 123 ---- jsonloader/parser2v2/parser.go | 132 ---- jsonloader/parser2v2/parser_test.go | 419 ----------- jsonloader/parser2v2/types.go | 9 - jsonloader/parser2v2/util.go | 115 --- jsonloader/parser2v2/util_test.go | 156 ----- jsonsaver/jsonsaver.go | 25 - jsonsaver/jsonsaver_test.go | 228 ------ jsonsaver/saver2v2/save_annotations.go | 27 - jsonsaver/saver2v2/save_annotations_test.go | 95 --- jsonsaver/saver2v2/save_creation_info.go | 77 -- jsonsaver/saver2v2/save_creation_info_test.go | 90 --- jsonsaver/saver2v2/save_document.go | 110 --- jsonsaver/saver2v2/save_document_test.go | 436 ------------ jsonsaver/saver2v2/save_files.go | 89 --- jsonsaver/saver2v2/save_files_test.go | 160 ----- jsonsaver/saver2v2/save_other_license.go | 35 - jsonsaver/saver2v2/save_other_license_test.go | 85 --- jsonsaver/saver2v2/save_package.go | 158 ----- jsonsaver/saver2v2/save_package_test.go | 233 ------- jsonsaver/saver2v2/save_relationships.go | 32 - jsonsaver/saver2v2/save_relationships_test.go | 94 --- jsonsaver/saver2v2/save_reviews.go | 33 - jsonsaver/saver2v2/save_reviews_test.go | 72 -- jsonsaver/saver2v2/save_snippets.go | 87 --- jsonsaver/saver2v2/save_snippets_test.go | 127 ---- licensediff/licensediff_test.go | 381 +++++----- rdfloader/parser2v2/license_utils.go | 5 +- rdfloader/parser2v2/parse_annotation.go | 4 +- rdfloader/parser2v2/parse_annotation_test.go | 10 +- rdfloader/parser2v2/parse_creation_info.go | 18 +- .../parser2v2/parse_creation_info_test.go | 14 +- rdfloader/parser2v2/parse_file.go | 17 +- rdfloader/parser2v2/parse_file_test.go | 52 +- rdfloader/parser2v2/parse_package.go | 65 +- rdfloader/parser2v2/parse_package_test.go | 38 +- rdfloader/parser2v2/parse_snippet_info.go | 16 +- rdfloader/parser2v2/parse_spdx_document.go | 20 +- rdfloader/parser2v2/parser.go | 23 +- rdfloader/parser2v2/parser_test.go | 4 +- reporter/reporter_test.go | 104 +-- spdx/annotation.go | 69 +- spdx/checksum.go | 26 +- spdx/creation_info.go | 151 ++-- spdx/document.go | 114 ++- spdx/file.go | 72 +- spdx/identifier.go | 62 ++ spdx/other_license.go | 22 +- spdx/package.go | 246 ++++--- spdx/relationship.go | 16 +- spdx/snippet.go | 73 +- spdxlib/described_elements.go | 8 +- spdxlib/described_elements_test.go | 180 +++-- spdxlib/documents.go | 4 +- spdxlib/documents_test.go | 76 +- spdxlib/relationships_test.go | 42 +- tvloader/parser2v1/parse_annotation.go | 4 +- tvloader/parser2v1/parse_annotation_test.go | 7 +- tvloader/parser2v1/parse_creation_info.go | 46 +- .../parser2v1/parse_creation_info_test.go | 128 +--- tvloader/parser2v1/parse_file.go | 30 +- tvloader/parser2v1/parse_file_test.go | 260 +++---- .../parser2v1/parse_other_license_test.go | 58 +- tvloader/parser2v1/parse_package.go | 48 +- tvloader/parser2v1/parse_package_test.go | 185 ++--- tvloader/parser2v1/parse_review_test.go | 84 ++- tvloader/parser2v1/parse_snippet.go | 18 +- tvloader/parser2v1/parse_snippet_test.go | 152 ++-- tvloader/parser2v1/parser.go | 42 +- tvloader/parser2v1/parser_test.go | 24 +- tvloader/parser2v2/parse_annotation.go | 4 +- tvloader/parser2v2/parse_annotation_test.go | 8 +- tvloader/parser2v2/parse_creation_info.go | 46 +- .../parser2v2/parse_creation_info_test.go | 128 +--- tvloader/parser2v2/parse_file.go | 24 +- tvloader/parser2v2/parse_file_test.go | 238 +++---- .../parser2v2/parse_other_license_test.go | 58 +- tvloader/parser2v2/parse_package.go | 44 +- tvloader/parser2v2/parse_package_test.go | 160 ++--- tvloader/parser2v2/parse_review_test.go | 78 +-- tvloader/parser2v2/parse_snippet.go | 18 +- tvloader/parser2v2/parse_snippet_test.go | 152 ++-- tvloader/parser2v2/parser.go | 40 +- tvloader/parser2v2/parser_test.go | 24 +- tvsaver/saver2v1/save_annotation.go | 4 +- tvsaver/saver2v1/save_annotation_test.go | 9 +- tvsaver/saver2v1/save_creation_info.go | 43 +- tvsaver/saver2v1/save_creation_info_test.go | 86 +-- tvsaver/saver2v1/save_document.go | 54 +- tvsaver/saver2v1/save_document_test.go | 70 +- tvsaver/saver2v1/save_file.go | 17 +- tvsaver/saver2v1/save_file_test.go | 56 +- tvsaver/saver2v1/save_package.go | 61 +- tvsaver/saver2v1/save_package_test.go | 151 ++-- tvsaver/saver2v1/save_snippet.go | 15 +- tvsaver/saver2v1/save_snippet_test.go | 46 +- tvsaver/saver2v2/save_annotation.go | 4 +- tvsaver/saver2v2/save_annotation_test.go | 9 +- tvsaver/saver2v2/save_creation_info.go | 43 +- tvsaver/saver2v2/save_creation_info_test.go | 86 +-- tvsaver/saver2v2/save_document.go | 54 +- tvsaver/saver2v2/save_document_test.go | 104 ++- tvsaver/saver2v2/save_file.go | 18 +- tvsaver/saver2v2/save_file_test.go | 69 +- tvsaver/saver2v2/save_package.go | 61 +- tvsaver/saver2v2/save_package_test.go | 114 +-- tvsaver/saver2v2/save_snippet.go | 15 +- tvsaver/saver2v2/save_snippet_test.go | 46 +- utils/verification.go | 34 +- utils/verification_test.go | 154 ++-- 165 files changed, 5264 insertions(+), 9735 deletions(-) create mode 100644 examples/sample-docs/xls/SPDXSpreadsheetExample-v2.2.xlsx create mode 100644 examples/sample-docs/xml/SPDXXMLExample-v2.2.spdx.xml create mode 100644 examples/sample-docs/yaml/SPDXYAMLExample-2.2.spdx.yaml create mode 100644 json/json_test.go create mode 100644 json/parser.go create mode 100644 json/writer.go delete mode 100644 jsonloader/jsonloader.go delete mode 100644 jsonloader/jsonloader_test.go delete mode 100644 jsonloader/parser2v2/jsonfiles/jsonloadertest.json delete mode 100644 jsonloader/parser2v2/jsonfiles/otherlicensestest.json delete mode 100644 jsonloader/parser2v2/jsonfiles/test.json delete mode 100644 jsonloader/parser2v2/parse_annotations.go delete mode 100644 jsonloader/parser2v2/parse_annotations_test.go delete mode 100644 jsonloader/parser2v2/parse_creation_info.go delete mode 100644 jsonloader/parser2v2/parse_creation_info_test.go delete mode 100644 jsonloader/parser2v2/parse_files.go delete mode 100644 jsonloader/parser2v2/parse_files_test.go delete mode 100644 jsonloader/parser2v2/parse_other_license.go delete mode 100644 jsonloader/parser2v2/parse_other_license_test.go delete mode 100644 jsonloader/parser2v2/parse_package.go delete mode 100644 jsonloader/parser2v2/parse_package_test.go delete mode 100644 jsonloader/parser2v2/parse_relationship.go delete mode 100644 jsonloader/parser2v2/parse_relationship_test.go delete mode 100644 jsonloader/parser2v2/parse_reviews.go delete mode 100644 jsonloader/parser2v2/parse_reviews_test.go delete mode 100644 jsonloader/parser2v2/parse_snippets.go delete mode 100644 jsonloader/parser2v2/parse_snippets_test.go delete mode 100644 jsonloader/parser2v2/parser.go delete mode 100644 jsonloader/parser2v2/parser_test.go delete mode 100644 jsonloader/parser2v2/types.go delete mode 100644 jsonloader/parser2v2/util.go delete mode 100644 jsonloader/parser2v2/util_test.go delete mode 100644 jsonsaver/jsonsaver.go delete mode 100644 jsonsaver/jsonsaver_test.go delete mode 100644 jsonsaver/saver2v2/save_annotations.go delete mode 100644 jsonsaver/saver2v2/save_annotations_test.go delete mode 100644 jsonsaver/saver2v2/save_creation_info.go delete mode 100644 jsonsaver/saver2v2/save_creation_info_test.go delete mode 100644 jsonsaver/saver2v2/save_document.go delete mode 100644 jsonsaver/saver2v2/save_document_test.go delete mode 100644 jsonsaver/saver2v2/save_files.go delete mode 100644 jsonsaver/saver2v2/save_files_test.go delete mode 100644 jsonsaver/saver2v2/save_other_license.go delete mode 100644 jsonsaver/saver2v2/save_other_license_test.go delete mode 100644 jsonsaver/saver2v2/save_package.go delete mode 100644 jsonsaver/saver2v2/save_package_test.go delete mode 100644 jsonsaver/saver2v2/save_relationships.go delete mode 100644 jsonsaver/saver2v2/save_relationships_test.go delete mode 100644 jsonsaver/saver2v2/save_reviews.go delete mode 100644 jsonsaver/saver2v2/save_reviews_test.go delete mode 100644 jsonsaver/saver2v2/save_snippets.go delete mode 100644 jsonsaver/saver2v2/save_snippets_test.go diff --git a/README.md b/README.md index f0c39bda..3dd0e732 100644 --- a/README.md +++ b/README.md @@ -26,8 +26,7 @@ tools-golang provides the following packages: * *tvloader* - tag-value document loader * *tvsaver* - tag-value document saver * *rdfloader* - RDF document loader -* *jsonloader* - JSON document loader -* *jsonsaver* - JSON document saver +* *json* - JSON document parser and writer * *builder* - builds "empty" SPDX document (with hashes) for directory contents * *idsearcher* - searches for [SPDX short-form IDs](https://spdx.org/ids/) and builds SPDX document * *licensediff* - compares concluded licenses between files in two packages diff --git a/builder/build.go b/builder/build.go index f70f9112..235e91d0 100644 --- a/builder/build.go +++ b/builder/build.go @@ -5,6 +5,7 @@ package builder import ( + "fmt" "github.com/spdx/tools-golang/builder/builder2v1" "github.com/spdx/tools-golang/builder/builder2v2" "github.com/spdx/tools-golang/spdx" @@ -55,7 +56,7 @@ func Build2_1(packageName string, dirRoot string, config *Config2_1) (*spdx.Docu return nil, err } - ci, err := builder2v1.BuildCreationInfoSection2_1(packageName, pkg.PackageVerificationCode, config.NamespacePrefix, config.CreatorType, config.Creator, config.TestValues) + ci, err := builder2v1.BuildCreationInfoSection2_1(config.CreatorType, config.Creator, config.TestValues) if err != nil { return nil, err } @@ -66,9 +67,14 @@ func Build2_1(packageName string, dirRoot string, config *Config2_1) (*spdx.Docu } doc := &spdx.Document2_1{ - CreationInfo: ci, - Packages: map[spdx.ElementID]*spdx.Package2_1{pkg.PackageSPDXIdentifier: pkg}, - Relationships: []*spdx.Relationship2_1{rln}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + DocumentName: packageName, + DocumentNamespace: fmt.Sprintf("%s%s-%s", config.NamespacePrefix, packageName, pkg.PackageVerificationCode), + CreationInfo: ci, + Packages: []*spdx.Package2_1{pkg}, + Relationships: []*spdx.Relationship2_1{rln}, } return doc, nil @@ -119,7 +125,7 @@ func Build2_2(packageName string, dirRoot string, config *Config2_2) (*spdx.Docu return nil, err } - ci, err := builder2v2.BuildCreationInfoSection2_2(packageName, pkg.PackageVerificationCode, config.NamespacePrefix, config.CreatorType, config.Creator, config.TestValues) + ci, err := builder2v2.BuildCreationInfoSection2_2(config.CreatorType, config.Creator, config.TestValues) if err != nil { return nil, err } @@ -130,9 +136,14 @@ func Build2_2(packageName string, dirRoot string, config *Config2_2) (*spdx.Docu } doc := &spdx.Document2_2{ - CreationInfo: ci, - Packages: map[spdx.ElementID]*spdx.Package2_2{pkg.PackageSPDXIdentifier: pkg}, - Relationships: []*spdx.Relationship2_2{rln}, + SPDXVersion: "SPDX-2.2", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + DocumentName: packageName, + DocumentNamespace: fmt.Sprintf("%s%s-%s", config.NamespacePrefix, packageName, pkg.PackageVerificationCode), + CreationInfo: ci, + Packages: []*spdx.Package2_2{pkg}, + Relationships: []*spdx.Relationship2_2{rln}, } return doc, nil diff --git a/builder/build_test.go b/builder/build_test.go index 62be42bc..be4fd42b 100644 --- a/builder/build_test.go +++ b/builder/build_test.go @@ -21,7 +21,7 @@ func TestBuild2_1CreatesDocument(t *testing.T) { } config.TestValues["Created"] = "2018-10-19T04:38:00Z" - wantVerificationCode := "fc9ac4a370af0a471c2e52af66d6b4cf4e2ba12b" + wantVerificationCode := spdx.PackageVerificationCode{Value: "fc9ac4a370af0a471c2e52af66d6b4cf4e2ba12b"} doc, err := Build2_1("project1", dirRoot, config) if err != nil { @@ -35,33 +35,30 @@ func TestBuild2_1CreatesDocument(t *testing.T) { if doc.CreationInfo == nil { t.Fatalf("expected non-nil CreationInfo section, got nil") } - if doc.CreationInfo.SPDXVersion != "SPDX-2.1" { - t.Errorf("expected %s, got %s", "SPDX-2.1", doc.CreationInfo.SPDXVersion) + if doc.SPDXVersion != "SPDX-2.1" { + t.Errorf("expected %s, got %s", "SPDX-2.1", doc.SPDXVersion) } - if doc.CreationInfo.DataLicense != "CC0-1.0" { - t.Errorf("expected %s, got %s", "CC0-1.0", doc.CreationInfo.DataLicense) + if doc.DataLicense != "CC0-1.0" { + t.Errorf("expected %s, got %s", "CC0-1.0", doc.DataLicense) } - if doc.CreationInfo.SPDXIdentifier != spdx.ElementID("DOCUMENT") { - t.Errorf("expected %s, got %v", "DOCUMENT", doc.CreationInfo.SPDXIdentifier) + if doc.SPDXIdentifier != spdx.ElementID("DOCUMENT") { + t.Errorf("expected %s, got %v", "DOCUMENT", doc.SPDXIdentifier) } - if doc.CreationInfo.DocumentName != "project1" { - t.Errorf("expected %s, got %s", "project1", doc.CreationInfo.DocumentName) + if doc.DocumentName != "project1" { + t.Errorf("expected %s, got %s", "project1", doc.DocumentName) } wantNamespace := fmt.Sprintf("https://github.com/swinslow/spdx-docs/spdx-go/testdata-project1-%s", wantVerificationCode) - if doc.CreationInfo.DocumentNamespace != wantNamespace { - t.Errorf("expected %s, got %s", wantNamespace, doc.CreationInfo.DocumentNamespace) + if doc.DocumentNamespace != wantNamespace { + t.Errorf("expected %s, got %s", wantNamespace, doc.DocumentNamespace) } - if len(doc.CreationInfo.CreatorPersons) != 1 { - t.Fatalf("expected %d, got %d", 1, len(doc.CreationInfo.CreatorPersons)) + if len(doc.CreationInfo.Creators) != 2 { + t.Fatalf("expected %d, got %d", 2, len(doc.CreationInfo.Creators)) } - if doc.CreationInfo.CreatorPersons[0] != "John Doe" { - t.Errorf("expected %s, got %s", "John Doe", doc.CreationInfo.CreatorPersons[0]) + if doc.CreationInfo.Creators[1].Creator != "John Doe" { + t.Errorf("expected %s, got %+v", "John Doe", doc.CreationInfo.Creators[1]) } - if len(doc.CreationInfo.CreatorTools) != 1 { - t.Fatalf("expected %d, got %d", 1, len(doc.CreationInfo.CreatorTools)) - } - if doc.CreationInfo.CreatorTools[0] != "github.com/spdx/tools-golang/builder" { - t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", doc.CreationInfo.CreatorTools[0]) + if doc.CreationInfo.Creators[0].Creator != "github.com/spdx/tools-golang/builder" { + t.Errorf("expected %s, got %+v", "github.com/spdx/tools-golang/builder", doc.CreationInfo.Creators[0]) } if doc.CreationInfo.Created != "2018-10-19T04:38:00Z" { t.Errorf("expected %s, got %s", "2018-10-19T04:38:00Z", doc.CreationInfo.Created) @@ -74,7 +71,7 @@ func TestBuild2_1CreatesDocument(t *testing.T) { if len(doc.Packages) != 1 { t.Fatalf("expected %d, got %d", 1, len(doc.Packages)) } - pkg := doc.Packages[spdx.ElementID("Package-project1")] + pkg := doc.Packages[0] if pkg == nil { t.Fatalf("expected non-nil pkg, got nil") } @@ -90,7 +87,7 @@ func TestBuild2_1CreatesDocument(t *testing.T) { if pkg.FilesAnalyzed != true { t.Errorf("expected %v, got %v", true, pkg.FilesAnalyzed) } - if pkg.PackageVerificationCode != wantVerificationCode { + if pkg.PackageVerificationCode.Value != wantVerificationCode.Value { t.Errorf("expected %v, got %v", wantVerificationCode, pkg.PackageVerificationCode) } if pkg.PackageLicenseConcluded != "NOASSERTION" { @@ -119,7 +116,7 @@ func TestBuild2_1CreatesDocument(t *testing.T) { // emptyfile, file1, file3, folder/file4, lastfile // check emptyfile.testdata.txt - fileEmpty := pkg.Files[spdx.ElementID("File0")] + fileEmpty := pkg.Files[0] if fileEmpty == nil { t.Fatalf("expected non-nil file, got nil") } @@ -129,23 +126,32 @@ func TestBuild2_1CreatesDocument(t *testing.T) { if fileEmpty.FileSPDXIdentifier != spdx.ElementID("File0") { t.Errorf("expected %v, got %v", "File0", fileEmpty.FileSPDXIdentifier) } - if fileEmpty.FileChecksumSHA1 != "da39a3ee5e6b4b0d3255bfef95601890afd80709" { - t.Errorf("expected %v, got %v", "da39a3ee5e6b4b0d3255bfef95601890afd80709", fileEmpty.FileChecksumSHA1) - } - if fileEmpty.FileChecksumSHA256 != "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" { - t.Errorf("expected %v, got %v", "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", fileEmpty.FileChecksumSHA256) - } - if fileEmpty.FileChecksumMD5 != "d41d8cd98f00b204e9800998ecf8427e" { - t.Errorf("expected %v, got %v", "d41d8cd98f00b204e9800998ecf8427e", fileEmpty.FileChecksumMD5) + + for _, checksum := range fileEmpty.Checksums { + switch checksum.Algorithm { + case spdx.SHA1: + if checksum.Value != "da39a3ee5e6b4b0d3255bfef95601890afd80709" { + t.Errorf("expected %v, got %v", "da39a3ee5e6b4b0d3255bfef95601890afd80709", checksum.Value) + } + case spdx.SHA256: + if checksum.Value != "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" { + t.Errorf("expected %v, got %v", "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", checksum.Value) + } + case spdx.MD5: + if checksum.Value != "d41d8cd98f00b204e9800998ecf8427e" { + t.Errorf("expected %v, got %v", "d41d8cd98f00b204e9800998ecf8427e", checksum.Value) + } + } } + if fileEmpty.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseConcluded) } - if len(fileEmpty.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(fileEmpty.LicenseInfoInFile)) + if len(fileEmpty.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(fileEmpty.LicenseInfoInFiles)) } else { - if fileEmpty.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseInfoInFile[0]) + if fileEmpty.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseInfoInFiles[0]) } } if fileEmpty.FileCopyrightText != "NOASSERTION" { @@ -153,7 +159,7 @@ func TestBuild2_1CreatesDocument(t *testing.T) { } // check file1.testdata.txt - file1 := pkg.Files[spdx.ElementID("File1")] + file1 := pkg.Files[1] if file1 == nil { t.Fatalf("expected non-nil file, got nil") } @@ -163,23 +169,31 @@ func TestBuild2_1CreatesDocument(t *testing.T) { if file1.FileSPDXIdentifier != spdx.ElementID("File1") { t.Errorf("expected %v, got %v", "File1", file1.FileSPDXIdentifier) } - if file1.FileChecksumSHA1 != "024f870eb6323f532515f7a09d5646a97083b819" { - t.Errorf("expected %v, got %v", "024f870eb6323f532515f7a09d5646a97083b819", file1.FileChecksumSHA1) - } - if file1.FileChecksumSHA256 != "b14e44284ca477b4c0db34b15ca4c454b2947cce7883e22321cf2984050e15bf" { - t.Errorf("expected %v, got %v", "b14e44284ca477b4c0db34b15ca4c454b2947cce7883e22321cf2984050e15bf", file1.FileChecksumSHA256) - } - if file1.FileChecksumMD5 != "37c8208479dfe42d2bb29debd6e32d4a" { - t.Errorf("expected %v, got %v", "37c8208479dfe42d2bb29debd6e32d4a", file1.FileChecksumMD5) + + for _, checksum := range file1.Checksums { + switch checksum.Algorithm { + case spdx.SHA1: + if checksum.Value != "024f870eb6323f532515f7a09d5646a97083b819" { + t.Errorf("expected %v, got %v", "024f870eb6323f532515f7a09d5646a97083b819", checksum.Value) + } + case spdx.SHA256: + if checksum.Value != "b14e44284ca477b4c0db34b15ca4c454b2947cce7883e22321cf2984050e15bf" { + t.Errorf("expected %v, got %v", "b14e44284ca477b4c0db34b15ca4c454b2947cce7883e22321cf2984050e15bf", checksum.Value) + } + case spdx.MD5: + if checksum.Value != "37c8208479dfe42d2bb29debd6e32d4a" { + t.Errorf("expected %v, got %v", "37c8208479dfe42d2bb29debd6e32d4a", checksum.Value) + } + } } if file1.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseConcluded) } - if len(file1.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(file1.LicenseInfoInFile)) + if len(file1.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(file1.LicenseInfoInFiles)) } else { - if file1.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseInfoInFile[0]) + if file1.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseInfoInFiles[0]) } } if file1.FileCopyrightText != "NOASSERTION" { @@ -187,7 +201,7 @@ func TestBuild2_1CreatesDocument(t *testing.T) { } // check file3.testdata.txt - file3 := pkg.Files[spdx.ElementID("File2")] + file3 := pkg.Files[2] if file3 == nil { t.Fatalf("expected non-nil file, got nil") } @@ -197,23 +211,31 @@ func TestBuild2_1CreatesDocument(t *testing.T) { if file3.FileSPDXIdentifier != spdx.ElementID("File2") { t.Errorf("expected %v, got %v", "File2", file3.FileSPDXIdentifier) } - if file3.FileChecksumSHA1 != "a46114b70e163614f01c64adf44cdd438f158fce" { - t.Errorf("expected %v, got %v", "a46114b70e163614f01c64adf44cdd438f158fce", file3.FileChecksumSHA1) - } - if file3.FileChecksumSHA256 != "9fc181b9892720a15df1a1e561860318db40621bd4040ccdf18e110eb01d04b4" { - t.Errorf("expected %v, got %v", "9fc181b9892720a15df1a1e561860318db40621bd4040ccdf18e110eb01d04b4", file3.FileChecksumSHA256) - } - if file3.FileChecksumMD5 != "3e02d3ab9c58eec6911dbba37570934f" { - t.Errorf("expected %v, got %v", "3e02d3ab9c58eec6911dbba37570934f", file3.FileChecksumMD5) + + for _, checksum := range file3.Checksums { + switch checksum.Algorithm { + case spdx.SHA1: + if checksum.Value != "a46114b70e163614f01c64adf44cdd438f158fce" { + t.Errorf("expected %v, got %v", "a46114b70e163614f01c64adf44cdd438f158fce", checksum.Value) + } + case spdx.SHA256: + if checksum.Value != "9fc181b9892720a15df1a1e561860318db40621bd4040ccdf18e110eb01d04b4" { + t.Errorf("expected %v, got %v", "9fc181b9892720a15df1a1e561860318db40621bd4040ccdf18e110eb01d04b4", checksum.Value) + } + case spdx.MD5: + if checksum.Value != "3e02d3ab9c58eec6911dbba37570934f" { + t.Errorf("expected %v, got %v", "3e02d3ab9c58eec6911dbba37570934f", checksum.Value) + } + } } if file3.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", file3.LicenseConcluded) } - if len(file3.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(file3.LicenseInfoInFile)) + if len(file3.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(file3.LicenseInfoInFiles)) } else { - if file3.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", file3.LicenseInfoInFile[0]) + if file3.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", file3.LicenseInfoInFiles[0]) } } if file3.FileCopyrightText != "NOASSERTION" { @@ -221,7 +243,7 @@ func TestBuild2_1CreatesDocument(t *testing.T) { } // check folder1/file4.testdata.txt - file4 := pkg.Files[spdx.ElementID("File3")] + file4 := pkg.Files[3] if file4 == nil { t.Fatalf("expected non-nil file, got nil") } @@ -231,23 +253,31 @@ func TestBuild2_1CreatesDocument(t *testing.T) { if file4.FileSPDXIdentifier != spdx.ElementID("File3") { t.Errorf("expected %v, got %v", "File3", file4.FileSPDXIdentifier) } - if file4.FileChecksumSHA1 != "e623d7d7d782a7c8323c4d436acee4afab34320f" { - t.Errorf("expected %v, got %v", "e623d7d7d782a7c8323c4d436acee4afab34320f", file4.FileChecksumSHA1) - } - if file4.FileChecksumSHA256 != "574fa42c5e0806c0f8906a44884166540206f021527729407cd5326838629c59" { - t.Errorf("expected %v, got %v", "574fa42c5e0806c0f8906a44884166540206f021527729407cd5326838629c59", file4.FileChecksumSHA256) - } - if file4.FileChecksumMD5 != "96e6a25d35df5b1c477710ef4d0c7210" { - t.Errorf("expected %v, got %v", "96e6a25d35df5b1c477710ef4d0c7210", file4.FileChecksumMD5) + + for _, checksum := range file4.Checksums { + switch checksum.Algorithm { + case spdx.SHA1: + if checksum.Value != "e623d7d7d782a7c8323c4d436acee4afab34320f" { + t.Errorf("expected %v, got %v", "e623d7d7d782a7c8323c4d436acee4afab34320f", checksum.Value) + } + case spdx.SHA256: + if checksum.Value != "574fa42c5e0806c0f8906a44884166540206f021527729407cd5326838629c59" { + t.Errorf("expected %v, got %v", "574fa42c5e0806c0f8906a44884166540206f021527729407cd5326838629c59", checksum.Value) + } + case spdx.MD5: + if checksum.Value != "96e6a25d35df5b1c477710ef4d0c7210" { + t.Errorf("expected %v, got %v", "96e6a25d35df5b1c477710ef4d0c7210", checksum.Value) + } + } } if file4.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", file4.LicenseConcluded) } - if len(file4.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(file4.LicenseInfoInFile)) + if len(file4.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(file4.LicenseInfoInFiles)) } else { - if file4.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", file4.LicenseInfoInFile[0]) + if file4.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", file4.LicenseInfoInFiles[0]) } } if file4.FileCopyrightText != "NOASSERTION" { @@ -255,7 +285,7 @@ func TestBuild2_1CreatesDocument(t *testing.T) { } // check lastfile.testdata.txt - lastfile := pkg.Files[spdx.ElementID("File4")] + lastfile := pkg.Files[4] if lastfile == nil { t.Fatalf("expected non-nil file, got nil") } @@ -265,23 +295,31 @@ func TestBuild2_1CreatesDocument(t *testing.T) { if lastfile.FileSPDXIdentifier != spdx.ElementID("File4") { t.Errorf("expected %v, got %v", "File4", lastfile.FileSPDXIdentifier) } - if lastfile.FileChecksumSHA1 != "26d6221d682d9ba59116f9753a701f34271c8ce1" { - t.Errorf("expected %v, got %v", "26d6221d682d9ba59116f9753a701f34271c8ce1", lastfile.FileChecksumSHA1) - } - if lastfile.FileChecksumSHA256 != "0a4bdaf990e9b330ff72022dd78110ae98b60e08337cf2105b89856373416805" { - t.Errorf("expected %v, got %v", "0a4bdaf990e9b330ff72022dd78110ae98b60e08337cf2105b89856373416805", lastfile.FileChecksumSHA256) - } - if lastfile.FileChecksumMD5 != "f60baa793870d9085461ad6bbab50b7f" { - t.Errorf("expected %v, got %v", "f60baa793870d9085461ad6bbab50b7f", lastfile.FileChecksumMD5) + + for _, checksum := range lastfile.Checksums { + switch checksum.Algorithm { + case spdx.SHA1: + if checksum.Value != "26d6221d682d9ba59116f9753a701f34271c8ce1" { + t.Errorf("expected %v, got %v", "26d6221d682d9ba59116f9753a701f34271c8ce1", checksum.Value) + } + case spdx.SHA256: + if checksum.Value != "0a4bdaf990e9b330ff72022dd78110ae98b60e08337cf2105b89856373416805" { + t.Errorf("expected %v, got %v", "0a4bdaf990e9b330ff72022dd78110ae98b60e08337cf2105b89856373416805", checksum.Value) + } + case spdx.MD5: + if checksum.Value != "f60baa793870d9085461ad6bbab50b7f" { + t.Errorf("expected %v, got %v", "f60baa793870d9085461ad6bbab50b7f", checksum.Value) + } + } } if lastfile.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", lastfile.LicenseConcluded) } - if len(lastfile.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(lastfile.LicenseInfoInFile)) + if len(lastfile.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(lastfile.LicenseInfoInFiles)) } else { - if lastfile.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", lastfile.LicenseInfoInFile[0]) + if lastfile.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", lastfile.LicenseInfoInFiles[0]) } } if lastfile.FileCopyrightText != "NOASSERTION" { @@ -343,7 +381,7 @@ func TestBuild2_1CanIgnoreFiles(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - pkg := doc.Packages[spdx.ElementID("Package-project1")] + pkg := doc.Packages[0] if pkg == nil { t.Fatalf("expected non-nil pkg, got nil") } @@ -352,31 +390,31 @@ func TestBuild2_1CanIgnoreFiles(t *testing.T) { } want := "./dontscan.txt" - got := pkg.Files[spdx.ElementID("File0")].FileName + got := pkg.Files[0].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./keep/keep.txt" - got = pkg.Files[spdx.ElementID("File1")].FileName + got = pkg.Files[1].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./keep.txt" - got = pkg.Files[spdx.ElementID("File2")].FileName + got = pkg.Files[2].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./subdir/keep/dontscan.txt" - got = pkg.Files[spdx.ElementID("File3")].FileName + got = pkg.Files[3].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./subdir/keep/keep.txt" - got = pkg.Files[spdx.ElementID("File4")].FileName + got = pkg.Files[4].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } @@ -394,7 +432,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { } config.TestValues["Created"] = "2018-10-19T04:38:00Z" - wantVerificationCode := "fc9ac4a370af0a471c2e52af66d6b4cf4e2ba12b" + wantVerificationCode := spdx.PackageVerificationCode{Value: "fc9ac4a370af0a471c2e52af66d6b4cf4e2ba12b"} doc, err := Build2_2("project1", dirRoot, config) if err != nil { @@ -408,33 +446,30 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if doc.CreationInfo == nil { t.Fatalf("expected non-nil CreationInfo section, got nil") } - if doc.CreationInfo.SPDXVersion != "SPDX-2.2" { - t.Errorf("expected %s, got %s", "SPDX-2.2", doc.CreationInfo.SPDXVersion) + if doc.SPDXVersion != "SPDX-2.2" { + t.Errorf("expected %s, got %s", "SPDX-2.2", doc.SPDXVersion) } - if doc.CreationInfo.DataLicense != "CC0-1.0" { - t.Errorf("expected %s, got %s", "CC0-1.0", doc.CreationInfo.DataLicense) + if doc.DataLicense != "CC0-1.0" { + t.Errorf("expected %s, got %s", "CC0-1.0", doc.DataLicense) } - if doc.CreationInfo.SPDXIdentifier != spdx.ElementID("DOCUMENT") { - t.Errorf("expected %s, got %v", "DOCUMENT", doc.CreationInfo.SPDXIdentifier) + if doc.SPDXIdentifier != spdx.ElementID("DOCUMENT") { + t.Errorf("expected %s, got %v", "DOCUMENT", doc.SPDXIdentifier) } - if doc.CreationInfo.DocumentName != "project1" { - t.Errorf("expected %s, got %s", "project1", doc.CreationInfo.DocumentName) + if doc.DocumentName != "project1" { + t.Errorf("expected %s, got %s", "project1", doc.DocumentName) } wantNamespace := fmt.Sprintf("https://github.com/swinslow/spdx-docs/spdx-go/testdata-project1-%s", wantVerificationCode) - if doc.CreationInfo.DocumentNamespace != wantNamespace { - t.Errorf("expected %s, got %s", wantNamespace, doc.CreationInfo.DocumentNamespace) - } - if len(doc.CreationInfo.CreatorPersons) != 1 { - t.Fatalf("expected %d, got %d", 1, len(doc.CreationInfo.CreatorPersons)) + if doc.DocumentNamespace != wantNamespace { + t.Errorf("expected %s, got %s", wantNamespace, doc.DocumentNamespace) } - if doc.CreationInfo.CreatorPersons[0] != "John Doe" { - t.Errorf("expected %s, got %s", "John Doe", doc.CreationInfo.CreatorPersons[0]) + if len(doc.CreationInfo.Creators) != 2 { + t.Fatalf("expected %d, got %d", 2, len(doc.CreationInfo.Creators)) } - if len(doc.CreationInfo.CreatorTools) != 1 { - t.Fatalf("expected %d, got %d", 1, len(doc.CreationInfo.CreatorTools)) + if doc.CreationInfo.Creators[1].Creator != "John Doe" { + t.Errorf("expected %s, got %+v", "John Doe", doc.CreationInfo.Creators[1]) } - if doc.CreationInfo.CreatorTools[0] != "github.com/spdx/tools-golang/builder" { - t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", doc.CreationInfo.CreatorTools[0]) + if doc.CreationInfo.Creators[0].Creator != "github.com/spdx/tools-golang/builder" { + t.Errorf("expected %s, got %+v", "github.com/spdx/tools-golang/builder", doc.CreationInfo.Creators[0]) } if doc.CreationInfo.Created != "2018-10-19T04:38:00Z" { t.Errorf("expected %s, got %s", "2018-10-19T04:38:00Z", doc.CreationInfo.Created) @@ -447,7 +482,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if len(doc.Packages) != 1 { t.Fatalf("expected %d, got %d", 1, len(doc.Packages)) } - pkg := doc.Packages[spdx.ElementID("Package-project1")] + pkg := doc.Packages[0] if pkg == nil { t.Fatalf("expected non-nil pkg, got nil") } @@ -463,7 +498,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if pkg.FilesAnalyzed != true { t.Errorf("expected %v, got %v", true, pkg.FilesAnalyzed) } - if pkg.PackageVerificationCode != wantVerificationCode { + if pkg.PackageVerificationCode.Value != wantVerificationCode.Value { t.Errorf("expected %v, got %v", wantVerificationCode, pkg.PackageVerificationCode) } if pkg.PackageLicenseConcluded != "NOASSERTION" { @@ -492,7 +527,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { // emptyfile, file1, file3, folder/file4, lastfile // check emptyfile.testdata.txt - fileEmpty := pkg.Files[spdx.ElementID("File0")] + fileEmpty := pkg.Files[0] if fileEmpty == nil { t.Fatalf("expected non-nil file, got nil") } @@ -502,7 +537,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if fileEmpty.FileSPDXIdentifier != spdx.ElementID("File0") { t.Errorf("expected %v, got %v", "File0", fileEmpty.FileSPDXIdentifier) } - for _, checksum := range fileEmpty.FileChecksums { + for _, checksum := range fileEmpty.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != "da39a3ee5e6b4b0d3255bfef95601890afd80709" { @@ -521,11 +556,11 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if fileEmpty.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseConcluded) } - if len(fileEmpty.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(fileEmpty.LicenseInfoInFile)) + if len(fileEmpty.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(fileEmpty.LicenseInfoInFiles)) } else { - if fileEmpty.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseInfoInFile[0]) + if fileEmpty.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseInfoInFiles[0]) } } if fileEmpty.FileCopyrightText != "NOASSERTION" { @@ -533,7 +568,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { } // check file1.testdata.txt - file1 := pkg.Files[spdx.ElementID("File1")] + file1 := pkg.Files[1] if file1 == nil { t.Fatalf("expected non-nil file, got nil") } @@ -543,7 +578,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if file1.FileSPDXIdentifier != spdx.ElementID("File1") { t.Errorf("expected %v, got %v", "File1", file1.FileSPDXIdentifier) } - for _, checksum := range file1.FileChecksums { + for _, checksum := range file1.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != "024f870eb6323f532515f7a09d5646a97083b819" { @@ -562,11 +597,11 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if file1.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseConcluded) } - if len(file1.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(file1.LicenseInfoInFile)) + if len(file1.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(file1.LicenseInfoInFiles)) } else { - if file1.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseInfoInFile[0]) + if file1.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseInfoInFiles[0]) } } if file1.FileCopyrightText != "NOASSERTION" { @@ -574,7 +609,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { } // check file3.testdata.txt - file3 := pkg.Files[spdx.ElementID("File2")] + file3 := pkg.Files[2] if file3 == nil { t.Fatalf("expected non-nil file, got nil") } @@ -584,7 +619,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if file3.FileSPDXIdentifier != spdx.ElementID("File2") { t.Errorf("expected %v, got %v", "File2", file3.FileSPDXIdentifier) } - for _, checksum := range file3.FileChecksums { + for _, checksum := range file3.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != "a46114b70e163614f01c64adf44cdd438f158fce" { @@ -603,11 +638,11 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if file3.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", file3.LicenseConcluded) } - if len(file3.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(file3.LicenseInfoInFile)) + if len(file3.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(file3.LicenseInfoInFiles)) } else { - if file3.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", file3.LicenseInfoInFile[0]) + if file3.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", file3.LicenseInfoInFiles[0]) } } if file3.FileCopyrightText != "NOASSERTION" { @@ -615,7 +650,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { } // check folder1/file4.testdata.txt - file4 := pkg.Files[spdx.ElementID("File3")] + file4 := pkg.Files[3] if file4 == nil { t.Fatalf("expected non-nil file, got nil") } @@ -625,7 +660,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if file4.FileSPDXIdentifier != spdx.ElementID("File3") { t.Errorf("expected %v, got %v", "File3", file4.FileSPDXIdentifier) } - for _, checksum := range file4.FileChecksums { + for _, checksum := range file4.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != "e623d7d7d782a7c8323c4d436acee4afab34320f" { @@ -644,11 +679,11 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if file4.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", file4.LicenseConcluded) } - if len(file4.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(file4.LicenseInfoInFile)) + if len(file4.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(file4.LicenseInfoInFiles)) } else { - if file4.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", file4.LicenseInfoInFile[0]) + if file4.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", file4.LicenseInfoInFiles[0]) } } if file4.FileCopyrightText != "NOASSERTION" { @@ -656,7 +691,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { } // check lastfile.testdata.txt - lastfile := pkg.Files[spdx.ElementID("File4")] + lastfile := pkg.Files[4] if lastfile == nil { t.Fatalf("expected non-nil file, got nil") } @@ -666,7 +701,7 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if lastfile.FileSPDXIdentifier != spdx.ElementID("File4") { t.Errorf("expected %v, got %v", "File4", lastfile.FileSPDXIdentifier) } - for _, checksum := range lastfile.FileChecksums { + for _, checksum := range lastfile.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != "26d6221d682d9ba59116f9753a701f34271c8ce1" { @@ -685,11 +720,11 @@ func TestBuild2_2CreatesDocument(t *testing.T) { if lastfile.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", lastfile.LicenseConcluded) } - if len(lastfile.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(lastfile.LicenseInfoInFile)) + if len(lastfile.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(lastfile.LicenseInfoInFiles)) } else { - if lastfile.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", lastfile.LicenseInfoInFile[0]) + if lastfile.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", lastfile.LicenseInfoInFiles[0]) } } if lastfile.FileCopyrightText != "NOASSERTION" { @@ -751,7 +786,7 @@ func TestBuild2_2CanIgnoreFiles(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - pkg := doc.Packages[spdx.ElementID("Package-project1")] + pkg := doc.Packages[0] if pkg == nil { t.Fatalf("expected non-nil pkg, got nil") } @@ -760,31 +795,31 @@ func TestBuild2_2CanIgnoreFiles(t *testing.T) { } want := "./dontscan.txt" - got := pkg.Files[spdx.ElementID("File0")].FileName + got := pkg.Files[0].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./keep/keep.txt" - got = pkg.Files[spdx.ElementID("File1")].FileName + got = pkg.Files[1].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./keep.txt" - got = pkg.Files[spdx.ElementID("File2")].FileName + got = pkg.Files[2].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./subdir/keep/dontscan.txt" - got = pkg.Files[spdx.ElementID("File3")].FileName + got = pkg.Files[3].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./subdir/keep/keep.txt" - got = pkg.Files[spdx.ElementID("File4")].FileName + got = pkg.Files[4].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } diff --git a/builder/builder2v1/build_creation_info.go b/builder/builder2v1/build_creation_info.go index 1d8c77f9..c838b92a 100644 --- a/builder/builder2v1/build_creation_info.go +++ b/builder/builder2v1/build_creation_info.go @@ -3,7 +3,6 @@ package builder2v1 import ( - "fmt" "time" "github.com/spdx/tools-golang/spdx" @@ -11,29 +10,21 @@ import ( // BuildCreationInfoSection2_1 creates an SPDX Package (version 2.1), returning that // package or error if any is encountered. Arguments: -// - packageName: name of package / directory -// - code: verification code from Package -// - namespacePrefix: prefix for DocumentNamespace (packageName and code will be added) // - creatorType: one of Person, Organization or Tool // - creator: creator string // - testValues: for testing only; call with nil when using in production -func BuildCreationInfoSection2_1(packageName string, code string, namespacePrefix string, creatorType string, creator string, testValues map[string]string) (*spdx.CreationInfo2_1, error) { +func BuildCreationInfoSection2_1(creatorType string, creator string, testValues map[string]string) (*spdx.CreationInfo2_1, error) { // build creator slices - cPersons := []string{} - cOrganizations := []string{} - cTools := []string{} - // add builder as a tool - cTools = append(cTools, "github.com/spdx/tools-golang/builder") - - switch creatorType { - case "Person": - cPersons = append(cPersons, creator) - case "Organization": - cOrganizations = append(cOrganizations, creator) - case "Tool": - cTools = append(cTools, creator) - default: - cPersons = append(cPersons, creator) + creators := []spdx.Creator{ + // add builder as a tool + { + Creator: "github.com/spdx/tools-golang/builder", + CreatorType: "Tool", + }, + { + Creator: creator, + CreatorType: creatorType, + }, } // use test Created time if passing test values @@ -45,15 +36,8 @@ func BuildCreationInfoSection2_1(packageName string, code string, namespacePrefi } ci := &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - DocumentName: packageName, - DocumentNamespace: fmt.Sprintf("%s%s-%s", namespacePrefix, packageName, code), - CreatorPersons: cPersons, - CreatorOrganizations: cOrganizations, - CreatorTools: cTools, - Created: created, + Creators: creators, + Created: created, } return ci, nil } diff --git a/builder/builder2v1/build_creation_info_test.go b/builder/builder2v1/build_creation_info_test.go index b45f2f0e..9684fdeb 100644 --- a/builder/builder2v1/build_creation_info_test.go +++ b/builder/builder2v1/build_creation_info_test.go @@ -3,24 +3,17 @@ package builder2v1 import ( - "fmt" "testing" - - "github.com/spdx/tools-golang/spdx" ) // ===== CreationInfo section builder tests ===== func TestBuilder2_1CanBuildCreationInfoSection(t *testing.T) { - - namespacePrefix := "https://github.com/swinslow/spdx-docs/spdx-go/testdata-whatever-" creatorType := "Organization" creator := "Jane Doe LLC" testValues := make(map[string]string) testValues["Created"] = "2018-10-20T16:48:00Z" - packageName := "project1" - verificationCode := "TESTCODE" - ci, err := BuildCreationInfoSection2_1(packageName, verificationCode, namespacePrefix, creatorType, creator, testValues) + ci, err := BuildCreationInfoSection2_1(creatorType, creator, testValues) if err != nil { t.Fatalf("expected nil error, got %v", err) } @@ -28,36 +21,14 @@ func TestBuilder2_1CanBuildCreationInfoSection(t *testing.T) { if ci == nil { t.Fatalf("expected non-nil CreationInfo, got nil") } - if ci.SPDXVersion != "SPDX-2.1" { - t.Errorf("expected %s, got %s", "SPDX-2.1", ci.SPDXVersion) - } - if ci.DataLicense != "CC0-1.0" { - t.Errorf("expected %s, got %s", "CC0-1.0", ci.DataLicense) - } - if ci.SPDXIdentifier != spdx.ElementID("DOCUMENT") { - t.Errorf("expected %s, got %v", "DOCUMENT", ci.SPDXIdentifier) - } - if ci.DocumentName != "project1" { - t.Errorf("expected %s, got %s", "project1", ci.DocumentName) - } - wantNamespace := fmt.Sprintf("https://github.com/swinslow/spdx-docs/spdx-go/testdata-whatever-project1-%s", verificationCode) - if ci.DocumentNamespace != wantNamespace { - t.Errorf("expected %s, got %s", wantNamespace, ci.DocumentNamespace) - } - if len(ci.CreatorPersons) != 0 { - t.Fatalf("expected %d, got %d", 0, len(ci.CreatorPersons)) - } - if len(ci.CreatorOrganizations) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorOrganizations)) + if len(ci.Creators) != 2 { + t.Fatalf("expected %d, got %d", 2, len(ci.Creators)) } - if ci.CreatorOrganizations[0] != "Jane Doe LLC" { - t.Errorf("expected %s, got %s", "Jane Doe LLC", ci.CreatorOrganizations[0]) + if ci.Creators[1].Creator != "Jane Doe LLC" { + t.Errorf("expected %s, got %s", "Jane Doe LLC", ci.Creators[1].Creator) } - if len(ci.CreatorTools) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorTools)) - } - if ci.CreatorTools[0] != "github.com/spdx/tools-golang/builder" { - t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.CreatorTools[0]) + if ci.Creators[0].Creator != "github.com/spdx/tools-golang/builder" { + t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.Creators[0].Creator) } if ci.Created != "2018-10-20T16:48:00Z" { t.Errorf("expected %s, got %s", "2018-10-20T16:48:00Z", ci.Created) @@ -65,15 +36,12 @@ func TestBuilder2_1CanBuildCreationInfoSection(t *testing.T) { } func TestBuilder2_1CanBuildCreationInfoSectionWithCreatorPerson(t *testing.T) { - namespacePrefix := "https://github.com/swinslow/spdx-docs/spdx-go/testdata-whatever-" creatorType := "Person" creator := "John Doe" testValues := make(map[string]string) testValues["Created"] = "2018-10-20T16:48:00Z" - packageName := "project1" - verificationCode := "TESTCODE" - ci, err := BuildCreationInfoSection2_1(packageName, verificationCode, namespacePrefix, creatorType, creator, testValues) + ci, err := BuildCreationInfoSection2_1(creatorType, creator, testValues) if err != nil { t.Fatalf("expected nil error, got %v", err) } @@ -81,33 +49,24 @@ func TestBuilder2_1CanBuildCreationInfoSectionWithCreatorPerson(t *testing.T) { if ci == nil { t.Fatalf("expected non-nil CreationInfo, got nil") } - if len(ci.CreatorPersons) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorPersons)) - } - if ci.CreatorPersons[0] != "John Doe" { - t.Errorf("expected %s, got %s", "John Doe", ci.CreatorPersons[0]) + if len(ci.Creators) != 2 { + t.Fatalf("expected %d, got %d", 2, len(ci.Creators)) } - if len(ci.CreatorOrganizations) != 0 { - t.Fatalf("expected %d, got %d", 0, len(ci.CreatorOrganizations)) + if ci.Creators[1].Creator != "John Doe" { + t.Errorf("expected %s, got %s", "John Doe", ci.Creators[1].Creator) } - if len(ci.CreatorTools) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorTools)) - } - if ci.CreatorTools[0] != "github.com/spdx/tools-golang/builder" { - t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.CreatorTools[0]) + if ci.Creators[0].Creator != "github.com/spdx/tools-golang/builder" { + t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.Creators[0].Creator) } } func TestBuilder2_1CanBuildCreationInfoSectionWithCreatorTool(t *testing.T) { - namespacePrefix := "https://github.com/swinslow/spdx-docs/spdx-go/testdata-whatever-" creatorType := "Tool" creator := "some-other-tool-2.1" testValues := make(map[string]string) testValues["Created"] = "2018-10-20T16:48:00Z" - packageName := "project1" - verificationCode := "TESTCODE" - ci, err := BuildCreationInfoSection2_1(packageName, verificationCode, namespacePrefix, creatorType, creator, testValues) + ci, err := BuildCreationInfoSection2_1(creatorType, creator, testValues) if err != nil { t.Fatalf("expected nil error, got %v", err) } @@ -115,33 +74,24 @@ func TestBuilder2_1CanBuildCreationInfoSectionWithCreatorTool(t *testing.T) { if ci == nil { t.Fatalf("expected non-nil CreationInfo, got nil") } - if len(ci.CreatorPersons) != 0 { - t.Fatalf("expected %d, got %d", 0, len(ci.CreatorPersons)) - } - if len(ci.CreatorOrganizations) != 0 { - t.Fatalf("expected %d, got %d", 0, len(ci.CreatorOrganizations)) + if len(ci.Creators) != 2 { + t.Fatalf("expected %d, got %d", 2, len(ci.Creators)) } - if len(ci.CreatorTools) != 2 { - t.Fatalf("expected %d, got %d", 2, len(ci.CreatorTools)) + if ci.Creators[0].Creator != "github.com/spdx/tools-golang/builder" { + t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.Creators[0]) } - if ci.CreatorTools[0] != "github.com/spdx/tools-golang/builder" { - t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.CreatorTools[0]) - } - if ci.CreatorTools[1] != "some-other-tool-2.1" { - t.Errorf("expected %s, got %s", "some-other-tool-2.1", ci.CreatorTools[1]) + if ci.Creators[1].Creator != "some-other-tool-2.1" { + t.Errorf("expected %s, got %s", "some-other-tool-2.1", ci.Creators[1]) } } func TestBuilder2_1CanBuildCreationInfoSectionWithInvalidPerson(t *testing.T) { - namespacePrefix := "https://github.com/swinslow/spdx-docs/spdx-go/testdata-whatever-" creatorType := "Whatever" creator := "John Doe" testValues := make(map[string]string) testValues["Created"] = "2018-10-20T16:48:00Z" - packageName := "project1" - verificationCode := "TESTCODE" - ci, err := BuildCreationInfoSection2_1(packageName, verificationCode, namespacePrefix, creatorType, creator, testValues) + ci, err := BuildCreationInfoSection2_1(creatorType, creator, testValues) if err != nil { t.Fatalf("expected nil error, got %v", err) } @@ -149,19 +99,13 @@ func TestBuilder2_1CanBuildCreationInfoSectionWithInvalidPerson(t *testing.T) { if ci == nil { t.Fatalf("expected non-nil CreationInfo, got nil") } - if len(ci.CreatorPersons) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorPersons)) - } - if ci.CreatorPersons[0] != "John Doe" { - t.Errorf("expected %s, got %s", "John Doe", ci.CreatorPersons[0]) - } - if len(ci.CreatorOrganizations) != 0 { - t.Fatalf("expected %d, got %d", 0, len(ci.CreatorOrganizations)) + if len(ci.Creators) != 2 { + t.Fatalf("expected %d, got %d", 2, len(ci.Creators)) } - if len(ci.CreatorTools) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorTools)) + if ci.Creators[1].Creator != "John Doe" { + t.Errorf("expected %s, got %s", "John Doe", ci.Creators[1]) } - if ci.CreatorTools[0] != "github.com/spdx/tools-golang/builder" { - t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.CreatorTools[0]) + if ci.Creators[0].Creator != "github.com/spdx/tools-golang/builder" { + t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.Creators[0]) } } diff --git a/builder/builder2v1/build_file.go b/builder/builder2v1/build_file.go index b47268c7..7e9e52e3 100644 --- a/builder/builder2v1/build_file.go +++ b/builder/builder2v1/build_file.go @@ -32,11 +32,22 @@ func BuildFileSection2_1(filePath string, prefix string, fileNumber int) (*spdx. f := &spdx.File2_1{ FileName: filePath, FileSPDXIdentifier: spdx.ElementID(i), - FileChecksumSHA1: ssha1, - FileChecksumSHA256: ssha256, - FileChecksumMD5: smd5, + Checksums: []spdx.Checksum{ + { + Algorithm: spdx.SHA1, + Value: ssha1, + }, + { + Algorithm: spdx.SHA256, + Value: ssha256, + }, + { + Algorithm: spdx.MD5, + Value: smd5, + }, + }, LicenseConcluded: "NOASSERTION", - LicenseInfoInFile: []string{"NOASSERTION"}, + LicenseInfoInFiles: []string{"NOASSERTION"}, FileCopyrightText: "NOASSERTION", } diff --git a/builder/builder2v1/build_file_test.go b/builder/builder2v1/build_file_test.go index 6ef157a6..cea297b2 100644 --- a/builder/builder2v1/build_file_test.go +++ b/builder/builder2v1/build_file_test.go @@ -28,23 +28,32 @@ func TestBuilder2_1CanBuildFileSection(t *testing.T) { if file1.FileSPDXIdentifier != spdx.ElementID("File17") { t.Errorf("expected %v, got %v", "File17", file1.FileSPDXIdentifier) } - if file1.FileChecksumSHA1 != "024f870eb6323f532515f7a09d5646a97083b819" { - t.Errorf("expected %v, got %v", "024f870eb6323f532515f7a09d5646a97083b819", file1.FileChecksumSHA1) - } - if file1.FileChecksumSHA256 != "b14e44284ca477b4c0db34b15ca4c454b2947cce7883e22321cf2984050e15bf" { - t.Errorf("expected %v, got %v", "b14e44284ca477b4c0db34b15ca4c454b2947cce7883e22321cf2984050e15bf", file1.FileChecksumSHA256) - } - if file1.FileChecksumMD5 != "37c8208479dfe42d2bb29debd6e32d4a" { - t.Errorf("expected %v, got %v", "37c8208479dfe42d2bb29debd6e32d4a", file1.FileChecksumMD5) + + for _, checksum := range file1.Checksums { + switch checksum.Algorithm { + case spdx.SHA1: + if checksum.Value != "024f870eb6323f532515f7a09d5646a97083b819" { + t.Errorf("expected %v, got %v", "024f870eb6323f532515f7a09d5646a97083b819", checksum.Value) + } + case spdx.SHA256: + if checksum.Value != "b14e44284ca477b4c0db34b15ca4c454b2947cce7883e22321cf2984050e15bf" { + t.Errorf("expected %v, got %v", "b14e44284ca477b4c0db34b15ca4c454b2947cce7883e22321cf2984050e15bf", checksum.Value) + } + case spdx.MD5: + if checksum.Value != "37c8208479dfe42d2bb29debd6e32d4a" { + t.Errorf("expected %v, got %v", "37c8208479dfe42d2bb29debd6e32d4a", checksum.Value) + } + } } + if file1.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseConcluded) } - if len(file1.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(file1.LicenseInfoInFile)) + if len(file1.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(file1.LicenseInfoInFiles)) } else { - if file1.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseInfoInFile[0]) + if file1.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseInfoInFiles[0]) } } if file1.FileCopyrightText != "NOASSERTION" { diff --git a/builder/builder2v1/build_package.go b/builder/builder2v1/build_package.go index e59e6763..f39bb536 100644 --- a/builder/builder2v1/build_package.go +++ b/builder/builder2v1/build_package.go @@ -35,7 +35,7 @@ func BuildPackageSection2_1(packageName string, dirRoot string, pathsIgnore []st dirRootLen = len(dirRoot) } - files := map[spdx.ElementID]*spdx.File2_1{} + files := []*spdx.File2_1{} fileNumber := 0 for _, fp := range filepaths { newFilePatch := "" @@ -48,7 +48,7 @@ func BuildPackageSection2_1(packageName string, dirRoot string, pathsIgnore []st if err != nil { return nil, err } - files[newFile.FileSPDXIdentifier] = newFile + files = append(files, newFile) fileNumber++ } // get the verification code diff --git a/builder/builder2v1/build_package_test.go b/builder/builder2v1/build_package_test.go index 7128834d..14586371 100644 --- a/builder/builder2v1/build_package_test.go +++ b/builder/builder2v1/build_package_test.go @@ -13,7 +13,7 @@ func TestBuilder2_1CanBuildPackageSection(t *testing.T) { packageName := "project1" dirRoot := "../../testdata/project1/" - wantVerificationCode := "fc9ac4a370af0a471c2e52af66d6b4cf4e2ba12b" + wantVerificationCode := spdx.PackageVerificationCode{Value: "fc9ac4a370af0a471c2e52af66d6b4cf4e2ba12b"} pkg, err := BuildPackageSection2_1(packageName, dirRoot, nil) if err != nil { @@ -38,7 +38,7 @@ func TestBuilder2_1CanBuildPackageSection(t *testing.T) { if pkg.IsFilesAnalyzedTagPresent != true { t.Errorf("expected %v, got %v", true, pkg.IsFilesAnalyzedTagPresent) } - if pkg.PackageVerificationCode != wantVerificationCode { + if pkg.PackageVerificationCode.Value != wantVerificationCode.Value { t.Errorf("expected %v, got %v", wantVerificationCode, pkg.PackageVerificationCode) } if pkg.PackageLicenseConcluded != "NOASSERTION" { @@ -61,7 +61,7 @@ func TestBuilder2_1CanBuildPackageSection(t *testing.T) { if len(pkg.Files) != 5 { t.Fatalf("expected %d, got %d", 5, len(pkg.Files)) } - fileEmpty := pkg.Files[spdx.ElementID("File0")] + fileEmpty := pkg.Files[0] if fileEmpty == nil { t.Fatalf("expected non-nil file, got nil") } @@ -71,23 +71,30 @@ func TestBuilder2_1CanBuildPackageSection(t *testing.T) { if fileEmpty.FileSPDXIdentifier != spdx.ElementID("File0") { t.Errorf("expected %v, got %v", "File0", fileEmpty.FileSPDXIdentifier) } - if fileEmpty.FileChecksumSHA1 != "da39a3ee5e6b4b0d3255bfef95601890afd80709" { - t.Errorf("expected %v, got %v", "da39a3ee5e6b4b0d3255bfef95601890afd80709", fileEmpty.FileChecksumSHA1) - } - if fileEmpty.FileChecksumSHA256 != "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" { - t.Errorf("expected %v, got %v", "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", fileEmpty.FileChecksumSHA256) - } - if fileEmpty.FileChecksumMD5 != "d41d8cd98f00b204e9800998ecf8427e" { - t.Errorf("expected %v, got %v", "d41d8cd98f00b204e9800998ecf8427e", fileEmpty.FileChecksumMD5) + for _, checksum := range fileEmpty.Checksums { + switch checksum.Algorithm { + case spdx.SHA1: + if checksum.Value != "da39a3ee5e6b4b0d3255bfef95601890afd80709" { + t.Errorf("expected %v, got %v", "da39a3ee5e6b4b0d3255bfef95601890afd80709", checksum.Value) + } + case spdx.SHA256: + if checksum.Value != "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" { + t.Errorf("expected %v, got %v", "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", checksum.Value) + } + case spdx.MD5: + if checksum.Value != "d41d8cd98f00b204e9800998ecf8427e" { + t.Errorf("expected %v, got %v", "d41d8cd98f00b204e9800998ecf8427e", checksum.Value) + } + } } if fileEmpty.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseConcluded) } - if len(fileEmpty.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(fileEmpty.LicenseInfoInFile)) + if len(fileEmpty.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(fileEmpty.LicenseInfoInFiles)) } else { - if fileEmpty.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseInfoInFile[0]) + if fileEmpty.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseInfoInFiles[0]) } } if fileEmpty.FileCopyrightText != "NOASSERTION" { @@ -118,31 +125,31 @@ func TestBuilder2_1CanIgnoreFiles(t *testing.T) { } want := "./dontscan.txt" - got := pkg.Files[spdx.ElementID("File0")].FileName + got := pkg.Files[0].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./keep/keep.txt" - got = pkg.Files[spdx.ElementID("File1")].FileName + got = pkg.Files[1].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./keep.txt" - got = pkg.Files[spdx.ElementID("File2")].FileName + got = pkg.Files[2].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./subdir/keep/dontscan.txt" - got = pkg.Files[spdx.ElementID("File3")].FileName + got = pkg.Files[3].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./subdir/keep/keep.txt" - got = pkg.Files[spdx.ElementID("File4")].FileName + got = pkg.Files[4].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } diff --git a/builder/builder2v2/build_creation_info.go b/builder/builder2v2/build_creation_info.go index 89e1b3cb..c24d2d31 100644 --- a/builder/builder2v2/build_creation_info.go +++ b/builder/builder2v2/build_creation_info.go @@ -3,7 +3,6 @@ package builder2v2 import ( - "fmt" "time" "github.com/spdx/tools-golang/spdx" @@ -17,23 +16,18 @@ import ( // - creatorType: one of Person, Organization or Tool // - creator: creator string // - testValues: for testing only; call with nil when using in production -func BuildCreationInfoSection2_2(packageName string, code string, namespacePrefix string, creatorType string, creator string, testValues map[string]string) (*spdx.CreationInfo2_2, error) { +func BuildCreationInfoSection2_2(creatorType string, creator string, testValues map[string]string) (*spdx.CreationInfo2_2, error) { // build creator slices - cPersons := []string{} - cOrganizations := []string{} - cTools := []string{} - // add builder as a tool - cTools = append(cTools, "github.com/spdx/tools-golang/builder") - - switch creatorType { - case "Person": - cPersons = append(cPersons, creator) - case "Organization": - cOrganizations = append(cOrganizations, creator) - case "Tool": - cTools = append(cTools, creator) - default: - cPersons = append(cPersons, creator) + creators := []spdx.Creator{ + // add builder as a tool + { + Creator: "github.com/spdx/tools-golang/builder", + CreatorType: "Tool", + }, + { + Creator: creator, + CreatorType: creatorType, + }, } // use test Created time if passing test values @@ -45,15 +39,8 @@ func BuildCreationInfoSection2_2(packageName string, code string, namespacePrefi } ci := &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - DocumentName: packageName, - DocumentNamespace: fmt.Sprintf("%s%s-%s", namespacePrefix, packageName, code), - CreatorPersons: cPersons, - CreatorOrganizations: cOrganizations, - CreatorTools: cTools, - Created: created, + Creators: creators, + Created: created, } return ci, nil } diff --git a/builder/builder2v2/build_creation_info_test.go b/builder/builder2v2/build_creation_info_test.go index 188bd74c..48a06540 100644 --- a/builder/builder2v2/build_creation_info_test.go +++ b/builder/builder2v2/build_creation_info_test.go @@ -3,24 +3,17 @@ package builder2v2 import ( - "fmt" "testing" - - "github.com/spdx/tools-golang/spdx" ) // ===== CreationInfo section builder tests ===== func TestBuilder2_2CanBuildCreationInfoSection(t *testing.T) { - - namespacePrefix := "https://github.com/swinslow/spdx-docs/spdx-go/testdata-whatever-" creatorType := "Organization" creator := "Jane Doe LLC" testValues := make(map[string]string) testValues["Created"] = "2018-10-20T16:48:00Z" - packageName := "project1" - verificationCode := "TESTCODE" - ci, err := BuildCreationInfoSection2_2(packageName, verificationCode, namespacePrefix, creatorType, creator, testValues) + ci, err := BuildCreationInfoSection2_2(creatorType, creator, testValues) if err != nil { t.Fatalf("expected nil error, got %v", err) } @@ -28,36 +21,14 @@ func TestBuilder2_2CanBuildCreationInfoSection(t *testing.T) { if ci == nil { t.Fatalf("expected non-nil CreationInfo, got nil") } - if ci.SPDXVersion != "SPDX-2.2" { - t.Errorf("expected %s, got %s", "SPDX-2.2", ci.SPDXVersion) - } - if ci.DataLicense != "CC0-1.0" { - t.Errorf("expected %s, got %s", "CC0-1.0", ci.DataLicense) - } - if ci.SPDXIdentifier != spdx.ElementID("DOCUMENT") { - t.Errorf("expected %s, got %v", "DOCUMENT", ci.SPDXIdentifier) - } - if ci.DocumentName != "project1" { - t.Errorf("expected %s, got %s", "project1", ci.DocumentName) - } - wantNamespace := fmt.Sprintf("https://github.com/swinslow/spdx-docs/spdx-go/testdata-whatever-project1-%s", verificationCode) - if ci.DocumentNamespace != wantNamespace { - t.Errorf("expected %s, got %s", wantNamespace, ci.DocumentNamespace) - } - if len(ci.CreatorPersons) != 0 { - t.Fatalf("expected %d, got %d", 0, len(ci.CreatorPersons)) - } - if len(ci.CreatorOrganizations) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorOrganizations)) + if len(ci.Creators) != 2 { + t.Fatalf("expected %d, got %d", 2, len(ci.Creators)) } - if ci.CreatorOrganizations[0] != "Jane Doe LLC" { - t.Errorf("expected %s, got %s", "Jane Doe LLC", ci.CreatorOrganizations[0]) + if ci.Creators[1].Creator != "Jane Doe LLC" { + t.Errorf("expected %s, got %s", "Jane Doe LLC", ci.Creators[0].Creator) } - if len(ci.CreatorTools) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorTools)) - } - if ci.CreatorTools[0] != "github.com/spdx/tools-golang/builder" { - t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.CreatorTools[0]) + if ci.Creators[0].Creator != "github.com/spdx/tools-golang/builder" { + t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.Creators[1].Creator) } if ci.Created != "2018-10-20T16:48:00Z" { t.Errorf("expected %s, got %s", "2018-10-20T16:48:00Z", ci.Created) @@ -65,15 +36,12 @@ func TestBuilder2_2CanBuildCreationInfoSection(t *testing.T) { } func TestBuilder2_2CanBuildCreationInfoSectionWithCreatorPerson(t *testing.T) { - namespacePrefix := "https://github.com/swinslow/spdx-docs/spdx-go/testdata-whatever-" creatorType := "Person" creator := "John Doe" testValues := make(map[string]string) testValues["Created"] = "2018-10-20T16:48:00Z" - packageName := "project1" - verificationCode := "TESTCODE" - ci, err := BuildCreationInfoSection2_2(packageName, verificationCode, namespacePrefix, creatorType, creator, testValues) + ci, err := BuildCreationInfoSection2_2(creatorType, creator, testValues) if err != nil { t.Fatalf("expected nil error, got %v", err) } @@ -81,33 +49,24 @@ func TestBuilder2_2CanBuildCreationInfoSectionWithCreatorPerson(t *testing.T) { if ci == nil { t.Fatalf("expected non-nil CreationInfo, got nil") } - if len(ci.CreatorPersons) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorPersons)) - } - if ci.CreatorPersons[0] != "John Doe" { - t.Errorf("expected %s, got %s", "John Doe", ci.CreatorPersons[0]) + if len(ci.Creators) != 2 { + t.Fatalf("expected %d, got %d", 2, len(ci.Creators)) } - if len(ci.CreatorOrganizations) != 0 { - t.Fatalf("expected %d, got %d", 0, len(ci.CreatorOrganizations)) + if ci.Creators[1].Creator != "John Doe" { + t.Errorf("expected %s, got %s", "John Doe", ci.Creators[0].Creator) } - if len(ci.CreatorTools) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorTools)) - } - if ci.CreatorTools[0] != "github.com/spdx/tools-golang/builder" { - t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.CreatorTools[0]) + if ci.Creators[0].Creator != "github.com/spdx/tools-golang/builder" { + t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.Creators[1].Creator) } } func TestBuilder2_2CanBuildCreationInfoSectionWithCreatorTool(t *testing.T) { - namespacePrefix := "https://github.com/swinslow/spdx-docs/spdx-go/testdata-whatever-" creatorType := "Tool" creator := "some-other-tool-2.1" testValues := make(map[string]string) testValues["Created"] = "2018-10-20T16:48:00Z" - packageName := "project1" - verificationCode := "TESTCODE" - ci, err := BuildCreationInfoSection2_2(packageName, verificationCode, namespacePrefix, creatorType, creator, testValues) + ci, err := BuildCreationInfoSection2_2(creatorType, creator, testValues) if err != nil { t.Fatalf("expected nil error, got %v", err) } @@ -115,33 +74,24 @@ func TestBuilder2_2CanBuildCreationInfoSectionWithCreatorTool(t *testing.T) { if ci == nil { t.Fatalf("expected non-nil CreationInfo, got nil") } - if len(ci.CreatorPersons) != 0 { - t.Fatalf("expected %d, got %d", 0, len(ci.CreatorPersons)) - } - if len(ci.CreatorOrganizations) != 0 { - t.Fatalf("expected %d, got %d", 0, len(ci.CreatorOrganizations)) + if len(ci.Creators) != 2 { + t.Fatalf("expected %d, got %d", 2, len(ci.Creators)) } - if len(ci.CreatorTools) != 2 { - t.Fatalf("expected %d, got %d", 2, len(ci.CreatorTools)) + if ci.Creators[0].Creator != "github.com/spdx/tools-golang/builder" { + t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.Creators[0]) } - if ci.CreatorTools[0] != "github.com/spdx/tools-golang/builder" { - t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.CreatorTools[0]) - } - if ci.CreatorTools[1] != "some-other-tool-2.1" { - t.Errorf("expected %s, got %s", "some-other-tool-2.1", ci.CreatorTools[1]) + if ci.Creators[1].Creator != "some-other-tool-2.1" { + t.Errorf("expected %s, got %s", "some-other-tool-2.1", ci.Creators[1]) } } func TestBuilder2_2CanBuildCreationInfoSectionWithInvalidPerson(t *testing.T) { - namespacePrefix := "https://github.com/swinslow/spdx-docs/spdx-go/testdata-whatever-" creatorType := "Whatever" creator := "John Doe" testValues := make(map[string]string) testValues["Created"] = "2018-10-20T16:48:00Z" - packageName := "project1" - verificationCode := "TESTCODE" - ci, err := BuildCreationInfoSection2_2(packageName, verificationCode, namespacePrefix, creatorType, creator, testValues) + ci, err := BuildCreationInfoSection2_2(creatorType, creator, testValues) if err != nil { t.Fatalf("expected nil error, got %v", err) } @@ -149,19 +99,13 @@ func TestBuilder2_2CanBuildCreationInfoSectionWithInvalidPerson(t *testing.T) { if ci == nil { t.Fatalf("expected non-nil CreationInfo, got nil") } - if len(ci.CreatorPersons) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorPersons)) - } - if ci.CreatorPersons[0] != "John Doe" { - t.Errorf("expected %s, got %s", "John Doe", ci.CreatorPersons[0]) - } - if len(ci.CreatorOrganizations) != 0 { - t.Fatalf("expected %d, got %d", 0, len(ci.CreatorOrganizations)) + if len(ci.Creators) != 2 { + t.Fatalf("expected %d, got %d", 2, len(ci.Creators)) } - if len(ci.CreatorTools) != 1 { - t.Fatalf("expected %d, got %d", 1, len(ci.CreatorTools)) + if ci.Creators[1].Creator != "John Doe" { + t.Errorf("expected %s, got %s", "John Doe", ci.Creators[1]) } - if ci.CreatorTools[0] != "github.com/spdx/tools-golang/builder" { - t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.CreatorTools[0]) + if ci.Creators[0].Creator != "github.com/spdx/tools-golang/builder" { + t.Errorf("expected %s, got %s", "github.com/spdx/tools-golang/builder", ci.Creators[0]) } } diff --git a/builder/builder2v2/build_file.go b/builder/builder2v2/build_file.go index ec59f6a9..efdd9799 100644 --- a/builder/builder2v2/build_file.go +++ b/builder/builder2v2/build_file.go @@ -32,23 +32,23 @@ func BuildFileSection2_2(filePath string, prefix string, fileNumber int) (*spdx. f := &spdx.File2_2{ FileName: filePath, FileSPDXIdentifier: spdx.ElementID(i), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: ssha1, }, - spdx.SHA256: spdx.Checksum{ + { Algorithm: spdx.SHA256, Value: ssha256, }, - spdx.MD5: spdx.Checksum{ + { Algorithm: spdx.MD5, Value: smd5, }, }, - LicenseConcluded: "NOASSERTION", - LicenseInfoInFile: []string{"NOASSERTION"}, - FileCopyrightText: "NOASSERTION", + LicenseConcluded: "NOASSERTION", + LicenseInfoInFiles: []string{"NOASSERTION"}, + FileCopyrightText: "NOASSERTION", } return f, nil diff --git a/builder/builder2v2/build_file_test.go b/builder/builder2v2/build_file_test.go index 8a1767e8..74a6a6a9 100644 --- a/builder/builder2v2/build_file_test.go +++ b/builder/builder2v2/build_file_test.go @@ -28,7 +28,8 @@ func TestBuilder2_2CanBuildFileSection(t *testing.T) { if file1.FileSPDXIdentifier != spdx.ElementID("File17") { t.Errorf("expected %v, got %v", "File17", file1.FileSPDXIdentifier) } - for _, checksum := range file1.FileChecksums { + + for _, checksum := range file1.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != "024f870eb6323f532515f7a09d5646a97083b819" { @@ -44,14 +45,15 @@ func TestBuilder2_2CanBuildFileSection(t *testing.T) { } } } + if file1.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseConcluded) } - if len(file1.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(file1.LicenseInfoInFile)) + if len(file1.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(file1.LicenseInfoInFiles)) } else { - if file1.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseInfoInFile[0]) + if file1.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", file1.LicenseInfoInFiles[0]) } } if file1.FileCopyrightText != "NOASSERTION" { diff --git a/builder/builder2v2/build_package.go b/builder/builder2v2/build_package.go index 640f9c77..9c460da8 100644 --- a/builder/builder2v2/build_package.go +++ b/builder/builder2v2/build_package.go @@ -35,7 +35,7 @@ func BuildPackageSection2_2(packageName string, dirRoot string, pathsIgnore []st dirRootLen = len(dirRoot) } - files := map[spdx.ElementID]*spdx.File2_2{} + files := []*spdx.File2_2{} fileNumber := 0 for _, fp := range filepaths { newFilePatch := "" @@ -48,7 +48,7 @@ func BuildPackageSection2_2(packageName string, dirRoot string, pathsIgnore []st if err != nil { return nil, err } - files[newFile.FileSPDXIdentifier] = newFile + files = append(files, newFile) fileNumber++ } diff --git a/builder/builder2v2/build_package_test.go b/builder/builder2v2/build_package_test.go index 85f402e2..3ab88d24 100644 --- a/builder/builder2v2/build_package_test.go +++ b/builder/builder2v2/build_package_test.go @@ -13,7 +13,7 @@ func TestBuilder2_2CanBuildPackageSection(t *testing.T) { packageName := "project1" dirRoot := "../../testdata/project1/" - wantVerificationCode := "fc9ac4a370af0a471c2e52af66d6b4cf4e2ba12b" + wantVerificationCode := spdx.PackageVerificationCode{Value: "fc9ac4a370af0a471c2e52af66d6b4cf4e2ba12b"} pkg, err := BuildPackageSection2_2(packageName, dirRoot, nil) if err != nil { @@ -38,7 +38,7 @@ func TestBuilder2_2CanBuildPackageSection(t *testing.T) { if pkg.IsFilesAnalyzedTagPresent != true { t.Errorf("expected %v, got %v", true, pkg.IsFilesAnalyzedTagPresent) } - if pkg.PackageVerificationCode != wantVerificationCode { + if pkg.PackageVerificationCode.Value != wantVerificationCode.Value { t.Errorf("expected %v, got %v", wantVerificationCode, pkg.PackageVerificationCode) } if pkg.PackageLicenseConcluded != "NOASSERTION" { @@ -61,7 +61,7 @@ func TestBuilder2_2CanBuildPackageSection(t *testing.T) { if len(pkg.Files) != 5 { t.Fatalf("expected %d, got %d", 5, len(pkg.Files)) } - fileEmpty := pkg.Files[spdx.ElementID("File0")] + fileEmpty := pkg.Files[0] if fileEmpty == nil { t.Fatalf("expected non-nil file, got nil") } @@ -71,7 +71,7 @@ func TestBuilder2_2CanBuildPackageSection(t *testing.T) { if fileEmpty.FileSPDXIdentifier != spdx.ElementID("File0") { t.Errorf("expected %v, got %v", "File0", fileEmpty.FileSPDXIdentifier) } - for _, checksum := range fileEmpty.FileChecksums { + for _, checksum := range fileEmpty.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != "da39a3ee5e6b4b0d3255bfef95601890afd80709" { @@ -90,11 +90,11 @@ func TestBuilder2_2CanBuildPackageSection(t *testing.T) { if fileEmpty.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseConcluded) } - if len(fileEmpty.LicenseInfoInFile) != 1 { - t.Errorf("expected %v, got %v", 1, len(fileEmpty.LicenseInfoInFile)) + if len(fileEmpty.LicenseInfoInFiles) != 1 { + t.Errorf("expected %v, got %v", 1, len(fileEmpty.LicenseInfoInFiles)) } else { - if fileEmpty.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseInfoInFile[0]) + if fileEmpty.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", fileEmpty.LicenseInfoInFiles[0]) } } if fileEmpty.FileCopyrightText != "NOASSERTION" { @@ -125,31 +125,31 @@ func TestBuilder2_2CanIgnoreFiles(t *testing.T) { } want := "./dontscan.txt" - got := pkg.Files[spdx.ElementID("File0")].FileName + got := pkg.Files[0].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./keep/keep.txt" - got = pkg.Files[spdx.ElementID("File1")].FileName + got = pkg.Files[1].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./keep.txt" - got = pkg.Files[spdx.ElementID("File2")].FileName + got = pkg.Files[2].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./subdir/keep/dontscan.txt" - got = pkg.Files[spdx.ElementID("File3")].FileName + got = pkg.Files[3].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } want = "./subdir/keep/keep.txt" - got = pkg.Files[spdx.ElementID("File4")].FileName + got = pkg.Files[4].FileName if want != got { t.Errorf("expected %v, got %v", want, got) } diff --git a/examples/1-load/example_load.go b/examples/1-load/example_load.go index 12563d2c..328d349c 100644 --- a/examples/1-load/example_load.go +++ b/examples/1-load/example_load.go @@ -62,14 +62,26 @@ func main() { return } + if len(pkgIDs) == 0 { + return + } + // it does, so we'll go through each one - for _, pkgID := range pkgIDs { - pkg, ok := doc.Packages[pkgID] - if !ok { - fmt.Printf("Package %s has described relationship but ID not found\n", string(pkgID)) + for _, pkg := range doc.Packages { + var documentDescribesPackage bool + for _, describedPackageID := range pkgIDs { + if pkg.PackageSPDXIdentifier == describedPackageID { + documentDescribesPackage = true + break + } + } + + if !documentDescribesPackage { continue } + pkgID := pkg.PackageSPDXIdentifier + // check whether the package had its files analyzed if !pkg.FilesAnalyzed { fmt.Printf("Package %s (%s) had FilesAnalyzed: false\n", string(pkgID), pkg.PackageName) @@ -93,7 +105,7 @@ func main() { // from a map. if we care about order, we should first pull the // IDs into a slice, sort it, and then print the ordered files. fmt.Printf("- File %d: %s\n", i, f.FileName) - fmt.Printf(" License from file: %v\n", f.LicenseInfoInFile) + fmt.Printf(" License from file: %v\n", f.LicenseInfoInFiles) fmt.Printf(" License concluded: %v\n", f.LicenseConcluded) i++ if i > 50 { diff --git a/examples/10-jsonloader/example_json_loader.go b/examples/10-jsonloader/example_json_loader.go index a9422b1d..4de95610 100644 --- a/examples/10-jsonloader/example_json_loader.go +++ b/examples/10-jsonloader/example_json_loader.go @@ -12,7 +12,7 @@ import ( "os" "strings" - "github.com/spdx/tools-golang/jsonloader" + "github.com/spdx/tools-golang/json" ) func main() { @@ -36,7 +36,7 @@ func main() { defer r.Close() // try to load the SPDX file's contents as a json file, version 2.2 - doc, err := jsonloader.Load2_2(r) + doc, err := spdx_json.Load2_2(r) if err != nil { fmt.Printf("Error while parsing %v: %v", args[1], err) return @@ -47,9 +47,9 @@ func main() { fmt.Println(strings.Repeat("=", 80)) fmt.Println("Some Attributes of the Document:") - fmt.Printf("Document Name: %s\n", doc.CreationInfo.DocumentName) - fmt.Printf("DataLicense: %s\n", doc.CreationInfo.DataLicense) - fmt.Printf("Document Namespace: %s\n", doc.CreationInfo.DocumentNamespace) - fmt.Printf("SPDX Version: %s\n", doc.CreationInfo.SPDXVersion) + fmt.Printf("Document Name: %s\n", doc.DocumentName) + fmt.Printf("DataLicense: %s\n", doc.DataLicense) + fmt.Printf("Document Namespace: %s\n", doc.DocumentNamespace) + fmt.Printf("SPDX Version: %s\n", doc.SPDXVersion) fmt.Println(strings.Repeat("=", 80)) } diff --git a/examples/4-search/example_search.go b/examples/4-search/example_search.go index 3f195969..52f8b075 100644 --- a/examples/4-search/example_search.go +++ b/examples/4-search/example_search.go @@ -120,7 +120,7 @@ func main() { // all file hashes and the package verification code have been filled in // appropriately by builder. // And, all files with "SPDX-License-Identifier:" tags have had their - // licenses extracted into LicenseInfoInFile and LicenseConcluded for + // licenses extracted into LicenseInfoInFiles and LicenseConcluded for // each file by idsearcher. The PackageLicenseInfoFromFiles field will // also be filled in with all license identifiers. fmt.Printf("Successfully created document and searched for IDs for package %s\n", packageName) diff --git a/examples/5-report/example_report.go b/examples/5-report/example_report.go index bd7971fd..1197547a 100644 --- a/examples/5-report/example_report.go +++ b/examples/5-report/example_report.go @@ -54,14 +54,26 @@ func main() { return } + if len(pkgIDs) == 0 { + return + } + // it does, so we'll go through each one - for _, pkgID := range pkgIDs { - pkg, ok := doc.Packages[pkgID] - if !ok { - fmt.Printf("Package %s has described relationship but ID not found\n", string(pkgID)) + for _, pkg := range doc.Packages { + var documentDescribesPackage bool + for _, describedPackageID := range pkgIDs { + if pkg.PackageSPDXIdentifier == describedPackageID { + documentDescribesPackage = true + break + } + } + + if !documentDescribesPackage { continue } + pkgID := pkg.PackageSPDXIdentifier + // check whether the package had its files analyzed if !pkg.FilesAnalyzed { fmt.Printf("Package %s (%s) had FilesAnalyzed: false\n", string(pkgID), pkg.PackageName) diff --git a/examples/6-licensediff/example_licensediff.go b/examples/6-licensediff/example_licensediff.go index 5205efa5..49d76035 100644 --- a/examples/6-licensediff/example_licensediff.go +++ b/examples/6-licensediff/example_licensediff.go @@ -13,6 +13,7 @@ package main import ( "fmt" + "github.com/spdx/tools-golang/spdx" "os" "github.com/spdx/tools-golang/licensediff" @@ -85,13 +86,30 @@ func main() { // go through the first set first, report if they aren't in the second set for _, pkgID := range pkgIDsFirst { fmt.Printf("================================\n") - p1, okFirst := docFirst.Packages[pkgID] + + var p1, p2 *spdx.Package2_2 + var okFirst, okSecond bool + for _, pkg := range docFirst.Packages { + if pkg.PackageSPDXIdentifier == pkgID { + okFirst = true + p1 = pkg + break + } + } if !okFirst { fmt.Printf("Package %s has described relationship in first document but ID not found\n", string(pkgID)) continue } + fmt.Printf("Package %s (%s)\n", string(pkgID), p1.PackageName) - p2, okSecond := docSecond.Packages[pkgID] + + for _, pkg := range docSecond.Packages { + if pkg.PackageSPDXIdentifier == pkgID { + okSecond = true + p2 = pkg + break + } + } if !okSecond { fmt.Printf(" Found in first document, not found in second\n") continue @@ -121,13 +139,27 @@ func main() { // now report if there are any package IDs in the second set that aren't // in the first for _, pkgID := range pkgIDsSecond { - p2, okSecond := docSecond.Packages[pkgID] + var p2 *spdx.Package2_2 + var okFirst, okSecond bool + for _, pkg := range docSecond.Packages { + if pkg.PackageSPDXIdentifier == pkgID { + okSecond = true + p2 = pkg + break + } + } if !okSecond { fmt.Printf("================================\n") fmt.Printf("Package %s has described relationship in second document but ID not found\n", string(pkgID)) continue } - _, okFirst := docFirst.Packages[pkgID] + + for _, pkg := range docFirst.Packages { + if pkg.PackageSPDXIdentifier == pkgID { + okFirst = true + break + } + } if !okFirst { fmt.Printf("================================\n") fmt.Printf("Package %s (%s)\n", string(pkgID), p2.PackageName) diff --git a/examples/7-rdfloader/exampleRDFLoader.go b/examples/7-rdfloader/exampleRDFLoader.go index 4eae8c9a..5258ac2b 100644 --- a/examples/7-rdfloader/exampleRDFLoader.go +++ b/examples/7-rdfloader/exampleRDFLoader.go @@ -40,9 +40,9 @@ func main() { // Printing some of the document Information fmt.Println(strings.Repeat("=", 80)) fmt.Println("Some Attributes of the Document:") - fmt.Printf("Document Name: %s\n", doc.CreationInfo.DocumentName) - fmt.Printf("DataLicense: %s\n", doc.CreationInfo.DataLicense) - fmt.Printf("Document Namespace: %s\n", doc.CreationInfo.DocumentNamespace) - fmt.Printf("SPDX Version: %s\n", doc.CreationInfo.SPDXVersion) + fmt.Printf("Document Name: %s\n", doc.DocumentName) + fmt.Printf("DataLicense: %s\n", doc.DataLicense) + fmt.Printf("Document Namespace: %s\n", doc.DocumentNamespace) + fmt.Printf("SPDX Version: %s\n", doc.SPDXVersion) fmt.Println(strings.Repeat("=", 80)) } diff --git a/examples/8-jsontotv/examplejsontotv.go b/examples/8-jsontotv/examplejsontotv.go index 5ceccc07..9faadcef 100644 --- a/examples/8-jsontotv/examplejsontotv.go +++ b/examples/8-jsontotv/examplejsontotv.go @@ -11,7 +11,7 @@ import ( "fmt" "os" - "github.com/spdx/tools-golang/jsonloader" + "github.com/spdx/tools-golang/json" "github.com/spdx/tools-golang/tvsaver" ) @@ -36,7 +36,7 @@ func main() { defer r.Close() // try to load the SPDX file's contents as a json file, version 2.2 - doc, err := jsonloader.Load2_2(r) + doc, err := spdx_json.Load2_2(r) if err != nil { fmt.Printf("Error while parsing %v: %v", args[1], err) return diff --git a/examples/9-tvtojson/exampletvtojson.go b/examples/9-tvtojson/exampletvtojson.go index e75afc43..fac1c980 100644 --- a/examples/9-tvtojson/exampletvtojson.go +++ b/examples/9-tvtojson/exampletvtojson.go @@ -11,7 +11,7 @@ import ( "fmt" "os" - "github.com/spdx/tools-golang/jsonsaver" + "github.com/spdx/tools-golang/json" "github.com/spdx/tools-golang/tvloader" ) @@ -57,7 +57,7 @@ func main() { defer w.Close() // try to save the document to disk as an SPDX json file, version 2.2 - err = jsonsaver.Save2_2(doc, w) + err = spdx_json.Save2_2(doc, w) if err != nil { fmt.Printf("Error while saving %v: %v", fileOut, err) return diff --git a/examples/sample-docs/json/SPDXJSONExample-v2.2.spdx.json b/examples/sample-docs/json/SPDXJSONExample-v2.2.spdx.json index 546e9483..89171a14 100644 --- a/examples/sample-docs/json/SPDXJSONExample-v2.2.spdx.json +++ b/examples/sample-docs/json/SPDXJSONExample-v2.2.spdx.json @@ -1,277 +1,284 @@ -{ - "SPDXID" : "SPDXRef-DOCUMENT", - "spdxVersion" : "SPDX-2.2", - "creationInfo" : { - "comment" : "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", - "created" : "2010-01-29T18:30:22Z", - "creators" : [ "Tool: LicenseFind-1.0", "Organization: ExampleCodeInspect ()", "Person: Jane Doe ()" ], - "licenseListVersion" : "3.9" - }, - "name" : "SPDX-Tools-v2.0", - "dataLicense" : "CC0-1.0", - "comment" : "This document was created using SPDX 2.0 using licenses from the web site.", - "externalDocumentRefs" : [ { - "externalDocumentId" : "DocumentRef-spdx-tool-1.2", - "checksum" : { - "algorithm" : "SHA1", - "checksumValue" : "d6a770ba38583ed4bb4525bd96e50461655d2759" - }, - "spdxDocument" : "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301" - } ], - "hasExtractedLicensingInfos" : [ { - "extractedText" : "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp This document was created using SPDX 2.0 using licenses from the web site. - -## External Document References -ExternalDocumentRef: DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1: d6a770ba38583ed4bb4525bd96e50461655d2759 -## Creation Information -Creator: Tool: LicenseFind-1.0 -Creator: Organization: ExampleCodeInspect () -Creator: Person: Jane Doe () -Created: 2010-01-29T18:30:22Z -CreatorComment: This package has been shipped in source and binary form. -The binaries were created with gcc 4.5.1 and expect to link to -compatible system run time libraries. -LicenseListVersion: 3.9 -## Annotations -Annotator: Person: Jane Doe () -AnnotationDate: 2010-01-29T18:30:22Z -AnnotationComment: Document level annotation -AnnotationType: OTHER -SPDXREF: SPDXRef-DOCUMENT -Annotator: Person: Joe Reviewer -AnnotationDate: 2010-02-10T00:00:00Z -AnnotationComment: This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses -AnnotationType: REVIEW -SPDXREF: SPDXRef-DOCUMENT -Annotator: Person: Suzanne Reviewer -AnnotationDate: 2011-03-13T00:00:00Z -AnnotationComment: Another example reviewer. -AnnotationType: REVIEW -SPDXREF: SPDXRef-DOCUMENT -## Relationships -Relationship: SPDXRef-DOCUMENT CONTAINS SPDXRef-Package -Relationship: SPDXRef-DOCUMENT COPY_OF DocumentRef-spdx-tool-1.2:SPDXRef-ToolsElement -Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-File -Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-Package - -FileName: ./package/foo.c -SPDXID: SPDXRef-File -FileComment: The concluded license was taken from the package level that the file was included in. -This information was found in the COPYING.txt file in the xyz directory. -FileType: SOURCE -FileChecksum: SHA1: d6a770ba38583ed4bb4525bd96e50461655d2758 -FileChecksum: MD5: 624c1abb3664f4b35547e7c73864ad24 -LicenseConcluded: (LGPL-2.0-only OR LicenseRef-2) -LicenseInfoInFile: GPL-2.0-only -LicenseInfoInFile: LicenseRef-2 -LicenseComments: The concluded license was taken from the package level that the file was included in. -FileCopyrightText: Copyright 2008-2010 John Smith -FileNotice: Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -FileContributor: The Regents of the University of California -FileContributor: Modified by Paul Mundt lethal@linux-sh.org -FileContributor: IBM Corporation -## Annotations -Annotator: Person: File Commenter -AnnotationDate: 2011-01-29T18:30:22Z -AnnotationComment: File level annotation -AnnotationType: OTHER -SPDXREF: SPDXRef-File -## Relationships -Relationship: SPDXRef-File GENERATED_FROM SPDXRef-fromDoap-0 -## Package Information -PackageName: glibc -SPDXID: SPDXRef-Package -PackageVersion: 2.11.1 -PackageFileName: glibc-2.11.1.tar.gz -PackageSupplier: Person: Jane Doe (jane.doe@example.com) -PackageOriginator: Organization: ExampleCodeInspect (contact@example.com) -PackageDownloadLocation: http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz -PackageVerificationCode: d6a770ba38583ed4bb4525bd96e50461655d2758(./package.spdx) -PackageChecksum: MD5: 624c1abb3664f4b35547e7c73864ad24 -PackageChecksum: SHA1: 85ed0817af83a24ad8da68c2b5094de69833983c -PackageChecksum: SHA256: 11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd -PackageHomePage: http://ftp.gnu.org/gnu/glibc -PackageSourceInfo: uses glibc-2_11-branch from git://sourceware.org/git/glibc.git. -PackageLicenseConcluded: (LGPL-2.0-only OR LicenseRef-3) -## License information from files -PackageLicenseInfoFromFiles: GPL-2.0-only -PackageLicenseInfoFromFiles: LicenseRef-2 -PackageLicenseInfoFromFiles: LicenseRef-1 -PackageLicenseDeclared: (LGPL-2.0-only AND LicenseRef-3) -PackageLicenseComments: The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change. -PackageCopyrightText: Copyright 2008-2010 John Smith -PackageSummary: GNU C library. -PackageDescription: The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems. -PackageAttributionText: The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually. -ExternalRef: SECURITY cpe23Type cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:* -ExternalRef: OTHER LocationRef-acmeforge acmecorp/acmenator/4.1.3-alpha -ExternalRefComment: This is the external ref for Acme -## Annotations -Annotator: Person: Package Commenter -AnnotationDate: 2011-01-29T18:30:22Z -AnnotationComment: Package level annotation -AnnotationType: OTHER -SPDXREF: SPDXRef-Package -## Relationships -Relationship: SPDXRef-Package CONTAINS SPDXRef-JenaLib -Relationship: SPDXRef-Package DYNAMIC_LINK SPDXRef-Saxon - -## File Information -FileName: ./lib-source/commons-lang3-3.1-sources.jar -SPDXID: SPDXRef-CommonsLangSrc -FileComment: This file is used by Jena -FileType: ARCHIVE -FileChecksum: SHA1: c2b4e1c67a2d28fced849ee1bb76e7391b93f125 -LicenseConcluded: Apache-2.0 -LicenseInfoInFile: Apache-2.0 -FileCopyrightText: Copyright 2001-2011 The Apache Software Foundation -FileNotice: Apache Commons Lang -Copyright 2001-2011 The Apache Software Foundation - -This product includes software developed by -The Apache Software Foundation (http://www.apache.org/). - -This product includes software from the Spring Framework, -under the Apache License 2.0 (see: StringUtils.containsWhitespace()) -FileContributor: Apache Software Foundation -## Relationships -Relationship: SPDXRef-CommonsLangSrc GENERATED_FROM NOASSERTION - -FileName: ./lib-source/jena-2.6.3-sources.jar -SPDXID: SPDXRef-JenaLib -FileComment: This file belongs to Jena -FileType: ARCHIVE -FileChecksum: SHA1: 3ab4e1c67a2d28fced849ee1bb76e7391b93f125 -LicenseConcluded: LicenseRef-1 -LicenseInfoInFile: LicenseRef-1 -LicenseComments: This license is used by Jena -FileCopyrightText: (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP -FileContributor: Apache Software Foundation -FileContributor: Hewlett Packard Inc. -## Relationships -Relationship: SPDXRef-JenaLib CONTAINS SPDXRef-Package - -FileName: ./src/org/spdx/parser/DOAPProject.java -SPDXID: SPDXRef-DoapSource -FileType: SOURCE -FileChecksum: SHA1: 2fd4e1c67a2d28fced849ee1bb76e7391b93eb12 -LicenseConcluded: Apache-2.0 -LicenseInfoInFile: Apache-2.0 -FileCopyrightText: Copyright 2010, 2011 Source Auditor Inc. -FileContributor: Protecode Inc. -FileContributor: SPDX Technical Team Members -FileContributor: Open Logic Inc. -FileContributor: Source Auditor Inc. -FileContributor: Black Duck Software In.c - -## Package Information -PackageName: Apache Commons Lang -SPDXID: SPDXRef-fromDoap-1 -PackageDownloadLocation: NOASSERTION -PackageHomePage: http://commons.apache.org/proper/commons-lang/ -PackageLicenseConcluded: NOASSERTION -PackageLicenseDeclared: NOASSERTION -PackageCopyrightText: NOASSERTION -FilesAnalyzed: false - -## Package Information -PackageName: Jena -SPDXID: SPDXRef-fromDoap-0 -PackageVersion: 3.12.0 -PackageDownloadLocation: https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz -PackageHomePage: http://www.openjena.org/ -PackageLicenseConcluded: NOASSERTION -PackageLicenseDeclared: NOASSERTION -PackageCopyrightText: NOASSERTION -ExternalRef: PACKAGE-MANAGER purl pkg:maven/org.apache.jena/apache-jena@3.12.0 -FilesAnalyzed: false - -## Package Information -PackageName: Saxons -SPDXID: SPDXRef-Saxonas -PackageVersion: 8.8 -PackageFileName: saxonB-8.8.zip -PackageDownloadLocation: https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download -PackageChecksum: SHA1: 85ed0817af83a24ad8da68c2b5094de69833983c -PackageHomePage: http://saxon.sourceforge.net/ -PackageLicenseConcluded: MPL-1.0 -PackageLicenseDeclared: MPL-1.0 -PackageLicenseComments: Other versions available for a commercial license -PackageCopyrightText: Copyright Saxonica Ltd -PackageDescription: The Saxon package is a collection of tools for processing XML documents. -FilesAnalyzed: false - -## Snippet Information -SnippetSPDXID: SPDXRef-Snippet -SnippetFromFileSPDXID: SPDXRef-DoapSource -SnippetByteRange: 310:420 -SnippetLineRange: 5:23 -SnippetLicenseConcluded: GPL-2.0-only -LicenseInfoInSnippet: GPL-2.0-only -SnippetLicenseComments: The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz. -SnippetCopyrightText: Copyright 2008-2010 John Smith -SnippetComment: This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0. -SnippetName: from linux kernel - - -## License Information -LicenseID: LicenseRef-1 -ExtractedText: /* - * (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR - * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. - * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF - * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -*/ - -LicenseID: LicenseRef-2 -ExtractedText: This package includes the GRDDL parser developed by Hewlett Packard under the following license: -� Copyright 2007 Hewlett-Packard Development Company, LP - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. -Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. -The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. -THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -LicenseID: LicenseRef-4 -ExtractedText: /* - * (c) Copyright 2009 University of Bristol - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR - * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. - * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF - * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -*/ - -LicenseID: LicenseRef-Beerware-4.2 -ExtractedText: "THE BEER-WARE LICENSE" (Revision 42): -phk@FreeBSD.ORG wrote this file. As long as you retain this notice you -can do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp -LicenseName: Beer-Ware License (Version 42) -LicenseCrossReference: http://people.freebsd.org/~phk/ -LicenseComment: The beerware license has a couple of other standard variants. - -LicenseID: LicenseRef-3 -ExtractedText: The CyberNeko Software License, Version 1.0 - - -(C) Copyright 2002-2005, Andy Clark. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - -3. The end-user documentation included with the redistribution, - if any, must include the following acknowledgment: - "This product includes software developed by Andy Clark." - Alternately, this acknowledgment may appear in the software itself, - if and wherever such third-party acknowledgments normally appear. - -4. The names "CyberNeko" and "NekoHTML" must not be used to endorse - or promote products derived from this software without prior - written permission. For written permission, please contact - andyc@cyberneko.net. - -5. Products derived from this software may not be called "CyberNeko", - nor may "CyberNeko" appear in their name, without prior written - permission of the author. - -THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED -WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES -OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, -OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT -OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -LicenseName: CyberNeko License -LicenseCrossReference: http://people.apache.org/~andyc/neko/LICENSE, http://justasample.url.com -LicenseComment: This is tye CyperNeko License \ No newline at end of file +SPDXVersion: SPDX-2.2 +DataLicense: CC0-1.0 +DocumentNamespace: http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301 +DocumentName: SPDX-Tools-v2.0 +SPDXID: SPDXRef-DOCUMENT +DocumentComment: This document was created using SPDX 2.0 using licenses from the web site. + +## External Document References +ExternalDocumentRef: DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1: d6a770ba38583ed4bb4525bd96e50461655d2759 +## Creation Information +Creator: Tool: LicenseFind-1.0 +Creator: Organization: ExampleCodeInspect () +Creator: Person: Jane Doe () +Created: 2010-01-29T18:30:22Z +CreatorComment: This package has been shipped in source and binary form. +The binaries were created with gcc 4.5.1 and expect to link to +compatible system run time libraries. +LicenseListVersion: 3.9 +## Annotations +Annotator: Person: Jane Doe () +AnnotationDate: 2010-01-29T18:30:22Z +AnnotationComment: Document level annotation +AnnotationType: OTHER +SPDXREF: SPDXRef-DOCUMENT +Annotator: Person: Joe Reviewer +AnnotationDate: 2010-02-10T00:00:00Z +AnnotationComment: This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses +AnnotationType: REVIEW +SPDXREF: SPDXRef-DOCUMENT +Annotator: Person: Suzanne Reviewer +AnnotationDate: 2011-03-13T00:00:00Z +AnnotationComment: Another example reviewer. +AnnotationType: REVIEW +SPDXREF: SPDXRef-DOCUMENT +## Relationships +Relationship: SPDXRef-DOCUMENT CONTAINS SPDXRef-Package +Relationship: SPDXRef-DOCUMENT COPY_OF DocumentRef-spdx-tool-1.2:SPDXRef-ToolsElement +Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-File +Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-Package + +FileName: ./package/foo.c +SPDXID: SPDXRef-File +FileComment: The concluded license was taken from the package level that the file was included in. +This information was found in the COPYING.txt file in the xyz directory. +FileType: SOURCE +FileChecksum: SHA1: d6a770ba38583ed4bb4525bd96e50461655d2758 +FileChecksum: MD5: 624c1abb3664f4b35547e7c73864ad24 +LicenseConcluded: (LGPL-2.0-only OR LicenseRef-2) +LicenseInfoInFile: GPL-2.0-only +LicenseInfoInFile: LicenseRef-2 +LicenseComments: The concluded license was taken from the package level that the file was included in. +FileCopyrightText: Copyright 2008-2010 John Smith +FileNotice: Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +FileContributor: The Regents of the University of California +FileContributor: Modified by Paul Mundt lethal@linux-sh.org +FileContributor: IBM Corporation +## Annotations +Annotator: Person: File Commenter +AnnotationDate: 2011-01-29T18:30:22Z +AnnotationComment: File level annotation +AnnotationType: OTHER +SPDXREF: SPDXRef-File +## Relationships +Relationship: SPDXRef-File GENERATED_FROM SPDXRef-fromDoap-0 +## Package Information +PackageName: glibc +SPDXID: SPDXRef-Package +PackageVersion: 2.11.1 +PackageFileName: glibc-2.11.1.tar.gz +PackageSupplier: Person: Jane Doe (jane.doe@example.com) +PackageOriginator: Organization: ExampleCodeInspect (contact@example.com) +PackageDownloadLocation: http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz +PackageVerificationCode: d6a770ba38583ed4bb4525bd96e50461655d2758(./package.spdx) +PackageChecksum: MD5: 624c1abb3664f4b35547e7c73864ad24 +PackageChecksum: SHA1: 85ed0817af83a24ad8da68c2b5094de69833983c +PackageChecksum: SHA256: 11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd +PackageHomePage: http://ftp.gnu.org/gnu/glibc +PackageSourceInfo: uses glibc-2_11-branch from git://sourceware.org/git/glibc.git. +PackageLicenseConcluded: (LGPL-2.0-only OR LicenseRef-3) +## License information from files +PackageLicenseInfoFromFiles: GPL-2.0-only +PackageLicenseInfoFromFiles: LicenseRef-2 +PackageLicenseInfoFromFiles: LicenseRef-1 +PackageLicenseDeclared: (LGPL-2.0-only AND LicenseRef-3) +PackageLicenseComments: The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change. +PackageCopyrightText: Copyright 2008-2010 John Smith +PackageSummary: GNU C library. +PackageDescription: The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems. +PackageAttributionText: The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually. +ExternalRef: SECURITY cpe23Type cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:* +ExternalRef: OTHER LocationRef-acmeforge acmecorp/acmenator/4.1.3-alpha +ExternalRefComment: This is the external ref for Acme +## Annotations +Annotator: Person: Package Commenter +AnnotationDate: 2011-01-29T18:30:22Z +AnnotationComment: Package level annotation +AnnotationType: OTHER +SPDXREF: SPDXRef-Package +## Relationships +Relationship: SPDXRef-Package CONTAINS SPDXRef-JenaLib +Relationship: SPDXRef-Package DYNAMIC_LINK SPDXRef-Saxon + +## File Information +FileName: ./lib-source/commons-lang3-3.1-sources.jar +SPDXID: SPDXRef-CommonsLangSrc +FileComment: This file is used by Jena +FileType: ARCHIVE +FileChecksum: SHA1: c2b4e1c67a2d28fced849ee1bb76e7391b93f125 +LicenseConcluded: Apache-2.0 +LicenseInfoInFile: Apache-2.0 +FileCopyrightText: Copyright 2001-2011 The Apache Software Foundation +FileNotice: Apache Commons Lang +Copyright 2001-2011 The Apache Software Foundation + +This product includes software developed by +The Apache Software Foundation (http://www.apache.org/). + +This product includes software from the Spring Framework, +under the Apache License 2.0 (see: StringUtils.containsWhitespace()) +FileContributor: Apache Software Foundation +## Relationships +Relationship: SPDXRef-CommonsLangSrc GENERATED_FROM NOASSERTION + +FileName: ./lib-source/jena-2.6.3-sources.jar +SPDXID: SPDXRef-JenaLib +FileComment: This file belongs to Jena +FileType: ARCHIVE +FileChecksum: SHA1: 3ab4e1c67a2d28fced849ee1bb76e7391b93f125 +LicenseConcluded: LicenseRef-1 +LicenseInfoInFile: LicenseRef-1 +LicenseComments: This license is used by Jena +FileCopyrightText: (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP +FileContributor: Apache Software Foundation +FileContributor: Hewlett Packard Inc. +## Relationships +Relationship: SPDXRef-JenaLib CONTAINS SPDXRef-Package + +FileName: ./src/org/spdx/parser/DOAPProject.java +SPDXID: SPDXRef-DoapSource +FileType: SOURCE +FileChecksum: SHA1: 2fd4e1c67a2d28fced849ee1bb76e7391b93eb12 +LicenseConcluded: Apache-2.0 +LicenseInfoInFile: Apache-2.0 +FileCopyrightText: Copyright 2010, 2011 Source Auditor Inc. +FileContributor: Protecode Inc. +FileContributor: SPDX Technical Team Members +FileContributor: Open Logic Inc. +FileContributor: Source Auditor Inc. +FileContributor: Black Duck Software In.c + +## Package Information +PackageName: Apache Commons Lang +SPDXID: SPDXRef-fromDoap-1 +PackageDownloadLocation: NOASSERTION +PackageHomePage: http://commons.apache.org/proper/commons-lang/ +PackageLicenseConcluded: NOASSERTION +PackageLicenseDeclared: NOASSERTION +PackageCopyrightText: NOASSERTION +FilesAnalyzed: false + +## Package Information +PackageName: Jena +SPDXID: SPDXRef-fromDoap-0 +PackageVersion: 3.12.0 +PackageDownloadLocation: https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz +PackageHomePage: http://www.openjena.org/ +PackageLicenseConcluded: NOASSERTION +PackageLicenseDeclared: NOASSERTION +PackageCopyrightText: NOASSERTION +ExternalRef: PACKAGE-MANAGER purl pkg:maven/org.apache.jena/apache-jena@3.12.0 +FilesAnalyzed: false + +## Package Information +PackageName: Saxon +SPDXID: SPDXRef-Saxon +PackageVersion: 8.8 +PackageFileName: saxonB-8.8.zip +PackageDownloadLocation: https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download +PackageChecksum: SHA1: 85ed0817af83a24ad8da68c2b5094de69833983c +PackageHomePage: http://saxon.sourceforge.net/ +PackageLicenseConcluded: MPL-1.0 +PackageLicenseDeclared: MPL-1.0 +PackageLicenseComments: Other versions available for a commercial license +PackageCopyrightText: Copyright Saxonica Ltd +PackageDescription: The Saxon package is a collection of tools for processing XML documents. +FilesAnalyzed: false + +## Snippet Information +SnippetSPDXID: SPDXRef-Snippet +SnippetFromFileSPDXID: SPDXRef-DoapSource +SnippetByteRange: 310:420 +SnippetLineRange: 5:23 +SnippetLicenseConcluded: GPL-2.0-only +LicenseInfoInSnippet: GPL-2.0-only +SnippetLicenseComments: The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz. +SnippetCopyrightText: Copyright 2008-2010 John Smith +SnippetComment: This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0. +SnippetName: from linux kernel + + +## License Information +LicenseID: LicenseRef-1 +ExtractedText: /* + * (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +LicenseID: LicenseRef-2 +ExtractedText: This package includes the GRDDL parser developed by Hewlett Packard under the following license: +� Copyright 2007 Hewlett-Packard Development Company, LP + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +LicenseID: LicenseRef-4 +ExtractedText: /* + * (c) Copyright 2009 University of Bristol + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +LicenseID: LicenseRef-Beerware-4.2 +ExtractedText: "THE BEER-WARE LICENSE" (Revision 42): +phk@FreeBSD.ORG wrote this file. As long as you retain this notice you +can do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp +LicenseName: Beer-Ware License (Version 42) +LicenseCrossReference: http://people.freebsd.org/~phk/ +LicenseComment: The beerware license has a couple of other standard variants. + +LicenseID: LicenseRef-3 +ExtractedText: The CyberNeko Software License, Version 1.0 + + +(C) Copyright 2002-2005, Andy Clark. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + +3. The end-user documentation included with the redistribution, + if any, must include the following acknowledgment: + "This product includes software developed by Andy Clark." + Alternately, this acknowledgment may appear in the software itself, + if and wherever such third-party acknowledgments normally appear. + +4. The names "CyberNeko" and "NekoHTML" must not be used to endorse + or promote products derived from this software without prior + written permission. For written permission, please contact + andyc@cyberneko.net. + +5. Products derived from this software may not be called "CyberNeko", + nor may "CyberNeko" appear in their name, without prior written + permission of the author. + +THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, +OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT +OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +LicenseName: CyberNeko License +LicenseCrossReference: http://people.apache.org/~andyc/neko/LICENSE, http://justasample.url.com +LicenseComment: This is tye CyperNeko License + diff --git a/examples/sample-docs/xls/SPDXSpreadsheetExample-v2.2.xlsx b/examples/sample-docs/xls/SPDXSpreadsheetExample-v2.2.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..171ba8e248b0b4b052960cb8ea807fe17617684a GIT binary patch literal 14949 zcmbVz19)ZIvTkhKX2-T|+ugC9bZpyBI=0cVZL8ySI=1&#YEBX0H$i@XuO(z-?Pz4}sHgnZ*2qDJ*44^#JZaRr zhaN%1oAk0#630}^OhkWruvX;L3&3I)0(7S#Hk9{A)w+KYi>ih*DAp7yKi{O$S4r&U-C3#fO@Eh+LJ zed6Dp$eR*_QTs6AnLn$j&@stzi!6`FQo z_Qz0LZSo$wc!)tOA{>8V_EV%S46+`(&R;oeSNhMe@>1XsRcg%2T7Uom+7JK$^8eO0 z(C@aGiM4o1}L)#RBASzw*e zU6lKY2o2a@yk)Uv$rmUo2)TddfE_$Rb5EDNp7=JO&C0#(N)9Z8X?_}WNURCWspm&L zMhf}M#8(dky!5b&$Y|kC$e)#0NpO~Wz#OvgU;V*dteT+>}gj@U0`})jll@LII*=Q7HBwVdb%z#`l*0(MAnAQT%7Rp=U|Nl@Smy^V;3MXClTwr zEn~__lq5Fa`>q4%WhS;cmXgIjFR_gS+8X5}4u10TRV?Ej=-22K5xkp5$;Yr(1AS}v z%l3bLH!e5?tF=F2{QKd4e}DXYV|AXJa-o6UkXTk6%q&)d^*MZ!d1QNvVy7597_5;9b#=g1 zht#GCcOw$%4G<(j0K@5P(qpp{B&{W%_6AgPNH#-|4>t*=nC=aLTZZ4bs>!gwUiXTQ zUu8`6kMu_}y0)}8>m|#9wR`9gS(V18Nd*(-5Gi!^21iDEkVp6bRB0`enYg4jFp8^V zHF+-I6}iyvu6hu;cVew<4hg&#hjUqkZsJipFWTEf{un~i1PB3;cYmpVfBgF)M0p=V zS4%nvQv-V=LnTLhGiwuvk5N3))^gYuuk|9|eh!AxUEuNUTLY7olN%*yV)6I&x7C}F zvLQqjwIVcgVkpGF&9*w0Tj{J$RY8J~-0Y|(-xeN|W*aCiEiW%0qoY$fTB^#^rSiQjWqS=_VPLO?QVtZ3@!xdqudF~7MreVWgW0c_jWdnH>P9Eu2 zB<&M}{-UWnU^i8!N^CK#%B=X}^L48BycaccYw`RW7ARq!bG;$8Zu7iog|mi^!XQ=W zS6mJo-9ei%9j>o*?Cv^|8*P=J9afAO?_JBS_`gU7MfJCZphhuS?)6Xf7;G6ha6*ZS zi5219tJVSm849Z*y|fQdgMDE$v{{4ZiKbK)WktHIn%O+KOhMWFqhy7K#Zjhc=W_WxJf&RmjX1vg zR->VnSR=DVhn=>w`S(;1U#%QgjVE?TJe(@=?WWhTSjcM*R>#dMp2Z?o%ty3Hx$`AG z#v=wvRq`Z=QX12)y``B5MetlcE$h#{y4#PWxJ)l3^%x9of!<@9fO*Ha)YtR@>Ka~ zK9b*ZYGJ7vE4}u`rr~ymUK}YxVL&}+s-u|(f^!XNbJYT^QUy@!&q|7R?0!-g+9_U$ zdcg!hrPm{T2Wv+379jif7N|kqN)Q18N)dMkd>QYuiX3(v+@H&n3)g1V)#dN+N>c=Y zeY-|E4aX&&I_gWN$=NOCUnK|2JFra{%i-SzDm`ejTQ~om!ul(f0h|xMQ`G63(qiGO2sJ{5FAmcvdz4!4m z*Ui+=T0hzsuL{%J<(W|NnB4I=(Hzq;;^I_5eY_A>BI)F1Z#+(HgmH^!)?G6 zct8dE&!PL_<(aM$c+}zbbB&t6h?kj6xNk#`T%*tH3?Iq>RN283Ls%8YR7s?{k+=|K zzfW-Q8j_~$jjd={eh0IzF*&HJjByY~EB0cbZ!>m4;$VoUV4Js|#shMr z^0#|?mBk;2y2D^&R?LC|>alc1Y&5o_9KjFws)2^b|B?dAS|uPWmRh3^#`$SHeIVSI zjKQYCEqW>W`mmI>Xk4~+*3j94*QNpcq?ic=w5Ed1@hVCg#qxfKmfRTVtQ$r=JcEh#-W9pA8mrzjuk=*d~EG{Y*Fj zydI0=kqknK>2OLAtrP1N0IvzL#!FK&vC)*7LUwzBF&0^2XG)-hm4kGeU9e z3CizBzAU#^7L1X;C0VP82#GfqFbuUOIL}kB0Zb%br%cxzBRPqk5(a$0E@2V4mTkIl zXs_g_o79Bj_O|^TAb6U!kW+>OyJ?Vkr?!R8yO9bW`ib24e*ag1>;Z*1; z{3U?XvvabMKs8YO)=Cpf9!p)l*}I_S(MLV*xXv08*i z!IZ}wRq^)Kc41o^HU6(wy39Tu8N^_c9a|s_k6UEEB%cIFiTTT*BqL6Zi!l1m0W-Q* zs)quz#|rJ@BE?PFcLn^|R=iNrLI!l8;ILcFo{ zl8|IAp5Ky@i|5vCT=P*i--4`9Y)!AgW8R^{-&wFF+H3{GuUcgPjVOi z+^T9wck@R-e^i-in*eeP93X%v`ja^V)uk6=Im72IWCBj^!9vQ_9_}=}7L@b1K?iwT zE^bRehAp{sCUK*mtE-H*frHfuvV!MP05G56?xFSA_DVVmqFccU*Ve+@bC z4K%(WmC;udI$iM9bIi|yXAJ@wt9wDnQ-&Map9@iUz%jBkum{YXox@j6^%W?jZe<4Q z0(7DfCcOw~5mp#Mk4s;}B4Ze2tAk%r4u%PlF5I~r;Kz?SlA9_CR+>8X zoZNXXEsyO<^f)-I%qBB*9TNp;J>;XnZ(JH_9AvE;!f*(G29BVsGIaA25i!6Nl6M+` z>6ji#mC?ct6B=nVl*xOt(Sv-lf&nCdsbLeb3}Mm+=*7%9P0(<<0pCEG`a_No5Lnj$ z61evmo@GZC-*y_CI3;=i>}DFq0;t!(OJR|dK3@mkNdO6rg}%_uJE**910<4H9K;n< zHH2gO3CwGn@35|;w^*N3&z*dD92fk1c;BK3_r!xNG!x-b& z2iIPHpdo6z&d-4P02$`=Ze2e)@V3$d(e`%^$VnnEH<7Ah;F~yGAW;aZZCK~!>Gv$n zuT37T>3>>s{|sNPNRGI65!%V*Jv>_SeM*N9D(Up&VU#}BKOZ*OF<5c$aQ=a@3ELqV zrm@8(9)X<;yN*5Ny@KDk#;l0CwSBZvIHM-jDQ?Y{t(HZNR;ZC}t9@4J-;I#g6GDC~ zH+BQ8!2I3FBDtSotDzEsU=4VdyDN!V@gVW_X43oEJ+92MEiMutVte_(mcLyl5Qe;9 zX0kP!={~ArhaecnvugI9!$lx}xpP%1qgO1WpDSiY1E=?^HW0l7n2jy3nk_Bg`K;`w zCEljTxi{W1ANYAT?2R|xrQBw_@;oQf`r@a1jbGWYugkmHG_T8h*~!mWVcyQ4h$!z> zDc+c5?ti~~$C7C?q2=~d&^+`AVN&WHJt}XB#^NC+41=!cY???^43z0os`GZVLt?hF zf*-m*!bQMXC9V=L5US(?KHme-F%x0)E&Eb9B47kYOQGEWm=0m-V~^gq9i?rn0Q!=f zuG`hB^VioASOi#u-A1ZmShXMqW3Yp8Zj%ODxs*>-yda3P+Xdcrh3*Z5K_G;&k+ClU zDoO0ehGu^sp}FD?=;hm&EPcyuOMr)9SQZ!%aq~tFw97ZfqQ{T+KF=}6TW)iGb%{x< zr+d~EZ6RE(V{2RVE!YAPCoqTlF%D-pj0}2DQ~KqXTv0gLq0JsOPK{W03Z{9X(5K}@ zW!vxu#n1%WM{ogQA;cQLy=Ckf7qk1~IAZHJm=c`>m{+f(V<{8c8@_{IgzenX>2GHE z(Ij5nv%S+#yd4Q{uIFo#J!O#Y0Zk_3&B#aGzEFyeXaR$9+5iV3{w(|uhYtu*EAZm2 zVh~Zp`P@=+gAC$JiR_I6-MVJPMJ+}+mCbE#_)ISGVGiDCc)3#qLzy(zkX&|sKl-EG z)d{i+iNi!f`@5;?6Mcd%_Iy4O>3{EG>Q7)Ds%{96BT`&XGfPFL-C0~+UaO`CQO02= zBg$-J zB#VIh?SaVr2eQ1EdqzxnwS9GMf;&l#R+kqulqDWKcr=*LeneA)@66<&Hy`42;$?&H z>^D8MgpjELfY$+~I>)Fufdgt5{~63Mu!1XPSV;$h4Yx&2Ap6-bU{IBF@eD=dS z%THWQN1xIhSt22*mgfiPe8icNTy)~5`k!dU;HB9xd_js*hiKvR7f~*faxo=?54UOa z6m1_%RCP=$Ar?DahO0yG2xfxBUjz$|dbd+H%*V9Kt#x##gVy$`sf}GUbp{Tv9_C3~ z&7F^~tQ{0=>Pe%zEFyYPQ+1ro z_sA%8bSc=RH9dIJ03CaFKGWiq3D@2|&DeE3OMKj>UR~`W>tx}#uGIogoH;!w%H#;& zyqLKV=~ItRVSsCi#${7%rgLc9@0|(l*K$Cw$_L<^mH*-Yjft8ov#xLqwFBvSF>MFy)Yv?T8vx-q|B-lhb$Yl7gPF8n4@v8BJ^v7Y(oeHE~ z$FJ>!=yhP$!MoWG5JFQn)!XeA=e!F>PicHYvei;==c!-Yy1VwoC~VFfLEBN}dPie= zDx^h|C~v^W5Xdgv7tXXjfVb+KSthYA&SeZ7LGY%TEaD4p(rH5_?f$BfG;%>QN5mr?`QlB z;f7CY<65OOedo+ijX}eijUvxkx#Vq~n`W3!g##gs5exj4r9IYf7_Tr_1K`eP{jr3l zVpW%ULn0^dNj*{b)W3ZrhvN>9{xa)`gS%vHGFu3dFw*ZqZUPmsm*n5Pi!;j0unyl> zHlcAsG|4%sbGMXZQi+b7ZvyZ<$#=Nn4K{Sx<&F`T{$$m6O$^c5p70oM$XQA3b zXybeC&Wwkdmrgm>llO)mhpwcYL7rVW(7K8laMkMz2mf<7@Rwks)oac8*Rb$5tmlI- zK|V)IppV>zli+8GR_9(!xWEt0*7zonJ>uxf@(YGGO7ucQxla+vzLl`PXnPdynWQMI z9V@%D{B2+~?o|}^rOQ;JDt0w_oER4I*H*G2Tv@wi!#{Lb&~C$h!P6H>1Ij?Cj?^U9KOld+-2;UIg#f$jD^R-Vf$dRi~qv}Qzmh#5*4oh*K zY0|D?6(ZR`~P@RnN>Qqy7*SnfIDT`>e%+ zTjanSp42afD{w^;_t`+7`4hOWJR3@l;ApGpTsj-x0ULNsR^Z?ZbrPRJ9i73x_$Q#b=K^HWLgr#IXugt*030T0 zDwVM58TU$eIR>4Fjh?|WaWrlF#fP(qf#s-+B}Ln0%Y&ywU}V9np&v7G1%*Sy)F zRoVt#>pk}qbyzYSK3(80lWsS_UgJA46Te<176xblVDQV}y3`EL#S#<9rtNH_|s`g+sa zR!GHB*v3sd={r5&-kIn%!z4+SOBZQUcs_)7(=*1|o1nW|o-I-bnCfc-y*Z(%=inXz ziDt>{nMAvq=9QIQ6_;f`k9Zy+k#tR?bAF@2iC^7Zg+OKu z(WW1B534+o#APpbt&Rz~3u&ddoJ@o3QY-fo%*N?x)m@AGh^di-c^@=)1c8_TuwSzU zv4KmI3lF152xhy9W_);$*=^OqZ5$2H-7M(Sc%KxLve`sCvrwY(VF6|-GZWpod_nX1 z42(+JK1Se$rxmYSv~1yoz-PPX;$?QU4OYw0pSh1TV#H3tv2rbL7lACM3TbfBGyVJ& zBC#&!8(6MI&i3ZVnJZ0T>m`{PoR_jETEJ&>va-1DMYk2cF6QJ#vTaIF3te0d@k*2J z|MRYBy~NXEGjnCQgz0RqgXLV-N(t(akBq5oj}L5y@54ic@?RZ)z1L}s>Hm|5!2iob zTx{$uKHJz>e5B+fXJl-8=wZM%pYmUVDehvEY!E2Z>In%?d@xpd>S-fQGd>ObWOt&> z#WrvqD{FkkU#zH8*HxB~)i0kLGW!uu0VwT6cF}b(Xnn(6FuHqTV7IXMeeqG;1CORC z&|yAmDtmW@V#~t}ZcG(S*cck1NOB=MmqAI8sNE?g{q1_{E3K02^g%L^ zV-p#emWYt7gMwxbwZxE5PNH zg&=%;iTBiZam^OJ4V!=$g{G**!8jbK_df;Y&c-`6*XI>q8<{pm$$`bWZ56{Y3PII{ zU8uwO)((K`w`PBe9TicZw?wm?$~)aBnk<7HiY+^IUsRcmV{w$GQ;h7#(gw=ozL;Yv zkq%m#jy>uzc-&SU^Zik^%S2cqhk75;OtAknq8NWt+riYx$kE}w`t<(7@WJs+)n)6v z_c(xW_6qpm36#-c9RN1o&ywRNpAM$HHKvwBf3l4{lnqZ{5P@%Wyr)i zAnK3WUXo}Gy|{Kscn}dgkr5_T^R$XR1x=r+=vf?8BjLat9e~s8bT;JTZ+ZrA`TNrD zlaiQj{ul1eu2H~UN`}*WR*8++-?TbQfj%X88i%Z6)3M;QdMsTyxkGK3 z-VY>Rj0f514mV+SI2pY<_cn2LJY?Sob-7@5%wDWXcrnYP8Y@+e@oC_LR=7~%rtvC>YvrIz zE$Mf}pgJ+fa9+1o48{;B)wmsdstDGIgt%0{`3N3b@ZXG`f7?p>CFe2=`3Cqg3k*)$ z`&qwF$GDLH1!o!m;H>(peJ($O=SJxZxcRyo2Cc5*=TC=NEDY1a@g})>iiiV#5@W9~ zK=w6Ub#pyZL|G1QZY^??^3GMAZT^?Tep5d?WK4yKp{^D^b{@IX%aii-tnAmG9`f9` zyWPK=NK?di!CTd$Z@U(hZn;fzlUtq>UlALQ0#71Nf2mgL{nC{oLv#sOT4^RT@1UK1 z-GaUN#j7nBsk%bOdVju5WKNfDtK<7Iimr$1cEJX_cQig3cDgBp%F6&QsnjH}QE%hK zpaA#9Y)W%$dq|@Z3(|BNe$Q^zcJqaG=r)Jm@LPN7Kd6T9f?O3l8?@%Hj8sE zik{?)P3525N{zr~E+NJxF{gg`1%i*8BQhc@wi~0H8Vi}vjT15vu^fpGm5W2uIhTqI zjftnTtno8(&9r#srxKd!h|P?})t?)-uB0Y>Ra`t29pWxQ&&r=C+6B+kt%GyhlUOT8 z6X3zW7!IU&446Fvd(HC$MNzRL1gOf-wy-0CJzUbK%j`X2_ysZ;U*STSe%20gV zpae67Ara7wVN8%;iMiAgJm4SM=Mq^Y_^B%REru~k-cdYNoWgWg39>&RfeW(VFTouW zbl#inTh1Y`I7M+wG!j(lsrHPi@>*bkU+ELc$LCV-pG!f4s<@E8Z*_d^RfQy}pu>_@ z=#hWzVR{Yi3~sazPB8MdnEXpN2I{%I&>(T6^c$LrJ?OM|%G8()gf))G z&uhRin{+^7g^M6#hLf0(7Oo(Q!>K;%^t;cIsT!jBj7oE86DJmdNex z@N+350r@XH83Hut`m~Bgj-m*|iqLY00F=ro_@n{#rVI9QX#HTSeKhC%OhR%Ibu^9E z>PA|MblX($GI7&6x+iXgeny7xTg{MZ=Aj-mWk93#$xSbvq}C1N!&y=xp(*%XrJG!8 zoPQ~e+|aw)&~Wqq!*|*13>=)_qj&oIA3^^8UmX5TykYvocN0cG;!V&U@eMBZ88$E( zNrg|T1lBmUC|2ZNt+PL1nNRFVz{G0w2|<94a><&{_?!2y9>5~_Dl1@GW$}Sln4mHM zpp{J5?v_vfo!+=MP3MH24{>nXZ9{2_pv+UlFbk45cM!u$GFn7xZAF=?lC+w+*Q(0m zw|;Rwf>!>y1mf~i>fl%P`ZOofq6DU?-Gk?^7(xj}3s1q*0uu{e`Cfa^{gxf5m@pBj z?H71fhxkA!^(4PUUv_)dMoKjg8W*UbGhK;KQ0vDWp#`LLPdxPlOPMe*iPXSen)I6;Xr7vq*llToL8bOJv(Ttx zEjVb^&kY4yT1)y(%^A{~&)PCtn=!T<7L&`!bfqd=G%A7RoHp+cyJV?tK-|@C)?d37 zUeMomOSkM3FO-eAo5XahGxHv2ms>yGjyTRQQGtwi%w#EFb0MN0k+|+jdT&2cYg%1z zDA`vQ({rtb8KkwA1_b7JJUFarL)$So1T-v z!NB=VlRDwD(nCRan73+pqUC(&xd&x1Z_hPv^#e^0gYXAK{qg0PO5)(xjJ*wty|9Fr z@^w)osQBttiE-gkpVm-OWBv8(Q~X+zK(dl!qF8w*W^)hJZH$He5-~fjY;GaRzH)x` zX0c2_w=yjB+I=2&>_EZ1jYw>}JS{5egjBAM!_&ISjLz0jnwKm>tt!j`w)uH2qVuJ; zzc_Dl*4q+FWm43UU9!J5_$mfTEGSmFQ>*Z%GlhvD1+W{M;5-R3*=M6E@ zG}R%FWx=qEE0Yak14X*LPP%-s+()aIdOqvl;bJIfTCEAPI7))6{Bk|c!J?yMsu`8` zS>qtZF6LZq4Ln@YAXCm<`vJ&e=Mt~|Bi2{-m z^c!%mdmtzpbDnW7*q4n6L|MY9&H2Hhos|3 zwWQFLBI5XK%US({u&NzLE=KQHh|cTr(TiVly~LAQ{ZR~N#h@h^sa%xs=t-oiTuH*Y zgYeWULqR4IB=csC4Sj@Prd!k#j%fxPJ*h&s=WyT`?RsXO!$b#IiLb)6xfN%z;Gs{e zQgB6fmWJqF`W_s-YSp8qxr#L3YpUp$^^9$b}+<q3XDAaP$JKsJ zYZ|*aBDeNkWgjWGnCcFpz|Ew6MFW270>vs9*zg#J`edgBB2eLlPX4ri*7Cu4 zA$!uZeC2pH&LLaFh2DqLh;>-joV_oe*WMF)|K99JS8)2*8Pta7lN#4xPgM z3Cy7@A_b-9@eNEV;XoM6S*h127QpuO2K)8tm}3tw-h<#z?9E+a`Fu#AOrO&%+P^cLSn|s*!R#1w+oLWqX>`~(6 z?`Pf`=zE(Gkc09vZ-euA5;|!>Hq&~uNw3TX!FT3V%-a*1UL2i!mpUjEOw}lZ-h$Jb zJ~4M79rF`xZo3HOGp1OI0-G?}uERWgGun~^-+42glL5PYv_~j7n8}Qt+GUZG3Tu^(^hh5D!h;$ ze+e7#x)NaYZif(~>q#x!D6kRUF^#?F#(Fv;QH%eP%BiWo)&fSM>$5k#qznEPYuCzm zPvEODIsubg(5^*Xr^qF2)lac(u`d?5D5mn&piry=-JlK7EEL*u79fG?t=%DDxsm3RUdWE8yd`2un6I!z!J;YPx^qdK>Mqs5z-{NFU_4(%j5 z3{)3z!6%i6WXY-`btu?W1WE{`BX_@*?1C16!&3I;`1wRG4xCe)@{OzL$Zhy9|1l%i zvf>h>zmtm#{J$U<+rP-A@DarWH$I~HI=ZE0eExeFm&rXbcn{-ib=dJsrB*})5_f0d zeP4n!bsLlW?r|nwHi=Hm z-s^S*fXnbHh7Wm7CO8HDjLrWVNpJuh#|PpKc@80t&u__Zj||wt+FNg}GejiE(F$PQ z^35F$R?+Kjy03U;@Y6swCl(Yt1a}Ju6&sfYS=(J2aIT1OyP+XLQKgtz|1GvI zm6r+^74KOcCk|Omtr6ESdx~xPa|#`nYYH7y_k;_YgwO(Mu-k&c8p#Yv-9bZRJZYSu)D^@8Gc+B|^dOhHKcTCPDe;G65!~OWlP%xr`ke z#(2KO+4Cs=DpJI!1|Jgj#ee|O`7DTb)Ga1}%0XQ#yJmrGD!Ueeyix8aydje` zt23w|>IpDFyEj0x2-VHH%_|A!{A?@rD?pE$ffgSc;xd-Q>=;_!bVCJvC(7KGfYX)p zCHhR^Q#2Q5s_?Ay6;IWvZS0wBA089RGaxdRYmKdoFcu9@3o$Gi(s1o}d>I`ge*0A6EIWI0GY@{ia^>Pto zX=b-EIGhlrD7Zh47qgVyc}JX%72l}16CbJJNp*7N()5%0fY z5XT3D0^@Y#{NJPCnanf1%vnL#k6`HPu~ZQ{>4=M+7Jp#oJQ}msZ*T8G$>Rwj)SEDhg$k9|AE#Gj&R_;`3``yT+!9vuC2C;)xTiehdu)XMY33NgP7v zVpj>AA#~3xWx@8`lCp|$6O`#zxU0}MeFmazYL|v0JzS5IFw_W2Ro+js9uldxTYrI~ zx4xloyn<0Wi$2*7(DAmVFKYF91Ts_&p%$c>txsx(q>t(IHophfyHx|6!@_ z=aL)4cS|+C`@6gpAP@?`pB-Mm_ZNM1c>PuWq2KH80KYfke6%6`DH-oS`CC)c-xYsv zcKPVh`BT2XALrjYcK)vYd!NTgL(HEN`kos3U+w>CkNLa$?*-eBW{^K+4CJ3%L;jBN zdqL}?u=}S7V*U%^f6KgoSN**N^HK5oQw(wbQ2o8&@^|Ims|_Djnm^?P_YdWNQ?vOy z&hHuEk6h88!iWFA2l4+@D*lf0doJp)C|894i1J77`0pscXDs<&T`l-%);F-2WBjiSi#&{g=f?~}-1QCOM(5#^65US10HJ+lA+fcSp1dH1YM ImXEFf1I?3y=Kufz literal 0 HcmV?d00001 diff --git a/examples/sample-docs/xml/SPDXXMLExample-v2.2.spdx.xml b/examples/sample-docs/xml/SPDXXMLExample-v2.2.spdx.xml new file mode 100644 index 00000000..80e0527a --- /dev/null +++ b/examples/sample-docs/xml/SPDXXMLExample-v2.2.spdx.xml @@ -0,0 +1,443 @@ + + + SPDXRef-DOCUMENT + SPDX-2.2 + + This package has been shipped in source and binary form. +The binaries were created with gcc 4.5.1 and expect to link to +compatible system run time libraries. + 2010-01-29T18:30:22Z + Tool: LicenseFind-1.0 + Organization: ExampleCodeInspect () + Person: Jane Doe () + 3.9 + + SPDX-Tools-v2.0 + CC0-1.0 + This document was created using SPDX 2.0 using licenses from the web site. + + DocumentRef-spdx-tool-1.2 + + SHA1 + d6a770ba38583ed4bb4525bd96e50461655d2759 + + http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 + + + LicenseRef-1 + /* + * (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + + + LicenseRef-2 + This package includes the GRDDL parser developed by Hewlett Packard under the following license: +� Copyright 2007 Hewlett-Packard Development Company, LP + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + LicenseRef-4 + /* + * (c) Copyright 2009 University of Bristol + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + + + LicenseRef-Beerware-4.2 + The beerware license has a couple of other standard variants. + "THE BEER-WARE LICENSE" (Revision 42): +phk@FreeBSD.ORG wrote this file. As long as you retain this notice you +can do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp + Beer-Ware License (Version 42) + http://people.freebsd.org/~phk/ + + + LicenseRef-3 + This is tye CyperNeko License + The CyberNeko Software License, Version 1.0 + + +(C) Copyright 2002-2005, Andy Clark. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + +3. The end-user documentation included with the redistribution, + if any, must include the following acknowledgment: + "This product includes software developed by Andy Clark." + Alternately, this acknowledgment may appear in the software itself, + if and wherever such third-party acknowledgments normally appear. + +4. The names "CyberNeko" and "NekoHTML" must not be used to endorse + or promote products derived from this software without prior + written permission. For written permission, please contact + andyc@cyberneko.net. + +5. Products derived from this software may not be called "CyberNeko", + nor may "CyberNeko" appear in their name, without prior written + permission of the author. + +THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, +OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT +OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + CyberNeko License + http://people.apache.org/~andyc/neko/LICENSE + http://justasample.url.com + + + 2010-01-29T18:30:22Z + OTHER + Person: Jane Doe () + Document level annotation + + + 2010-02-10T00:00:00Z + REVIEW + Person: Joe Reviewer + This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses + + + 2011-03-13T00:00:00Z + REVIEW + Person: Suzanne Reviewer + Another example reviewer. + + http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301 + SPDXRef-File + SPDXRef-Package + + SPDXRef-Package + + 2011-01-29T18:30:22Z + OTHER + Person: Package Commenter + Package level annotation + + The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually. + + MD5 + 624c1abb3664f4b35547e7c73864ad24 + + + SHA1 + 85ed0817af83a24ad8da68c2b5094de69833983c + + + SHA256 + 11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd + + Copyright 2008-2010 John Smith + The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems. + http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz + + SECURITY + cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:* + cpe23Type + + + This is the external ref for Acme + OTHER + acmecorp/acmenator/4.1.3-alpha + http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge + + true + SPDXRef-CommonsLangSrc + SPDXRef-JenaLib + SPDXRef-DoapSource + http://ftp.gnu.org/gnu/glibc + The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change. + (LGPL-2.0-only OR LicenseRef-3) + (LGPL-2.0-only AND LicenseRef-3) + GPL-2.0-only + LicenseRef-2 + LicenseRef-1 + glibc + Organization: ExampleCodeInspect (contact@example.com) + glibc-2.11.1.tar.gz + + ./package.spdx + d6a770ba38583ed4bb4525bd96e50461655d2758 + + uses glibc-2_11-branch from git://sourceware.org/git/glibc.git. + GNU C library. + Person: Jane Doe (jane.doe@example.com) + 2.11.1 + + + SPDXRef-fromDoap-1 + NOASSERTION + NOASSERTION + false + http://commons.apache.org/proper/commons-lang/ + NOASSERTION + NOASSERTION + Apache Commons Lang + + + SPDXRef-fromDoap-0 + NOASSERTION + https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz + + PACKAGE_MANAGER + pkg:maven/org.apache.jena/apache-jena@3.12.0 + purl + + false + http://www.openjena.org/ + NOASSERTION + NOASSERTION + Jena + 3.12.0 + + + SPDXRef-Saxon + + SHA1 + 85ed0817af83a24ad8da68c2b5094de69833983c + + Copyright Saxonica Ltd + The Saxon package is a collection of tools for processing XML documents. + https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download + false + http://saxon.sourceforge.net/ + Other versions available for a commercial license + MPL-1.0 + MPL-1.0 + Saxon + saxonB-8.8.zip + 8.8 + + + SPDXRef-DoapSource + + SHA1 + 2fd4e1c67a2d28fced849ee1bb76e7391b93eb12 + + Copyright 2010, 2011 Source Auditor Inc. + Protecode Inc. + SPDX Technical Team Members + Open Logic Inc. + Source Auditor Inc. + Black Duck Software In.c + ./src/org/spdx/parser/DOAPProject.java + SOURCE + Apache-2.0 + Apache-2.0 + + + SPDXRef-CommonsLangSrc + + SHA1 + c2b4e1c67a2d28fced849ee1bb76e7391b93f125 + + This file is used by Jena + Copyright 2001-2011 The Apache Software Foundation + Apache Software Foundation + ./lib-source/commons-lang3-3.1-sources.jar + ARCHIVE + Apache-2.0 + Apache-2.0 + Apache Commons Lang +Copyright 2001-2011 The Apache Software Foundation + +This product includes software developed by +The Apache Software Foundation (http://www.apache.org/). + +This product includes software from the Spring Framework, +under the Apache License 2.0 (see: StringUtils.containsWhitespace()) + + + SPDXRef-JenaLib + + SHA1 + 3ab4e1c67a2d28fced849ee1bb76e7391b93f125 + + This file belongs to Jena + (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP + Apache Software Foundation + Hewlett Packard Inc. + ./lib-source/jena-2.6.3-sources.jar + ARCHIVE + This license is used by Jena + LicenseRef-1 + LicenseRef-1 + + + SPDXRef-File + + 2011-01-29T18:30:22Z + OTHER + Person: File Commenter + File level annotation + + + SHA1 + d6a770ba38583ed4bb4525bd96e50461655d2758 + + + MD5 + 624c1abb3664f4b35547e7c73864ad24 + + The concluded license was taken from the package level that the file was included in. +This information was found in the COPYING.txt file in the xyz directory. + Copyright 2008-2010 John Smith + The Regents of the University of California + Modified by Paul Mundt lethal@linux-sh.org + IBM Corporation + ./package/foo.c + SOURCE + The concluded license was taken from the package level that the file was included in. + (LGPL-2.0-only OR LicenseRef-2) + GPL-2.0-only + LicenseRef-2 + Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + + SPDXRef-Snippet + This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0. + Copyright 2008-2010 John Smith + The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz. + GPL-2.0-only + GPL-2.0-only + from linux kernel + + + 420 + SPDXRef-DoapSource + + + 310 + SPDXRef-DoapSource + + + + + 23 + SPDXRef-DoapSource + + + 5 + SPDXRef-DoapSource + + + SPDXRef-DoapSource + + + SPDXRef-DOCUMENT + SPDXRef-Package + CONTAINS + + + SPDXRef-DOCUMENT + DocumentRef-spdx-tool-1.2:SPDXRef-ToolsElement + COPY_OF + + + SPDXRef-DOCUMENT + SPDXRef-File + DESCRIBES + + + SPDXRef-DOCUMENT + SPDXRef-Package + DESCRIBES + + + SPDXRef-Package + SPDXRef-JenaLib + CONTAINS + + + SPDXRef-Package + SPDXRef-Saxon + DYNAMIC_LINK + + + SPDXRef-CommonsLangSrc + NOASSERTION + GENERATED_FROM + + + SPDXRef-JenaLib + SPDXRef-Package + CONTAINS + + + SPDXRef-File + SPDXRef-fromDoap-0 + GENERATED_FROM + + diff --git a/examples/sample-docs/yaml/SPDXYAMLExample-2.2.spdx.yaml b/examples/sample-docs/yaml/SPDXYAMLExample-2.2.spdx.yaml new file mode 100644 index 00000000..d58cf229 --- /dev/null +++ b/examples/sample-docs/yaml/SPDXYAMLExample-2.2.spdx.yaml @@ -0,0 +1,390 @@ +--- +SPDXID: "SPDXRef-DOCUMENT" +spdxVersion: "SPDX-2.2" +creationInfo: + comment: "This package has been shipped in source and binary form.\nThe binaries\ + \ were created with gcc 4.5.1 and expect to link to\ncompatible system run time\ + \ libraries." + created: "2010-01-29T18:30:22Z" + creators: + - "Tool: LicenseFind-1.0" + - "Organization: ExampleCodeInspect ()" + - "Person: Jane Doe ()" + licenseListVersion: "3.9" +name: "SPDX-Tools-v2.0" +dataLicense: "CC0-1.0" +comment: "This document was created using SPDX 2.0 using licenses from the web site." +externalDocumentRefs: +- externalDocumentId: "DocumentRef-spdx-tool-1.2" + checksum: + algorithm: "SHA1" + checksumValue: "d6a770ba38583ed4bb4525bd96e50461655d2759" + spdxDocument: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301" +hasExtractedLicensingInfos: +- licenseId: "LicenseRef-1" + extractedText: "/*\n * (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,\ + \ 2008, 2009 Hewlett-Packard Development Company, LP\n * All rights reserved.\n\ + \ *\n * Redistribution and use in source and binary forms, with or without\n *\ + \ modification, are permitted provided that the following conditions\n * are met:\n\ + \ * 1. Redistributions of source code must retain the above copyright\n * notice,\ + \ this list of conditions and the following disclaimer.\n * 2. Redistributions\ + \ in binary form must reproduce the above copyright\n * notice, this list of\ + \ conditions and the following disclaimer in the\n * documentation and/or other\ + \ materials provided with the distribution.\n * 3. The name of the author may\ + \ not be used to endorse or promote products\n * derived from this software\ + \ without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED\ + \ BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING,\ + \ BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS\ + \ FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE\ + \ LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\ + \ DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS\ + \ OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\ + \ CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\ + \ OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE\ + \ USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\ + */" +- licenseId: "LicenseRef-2" + extractedText: "This package includes the GRDDL parser developed by Hewlett Packard\ + \ under the following license:\n� Copyright 2007 Hewlett-Packard Development Company,\ + \ LP\n\nRedistribution and use in source and binary forms, with or without modification,\ + \ are permitted provided that the following conditions are met: \n\nRedistributions\ + \ of source code must retain the above copyright notice, this list of conditions\ + \ and the following disclaimer. \nRedistributions in binary form must reproduce\ + \ the above copyright notice, this list of conditions and the following disclaimer\ + \ in the documentation and/or other materials provided with the distribution.\ + \ \nThe name of the author may not be used to endorse or promote products derived\ + \ from this software without specific prior written permission. \nTHIS SOFTWARE\ + \ IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,\ + \ BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\ + \ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE\ + \ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\ + \ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\ + \ LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND\ + \ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\ + \ NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\ + \ EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +- licenseId: "LicenseRef-4" + extractedText: "/*\n * (c) Copyright 2009 University of Bristol\n * All rights reserved.\n\ + \ *\n * Redistribution and use in source and binary forms, with or without\n *\ + \ modification, are permitted provided that the following conditions\n * are met:\n\ + \ * 1. Redistributions of source code must retain the above copyright\n * notice,\ + \ this list of conditions and the following disclaimer.\n * 2. Redistributions\ + \ in binary form must reproduce the above copyright\n * notice, this list of\ + \ conditions and the following disclaimer in the\n * documentation and/or other\ + \ materials provided with the distribution.\n * 3. The name of the author may\ + \ not be used to endorse or promote products\n * derived from this software\ + \ without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED\ + \ BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING,\ + \ BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS\ + \ FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE\ + \ LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\ + \ DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS\ + \ OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\ + \ CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\ + \ OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE\ + \ USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\ + */" +- licenseId: "LicenseRef-Beerware-4.2" + comment: "The beerware license has a couple of other standard variants." + extractedText: "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote\ + \ this file. As long as you retain this notice you\ncan do whatever you want with\ + \ this stuff. If we meet some day, and you think this stuff is worth it, you can\ + \ buy me a beer in return Poul-Henning Kamp" + name: "Beer-Ware License (Version 42)" + seeAlsos: + - "http://people.freebsd.org/~phk/" +- licenseId: "LicenseRef-3" + comment: "This is tye CyperNeko License" + extractedText: "The CyberNeko Software License, Version 1.0\n\n \n(C) Copyright\ + \ 2002-2005, Andy Clark. All rights reserved.\n \nRedistribution and use in source\ + \ and binary forms, with or without\nmodification, are permitted provided that\ + \ the following conditions\nare met:\n\n1. Redistributions of source code must\ + \ retain the above copyright\n notice, this list of conditions and the following\ + \ disclaimer. \n\n2. Redistributions in binary form must reproduce the above copyright\n\ + \ notice, this list of conditions and the following disclaimer in\n the documentation\ + \ and/or other materials provided with the\n distribution.\n\n3. The end-user\ + \ documentation included with the redistribution,\n if any, must include the\ + \ following acknowledgment: \n \"This product includes software developed\ + \ by Andy Clark.\"\n Alternately, this acknowledgment may appear in the software\ + \ itself,\n if and wherever such third-party acknowledgments normally appear.\n\ + \n4. The names \"CyberNeko\" and \"NekoHTML\" must not be used to endorse\n \ + \ or promote products derived from this software without prior \n written permission.\ + \ For written permission, please contact \n andyc@cyberneko.net.\n\n5. Products\ + \ derived from this software may not be called \"CyberNeko\",\n nor may \"CyberNeko\"\ + \ appear in their name, without prior written\n permission of the author.\n\n\ + THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED\nWARRANTIES,\ + \ INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\nOF MERCHANTABILITY AND\ + \ FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE AUTHOR\ + \ OR OTHER CONTRIBUTORS\nBE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\ + \ EXEMPLARY, \nOR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT\ + \ \nOF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR \nBUSINESS\ + \ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, \nWHETHER IN CONTRACT,\ + \ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE \nOR OTHERWISE) ARISING IN ANY\ + \ WAY OUT OF THE USE OF THIS SOFTWARE, \nEVEN IF ADVISED OF THE POSSIBILITY OF\ + \ SUCH DAMAGE." + name: "CyberNeko License" + seeAlsos: + - "http://people.apache.org/~andyc/neko/LICENSE" + - "http://justasample.url.com" +annotations: +- annotationDate: "2010-01-29T18:30:22Z" + annotationType: "OTHER" + annotator: "Person: Jane Doe ()" + comment: "Document level annotation" +- annotationDate: "2010-02-10T00:00:00Z" + annotationType: "REVIEW" + annotator: "Person: Joe Reviewer" + comment: "This is just an example. Some of the non-standard licenses look like\ + \ they are actually BSD 3 clause licenses" +- annotationDate: "2011-03-13T00:00:00Z" + annotationType: "REVIEW" + annotator: "Person: Suzanne Reviewer" + comment: "Another example reviewer." +documentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301" +documentDescribes: +- "SPDXRef-File" +- "SPDXRef-Package" +packages: +- SPDXID: "SPDXRef-Package" + annotations: + - annotationDate: "2011-01-29T18:30:22Z" + annotationType: "OTHER" + annotator: "Person: Package Commenter" + comment: "Package level annotation" + attributionTexts: + - "The GNU C Library is free software. See the file COPYING.LIB for copying conditions,\ + \ and LICENSES for notices about a few contributions that require these additional\ + \ notices to be distributed. License copyright years may be listed using range\ + \ notation, e.g., 1996-2015, indicating that every year in the range, inclusive,\ + \ is a copyrightable year that would otherwise be listed individually." + checksums: + - algorithm: "MD5" + checksumValue: "624c1abb3664f4b35547e7c73864ad24" + - algorithm: "SHA1" + checksumValue: "85ed0817af83a24ad8da68c2b5094de69833983c" + - algorithm: "SHA256" + checksumValue: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd" + copyrightText: "Copyright 2008-2010 John Smith" + description: "The GNU C Library defines functions that are specified by the ISO\ + \ C standard, as well as additional features specific to POSIX and other derivatives\ + \ of the Unix operating system, and extensions specific to GNU systems." + downloadLocation: "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz" + externalRefs: + - referenceCategory: "SECURITY" + referenceLocator: "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*" + referenceType: "cpe23Type" + - comment: "This is the external ref for Acme" + referenceCategory: "OTHER" + referenceLocator: "acmecorp/acmenator/4.1.3-alpha" + referenceType: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge" + filesAnalyzed: true + hasFiles: + - "SPDXRef-CommonsLangSrc" + - "SPDXRef-JenaLib" + - "SPDXRef-DoapSource" + homepage: "http://ftp.gnu.org/gnu/glibc" + licenseComments: "The license for this project changed with the release of version\ + \ x.y. The version of the project included here post-dates the license change." + licenseConcluded: "(LGPL-2.0-only OR LicenseRef-3)" + licenseDeclared: "(LGPL-2.0-only AND LicenseRef-3)" + licenseInfoFromFiles: + - "GPL-2.0-only" + - "LicenseRef-2" + - "LicenseRef-1" + name: "glibc" + originator: "Organization: ExampleCodeInspect (contact@example.com)" + packageFileName: "glibc-2.11.1.tar.gz" + packageVerificationCode: + packageVerificationCodeExcludedFiles: + - "./package.spdx" + packageVerificationCodeValue: "d6a770ba38583ed4bb4525bd96e50461655d2758" + sourceInfo: "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git." + summary: "GNU C library." + supplier: "Person: Jane Doe (jane.doe@example.com)" + versionInfo: "2.11.1" +- SPDXID: "SPDXRef-fromDoap-1" + copyrightText: "NOASSERTION" + downloadLocation: "NOASSERTION" + filesAnalyzed: false + homepage: "http://commons.apache.org/proper/commons-lang/" + licenseConcluded: "NOASSERTION" + licenseDeclared: "NOASSERTION" + name: "Apache Commons Lang" +- SPDXID: "SPDXRef-fromDoap-0" + copyrightText: "NOASSERTION" + downloadLocation: "https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz" + externalRefs: + - referenceCategory: "PACKAGE_MANAGER" + referenceLocator: "pkg:maven/org.apache.jena/apache-jena@3.12.0" + referenceType: "purl" + filesAnalyzed: false + homepage: "http://www.openjena.org/" + licenseConcluded: "NOASSERTION" + licenseDeclared: "NOASSERTION" + name: "Jena" + versionInfo: "3.12.0" +- SPDXID: "SPDXRef-Saxon" + checksums: + - algorithm: "SHA1" + checksumValue: "85ed0817af83a24ad8da68c2b5094de69833983c" + copyrightText: "Copyright Saxonica Ltd" + description: "The Saxon package is a collection of tools for processing XML documents." + downloadLocation: "https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download" + filesAnalyzed: false + homepage: "http://saxon.sourceforge.net/" + licenseComments: "Other versions available for a commercial license" + licenseConcluded: "MPL-1.0" + licenseDeclared: "MPL-1.0" + name: "Saxon" + packageFileName: "saxonB-8.8.zip" + versionInfo: "8.8" +files: +- SPDXID: "SPDXRef-DoapSource" + checksums: + - algorithm: "SHA1" + checksumValue: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12" + copyrightText: "Copyright 2010, 2011 Source Auditor Inc." + fileContributors: + - "Protecode Inc." + - "SPDX Technical Team Members" + - "Open Logic Inc." + - "Source Auditor Inc." + - "Black Duck Software In.c" + fileName: "./src/org/spdx/parser/DOAPProject.java" + fileTypes: + - "SOURCE" + licenseConcluded: "Apache-2.0" + licenseInfoInFiles: + - "Apache-2.0" +- SPDXID: "SPDXRef-CommonsLangSrc" + checksums: + - algorithm: "SHA1" + checksumValue: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125" + comment: "This file is used by Jena" + copyrightText: "Copyright 2001-2011 The Apache Software Foundation" + fileContributors: + - "Apache Software Foundation" + fileName: "./lib-source/commons-lang3-3.1-sources.jar" + fileTypes: + - "ARCHIVE" + licenseConcluded: "Apache-2.0" + licenseInfoInFiles: + - "Apache-2.0" + noticeText: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\ + \nThis product includes software developed by\nThe Apache Software Foundation\ + \ (http://www.apache.org/).\n\nThis product includes software from the Spring\ + \ Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())" +- SPDXID: "SPDXRef-JenaLib" + checksums: + - algorithm: "SHA1" + checksumValue: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125" + comment: "This file belongs to Jena" + copyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008,\ + \ 2009 Hewlett-Packard Development Company, LP" + fileContributors: + - "Apache Software Foundation" + - "Hewlett Packard Inc." + fileName: "./lib-source/jena-2.6.3-sources.jar" + fileTypes: + - "ARCHIVE" + licenseComments: "This license is used by Jena" + licenseConcluded: "LicenseRef-1" + licenseInfoInFiles: + - "LicenseRef-1" +- SPDXID: "SPDXRef-File" + annotations: + - annotationDate: "2011-01-29T18:30:22Z" + annotationType: "OTHER" + annotator: "Person: File Commenter" + comment: "File level annotation" + checksums: + - algorithm: "SHA1" + checksumValue: "d6a770ba38583ed4bb4525bd96e50461655d2758" + - algorithm: "MD5" + checksumValue: "624c1abb3664f4b35547e7c73864ad24" + comment: "The concluded license was taken from the package level that the file was\ + \ included in.\nThis information was found in the COPYING.txt file in the xyz\ + \ directory." + copyrightText: "Copyright 2008-2010 John Smith" + fileContributors: + - "The Regents of the University of California" + - "Modified by Paul Mundt lethal@linux-sh.org" + - "IBM Corporation" + fileName: "./package/foo.c" + fileTypes: + - "SOURCE" + licenseComments: "The concluded license was taken from the package level that the\ + \ file was included in." + licenseConcluded: "(LGPL-2.0-only OR LicenseRef-2)" + licenseInfoInFiles: + - "GPL-2.0-only" + - "LicenseRef-2" + noticeText: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com\n\nPermission is\ + \ hereby granted, free of charge, to any person obtaining a copy of this software\ + \ and associated documentation files (the �Software�), to deal in the Software\ + \ without restriction, including without limitation the rights to use, copy, modify,\ + \ merge, publish, distribute, sublicense, and/or sell copies of the Software,\ + \ and to permit persons to whom the Software is furnished to do so, subject to\ + \ the following conditions: \nThe above copyright notice and this permission notice\ + \ shall be included in all copies or substantial portions of the Software.\n\n\ + THE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\ + \ INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR\ + \ A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\ + \ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\ + \ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\ + \ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE." +snippets: +- SPDXID: "SPDXRef-Snippet" + comment: "This snippet was identified as significant and highlighted in this Apache-2.0\ + \ file, when a commercial scanner identified it as being derived from file foo.c\ + \ in package xyz which is licensed under GPL-2.0." + copyrightText: "Copyright 2008-2010 John Smith" + licenseComments: "The concluded license was taken from package xyz, from which the\ + \ snippet was copied into the current file. The concluded license information\ + \ was found in the COPYING.txt file in package xyz." + licenseConcluded: "GPL-2.0-only" + licenseInfoInSnippets: + - "GPL-2.0-only" + name: "from linux kernel" + ranges: + - endPointer: + offset: 420 + reference: "SPDXRef-DoapSource" + startPointer: + offset: 310 + reference: "SPDXRef-DoapSource" + - endPointer: + lineNumber: 23 + reference: "SPDXRef-DoapSource" + startPointer: + lineNumber: 5 + reference: "SPDXRef-DoapSource" + snippetFromFile: "SPDXRef-DoapSource" +relationships: +- spdxElementId: "SPDXRef-DOCUMENT" + relatedSpdxElement: "SPDXRef-Package" + relationshipType: "CONTAINS" +- spdxElementId: "SPDXRef-DOCUMENT" + relatedSpdxElement: "DocumentRef-spdx-tool-1.2:SPDXRef-ToolsElement" + relationshipType: "COPY_OF" +- spdxElementId: "SPDXRef-DOCUMENT" + relatedSpdxElement: "SPDXRef-File" + relationshipType: "DESCRIBES" +- spdxElementId: "SPDXRef-DOCUMENT" + relatedSpdxElement: "SPDXRef-Package" + relationshipType: "DESCRIBES" +- spdxElementId: "SPDXRef-Package" + relatedSpdxElement: "SPDXRef-JenaLib" + relationshipType: "CONTAINS" +- spdxElementId: "SPDXRef-Package" + relatedSpdxElement: "SPDXRef-Saxon" + relationshipType: "DYNAMIC_LINK" +- spdxElementId: "SPDXRef-CommonsLangSrc" + relatedSpdxElement: "NOASSERTION" + relationshipType: "GENERATED_FROM" +- spdxElementId: "SPDXRef-JenaLib" + relatedSpdxElement: "SPDXRef-Package" + relationshipType: "CONTAINS" +- spdxElementId: "SPDXRef-File" + relatedSpdxElement: "SPDXRef-fromDoap-0" + relationshipType: "GENERATED_FROM" diff --git a/go.mod b/go.mod index 416c181b..213c1297 100644 --- a/go.mod +++ b/go.mod @@ -2,4 +2,7 @@ module github.com/spdx/tools-golang go 1.13 -require github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb +require ( + github.com/google/go-cmp v0.5.7 + github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb +) diff --git a/go.sum b/go.sum index 9e2d8b56..4355c263 100644 --- a/go.sum +++ b/go.sum @@ -1,2 +1,6 @@ +github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb h1:bLo8hvc8XFm9J47r690TUKBzcjSWdJDxmjXJZ+/f92U= github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb/go.mod h1:uKWaldnbMnjsSAXRurWqqrdyZen1R7kxl8TkmWk2OyM= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/idsearcher/idsearcher.go b/idsearcher/idsearcher.go index 3982109c..bb8bbd1a 100644 --- a/idsearcher/idsearcher.go +++ b/idsearcher/idsearcher.go @@ -73,7 +73,7 @@ func BuildIDsDocument2_1(packageName string, dirRoot string, idconfig *Config2_1 } // now, walk through each file and find its licenses (if any) - pkg := doc.Packages[spdx.ElementID("Package-"+packageName)] + pkg := doc.Packages[0] if pkg == nil { return nil, fmt.Errorf("builder returned nil Package") } @@ -83,7 +83,7 @@ func BuildIDsDocument2_1(packageName string, dirRoot string, idconfig *Config2_1 licsForPackage := map[string]int{} for _, f := range pkg.Files { // start by initializing / clearing values - f.LicenseInfoInFile = []string{"NOASSERTION"} + f.LicenseInfoInFiles = []string{"NOASSERTION"} f.LicenseConcluded = "NOASSERTION" // check whether the searcher should ignore this file @@ -114,11 +114,11 @@ func BuildIDsDocument2_1(packageName string, dirRoot string, idconfig *Config2_1 // OK -- now we can fill in the file's details, or NOASSERTION if none if len(licsForFile) > 0 { - f.LicenseInfoInFile = []string{} + f.LicenseInfoInFiles = []string{} for lic := range licsForFile { - f.LicenseInfoInFile = append(f.LicenseInfoInFile, lic) + f.LicenseInfoInFiles = append(f.LicenseInfoInFiles, lic) } - sort.Strings(f.LicenseInfoInFile) + sort.Strings(f.LicenseInfoInFiles) // avoid adding parens and joining for single-ID items if len(licsParens) == 1 { f.LicenseConcluded = ids[0] @@ -197,7 +197,7 @@ func BuildIDsDocument2_2(packageName string, dirRoot string, idconfig *Config2_2 } // now, walk through each file and find its licenses (if any) - pkg := doc.Packages[spdx.ElementID("Package-"+packageName)] + pkg := doc.Packages[0] if pkg == nil { return nil, fmt.Errorf("builder returned nil Package") } @@ -207,7 +207,7 @@ func BuildIDsDocument2_2(packageName string, dirRoot string, idconfig *Config2_2 licsForPackage := map[string]int{} for _, f := range pkg.Files { // start by initializing / clearing values - f.LicenseInfoInFile = []string{"NOASSERTION"} + f.LicenseInfoInFiles = []string{"NOASSERTION"} f.LicenseConcluded = "NOASSERTION" // check whether the searcher should ignore this file @@ -238,11 +238,11 @@ func BuildIDsDocument2_2(packageName string, dirRoot string, idconfig *Config2_2 // OK -- now we can fill in the file's details, or NOASSERTION if none if len(licsForFile) > 0 { - f.LicenseInfoInFile = []string{} + f.LicenseInfoInFiles = []string{} for lic := range licsForFile { - f.LicenseInfoInFile = append(f.LicenseInfoInFile, lic) + f.LicenseInfoInFiles = append(f.LicenseInfoInFiles, lic) } - sort.Strings(f.LicenseInfoInFile) + sort.Strings(f.LicenseInfoInFiles) // avoid adding parens and joining for single-ID items if len(licsParens) == 1 { f.LicenseConcluded = ids[0] diff --git a/idsearcher/idsearcher_test.go b/idsearcher/idsearcher_test.go index ab0adc93..00e52065 100644 --- a/idsearcher/idsearcher_test.go +++ b/idsearcher/idsearcher_test.go @@ -4,8 +4,6 @@ package idsearcher import ( "testing" - - "github.com/spdx/tools-golang/spdx" ) // ===== 2.1 Searcher top-level function tests ===== @@ -33,7 +31,7 @@ func Test2_1SearcherCanFillInIDs(t *testing.T) { if len(doc.Packages) != 1 { t.Fatalf("expected Packages len to be 1, got %d", len(doc.Packages)) } - pkg := doc.Packages[spdx.ElementID("Package-project2")] + pkg := doc.Packages[0] if pkg == nil { t.Fatalf("expected non-nil pkg, got nil") } @@ -45,101 +43,101 @@ func Test2_1SearcherCanFillInIDs(t *testing.T) { t.Fatalf("expected Files len to be 6, got %d", len(pkg.Files)) } - fileInFolder := pkg.Files[spdx.ElementID("File0")] - if fileInFolder.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileInFolder := pkg.Files[0] + if fileInFolder.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileInFolder.LicenseInfoInFile) != 1 { - t.Fatalf("expected LicenseInfoInFile len to be 1, got %d", len(fileInFolder.LicenseInfoInFile)) + if len(fileInFolder.LicenseInfoInFiles) != 1 { + t.Fatalf("expected LicenseInfoInFiles len to be 1, got %d", len(fileInFolder.LicenseInfoInFiles)) } - if fileInFolder.LicenseInfoInFile[0] != "MIT" { - t.Errorf("expected %v, got %v", "MIT", fileInFolder.LicenseInfoInFile[0]) + if fileInFolder.LicenseInfoInFiles[0] != "MIT" { + t.Errorf("expected %v, got %v", "MIT", fileInFolder.LicenseInfoInFiles[0]) } if fileInFolder.LicenseConcluded != "MIT" { t.Errorf("expected %v, got %v", "MIT", fileInFolder.LicenseConcluded) } - fileTrailingComment := pkg.Files[spdx.ElementID("File1")] - if fileTrailingComment.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileTrailingComment := pkg.Files[1] + if fileTrailingComment.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileTrailingComment.LicenseInfoInFile) != 1 { - t.Fatalf("expected LicenseInfoInFile len to be 1, got %d", len(fileTrailingComment.LicenseInfoInFile)) + if len(fileTrailingComment.LicenseInfoInFiles) != 1 { + t.Fatalf("expected LicenseInfoInFiles len to be 1, got %d", len(fileTrailingComment.LicenseInfoInFiles)) } - if fileTrailingComment.LicenseInfoInFile[0] != "GPL-2.0-or-later" { - t.Errorf("expected %v, got %v", "GPL-2.0-or-later", fileTrailingComment.LicenseInfoInFile[0]) + if fileTrailingComment.LicenseInfoInFiles[0] != "GPL-2.0-or-later" { + t.Errorf("expected %v, got %v", "GPL-2.0-or-later", fileTrailingComment.LicenseInfoInFiles[0]) } if fileTrailingComment.LicenseConcluded != "GPL-2.0-or-later" { t.Errorf("expected %v, got %v", "GPL-2.0-or-later", fileTrailingComment.LicenseConcluded) } - fileHasDuplicateID := pkg.Files[spdx.ElementID("File2")] - if fileHasDuplicateID.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileHasDuplicateID := pkg.Files[2] + if fileHasDuplicateID.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileHasDuplicateID.LicenseInfoInFile) != 1 { - t.Fatalf("expected LicenseInfoInFile len to be 1, got %d", len(fileHasDuplicateID.LicenseInfoInFile)) + if len(fileHasDuplicateID.LicenseInfoInFiles) != 1 { + t.Fatalf("expected LicenseInfoInFiles len to be 1, got %d", len(fileHasDuplicateID.LicenseInfoInFiles)) } - if fileHasDuplicateID.LicenseInfoInFile[0] != "MIT" { - t.Errorf("expected %v, got %v", "MIT", fileHasDuplicateID.LicenseInfoInFile[0]) + if fileHasDuplicateID.LicenseInfoInFiles[0] != "MIT" { + t.Errorf("expected %v, got %v", "MIT", fileHasDuplicateID.LicenseInfoInFiles[0]) } if fileHasDuplicateID.LicenseConcluded != "MIT" { t.Errorf("expected %v, got %v", "MIT", fileHasDuplicateID.LicenseConcluded) } - fileHasID := pkg.Files[spdx.ElementID("File3")] - if fileHasID.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileHasID := pkg.Files[3] + if fileHasID.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileHasID.LicenseInfoInFile) != 2 { - t.Fatalf("expected LicenseInfoInFile len to be 2, got %d", len(fileHasID.LicenseInfoInFile)) + if len(fileHasID.LicenseInfoInFiles) != 2 { + t.Fatalf("expected LicenseInfoInFiles len to be 2, got %d", len(fileHasID.LicenseInfoInFiles)) } - if fileHasID.LicenseInfoInFile[0] != "Apache-2.0" { - t.Errorf("expected %v, got %v", "Apache-2.0", fileHasID.LicenseInfoInFile[0]) + if fileHasID.LicenseInfoInFiles[0] != "Apache-2.0" { + t.Errorf("expected %v, got %v", "Apache-2.0", fileHasID.LicenseInfoInFiles[0]) } - if fileHasID.LicenseInfoInFile[1] != "GPL-2.0-or-later" { - t.Errorf("expected %v, got %v", "GPL-2.0-or-later", fileHasID.LicenseInfoInFile[1]) + if fileHasID.LicenseInfoInFiles[1] != "GPL-2.0-or-later" { + t.Errorf("expected %v, got %v", "GPL-2.0-or-later", fileHasID.LicenseInfoInFiles[1]) } if fileHasID.LicenseConcluded != "Apache-2.0 OR GPL-2.0-or-later" { t.Errorf("expected %v, got %v", "Apache-2.0 OR GPL-2.0-or-later", fileHasID.LicenseConcluded) } - fileMultipleIDs := pkg.Files[spdx.ElementID("File4")] - if fileMultipleIDs.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileMultipleIDs := pkg.Files[4] + if fileMultipleIDs.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileMultipleIDs.LicenseInfoInFile) != 5 { - t.Fatalf("expected LicenseInfoInFile len to be 5, got %d", len(fileMultipleIDs.LicenseInfoInFile)) + if len(fileMultipleIDs.LicenseInfoInFiles) != 5 { + t.Fatalf("expected LicenseInfoInFiles len to be 5, got %d", len(fileMultipleIDs.LicenseInfoInFiles)) } - if fileMultipleIDs.LicenseInfoInFile[0] != "BSD-2-Clause" { - t.Errorf("expected %v, got %v", "BSD-2-Clause", fileMultipleIDs.LicenseInfoInFile[0]) + if fileMultipleIDs.LicenseInfoInFiles[0] != "BSD-2-Clause" { + t.Errorf("expected %v, got %v", "BSD-2-Clause", fileMultipleIDs.LicenseInfoInFiles[0]) } - if fileMultipleIDs.LicenseInfoInFile[1] != "BSD-3-Clause" { - t.Errorf("expected %v, got %v", "BSD-3-Clause", fileMultipleIDs.LicenseInfoInFile[1]) + if fileMultipleIDs.LicenseInfoInFiles[1] != "BSD-3-Clause" { + t.Errorf("expected %v, got %v", "BSD-3-Clause", fileMultipleIDs.LicenseInfoInFiles[1]) } // here, DO NOT keep the + - if fileMultipleIDs.LicenseInfoInFile[2] != "EPL-1.0" { - t.Errorf("expected %v, got %v", "EPL-1.0", fileMultipleIDs.LicenseInfoInFile[2]) + if fileMultipleIDs.LicenseInfoInFiles[2] != "EPL-1.0" { + t.Errorf("expected %v, got %v", "EPL-1.0", fileMultipleIDs.LicenseInfoInFiles[2]) } - if fileMultipleIDs.LicenseInfoInFile[3] != "ISC" { - t.Errorf("expected %v, got %v", "ISC", fileMultipleIDs.LicenseInfoInFile[3]) + if fileMultipleIDs.LicenseInfoInFiles[3] != "ISC" { + t.Errorf("expected %v, got %v", "ISC", fileMultipleIDs.LicenseInfoInFiles[3]) } - if fileMultipleIDs.LicenseInfoInFile[4] != "MIT" { - t.Errorf("expected %v, got %v", "MIT", fileMultipleIDs.LicenseInfoInFile[4]) + if fileMultipleIDs.LicenseInfoInFiles[4] != "MIT" { + t.Errorf("expected %v, got %v", "MIT", fileMultipleIDs.LicenseInfoInFiles[4]) } if fileMultipleIDs.LicenseConcluded != "((MIT AND BSD-3-Clause) OR ISC) AND BSD-2-Clause AND EPL-1.0+" { t.Errorf("expected %v, got %v", "((MIT AND BSD-3-Clause) OR ISC) AND BSD-2-Clause AND EPL-1.0+", fileMultipleIDs.LicenseConcluded) } - fileNoID := pkg.Files[spdx.ElementID("File5")] - if fileNoID.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileNoID := pkg.Files[5] + if fileNoID.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileNoID.LicenseInfoInFile) != 1 { - t.Fatalf("expected LicenseInfoInFile len to be 1, got %d", len(fileNoID.LicenseInfoInFile)) + if len(fileNoID.LicenseInfoInFiles) != 1 { + t.Fatalf("expected LicenseInfoInFiles len to be 1, got %d", len(fileNoID.LicenseInfoInFiles)) } - if fileNoID.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", fileNoID.LicenseInfoInFile[0]) + if fileNoID.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", fileNoID.LicenseInfoInFiles[0]) } if fileNoID.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", fileNoID.LicenseConcluded) @@ -205,7 +203,7 @@ func Test2_1SearcherCanFillInIDsAndIgnorePaths(t *testing.T) { // get the package and its files, checking licenses for each, and // confirming NOASSERTION for those that are skipped - pkg := doc.Packages[spdx.ElementID("Package-project3")] + pkg := doc.Packages[0] if pkg == nil { t.Fatalf("expected non-nil pkg, got nil") } @@ -213,71 +211,71 @@ func Test2_1SearcherCanFillInIDsAndIgnorePaths(t *testing.T) { t.Fatalf("expected len %d, got %d", 5, len(pkg.Files)) } - f := pkg.Files[spdx.ElementID("File0")] + f := pkg.Files[0] if f.FileName != "./dontscan.txt" { t.Errorf("expected %v, got %v", "./dontscan.txt", f.FileName) } - if len(f.LicenseInfoInFile) != 1 { - t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFile)) + if len(f.LicenseInfoInFiles) != 1 { + t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFiles)) } - if f.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFile[0]) + if f.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFiles[0]) } if f.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseConcluded) } - f = pkg.Files[spdx.ElementID("File1")] + f = pkg.Files[1] if f.FileName != "./keep/keep.txt" { t.Errorf("expected %v, got %v", "./keep/keep.txt", f.FileName) } - if len(f.LicenseInfoInFile) != 1 { - t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFile)) + if len(f.LicenseInfoInFiles) != 1 { + t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFiles)) } - if f.LicenseInfoInFile[0] != "MIT" { - t.Errorf("expected %s, got %s", "MIT", f.LicenseInfoInFile[0]) + if f.LicenseInfoInFiles[0] != "MIT" { + t.Errorf("expected %s, got %s", "MIT", f.LicenseInfoInFiles[0]) } if f.LicenseConcluded != "MIT" { t.Errorf("expected %s, got %s", "MIT", f.LicenseConcluded) } - f = pkg.Files[spdx.ElementID("File2")] + f = pkg.Files[2] if f.FileName != "./keep.txt" { t.Errorf("expected %v, got %v", "./keep.txt", f.FileName) } - if len(f.LicenseInfoInFile) != 1 { - t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFile)) + if len(f.LicenseInfoInFiles) != 1 { + t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFiles)) } - if f.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFile[0]) + if f.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFiles[0]) } if f.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseConcluded) } - f = pkg.Files[spdx.ElementID("File3")] + f = pkg.Files[3] if f.FileName != "./subdir/keep/dontscan.txt" { t.Errorf("expected %v, got %v", "./subdir/keep/dontscan.txt", f.FileName) } - if len(f.LicenseInfoInFile) != 1 { - t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFile)) + if len(f.LicenseInfoInFiles) != 1 { + t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFiles)) } - if f.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFile[0]) + if f.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFiles[0]) } if f.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseConcluded) } - f = pkg.Files[spdx.ElementID("File4")] + f = pkg.Files[4] if f.FileName != "./subdir/keep/keep.txt" { t.Errorf("expected %v, got %v", "./subdir/keep/keep.txt", f.FileName) } - if len(f.LicenseInfoInFile) != 1 { - t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFile)) + if len(f.LicenseInfoInFiles) != 1 { + t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFiles)) } - if f.LicenseInfoInFile[0] != "MIT" { - t.Errorf("expected %s, got %s", "MIT", f.LicenseInfoInFile[0]) + if f.LicenseInfoInFiles[0] != "MIT" { + t.Errorf("expected %s, got %s", "MIT", f.LicenseInfoInFiles[0]) } if f.LicenseConcluded != "MIT" { t.Errorf("expected %s, got %s", "MIT", f.LicenseConcluded) @@ -322,7 +320,7 @@ func Test2_2SearcherCanFillInIDs(t *testing.T) { if len(doc.Packages) != 1 { t.Fatalf("expected Packages len to be 1, got %d", len(doc.Packages)) } - pkg := doc.Packages[spdx.ElementID("Package-project2")] + pkg := doc.Packages[0] if pkg == nil { t.Fatalf("expected non-nil pkg, got nil") } @@ -334,101 +332,101 @@ func Test2_2SearcherCanFillInIDs(t *testing.T) { t.Fatalf("expected Files len to be 6, got %d", len(pkg.Files)) } - fileInFolder := pkg.Files[spdx.ElementID("File0")] - if fileInFolder.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileInFolder := pkg.Files[0] + if fileInFolder.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileInFolder.LicenseInfoInFile) != 1 { - t.Fatalf("expected LicenseInfoInFile len to be 1, got %d", len(fileInFolder.LicenseInfoInFile)) + if len(fileInFolder.LicenseInfoInFiles) != 1 { + t.Fatalf("expected LicenseInfoInFiles len to be 1, got %d", len(fileInFolder.LicenseInfoInFiles)) } - if fileInFolder.LicenseInfoInFile[0] != "MIT" { - t.Errorf("expected %v, got %v", "MIT", fileInFolder.LicenseInfoInFile[0]) + if fileInFolder.LicenseInfoInFiles[0] != "MIT" { + t.Errorf("expected %v, got %v", "MIT", fileInFolder.LicenseInfoInFiles[0]) } if fileInFolder.LicenseConcluded != "MIT" { t.Errorf("expected %v, got %v", "MIT", fileInFolder.LicenseConcluded) } - fileTrailingComment := pkg.Files[spdx.ElementID("File1")] - if fileTrailingComment.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileTrailingComment := pkg.Files[1] + if fileTrailingComment.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileTrailingComment.LicenseInfoInFile) != 1 { - t.Fatalf("expected LicenseInfoInFile len to be 1, got %d", len(fileTrailingComment.LicenseInfoInFile)) + if len(fileTrailingComment.LicenseInfoInFiles) != 1 { + t.Fatalf("expected LicenseInfoInFiles len to be 1, got %d", len(fileTrailingComment.LicenseInfoInFiles)) } - if fileTrailingComment.LicenseInfoInFile[0] != "GPL-2.0-or-later" { - t.Errorf("expected %v, got %v", "GPL-2.0-or-later", fileTrailingComment.LicenseInfoInFile[0]) + if fileTrailingComment.LicenseInfoInFiles[0] != "GPL-2.0-or-later" { + t.Errorf("expected %v, got %v", "GPL-2.0-or-later", fileTrailingComment.LicenseInfoInFiles[0]) } if fileTrailingComment.LicenseConcluded != "GPL-2.0-or-later" { t.Errorf("expected %v, got %v", "GPL-2.0-or-later", fileTrailingComment.LicenseConcluded) } - fileHasDuplicateID := pkg.Files[spdx.ElementID("File2")] - if fileHasDuplicateID.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileHasDuplicateID := pkg.Files[2] + if fileHasDuplicateID.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileHasDuplicateID.LicenseInfoInFile) != 1 { - t.Fatalf("expected LicenseInfoInFile len to be 1, got %d", len(fileHasDuplicateID.LicenseInfoInFile)) + if len(fileHasDuplicateID.LicenseInfoInFiles) != 1 { + t.Fatalf("expected LicenseInfoInFiles len to be 1, got %d", len(fileHasDuplicateID.LicenseInfoInFiles)) } - if fileHasDuplicateID.LicenseInfoInFile[0] != "MIT" { - t.Errorf("expected %v, got %v", "MIT", fileHasDuplicateID.LicenseInfoInFile[0]) + if fileHasDuplicateID.LicenseInfoInFiles[0] != "MIT" { + t.Errorf("expected %v, got %v", "MIT", fileHasDuplicateID.LicenseInfoInFiles[0]) } if fileHasDuplicateID.LicenseConcluded != "MIT" { t.Errorf("expected %v, got %v", "MIT", fileHasDuplicateID.LicenseConcluded) } - fileHasID := pkg.Files[spdx.ElementID("File3")] - if fileHasID.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileHasID := pkg.Files[3] + if fileHasID.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileHasID.LicenseInfoInFile) != 2 { - t.Fatalf("expected LicenseInfoInFile len to be 2, got %d", len(fileHasID.LicenseInfoInFile)) + if len(fileHasID.LicenseInfoInFiles) != 2 { + t.Fatalf("expected LicenseInfoInFiles len to be 2, got %d", len(fileHasID.LicenseInfoInFiles)) } - if fileHasID.LicenseInfoInFile[0] != "Apache-2.0" { - t.Errorf("expected %v, got %v", "Apache-2.0", fileHasID.LicenseInfoInFile[0]) + if fileHasID.LicenseInfoInFiles[0] != "Apache-2.0" { + t.Errorf("expected %v, got %v", "Apache-2.0", fileHasID.LicenseInfoInFiles[0]) } - if fileHasID.LicenseInfoInFile[1] != "GPL-2.0-or-later" { - t.Errorf("expected %v, got %v", "GPL-2.0-or-later", fileHasID.LicenseInfoInFile[1]) + if fileHasID.LicenseInfoInFiles[1] != "GPL-2.0-or-later" { + t.Errorf("expected %v, got %v", "GPL-2.0-or-later", fileHasID.LicenseInfoInFiles[1]) } if fileHasID.LicenseConcluded != "Apache-2.0 OR GPL-2.0-or-later" { t.Errorf("expected %v, got %v", "Apache-2.0 OR GPL-2.0-or-later", fileHasID.LicenseConcluded) } - fileMultipleIDs := pkg.Files[spdx.ElementID("File4")] - if fileMultipleIDs.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileMultipleIDs := pkg.Files[4] + if fileMultipleIDs.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileMultipleIDs.LicenseInfoInFile) != 5 { - t.Fatalf("expected LicenseInfoInFile len to be 5, got %d", len(fileMultipleIDs.LicenseInfoInFile)) + if len(fileMultipleIDs.LicenseInfoInFiles) != 5 { + t.Fatalf("expected LicenseInfoInFiles len to be 5, got %d", len(fileMultipleIDs.LicenseInfoInFiles)) } - if fileMultipleIDs.LicenseInfoInFile[0] != "BSD-2-Clause" { - t.Errorf("expected %v, got %v", "BSD-2-Clause", fileMultipleIDs.LicenseInfoInFile[0]) + if fileMultipleIDs.LicenseInfoInFiles[0] != "BSD-2-Clause" { + t.Errorf("expected %v, got %v", "BSD-2-Clause", fileMultipleIDs.LicenseInfoInFiles[0]) } - if fileMultipleIDs.LicenseInfoInFile[1] != "BSD-3-Clause" { - t.Errorf("expected %v, got %v", "BSD-3-Clause", fileMultipleIDs.LicenseInfoInFile[1]) + if fileMultipleIDs.LicenseInfoInFiles[1] != "BSD-3-Clause" { + t.Errorf("expected %v, got %v", "BSD-3-Clause", fileMultipleIDs.LicenseInfoInFiles[1]) } // here, DO NOT keep the + - if fileMultipleIDs.LicenseInfoInFile[2] != "EPL-1.0" { - t.Errorf("expected %v, got %v", "EPL-1.0", fileMultipleIDs.LicenseInfoInFile[2]) + if fileMultipleIDs.LicenseInfoInFiles[2] != "EPL-1.0" { + t.Errorf("expected %v, got %v", "EPL-1.0", fileMultipleIDs.LicenseInfoInFiles[2]) } - if fileMultipleIDs.LicenseInfoInFile[3] != "ISC" { - t.Errorf("expected %v, got %v", "ISC", fileMultipleIDs.LicenseInfoInFile[3]) + if fileMultipleIDs.LicenseInfoInFiles[3] != "ISC" { + t.Errorf("expected %v, got %v", "ISC", fileMultipleIDs.LicenseInfoInFiles[3]) } - if fileMultipleIDs.LicenseInfoInFile[4] != "MIT" { - t.Errorf("expected %v, got %v", "MIT", fileMultipleIDs.LicenseInfoInFile[4]) + if fileMultipleIDs.LicenseInfoInFiles[4] != "MIT" { + t.Errorf("expected %v, got %v", "MIT", fileMultipleIDs.LicenseInfoInFiles[4]) } if fileMultipleIDs.LicenseConcluded != "((MIT AND BSD-3-Clause) OR ISC) AND BSD-2-Clause AND EPL-1.0+" { t.Errorf("expected %v, got %v", "((MIT AND BSD-3-Clause) OR ISC) AND BSD-2-Clause AND EPL-1.0+", fileMultipleIDs.LicenseConcluded) } - fileNoID := pkg.Files[spdx.ElementID("File5")] - if fileNoID.LicenseInfoInFile == nil { - t.Fatalf("expected non-nil LicenseInfoInFile, got nil") + fileNoID := pkg.Files[5] + if fileNoID.LicenseInfoInFiles == nil { + t.Fatalf("expected non-nil LicenseInfoInFiles, got nil") } - if len(fileNoID.LicenseInfoInFile) != 1 { - t.Fatalf("expected LicenseInfoInFile len to be 1, got %d", len(fileNoID.LicenseInfoInFile)) + if len(fileNoID.LicenseInfoInFiles) != 1 { + t.Fatalf("expected LicenseInfoInFiles len to be 1, got %d", len(fileNoID.LicenseInfoInFiles)) } - if fileNoID.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %v, got %v", "NOASSERTION", fileNoID.LicenseInfoInFile[0]) + if fileNoID.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %v, got %v", "NOASSERTION", fileNoID.LicenseInfoInFiles[0]) } if fileNoID.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %v, got %v", "NOASSERTION", fileNoID.LicenseConcluded) @@ -494,7 +492,7 @@ func Test2_2SearcherCanFillInIDsAndIgnorePaths(t *testing.T) { // get the package and its files, checking licenses for each, and // confirming NOASSERTION for those that are skipped - pkg := doc.Packages[spdx.ElementID("Package-project3")] + pkg := doc.Packages[0] if pkg == nil { t.Fatalf("expected non-nil pkg, got nil") } @@ -502,71 +500,71 @@ func Test2_2SearcherCanFillInIDsAndIgnorePaths(t *testing.T) { t.Fatalf("expected len %d, got %d", 5, len(pkg.Files)) } - f := pkg.Files[spdx.ElementID("File0")] + f := pkg.Files[0] if f.FileName != "./dontscan.txt" { t.Errorf("expected %v, got %v", "./dontscan.txt", f.FileName) } - if len(f.LicenseInfoInFile) != 1 { - t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFile)) + if len(f.LicenseInfoInFiles) != 1 { + t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFiles)) } - if f.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFile[0]) + if f.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFiles[0]) } if f.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseConcluded) } - f = pkg.Files[spdx.ElementID("File1")] + f = pkg.Files[1] if f.FileName != "./keep/keep.txt" { t.Errorf("expected %v, got %v", "./keep/keep.txt", f.FileName) } - if len(f.LicenseInfoInFile) != 1 { - t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFile)) + if len(f.LicenseInfoInFiles) != 1 { + t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFiles)) } - if f.LicenseInfoInFile[0] != "MIT" { - t.Errorf("expected %s, got %s", "MIT", f.LicenseInfoInFile[0]) + if f.LicenseInfoInFiles[0] != "MIT" { + t.Errorf("expected %s, got %s", "MIT", f.LicenseInfoInFiles[0]) } if f.LicenseConcluded != "MIT" { t.Errorf("expected %s, got %s", "MIT", f.LicenseConcluded) } - f = pkg.Files[spdx.ElementID("File2")] + f = pkg.Files[2] if f.FileName != "./keep.txt" { t.Errorf("expected %v, got %v", "./keep.txt", f.FileName) } - if len(f.LicenseInfoInFile) != 1 { - t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFile)) + if len(f.LicenseInfoInFiles) != 1 { + t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFiles)) } - if f.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFile[0]) + if f.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFiles[0]) } if f.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseConcluded) } - f = pkg.Files[spdx.ElementID("File3")] + f = pkg.Files[3] if f.FileName != "./subdir/keep/dontscan.txt" { t.Errorf("expected %v, got %v", "./subdir/keep/dontscan.txt", f.FileName) } - if len(f.LicenseInfoInFile) != 1 { - t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFile)) + if len(f.LicenseInfoInFiles) != 1 { + t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFiles)) } - if f.LicenseInfoInFile[0] != "NOASSERTION" { - t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFile[0]) + if f.LicenseInfoInFiles[0] != "NOASSERTION" { + t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseInfoInFiles[0]) } if f.LicenseConcluded != "NOASSERTION" { t.Errorf("expected %s, got %s", "NOASSERTION", f.LicenseConcluded) } - f = pkg.Files[spdx.ElementID("File4")] + f = pkg.Files[4] if f.FileName != "./subdir/keep/keep.txt" { t.Errorf("expected %v, got %v", "./subdir/keep/keep.txt", f.FileName) } - if len(f.LicenseInfoInFile) != 1 { - t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFile)) + if len(f.LicenseInfoInFiles) != 1 { + t.Errorf("expected len to be %d, got %d", 1, len(f.LicenseInfoInFiles)) } - if f.LicenseInfoInFile[0] != "MIT" { - t.Errorf("expected %s, got %s", "MIT", f.LicenseInfoInFile[0]) + if f.LicenseInfoInFiles[0] != "MIT" { + t.Errorf("expected %s, got %s", "MIT", f.LicenseInfoInFiles[0]) } if f.LicenseConcluded != "MIT" { t.Errorf("expected %s, got %s", "MIT", f.LicenseConcluded) diff --git a/json/json_test.go b/json/json_test.go new file mode 100644 index 00000000..c78013cb --- /dev/null +++ b/json/json_test.go @@ -0,0 +1,449 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_json + +import ( + "bytes" + "fmt" + "github.com/google/go-cmp/cmp" + "os" + "testing" + + "github.com/spdx/tools-golang/spdx" +) + +func TestLoad2_2(t *testing.T) { + file, err := os.Open("../examples/sample-docs/json/SPDXJSONExample-v2.2.spdx.json") + if err != nil { + panic(fmt.Errorf("error opening File: %s", err)) + } + + got, err := Load2_2(file) + if err != nil { + t.Errorf("json.parser.Load2_2() error = %v", err) + return + } + + // get a copy of the handwritten struct so we don't mutate it on accident + handwrittenExample := want + + if cmp.Equal(handwrittenExample, got) { + t.Errorf("Got incorrect struct after parsing JSON example") + return + } +} + +func TestWrite2_2(t *testing.T) { + w := &bytes.Buffer{} + // get a copy of the handwritten struct so we don't mutate it on accident + handwrittenExample := want + if err := Save2_2(&handwrittenExample, w); err != nil { + t.Errorf("Save2_2() error = %v", err.Error()) + return + } + + // we should be able to parse what the writer wrote, and it should be identical to the original struct we wrote + parsedDoc, err := Load2_2(bytes.NewReader(w.Bytes())) + if err != nil { + t.Errorf("failed to parse written document: %v", err.Error()) + return + } + + if cmp.Equal(handwrittenExample, parsedDoc) { + t.Errorf("Got incorrect struct after writing and re-parsing JSON example") + return + } +} + +// want is handwritten translation of the official example JSON SPDX v2.2 document into a Go struct. +// We expect that the result of parsing the official document should be this value. +// We expect that the result of writing this struct should match the official example document. +var want = spdx.Document2_2{ + DataLicense: "CC0-1.0", + SPDXVersion: "SPDX-2.2", + SPDXIdentifier: "SPDXRef-DOCUMENT", + DocumentName: "SPDX-Tools-v2.0", + DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", + CreationInfo: &spdx.CreationInfo2_2{ + LicenseListVersion: "3.9", + Creators: []spdx.Creator{ + {CreatorType: "Tool", Creator: "LicenseFind-1.0"}, + {CreatorType: "Organization", Creator: "ExampleCodeInspect ()"}, + {CreatorType: "Person", Creator: "Jane Doe ()"}, + }, + Created: "2010-01-29T18:30:22Z", + CreatorComment: "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", + }, + DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", + ExternalDocumentReferences: []spdx.ExternalDocumentRef2_2{ + { + DocumentRefID: "DocumentRef-spdx-tool-1.2", + URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", + Checksum: spdx.Checksum{ + Algorithm: spdx.SHA1, + Value: "d6a770ba38583ed4bb4525bd96e50461655d2759", + }, + }, + }, + OtherLicenses: []*spdx.OtherLicense2_2{ + { + LicenseIdentifier: "LicenseRef-1", + ExtractedText: "/*\n * (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/", + }, + { + LicenseIdentifier: "LicenseRef-2", + ExtractedText: "This package includes the GRDDL parser developed by Hewlett Packard under the following license:\n� Copyright 2007 Hewlett-Packard Development Company, LP\n\nRedistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: \n\nRedistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. \nRedistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. \nThe name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. \nTHIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + }, + { + LicenseIdentifier: "LicenseRef-4", + ExtractedText: "/*\n * (c) Copyright 2009 University of Bristol\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/", + }, + { + LicenseIdentifier: "LicenseRef-Beerware-4.2", + ExtractedText: "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp", + LicenseComment: "The beerware license has a couple of other standard variants.", + LicenseName: "Beer-Ware License (Version 42)", + LicenseCrossReferences: []string{"http://people.freebsd.org/~phk/"}, + }, + { + LicenseIdentifier: "LicenseRef-3", + ExtractedText: "The CyberNeko Software License, Version 1.0\n\n \n(C) Copyright 2002-2005, Andy Clark. All rights reserved.\n \nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions\nare met:\n\n1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer. \n\n2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in\n the documentation and/or other materials provided with the\n distribution.\n\n3. The end-user documentation included with the redistribution,\n if any, must include the following acknowledgment: \n \"This product includes software developed by Andy Clark.\"\n Alternately, this acknowledgment may appear in the software itself,\n if and wherever such third-party acknowledgments normally appear.\n\n4. The names \"CyberNeko\" and \"NekoHTML\" must not be used to endorse\n or promote products derived from this software without prior \n written permission. For written permission, please contact \n andyc@cyberneko.net.\n\n5. Products derived from this software may not be called \"CyberNeko\",\n nor may \"CyberNeko\" appear in their name, without prior written\n permission of the author.\n\nTHIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED\nWARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\nOF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS\nBE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, \nOR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT \nOF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR \nBUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, \nWHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE \nOR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, \nEVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + LicenseName: "CyberNeko License", + LicenseCrossReferences: []string{ + "http://people.apache.org/~andyc/neko/LICENSE", + "http://justasample.url.com", + }, + LicenseComment: "This is tye CyperNeko License", + }, + }, + Annotations: []*spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "Jane Doe ()", + AnnotatorType: "Person", + }, + AnnotationDate: "2010-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "Document level annotation", + }, + { + Annotator: spdx.Annotator{ + Annotator: "Joe Reviewer", + AnnotatorType: "Person", + }, + AnnotationDate: "2010-02-10T00:00:00Z", + AnnotationType: "REVIEW", + AnnotationComment: "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses", + }, + { + Annotator: spdx.Annotator{ + Annotator: "Suzanne Reviewer", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-03-13T00:00:00Z", + AnnotationType: "REVIEW", + AnnotationComment: "Another example reviewer.", + }, + }, + Packages: []*spdx.Package2_2{ + { + PackageName: "glibc", + PackageSPDXIdentifier: "SPDXRef-Package", + PackageVersion: "2.11.1", + PackageFileName: "glibc-2.11.1.tar.gz", + PackageSupplier: &spdx.Supplier{ + Supplier: "Jane Doe (jane.doe@example.com)", + SupplierType: "Person", + }, + PackageOriginator: &spdx.Originator{ + Originator: "ExampleCodeInspect (contact@example.com)", + OriginatorType: "Organization", + }, + PackageDownloadLocation: "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", + FilesAnalyzed: true, + PackageVerificationCode: spdx.PackageVerificationCode{ + Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", + ExcludedFiles: []string{"./package.spdx"}, + }, + PackageChecksums: []spdx.Checksum{ + { + Algorithm: "MD5", + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + { + Algorithm: "SHA1", + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + { + Algorithm: "SHA256", + Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", + }, + }, + PackageHomePage: "http://ftp.gnu.org/gnu/glibc", + PackageSourceInfo: "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git.", + PackageLicenseConcluded: "(LGPL-2.0-only OR LicenseRef-3)", + PackageLicenseInfoFromFiles: []string{ + "GPL-2.0-only", + "LicenseRef-2", + "LicenseRef-1", + }, + PackageLicenseDeclared: "(LGPL-2.0-only AND LicenseRef-3)", + PackageLicenseComments: "The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.", + PackageCopyrightText: "Copyright 2008-2010 John Smith", + PackageSummary: "GNU C library.", + PackageDescription: "The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.", + PackageComment: "", + PackageExternalReferences: []*spdx.PackageExternalReference2_2{ + { + Category: "SECURITY", + RefType: "cpe23Type", + Locator: "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*", + }, + { + Category: "OTHER", + RefType: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge", + Locator: "acmecorp/acmenator/4.1.3-alpha", + ExternalRefComment: "This is the external ref for Acme", + }, + }, + PackageAttributionTexts: []string{ + "The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually.", + }, + Files: nil, + Annotations: []spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "Package Commenter", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "Package level annotation", + }, + }, + }, + { + PackageSPDXIdentifier: "SPDXRef-fromDoap-1", + PackageCopyrightText: "NOASSERTION", + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: false, + PackageHomePage: "http://commons.apache.org/proper/commons-lang/", + PackageLicenseConcluded: "NOASSERTION", + PackageLicenseDeclared: "NOASSERTION", + PackageName: "Apache Commons Lang", + }, + { + PackageName: "Jena", + PackageSPDXIdentifier: "SPDXRef-fromDoap-0", + PackageCopyrightText: "NOASSERTION", + PackageDownloadLocation: "https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz", + PackageExternalReferences: []*spdx.PackageExternalReference2_2{ + { + Category: "PACKAGE_MANAGER", + RefType: "purl", + Locator: "pkg:maven/org.apache.jena/apache-jena@3.12.0", + }, + }, + FilesAnalyzed: false, + PackageHomePage: "http://www.openjena.org/", + PackageLicenseConcluded: "NOASSERTION", + PackageLicenseDeclared: "NOASSERTION", + PackageVersion: "3.12.0", + }, + { + PackageSPDXIdentifier: "SPDXRef-Saxon", + PackageChecksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + }, + PackageCopyrightText: "Copyright Saxonica Ltd", + PackageDescription: "The Saxon package is a collection of tools for processing XML documents.", + PackageDownloadLocation: "https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download", + FilesAnalyzed: false, + PackageHomePage: "http://saxon.sourceforge.net/", + PackageLicenseComments: "Other versions available for a commercial license", + PackageLicenseConcluded: "MPL-1.0", + PackageLicenseDeclared: "MPL-1.0", + PackageName: "Saxon", + PackageFileName: "saxonB-8.8.zip", + PackageVersion: "8.8", + }, + }, + Files: []*spdx.File2_2{ + { + FileName: "./src/org/spdx/parser/DOAPProject.java", + FileSPDXIdentifier: "SPDXRef-DoapSource", + FileTypes: []string{ + "SOURCE", + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", + }, + }, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{ + "Apache-2.0", + }, + FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", + FileContributors: []string{ + "Protecode Inc.", + "SPDX Technical Team Members", + "Open Logic Inc.", + "Source Auditor Inc.", + "Black Duck Software In.c", + }, + }, + { + FileSPDXIdentifier: "SPDXRef-CommonsLangSrc", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file is used by Jena", + FileCopyrightText: "Copyright 2001-2011 The Apache Software Foundation", + FileContributors: []string{"Apache Software Foundation"}, + FileName: "./lib-source/commons-lang3-3.1-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{"Apache-2.0"}, + FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", + }, + { + FileSPDXIdentifier: "SPDXRef-JenaLib", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file belongs to Jena", + FileCopyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP", + FileContributors: []string{"Apache Software Foundation", "Hewlett Packard Inc."}, + FileName: "./lib-source/jena-2.6.3-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseComments: "This license is used by Jena", + LicenseConcluded: "LicenseRef-1", + LicenseInfoInFiles: []string{"LicenseRef-1"}, + }, + { + FileSPDXIdentifier: "SPDXRef-File", + Annotations: []spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "File Commenter", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "File level annotation", + }, + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", + }, + { + Algorithm: "MD5", + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + }, + FileComment: "The concluded license was taken from the package level that the file was included in.\nThis information was found in the COPYING.txt file in the xyz directory.", + FileCopyrightText: "Copyright 2008-2010 John Smith", + FileContributors: []string{"The Regents of the University of California", "Modified by Paul Mundt lethal@linux-sh.org", "IBM Corporation"}, + FileName: "./package/foo.c", + FileTypes: []string{"SOURCE"}, + LicenseComments: "The concluded license was taken from the package level that the file was included in.", + LicenseConcluded: "(LGPL-2.0-only OR LicenseRef-2)", + LicenseInfoInFiles: []string{"GPL-2.0-only", "LicenseRef-2"}, + FileNotice: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: \nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", + }, + }, + Snippets: []spdx.Snippet2_2{ + { + SnippetSPDXIdentifier: "SPDXRef-Snippet", + SnippetFromFileSPDXIdentifier: "SPDXRef-DoapSource", + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{ + Offset: 310, + FileSPDXIdentifier: "SPDXRef-DoapSource", + }, + EndPointer: spdx.SnippetRangePointer{ + Offset: 420, + FileSPDXIdentifier: "SPDXRef-DoapSource", + }, + }, + { + StartPointer: spdx.SnippetRangePointer{ + LineNumber: 5, + FileSPDXIdentifier: "SPDXRef-DoapSource", + }, + EndPointer: spdx.SnippetRangePointer{ + LineNumber: 23, + FileSPDXIdentifier: "SPDXRef-DoapSource", + }, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-only", + LicenseInfoInSnippet: []string{"GPL-2.0-only"}, + SnippetLicenseComments: "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", + SnippetCopyrightText: "Copyright 2008-2010 John Smith", + SnippetComment: "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", + SnippetName: "from linux kernel", + }, + }, + Relationships: []*spdx.Relationship2_2{ + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("spdx-tool-1.2", "ToolsElement"), + Relationship: "COPY_OF", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "File"), + Relationship: "DESCRIBES", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "DESCRIBES", + }, + { + RefA: spdx.MakeDocElementID("", "Package"), + RefB: spdx.MakeDocElementID("", "JenaLib"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "Package"), + RefB: spdx.MakeDocElementID("", "Saxon"), + Relationship: "DYNAMIC_LINK", + }, + { + RefA: spdx.MakeDocElementID("", "CommonsLangSrc"), + RefB: spdx.MakeDocElementSpecial("NOASSERTION"), + Relationship: "GENERATED_FROM", + }, + { + RefA: spdx.MakeDocElementID("", "JenaLib"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "File"), + RefB: spdx.MakeDocElementID("", "fromDoap-0"), + Relationship: "GENERATED_FROM", + }, + }, +} diff --git a/json/parser.go b/json/parser.go new file mode 100644 index 00000000..387b5b06 --- /dev/null +++ b/json/parser.go @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_json + +import ( + "bytes" + "encoding/json" + "io" + + "github.com/spdx/tools-golang/spdx" +) + +// Load2_2 takes in an io.Reader and returns an SPDX document. +func Load2_2(content io.Reader) (*spdx.Document2_2, error) { + // convert io.Reader to a slice of bytes and call the parser + buf := new(bytes.Buffer) + _, err := buf.ReadFrom(content) + if err != nil { + return nil, err + } + + var doc spdx.Document2_2 + err = json.Unmarshal(buf.Bytes(), &doc) + if err != nil { + return nil, err + } + + return &doc, nil +} diff --git a/json/writer.go b/json/writer.go new file mode 100644 index 00000000..c5980289 --- /dev/null +++ b/json/writer.go @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_json + +import ( + "encoding/json" + "io" + + "github.com/spdx/tools-golang/spdx" +) + +// Save2_2 takes an SPDX Document (version 2.2) and an io.Writer, and writes the document to the writer in JSON format. +func Save2_2(doc *spdx.Document2_2, w io.Writer) error { + buf, err := json.Marshal(doc) + if err != nil { + return err + } + + _, err = w.Write(buf) + if err != nil { + return err + } + + return nil +} diff --git a/jsonloader/jsonloader.go b/jsonloader/jsonloader.go deleted file mode 100644 index 8e2646c8..00000000 --- a/jsonloader/jsonloader.go +++ /dev/null @@ -1,24 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package jsonloader - -import ( - "bytes" - "io" - - parser2v2 "github.com/spdx/tools-golang/jsonloader/parser2v2" - "github.com/spdx/tools-golang/spdx" -) - -// Takes in a file Reader and returns the pertaining spdx document -// or the error if any is encountered while setting the doc. -func Load2_2(content io.Reader) (*spdx.Document2_2, error) { - //convert io.Reader to a slice of bytes and call the parser - buf := new(bytes.Buffer) - buf.ReadFrom(content) - var doc, err = parser2v2.Load2_2(buf.Bytes()) - if err != nil { - return nil, err - } - return doc, nil -} diff --git a/jsonloader/jsonloader_test.go b/jsonloader/jsonloader_test.go deleted file mode 100644 index e90a6a58..00000000 --- a/jsonloader/jsonloader_test.go +++ /dev/null @@ -1,70 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package jsonloader - -import ( - "bytes" - "fmt" - "io" - "os" - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func TestLoad2_2(t *testing.T) { - - file, err := os.Open("./parser2v2/jsonfiles/jsonloadertest.json") - if err != nil { - panic(fmt.Errorf("error opening File: %s", err)) - } - - type args struct { - content io.Reader - } - tests := []struct { - name string - args args - want *spdx.Document2_2 - wantErr bool - }{ - // TODO: Add test cases. - { - name: "success test", - args: args{ - content: file, - }, - want: &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - DataLicense: "CC0-1.0", - SPDXVersion: "SPDX-2.2", - SPDXIdentifier: "DOCUMENT", - DocumentName: "SPDX-Tools-v2.0", - ExternalDocumentReferences: make(map[string]spdx.ExternalDocumentRef2_2), - }, - }, - wantErr: false, - }, - { - name: "fail - invalidjson ", - args: args{ - content: bytes.NewReader([]byte(`{"Hello":"HI",}`)), - }, - want: nil, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := Load2_2(tt.args.content) - if (err != nil) != tt.wantErr { - t.Errorf("Load2_2() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !tt.wantErr && !reflect.DeepEqual(got.CreationInfo, tt.want.CreationInfo) { - t.Errorf("Load2_2() = %v, want %v", got.CreationInfo, tt.want.CreationInfo) - } - }) - } -} diff --git a/jsonloader/parser2v2/jsonfiles/jsonloadertest.json b/jsonloader/parser2v2/jsonfiles/jsonloadertest.json deleted file mode 100644 index cf5955a5..00000000 --- a/jsonloader/parser2v2/jsonfiles/jsonloadertest.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "SPDXID" : "SPDXRef-DOCUMENT", - "spdxVersion" : "SPDX-2.2", - "name" : "SPDX-Tools-v2.0", - "dataLicense" : "CC0-1.0" -} \ No newline at end of file diff --git a/jsonloader/parser2v2/jsonfiles/otherlicensestest.json b/jsonloader/parser2v2/jsonfiles/otherlicensestest.json deleted file mode 100644 index 21448601..00000000 --- a/jsonloader/parser2v2/jsonfiles/otherlicensestest.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "hasExtractedLicensingInfos" : [ { - "extractedText" : "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp unknown tag", - spec: specs2, - args: args{ - key: "hasExtractedLicensingInfos", - value: specs2["hasExtractedLicensingInfos"], - doc: &spdxDocument2_2{}, - }, - want: nil, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := tt.spec.parseJsonOtherLicenses2_2(tt.args.key, tt.args.value, tt.args.doc); (err != nil) != tt.wantErr { - t.Errorf("JSONSpdxDocument.parseJsonOtherLicenses2_2() error = %v, wantErr %v", err, tt.wantErr) - } - - if !tt.wantErr { - for i := 0; i < len(tt.want); i++ { - if !reflect.DeepEqual(tt.args.doc.OtherLicenses[i], tt.want[i]) { - t.Errorf("Load2_2() = %v, want %v", tt.args.doc.OtherLicenses[i], tt.want[i]) - } - } - } - }) - } -} diff --git a/jsonloader/parser2v2/parse_package.go b/jsonloader/parser2v2/parse_package.go deleted file mode 100644 index f698da5c..00000000 --- a/jsonloader/parser2v2/parse_package.go +++ /dev/null @@ -1,211 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package parser2v2 - -import ( - "fmt" - "reflect" - "strings" - - "github.com/spdx/tools-golang/spdx" -) - -func (spec JSONSpdxDocument) parseJsonPackages2_2(key string, value interface{}, doc *spdxDocument2_2) error { - - if doc.Packages == nil { - doc.Packages = map[spdx.ElementID]*spdx.Package2_2{} - } - - if reflect.TypeOf(value).Kind() == reflect.Slice { - packages := reflect.ValueOf(value) - for i := 0; i < packages.Len(); i++ { - pack := packages.Index(i).Interface().(map[string]interface{}) - // create a new package - pkg := &spdx.Package2_2{ - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: false, - } - //extract the SPDXID of the package - eID, err := extractElementID(pack["SPDXID"].(string)) - if err != nil { - return fmt.Errorf("%s", err) - } - pkg.PackageSPDXIdentifier = eID - //range over all other properties now - for k, v := range pack { - switch k { - case "SPDXID": - //redundant case - case "name": - pkg.PackageName = v.(string) - case "annotations": - packageId, err := extractDocElementID(pack["SPDXID"].(string)) - if err != nil { - return fmt.Errorf("%s", err) - } - //generalize function to parse annotations - err = spec.parseJsonAnnotations2_2("annotations", v, doc, packageId) - if err != nil { - return err - } - case "attributionTexts": - if reflect.TypeOf(v).Kind() == reflect.Slice { - texts := reflect.ValueOf(v) - for i := 0; i < texts.Len(); i++ { - pkg.PackageAttributionTexts = append(pkg.PackageAttributionTexts, texts.Index(i).Interface().(string)) - } - } - case "checksums": - //general function to parse checksums in utils - if reflect.TypeOf(v).Kind() == reflect.Slice { - checksums := reflect.ValueOf(v) - if pkg.PackageChecksums == nil { - pkg.PackageChecksums = make(map[spdx.ChecksumAlgorithm]spdx.Checksum) - } - for i := 0; i < checksums.Len(); i++ { - checksum := checksums.Index(i).Interface().(map[string]interface{}) - switch checksum["algorithm"].(string) { - case spdx.SHA1, spdx.SHA256, spdx.MD5: - algorithm := spdx.ChecksumAlgorithm(checksum["algorithm"].(string)) - pkg.PackageChecksums[algorithm] = spdx.Checksum{Algorithm: algorithm, Value: checksum["checksumValue"].(string)} - default: - return fmt.Errorf("got unknown checksum type %s", checksum["algorithm"]) - } - } - } - case "copyrightText": - pkg.PackageCopyrightText = v.(string) - case "description": - pkg.PackageDescription = v.(string) - case "downloadLocation": - pkg.PackageDownloadLocation = v.(string) - case "externalRefs": - //make a function to parse these - if reflect.TypeOf(v).Kind() == reflect.Slice { - extrefs := reflect.ValueOf(v) - for i := 0; i < extrefs.Len(); i++ { - ref := extrefs.Index(i).Interface().(map[string]interface{}) - newref := &spdx.PackageExternalReference2_2{} - //TODO: if either of these 3 missing then error - newref.RefType = ref["referenceType"].(string) - newref.Locator = ref["referenceLocator"].(string) - newref.Category = ref["referenceCategory"].(string) - if ref["comment"] != nil { - newref.ExternalRefComment = ref["comment"].(string) - } - pkg.PackageExternalReferences = append(pkg.PackageExternalReferences, newref) - } - } - case "filesAnalyzed": - pkg.IsFilesAnalyzedTagPresent = true - if !v.(bool) { - pkg.FilesAnalyzed = false - } else { - pkg.FilesAnalyzed = true - } - case "homepage": - pkg.PackageHomePage = v.(string) - case "licenseComments": - pkg.PackageLicenseComments = v.(string) - case "licenseConcluded": - pkg.PackageLicenseConcluded = v.(string) - case "licenseDeclared": - pkg.PackageLicenseDeclared = v.(string) - case "licenseInfoFromFiles": - if reflect.TypeOf(v).Kind() == reflect.Slice { - info := reflect.ValueOf(v) - for i := 0; i < info.Len(); i++ { - pkg.PackageLicenseInfoFromFiles = append(pkg.PackageLicenseInfoFromFiles, info.Index(i).Interface().(string)) - } - } - case "originator": - if v.(string) == "NOASSERTION" { - pkg.PackageOriginatorNOASSERTION = true - break - } - subkey, subvalue, err := extractSubs(v.(string)) - if err != nil { - return err - } - switch subkey { - case "Person": - pkg.PackageOriginatorPerson = subvalue - case "Organization": - pkg.PackageOriginatorOrganization = subvalue - default: - return fmt.Errorf("unrecognized PackageOriginator type %v", subkey) - } - case "packageFileName": - pkg.PackageFileName = v.(string) - case "packageVerificationCode": - code := v.(map[string]interface{}) - for codekey, codeval := range code { - switch codekey { - case "packageVerificationCodeExcludedFiles": - if reflect.TypeOf(codeval).Kind() == reflect.Slice { - efiles := reflect.ValueOf(codeval) - filename := efiles.Index(0).Interface().(string) - if strings.HasPrefix(filename, "excludes:") { - _, filename, err = extractSubs(efiles.Index(0).Interface().(string)) - if err != nil { - return fmt.Errorf("%s", err) - } - } - pkg.PackageVerificationCodeExcludedFile = strings.Trim(filename, " ") - } - case "packageVerificationCodeValue": - pkg.PackageVerificationCode = code["packageVerificationCodeValue"].(string) - } - } - case "sourceInfo": - pkg.PackageSourceInfo = v.(string) - case "summary": - pkg.PackageSummary = v.(string) - case "supplier": - if v.(string) == "NOASSERTION" { - pkg.PackageSupplierNOASSERTION = true - break - } - subkey, subvalue, err := extractSubs(v.(string)) - if err != nil { - return err - } - switch subkey { - case "Person": - pkg.PackageSupplierPerson = subvalue - case "Organization": - pkg.PackageSupplierOrganization = subvalue - default: - return fmt.Errorf("unrecognized PackageSupplier type %v", subkey) - } - - case "versionInfo": - pkg.PackageVersion = v.(string) - case "comment": - pkg.PackageComment = v.(string) - case "hasFiles": - if pkg.Files == nil { - pkg.Files = make(map[spdx.ElementID]*spdx.File2_2) - } - if reflect.TypeOf(v).Kind() == reflect.Slice { - SpdxIds := reflect.ValueOf(v) - for i := 0; i < SpdxIds.Len(); i++ { - fileId, err := extractElementID(SpdxIds.Index(i).Interface().(string)) - if err != nil { - return err - } - pkg.Files[fileId] = doc.UnpackagedFiles[fileId] - delete(doc.UnpackagedFiles, fileId) - } - } - - default: - return fmt.Errorf("received unknown tag %v in Annotation section", k) - } - } - doc.Packages[eID] = pkg - } - - } - return nil -} diff --git a/jsonloader/parser2v2/parse_package_test.go b/jsonloader/parser2v2/parse_package_test.go deleted file mode 100644 index 9217836f..00000000 --- a/jsonloader/parser2v2/parse_package_test.go +++ /dev/null @@ -1,376 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package parser2v2 - -import ( - "encoding/json" - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func TestJSONSpdxDocument_parseJsonPackages2_2(t *testing.T) { - - data := []byte(`{ - "packages" : [ { - "SPDXID" : "SPDXRef-Package", - "annotations" : [ { - "annotationDate" : "2011-01-29T18:30:22Z", - "annotationType" : "OTHER", - "annotator" : "Person: Package Commenter", - "comment" : "Package level annotation" - } ], - "attributionTexts" : [ "The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually." ], - "checksums" : [ { - "algorithm" : "SHA256", - "checksumValue" : "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd" - }, { - "algorithm" : "SHA1", - "checksumValue" : "85ed0817af83a24ad8da68c2b5094de69833983c" - }, { - "algorithm" : "MD5", - "checksumValue" : "624c1abb3664f4b35547e7c73864ad24" - } ], - "copyrightText" : "Copyright 2008-2010 John Smith", - "description" : "The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.", - "downloadLocation" : "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", - "externalRefs" : [ { - "comment" : "This is the external ref for Acme", - "referenceCategory" : "OTHER", - "referenceLocator" : "acmecorp/acmenator/4.1.3-alpha", - "referenceType" : "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge" - }, { - "referenceCategory" : "SECURITY", - "referenceLocator" : "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*", - "referenceType" : "http://spdx.org/rdf/references/cpe23Type" - } ], - "filesAnalyzed" : true, - "hasFiles" : [ "SPDXRef-JenaLib", "SPDXRef-DoapSource", "SPDXRef-CommonsLangSrc" ], - "homepage" : "http://ftp.gnu.org/gnu/glibc", - "licenseComments" : "The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.", - "licenseConcluded" : "(LGPL-2.0-only OR LicenseRef-3)", - "licenseDeclared" : "(LGPL-2.0-only AND LicenseRef-3)", - "licenseInfoFromFiles" : [ "GPL-2.0-only", "LicenseRef-2", "LicenseRef-1" ], - "name" : "glibc", - "originator" : "Organization: ExampleCodeInspect (contact@example.com)", - "packageFileName" : "glibc-2.11.1.tar.gz", - "packageVerificationCode" : { - "packageVerificationCodeExcludedFiles" : [ "excludes: ./package.spdx" ], - "packageVerificationCodeValue" : "d6a770ba38583ed4bb4525bd96e50461655d2758" - }, - "sourceInfo" : "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git.", - "summary" : "GNU C library.", - "supplier" : "Person: Jane Doe (jane.doe@example.com)", - "versionInfo" : "2.11.1" - }, { - "SPDXID" : "SPDXRef-fromDoap-1", - "comment" : "This package was converted from a DOAP Project by the same name", - "copyrightText" : "NOASSERTION", - "downloadLocation" : "NOASSERTION", - "filesAnalyzed" : false, - "homepage" : "http://commons.apache.org/proper/commons-lang/", - "licenseConcluded" : "NOASSERTION", - "licenseDeclared" : "NOASSERTION", - "name" : "Apache Commons Lang" - }, { - "SPDXID" : "SPDXRef-fromDoap-0", - "comment" : "This package was converted from a DOAP Project by the same name", - "copyrightText" : "NOASSERTION", - "downloadLocation" : "NOASSERTION", - "filesAnalyzed" : false, - "homepage" : "http://www.openjena.org/", - "licenseConcluded" : "NOASSERTION", - "licenseDeclared" : "NOASSERTION", - "name" : "Jena" - }, { - "SPDXID" : "SPDXRef-Saxon", - "checksums" : [ { - "algorithm" : "SHA1", - "checksumValue" : "85ed0817af83a24ad8da68c2b5094de69833983c" - } ], - "description" : "The Saxon package is a collection of tools for processing XML documents.", - "downloadLocation" : "https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download", - "filesAnalyzed" : false, - "homepage" : "http://saxon.sourceforge.net/", - "licenseComments" : "Other versions available for a commercial license", - "licenseConcluded" : "MPL-1.0", - "licenseDeclared" : "MPL-1.0", - "name" : "Saxon", - "packageFileName" : "saxonB-8.8.zip", - "versionInfo" : "8.8" - } ] - } - `) - - document := spdxDocument2_2{ - UnpackagedFiles: map[spdx.ElementID]*spdx.File2_2{ - "DoapSource": { - FileSPDXIdentifier: "DoapSource", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", - }, - }, - FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", - FileContributor: []string{"Protecode Inc.", "SPDX Technical Team Members", "Open Logic Inc.", "Source Auditor Inc.", "Black Duck Software In.c"}, - FileDependencies: []string{"SPDXRef-JenaLib", "SPDXRef-CommonsLangSrc"}, - FileName: "./src/org/spdx/parser/DOAPProject.java", - FileType: []string{"SOURCE"}, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, - }, - "CommonsLangSrc": { - FileSPDXIdentifier: "CommonsLangSrc", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125", - }, - }, - FileComment: "This file is used by Jena", - FileCopyrightText: "Copyright 2001-2011 The Apache Software Foundation", - FileContributor: []string{"Apache Software Foundation"}, - FileName: "./lib-source/commons-lang3-3.1-sources.jar", - FileType: []string{"ARCHIVE"}, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, - FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", - }, - "JenaLib": { - FileSPDXIdentifier: "JenaLib", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125", - }, - }, - FileComment: "This file belongs to Jena", - FileCopyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP", - FileContributor: []string{"Apache Software Foundation", "Hewlett Packard Inc."}, - FileDependencies: []string{"SPDXRef-CommonsLangSrc"}, - FileName: "./lib-source/jena-2.6.3-sources.jar", - FileType: []string{"ARCHIVE"}, - LicenseComments: "This license is used by Jena", - LicenseConcluded: "LicenseRef-1", - LicenseInfoInFile: []string{"LicenseRef-1"}, - }, - "File": { - FileSPDXIdentifier: "File", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", - }, - "MD5": { - Algorithm: "MD5", - Value: "624c1abb3664f4b35547e7c73864ad24", - }, - }, - FileComment: "The concluded license was taken from the package level that the file was included in.\nThis information was found in the COPYING.txt file in the xyz directory.", - FileCopyrightText: "Copyright 2008-2010 John Smith", - FileContributor: []string{"The Regents of the University of California", "Modified by Paul Mundt lethal@linux-sh.org", "IBM Corporation"}, - FileName: "./package/foo.c", - FileType: []string{"SOURCE"}, - LicenseComments: "The concluded license was taken from the package level that the file was included in.", - LicenseConcluded: "(LGPL-2.0-only OR LicenseRef-2)", - LicenseInfoInFile: []string{"GPL-2.0-only", "LicenseRef-2"}, - FileNotice: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: \nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", - }, - }, - } - - packagetest1 := map[spdx.ElementID]*spdx.Package2_2{ - "Package": { - PackageSPDXIdentifier: "Package", - PackageAttributionTexts: []string{"The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually."}, - PackageChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA256": { - Algorithm: "SHA256", - Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", - }, - "SHA1": { - Algorithm: "SHA1", - Value: "85ed0817af83a24ad8da68c2b5094de69833983c", - }, - "MD5": { - Algorithm: "MD5", - Value: "624c1abb3664f4b35547e7c73864ad24", - }, - }, - PackageCopyrightText: "Copyright 2008-2010 John Smith", - PackageDescription: "The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.", - PackageDownloadLocation: "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", - PackageExternalReferences: []*spdx.PackageExternalReference2_2{ - { - RefType: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge", - ExternalRefComment: "This is the external ref for Acme", - Category: "OTHER", - Locator: "acmecorp/acmenator/4.1.3-alpha", - }, - { - RefType: "http://spdx.org/rdf/references/cpe23Type", - Category: "SECURITY", - Locator: "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*", - }, - }, - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, - Files: map[spdx.ElementID]*spdx.File2_2{ - "DoapSource": { - FileSPDXIdentifier: "DoapSource", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", - }, - }, - FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", - FileContributor: []string{"Protecode Inc.", "SPDX Technical Team Members", "Open Logic Inc.", "Source Auditor Inc.", "Black Duck Software In.c"}, - FileDependencies: []string{"SPDXRef-JenaLib", "SPDXRef-CommonsLangSrc"}, - FileName: "./src/org/spdx/parser/DOAPProject.java", - FileType: []string{"SOURCE"}, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, - }, - "CommonsLangSrc": { - FileSPDXIdentifier: "CommonsLangSrc", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125", - }, - }, - FileComment: "This file is used by Jena", - FileCopyrightText: "Copyright 2001-2011 The Apache Software Foundation", - FileContributor: []string{"Apache Software Foundation"}, - FileName: "./lib-source/commons-lang3-3.1-sources.jar", - FileType: []string{"ARCHIVE"}, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, - FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", - }, - "JenaLib": { - FileSPDXIdentifier: "JenaLib", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125", - }, - }, - FileComment: "This file belongs to Jena", - FileCopyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP", - FileContributor: []string{"Apache Software Foundation", "Hewlett Packard Inc."}, - FileDependencies: []string{"SPDXRef-CommonsLangSrc"}, - FileName: "./lib-source/jena-2.6.3-sources.jar", - FileType: []string{"ARCHIVE"}, - LicenseComments: "This license is used by Jena", - LicenseConcluded: "LicenseRef-1", - LicenseInfoInFile: []string{"LicenseRef-1"}, - }, - }, - PackageHomePage: "http://ftp.gnu.org/gnu/glibc", - PackageLicenseComments: "The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.", - PackageLicenseConcluded: "(LGPL-2.0-only OR LicenseRef-3)", - PackageLicenseDeclared: "(LGPL-2.0-only AND LicenseRef-3)", - PackageLicenseInfoFromFiles: []string{"GPL-2.0-only", "LicenseRef-2", "LicenseRef-1"}, - PackageName: "glibc", - PackageOriginatorOrganization: "ExampleCodeInspect (contact@example.com)", - PackageFileName: "glibc-2.11.1.tar.gz", - PackageVerificationCodeExcludedFile: "./package.spdx", - PackageVerificationCode: "d6a770ba38583ed4bb4525bd96e50461655d2758", - PackageSourceInfo: "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git.", - PackageSummary: "GNU C library.", - PackageSupplierPerson: "Jane Doe (jane.doe@example.com)", - PackageVersion: "2.11.1", - }, - "fromDoap-1": { - PackageSPDXIdentifier: "fromDoap-1", - PackageComment: "This package was converted from a DOAP Project by the same name", - PackageCopyrightText: "NOASSERTION", - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, - PackageHomePage: "http://commons.apache.org/proper/commons-lang/", - PackageLicenseConcluded: "NOASSERTION", - PackageLicenseDeclared: "NOASSERTION", - PackageName: "Apache Commons Lang", - }, - "fromDoap-0": { - PackageSPDXIdentifier: "fromDoap-0", - PackageComment: "This package was converted from a DOAP Project by the same name", - PackageCopyrightText: "NOASSERTION", - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, - PackageHomePage: "http://www.openjena.org/", - PackageLicenseConcluded: "NOASSERTION", - PackageLicenseDeclared: "NOASSERTION", - PackageName: "Jena", - }, - - "Saxon": { - PackageSPDXIdentifier: "Saxon", - PackageChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "85ed0817af83a24ad8da68c2b5094de69833983c", - }, - }, - PackageDescription: "The Saxon package is a collection of tools for processing XML documents.", - PackageDownloadLocation: "https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, - PackageHomePage: "http://saxon.sourceforge.net/", - PackageLicenseComments: "Other versions available for a commercial license", - PackageLicenseConcluded: "MPL-1.0", - PackageLicenseDeclared: "MPL-1.0", - PackageName: "Saxon", - PackageFileName: "saxonB-8.8.zip", - PackageVersion: "8.8", - }, - } - var specs JSONSpdxDocument - json.Unmarshal(data, &specs) - - type args struct { - key string - value interface{} - doc *spdxDocument2_2 - } - tests := []struct { - name string - spec JSONSpdxDocument - args args - want map[spdx.ElementID]*spdx.Package2_2 - wantErr bool - }{ - // TODO: Add test cases. - { - name: "successTest", - spec: specs, - args: args{ - key: "packages", - value: specs["packages"], - doc: &document, - }, - want: packagetest1, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := tt.spec.parseJsonPackages2_2(tt.args.key, tt.args.value, tt.args.doc); (err != nil) != tt.wantErr { - t.Errorf("JSONSpdxDocument.parseJsonPackages2_2() error = %v, wantErr %v", err, tt.wantErr) - } - - for k, v := range tt.want { - if !reflect.DeepEqual(tt.args.doc.Packages[k], v) { - t.Errorf("Load2_2() = %v, want %v", tt.args.doc.Packages[k], v) - } - } - - }) - } -} diff --git a/jsonloader/parser2v2/parse_relationship.go b/jsonloader/parser2v2/parse_relationship.go deleted file mode 100644 index b6d2bfa9..00000000 --- a/jsonloader/parser2v2/parse_relationship.go +++ /dev/null @@ -1,51 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package parser2v2 - -import ( - "fmt" - "reflect" - - "github.com/spdx/tools-golang/spdx" -) - -func (spec JSONSpdxDocument) parseJsonRelationships2_2(key string, value interface{}, doc *spdxDocument2_2) error { - - //FIXME : NOASSERTION and NONE in relationship B value not compatible - if reflect.TypeOf(value).Kind() == reflect.Slice { - relationships := reflect.ValueOf(value) - for i := 0; i < relationships.Len(); i++ { - relationship := relationships.Index(i).Interface().(map[string]interface{}) - rel := spdx.Relationship2_2{} - // Parse ref A of the relationship - aid, err := extractDocElementID(relationship["spdxElementId"].(string)) - if err != nil { - return fmt.Errorf("%s", err) - } - rel.RefA = aid - - // Parse the refB of the relationship - // NONE and NOASSERTION are permitted on right side - permittedSpecial := []string{"NONE", "NOASSERTION"} - bid, err := extractDocElementSpecial(relationship["relatedSpdxElement"].(string), permittedSpecial) - if err != nil { - return fmt.Errorf("%s", err) - } - rel.RefB = bid - // Parse relationship type - if relationship["relationshipType"] == nil { - return fmt.Errorf("%s , %d", "RelationshipType propty missing in relationship number", i) - } - rel.Relationship = relationship["relationshipType"].(string) - - // Parse the relationship comment - if relationship["comment"] != nil { - rel.RelationshipComment = relationship["comment"].(string) - } - - doc.Relationships = append(doc.Relationships, &rel) - } - - } - return nil -} diff --git a/jsonloader/parser2v2/parse_relationship_test.go b/jsonloader/parser2v2/parse_relationship_test.go deleted file mode 100644 index 5350a9c1..00000000 --- a/jsonloader/parser2v2/parse_relationship_test.go +++ /dev/null @@ -1,145 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package parser2v2 - -import ( - "encoding/json" - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func TestJSONSpdxDocument_parseJsonRelationships2_2(t *testing.T) { - data := []byte(`{ - "relationships" : [ { - "spdxElementId" : "SPDXRef-DOCUMENT", - "relatedSpdxElement" : "DocumentRef-spdx-tool-1.2:SPDXRef-ToolsElement", - "relationshipType" : "COPY_OF" - }, { - "spdxElementId" : "SPDXRef-DOCUMENT", - "relatedSpdxElement" : "SPDXRef-Package", - "relationshipType" : "CONTAINS" - }, { - "spdxElementId" : "SPDXRef-DOCUMENT", - "relatedSpdxElement" : "SPDXRef-File", - "relationshipType" : "DESCRIBES" - }, { - "spdxElementId" : "SPDXRef-DOCUMENT", - "relatedSpdxElement" : "SPDXRef-Package", - "relationshipType" : "DESCRIBES" - }, { - "spdxElementId" : "SPDXRef-Package", - "relatedSpdxElement" : "SPDXRef-Saxon", - "relationshipType" : "DYNAMIC_LINK" - }, { - "spdxElementId" : "SPDXRef-Package", - "relatedSpdxElement" : "SPDXRef-JenaLib", - "relationshipType" : "CONTAINS" - },{ - "spdxElementId" : "SPDXRef-CommonsLangSrc", - "relatedSpdxElement" : "NOASSERTION", - "relationshipType" : "GENERATED_FROM" - } , { - "spdxElementId" : "SPDXRef-JenaLib", - "relatedSpdxElement" : "SPDXRef-Package", - "relationshipType" : "CONTAINS" - }, { - "spdxElementId" : "SPDXRef-File", - "relatedSpdxElement" : "SPDXRef-fromDoap-0", - "relationshipType" : "GENERATED_FROM" - } ] - } - `) - - Relationship := []*spdx.Relationship2_2{ - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "spdx-tool-1.2", ElementRefID: "ToolsElement"}, - Relationship: "COPY_OF", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - Relationship: "CONTAINS", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "File"}, - Relationship: "DESCRIBES", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - Relationship: "DESCRIBES", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Saxon"}, - Relationship: "DYNAMIC_LINK", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "JenaLib"}, - Relationship: "CONTAINS", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "CommonsLangSrc"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "", SpecialID: "NOASSERTION"}, - Relationship: "GENERATED_FROM", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "JenaLib"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - Relationship: "CONTAINS", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "File"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "fromDoap-0"}, - Relationship: "GENERATED_FROM", - }, - } - - var specs JSONSpdxDocument - json.Unmarshal(data, &specs) - - type args struct { - key string - value interface{} - doc *spdxDocument2_2 - } - tests := []struct { - name string - spec JSONSpdxDocument - args args - want []*spdx.Relationship2_2 - wantErr bool - }{ - // TODO: Add test cases. - { - name: "successTest", - spec: specs, - args: args{ - key: "relationships", - value: specs["relationships"], - doc: &spdxDocument2_2{}, - }, - want: Relationship, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := tt.spec.parseJsonRelationships2_2(tt.args.key, tt.args.value, tt.args.doc); (err != nil) != tt.wantErr { - t.Errorf("JSONSpdxDocument.parseJsonRelationships2_2() error = %v, wantErr %v", err, tt.wantErr) - } - - for i := 0; i < len(tt.want); i++ { - if !reflect.DeepEqual(tt.args.doc.Relationships[i], tt.want[i]) { - t.Errorf("Load2_2() = %v, want %v", tt.args.doc.Relationships[i], tt.want[i]) - } - } - - }) - } -} diff --git a/jsonloader/parser2v2/parse_reviews.go b/jsonloader/parser2v2/parse_reviews.go deleted file mode 100644 index 279e0b2e..00000000 --- a/jsonloader/parser2v2/parse_reviews.go +++ /dev/null @@ -1,45 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package parser2v2 - -import ( - "fmt" - "reflect" - - "github.com/spdx/tools-golang/spdx" -) - -func (spec JSONSpdxDocument) parseJsonReviews2_2(key string, value interface{}, doc *spdxDocument2_2) error { - //FIXME: Reviewer type property of review not specified in the spec - if reflect.TypeOf(value).Kind() == reflect.Slice { - reviews := reflect.ValueOf(value) - for i := 0; i < reviews.Len(); i++ { - reviewmap := reviews.Index(i).Interface().(map[string]interface{}) - review := spdx.Review2_2{} - // Remove loop all properties are mandatory in annotations - for k, v := range reviewmap { - switch k { - case "reviewer": - subkey, subvalue, err := extractSubs(v.(string)) - if err != nil { - return err - } - if subkey != "Person" && subkey != "Organization" && subkey != "Tool" { - return fmt.Errorf("unrecognized Reviewer type %v", subkey) - } - review.ReviewerType = subkey - review.Reviewer = subvalue - case "comment": - review.ReviewComment = v.(string) - case "reviewDate": - review.ReviewDate = v.(string) - default: - return fmt.Errorf("received unknown tag %v in Review Section section", k) - } - } - doc.Reviews = append(doc.Reviews, &review) - } - - } - return nil -} diff --git a/jsonloader/parser2v2/parse_reviews_test.go b/jsonloader/parser2v2/parse_reviews_test.go deleted file mode 100644 index d6fa6d0e..00000000 --- a/jsonloader/parser2v2/parse_reviews_test.go +++ /dev/null @@ -1,86 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package parser2v2 - -import ( - "encoding/json" - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func TestJSONSpdxDocument_parseJsonReviews2_2(t *testing.T) { - - data := []byte(`{ - "revieweds" : [ { - "reviewDate" : "2010-02-10T00:00:00Z", - "reviewer" : "Person: Joe Reviewer", - "comment" : "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses" - }, { - "reviewDate" : "2011-03-13T00:00:00Z", - "reviewer" : "Person: Suzanne Reviewer", - "comment" : "Another example reviewer." - }] - } - `) - - reviewstest1 := []*spdx.Review2_2{ - { - ReviewDate: "2010-02-10T00:00:00Z", - ReviewerType: "Person", - Reviewer: "Joe Reviewer", - ReviewComment: "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses", - }, - { - ReviewDate: "2011-03-13T00:00:00Z", - ReviewerType: "Person", - Reviewer: "Suzanne Reviewer", - ReviewComment: "Another example reviewer.", - }, - } - - var specs JSONSpdxDocument - json.Unmarshal(data, &specs) - - type args struct { - key string - value interface{} - doc *spdxDocument2_2 - SPDXElementID spdx.DocElementID - } - tests := []struct { - name string - spec JSONSpdxDocument - args args - want []*spdx.Review2_2 - wantErr bool - }{ - // TODO: Add test cases. - { - name: "successTest", - spec: specs, - args: args{ - key: "revieweds", - value: specs["revieweds"], - doc: &spdxDocument2_2{}, - }, - want: reviewstest1, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := tt.spec.parseJsonReviews2_2(tt.args.key, tt.args.value, tt.args.doc); (err != nil) != tt.wantErr { - t.Errorf("JSONSpdxDocument.parseJsonAnnotations2_2() error = %v, wantErr %v", err, tt.wantErr) - } - - for i := 0; i < len(tt.want); i++ { - if !reflect.DeepEqual(tt.args.doc.Reviews[i], tt.want[i]) { - t.Errorf("Load2_2() = %v, want %v", tt.args.doc.Reviews[i], tt.want[i]) - } - } - - }) - } -} diff --git a/jsonloader/parser2v2/parse_snippets.go b/jsonloader/parser2v2/parse_snippets.go deleted file mode 100644 index a49191d7..00000000 --- a/jsonloader/parser2v2/parse_snippets.go +++ /dev/null @@ -1,89 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package parser2v2 - -import ( - "fmt" - "reflect" - - "github.com/spdx/tools-golang/spdx" -) - -func (spec JSONSpdxDocument) parseJsonSnippets2_2(key string, value interface{}, doc *spdxDocument2_2) error { - - if reflect.TypeOf(value).Kind() == reflect.Slice { - snippets := reflect.ValueOf(value) - for i := 0; i < snippets.Len(); i++ { - snippetmap := snippets.Index(i).Interface().(map[string]interface{}) - // create a new package - snippet := &spdx.Snippet2_2{} - //extract the SPDXID of the package - eID, err := extractElementID(snippetmap["SPDXID"].(string)) - if err != nil { - return fmt.Errorf("%s", err) - } - snippet.SnippetSPDXIdentifier = eID - //range over all other properties now - for k, v := range snippetmap { - switch k { - case "SPDXID", "snippetFromFile": - //redundant case - case "name": - snippet.SnippetName = v.(string) - case "copyrightText": - snippet.SnippetCopyrightText = v.(string) - case "licenseComments": - snippet.SnippetLicenseComments = v.(string) - case "licenseConcluded": - snippet.SnippetLicenseConcluded = v.(string) - case "licenseInfoInSnippets": - if reflect.TypeOf(v).Kind() == reflect.Slice { - info := reflect.ValueOf(v) - for i := 0; i < info.Len(); i++ { - snippet.LicenseInfoInSnippet = append(snippet.LicenseInfoInSnippet, info.Index(i).Interface().(string)) - } - } - case "attributionTexts": - if reflect.TypeOf(v).Kind() == reflect.Slice { - info := reflect.ValueOf(v) - for i := 0; i < info.Len(); i++ { - snippet.SnippetAttributionTexts = append(snippet.SnippetAttributionTexts, info.Index(i).Interface().(string)) - } - } - case "comment": - snippet.SnippetComment = v.(string) - case "ranges": - //TODO: optimise this logic - if reflect.TypeOf(v).Kind() == reflect.Slice { - info := reflect.ValueOf(v) - for i := 0; i < info.Len(); i++ { - ranges := info.Index(i).Interface().(map[string]interface{}) - rangeStart := ranges["startPointer"].(map[string]interface{}) - rangeEnd := ranges["endPointer"].(map[string]interface{}) - if rangeStart["lineNumber"] != nil && rangeEnd["lineNumber"] != nil { - snippet.SnippetLineRangeStart = int(rangeStart["lineNumber"].(float64)) - snippet.SnippetLineRangeEnd = int(rangeEnd["lineNumber"].(float64)) - } else { - snippet.SnippetByteRangeStart = int(rangeStart["offset"].(float64)) - snippet.SnippetByteRangeEnd = int(rangeEnd["offset"].(float64)) - } - } - } - default: - return fmt.Errorf("received unknown tag %v in snippet section", k) - } - } - fileID, err2 := extractDocElementID(snippetmap["snippetFromFile"].(string)) - if err2 != nil { - return fmt.Errorf("%s", err2) - } - snippet.SnippetFromFileSPDXIdentifier = fileID - if doc.UnpackagedFiles[fileID.ElementRefID].Snippets == nil { - doc.UnpackagedFiles[fileID.ElementRefID].Snippets = make(map[spdx.ElementID]*spdx.Snippet2_2) - } - doc.UnpackagedFiles[fileID.ElementRefID].Snippets[eID] = snippet - } - - } - return nil -} diff --git a/jsonloader/parser2v2/parse_snippets_test.go b/jsonloader/parser2v2/parse_snippets_test.go deleted file mode 100644 index b25bee55..00000000 --- a/jsonloader/parser2v2/parse_snippets_test.go +++ /dev/null @@ -1,123 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package parser2v2 - -import ( - "encoding/json" - "log" - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func TestJSONSpdxDocument_parseJsonSnippets2_2(t *testing.T) { - - data := []byte(`{ - "snippets" : [ { - "SPDXID" : "SPDXRef-Snippet", - "comment" : "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", - "copyrightText" : "Copyright 2008-2010 John Smith", - "licenseComments" : "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", - "licenseConcluded" : "GPL-2.0-only", - "licenseInfoInSnippets" : [ "GPL-2.0-only" ], - "attributionTexts":["text1"], - "name" : "from linux kernel", - "ranges" : [ { - "endPointer" : { - "lineNumber" : 23, - "reference" : "SPDXRef-DoapSource" - }, - "startPointer" : { - "lineNumber" : 5, - "reference" : "SPDXRef-DoapSource" - } - }, { - "endPointer" : { - "offset" : 420, - "reference" : "SPDXRef-DoapSource" - }, - "startPointer" : { - "offset" : 310, - "reference" : "SPDXRef-DoapSource" - } - } ], - "snippetFromFile" : "SPDXRef-DoapSource" - } ] - } - `) - - filetest1 := spdx.File2_2{ - FileSPDXIdentifier: "DoapSource", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{}, - Snippets: map[spdx.ElementID]*spdx.Snippet2_2{ - "Snippet": { - SnippetSPDXIdentifier: "Snippet", - SnippetAttributionTexts: []string{"text1"}, - SnippetFromFileSPDXIdentifier: spdx.DocElementID{ElementRefID: "DoapSource"}, - SnippetComment: "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", - SnippetCopyrightText: "Copyright 2008-2010 John Smith", - SnippetLicenseComments: "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", - SnippetLicenseConcluded: "GPL-2.0-only", - LicenseInfoInSnippet: []string{"GPL-2.0-only"}, - SnippetName: "from linux kernel", - SnippetByteRangeStart: 310, - SnippetByteRangeEnd: 420, - SnippetLineRangeStart: 5, - SnippetLineRangeEnd: 23, - }, - }, - } - - var specs JSONSpdxDocument - err := json.Unmarshal(data, &specs) - - if err != nil { - log.Fatal(err) - } - type args struct { - key string - value interface{} - doc *spdxDocument2_2 - } - tests := []struct { - name string - spec JSONSpdxDocument - args args - want *spdx.File2_2 - wantErr bool - }{ - // TODO: Add test cases. - { - name: "successTest", - spec: specs, - args: args{ - key: "snippets", - value: specs["snippets"], - doc: &spdxDocument2_2{UnpackagedFiles: map[spdx.ElementID]*spdx.File2_2{ - "DoapSource": { - FileSPDXIdentifier: "DoapSource", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{}, - Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}, - }, - }}, - }, - want: &filetest1, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := tt.spec.parseJsonSnippets2_2(tt.args.key, tt.args.value, tt.args.doc); (err != nil) != tt.wantErr { - t.Errorf("JSONSpdxDocument.parseJsonSnippets2_2() error = %v, wantErr %v", err, tt.wantErr) - } - - for k, v := range tt.want.Snippets { - if !reflect.DeepEqual(tt.args.doc.UnpackagedFiles["DoapSource"].Snippets[k], v) { - t.Errorf("Load2_2() = %v, want %v", tt.args.doc.UnpackagedFiles["DoapSource"].Snippets[k], v) - } - } - - }) - } -} diff --git a/jsonloader/parser2v2/parser.go b/jsonloader/parser2v2/parser.go deleted file mode 100644 index b1fbe8a7..00000000 --- a/jsonloader/parser2v2/parser.go +++ /dev/null @@ -1,132 +0,0 @@ -// Package jsonloader is used to load and parse SPDX JSON documents -// into tools-golang data structures. -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package parser2v2 - -import ( - "encoding/json" - "fmt" - - "github.com/spdx/tools-golang/spdx" -) - -//TODO : return spdx.Document2_2 -func Load2_2(content []byte) (*spdx.Document2_2, error) { - // check whetehr the Json is valid or not - if !json.Valid(content) { - return nil, fmt.Errorf("%s", "Invalid JSON Specification") - } - result := spdxDocument2_2{} - // unmarshall the json into the result struct - err := json.Unmarshal(content, &result) - resultfinal := spdx.Document2_2(result) - - if err != nil { - return nil, fmt.Errorf("%s", err) - } - - return &resultfinal, nil -} - -func (doc *spdxDocument2_2) UnmarshalJSON(data []byte) error { - var specs JSONSpdxDocument - //unmarshall the json into the intermediate stricture map[string]interface{} - err := json.Unmarshal(data, &specs) - if err != nil { - return err - } - // parse the data from the intermediate structure to the spdx.Document2_2{} - err = specs.newDocument(doc) - if err != nil { - return err - } - return nil -} - -func (spec JSONSpdxDocument) newDocument(doc *spdxDocument2_2) error { - // raneg through all the keys in the map and send them to appropriate arsing functions - for key, val := range spec { - switch key { - case "dataLicense", "spdxVersion", "SPDXID", "documentNamespace", "name", "comment", "creationInfo", "externalDocumentRefs": - err := spec.parseJsonCreationInfo2_2(key, val, doc) - if err != nil { - return err - } - case "annotations": - // if the json spec doenn't has any files then only this case will be executed - if spec["files"] == nil { - - id, err := extractDocElementID(spec["SPDXID"].(string)) - if err != nil { - return fmt.Errorf("%s", err) - } - err = spec.parseJsonAnnotations2_2(key, val, doc, id) - if err != nil { - return err - } - } - case "relationships": - err := spec.parseJsonRelationships2_2(key, val, doc) - if err != nil { - return err - } - case "files": - //first parse all the files - err := spec.parseJsonFiles2_2(key, val, doc) - if err != nil { - return err - } - //then parse the snippets - if spec["snippets"] != nil { - err = spec.parseJsonSnippets2_2("snippets", spec["snippets"], doc) - if err != nil { - return err - } - } - //then parse the packages - if spec["packages"] != nil { - err = spec.parseJsonPackages2_2("packages", spec["packages"], doc) - if err != nil { - return err - } - } - // then parse the annotations - if spec["annotations"] != nil { - id, err := extractDocElementID(spec["SPDXID"].(string)) - if err != nil { - return fmt.Errorf("%s", err) - } - err = spec.parseJsonAnnotations2_2("annotations", spec["annotations"], doc, id) - if err != nil { - return err - } - } - - case "packages": - // if the json spec doesn't has any files to parse then this switch case will be executed - if spec["files"] == nil { - err := spec.parseJsonPackages2_2("packages", spec["packages"], doc) - if err != nil { - return err - } - } - case "hasExtractedLicensingInfos": - err := spec.parseJsonOtherLicenses2_2(key, val, doc) - if err != nil { - return err - } - case "revieweds": - err := spec.parseJsonReviews2_2(key, val, doc) - if err != nil { - return err - } - case "snippets", "documentDescribes": - //redundant case - default: - return fmt.Errorf("unrecognized key here %v", key) - } - - } - return nil -} diff --git a/jsonloader/parser2v2/parser_test.go b/jsonloader/parser2v2/parser_test.go deleted file mode 100644 index 051ff99f..00000000 --- a/jsonloader/parser2v2/parser_test.go +++ /dev/null @@ -1,419 +0,0 @@ -// Package jsonloader is used to load and parse SPDX JSON documents -// into tools-golang data structures. -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package parser2v2 - -import ( - "io/ioutil" - "log" - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -//TODO: json validity check -//TODO: passsing an unrecornized key - -func TestLoad2_2(t *testing.T) { - - jsonData, err := ioutil.ReadFile("jsonfiles/test.json") // b has type []byte - if err != nil { - log.Fatal(err) - } - - type args struct { - content []byte - } - tests := []struct { - name string - args args - want *spdxDocument2_2 - wantErr bool - }{ - { - name: "True test", - args: args{content: jsonData}, - want: &spdxDocument2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - DataLicense: "CC0-1.0", - SPDXVersion: "SPDX-2.2", - SPDXIdentifier: "DOCUMENT", - DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", - LicenseListVersion: "3.8", - Created: "2010-01-29T18:30:22Z", - CreatorPersons: []string{"Jane Doe ()"}, - CreatorOrganizations: []string{"ExampleCodeInspect ()"}, - CreatorTools: []string{"LicenseFind-1.0"}, - DocumentName: "SPDX-Tools-v2.0", - DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", - CreatorComment: "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", - ExternalDocumentReferences: map[string]spdx.ExternalDocumentRef2_2{ - "spdx-tool-1.2": { - DocumentRefID: "spdx-tool-1.2", - URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", - Alg: "SHA1", - Checksum: "d6a770ba38583ed4bb4525bd96e50461655d2759", - }, - }, - }, - Annotations: []*spdx.Annotation2_2{ - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "File"}, - AnnotationDate: "2011-01-29T18:30:22Z", - AnnotationType: "OTHER", - AnnotatorType: "Person", - Annotator: "File Commenter", - AnnotationComment: "File level annotation", - }, - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - AnnotationDate: "2011-01-29T18:30:22Z", - AnnotationType: "OTHER", - AnnotatorType: "Person", - Annotator: "Package Commenter", - AnnotationComment: "Package level annotation", - }, - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - AnnotationDate: "2010-02-10T00:00:00Z", - AnnotationType: "REVIEW", - AnnotatorType: "Person", - Annotator: "Joe Reviewer", - AnnotationComment: "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses", - }, - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - AnnotationDate: "2011-03-13T00:00:00Z", - AnnotationType: "REVIEW", - AnnotatorType: "Person", - Annotator: "Suzanne Reviewer", - AnnotationComment: "Another example reviewer.", - }, - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - AnnotationDate: "2010-01-29T18:30:22Z", - AnnotationType: "OTHER", - AnnotatorType: "Person", - Annotator: "Jane Doe ()", - AnnotationComment: "Document level annotation", - }, - }, - Relationships: []*spdx.Relationship2_2{ - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "spdx-tool-1.2", ElementRefID: "ToolsElement"}, - Relationship: "COPY_OF", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - Relationship: "CONTAINS", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "File"}, - Relationship: "DESCRIBES", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - Relationship: "DESCRIBES", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Saxon"}, - Relationship: "DYNAMIC_LINK", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "JenaLib"}, - Relationship: "CONTAINS", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "CommonsLangSrc"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "", SpecialID: "NOASSERTION"}, - Relationship: "GENERATED_FROM", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "JenaLib"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - Relationship: "CONTAINS", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "File"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "fromDoap-0"}, - Relationship: "GENERATED_FROM", - }, - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{ - "Package": { - PackageSPDXIdentifier: "Package", - PackageAttributionTexts: []string{"The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually."}, - PackageChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA256": { - Algorithm: "SHA256", - Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", - }, - "SHA1": { - Algorithm: "SHA1", - Value: "85ed0817af83a24ad8da68c2b5094de69833983c", - }, - "MD5": { - Algorithm: "MD5", - Value: "624c1abb3664f4b35547e7c73864ad24", - }, - }, - PackageCopyrightText: "Copyright 2008-2010 John Smith", - PackageDescription: "The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.", - PackageDownloadLocation: "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", - PackageExternalReferences: []*spdx.PackageExternalReference2_2{ - { - RefType: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge", - ExternalRefComment: "This is the external ref for Acme", - Category: "OTHER", - Locator: "acmecorp/acmenator/4.1.3-alpha", - }, - { - RefType: "http://spdx.org/rdf/references/cpe23Type", - Category: "SECURITY", - Locator: "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*", - }, - }, - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, - Files: map[spdx.ElementID]*spdx.File2_2{ - "DoapSource": { - FileSPDXIdentifier: "DoapSource", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", - }, - }, - Snippets: map[spdx.ElementID]*spdx.Snippet2_2{ - "Snippet": { - SnippetSPDXIdentifier: "Snippet", - SnippetFromFileSPDXIdentifier: spdx.DocElementID{ElementRefID: "DoapSource"}, - SnippetComment: "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", - SnippetCopyrightText: "Copyright 2008-2010 John Smith", - SnippetLicenseComments: "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", - SnippetLicenseConcluded: "GPL-2.0-only", - LicenseInfoInSnippet: []string{"GPL-2.0-only"}, - SnippetName: "from linux kernel", - SnippetByteRangeStart: 310, - SnippetByteRangeEnd: 420, - SnippetLineRangeStart: 5, - SnippetLineRangeEnd: 23, - }, - }, - FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", - FileContributor: []string{"Protecode Inc.", "SPDX Technical Team Members", "Open Logic Inc.", "Source Auditor Inc.", "Black Duck Software In.c"}, - FileDependencies: []string{"SPDXRef-JenaLib", "SPDXRef-CommonsLangSrc"}, - FileName: "./src/org/spdx/parser/DOAPProject.java", - FileType: []string{"SOURCE"}, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, - }, - "CommonsLangSrc": { - FileSPDXIdentifier: "CommonsLangSrc", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125", - }, - }, - FileComment: "This file is used by Jena", - FileCopyrightText: "Copyright 2001-2011 The Apache Software Foundation", - FileContributor: []string{"Apache Software Foundation"}, - FileName: "./lib-source/commons-lang3-3.1-sources.jar", - FileType: []string{"ARCHIVE"}, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, - FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", - }, - "JenaLib": { - FileSPDXIdentifier: "JenaLib", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125", - }, - }, - FileComment: "This file belongs to Jena", - FileCopyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP", - FileContributor: []string{"Apache Software Foundation", "Hewlett Packard Inc."}, - FileDependencies: []string{"SPDXRef-CommonsLangSrc"}, - FileName: "./lib-source/jena-2.6.3-sources.jar", - FileType: []string{"ARCHIVE"}, - LicenseComments: "This license is used by Jena", - LicenseConcluded: "LicenseRef-1", - LicenseInfoInFile: []string{"LicenseRef-1"}, - }, - }, - PackageHomePage: "http://ftp.gnu.org/gnu/glibc", - PackageLicenseComments: "The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.", - PackageLicenseConcluded: "(LGPL-2.0-only OR LicenseRef-3)", - PackageLicenseDeclared: "(LGPL-2.0-only AND LicenseRef-3)", - PackageLicenseInfoFromFiles: []string{"GPL-2.0-only", "LicenseRef-2", "LicenseRef-1"}, - PackageName: "glibc", - PackageOriginatorOrganization: "ExampleCodeInspect (contact@example.com)", - PackageFileName: "glibc-2.11.1.tar.gz", - PackageVerificationCodeExcludedFile: "./package.spdx", - PackageVerificationCode: "d6a770ba38583ed4bb4525bd96e50461655d2758", - PackageSourceInfo: "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git.", - PackageSummary: "GNU C library.", - PackageSupplierPerson: "Jane Doe (jane.doe@example.com)", - PackageVersion: "2.11.1", - }, - "fromDoap-1": { - PackageSPDXIdentifier: "fromDoap-1", - PackageComment: "This package was converted from a DOAP Project by the same name", - PackageCopyrightText: "NOASSERTION", - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, - PackageHomePage: "http://commons.apache.org/proper/commons-lang/", - PackageLicenseConcluded: "NOASSERTION", - PackageLicenseDeclared: "NOASSERTION", - PackageName: "Apache Commons Lang", - }, - "fromDoap-0": { - PackageSPDXIdentifier: "fromDoap-0", - PackageComment: "This package was converted from a DOAP Project by the same name", - PackageCopyrightText: "NOASSERTION", - PackageDownloadLocation: "NOASSERTION", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, - PackageHomePage: "http://www.openjena.org/", - PackageLicenseConcluded: "NOASSERTION", - PackageLicenseDeclared: "NOASSERTION", - PackageName: "Jena", - }, - - "Saxon": { - PackageSPDXIdentifier: "Saxon", - PackageChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "85ed0817af83a24ad8da68c2b5094de69833983c", - }, - }, - PackageDescription: "The Saxon package is a collection of tools for processing XML documents.", - PackageDownloadLocation: "https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, - PackageHomePage: "http://saxon.sourceforge.net/", - PackageLicenseComments: "Other versions available for a commercial license", - PackageLicenseConcluded: "MPL-1.0", - PackageLicenseDeclared: "MPL-1.0", - PackageName: "Saxon", - PackageFileName: "saxonB-8.8.zip", - PackageVersion: "8.8", - }, - }, - UnpackagedFiles: map[spdx.ElementID]*spdx.File2_2{ - "File": { - FileSPDXIdentifier: "File", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", - }, - "MD5": { - Algorithm: "MD5", - Value: "624c1abb3664f4b35547e7c73864ad24", - }, - }, - FileComment: "The concluded license was taken from the package level that the file was included in.\nThis information was found in the COPYING.txt file in the xyz directory.", - FileCopyrightText: "Copyright 2008-2010 John Smith", - FileContributor: []string{"The Regents of the University of California", "Modified by Paul Mundt lethal@linux-sh.org", "IBM Corporation"}, - FileName: "./package/foo.c", - FileType: []string{"SOURCE"}, - LicenseComments: "The concluded license was taken from the package level that the file was included in.", - LicenseConcluded: "(LGPL-2.0-only OR LicenseRef-2)", - LicenseInfoInFile: []string{"GPL-2.0-only", "LicenseRef-2"}, - FileNotice: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: \nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", - }, - }, - OtherLicenses: []*spdx.OtherLicense2_2{ - { - ExtractedText: "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp 2 { - return spdx.DocElementID{}, fmt.Errorf("more than one colon found") - } - - // trim the prefix and confirm non-empty - docRefID = strings.TrimPrefix(strs[0], "DocumentRef-") - if docRefID == "" { - return spdx.DocElementID{}, fmt.Errorf("document identifier has nothing after prefix") - } - // and use remainder for element ID parsing - idStr = strs[1] - } - - // check prefix to confirm it's got the right prefix for element IDs - if !strings.HasPrefix(idStr, "SPDXRef-") { - return spdx.DocElementID{}, fmt.Errorf("missing SPDXRef- prefix for element identifier") - } - - // make sure no colons are present - if strings.Contains(idStr, ":") { - // we know this means there was no DocumentRef- prefix, because - // we would have handled multiple colons above if it was - return spdx.DocElementID{}, fmt.Errorf("invalid colon in element identifier") - } - - // trim the prefix and confirm non-empty - eltRefID := strings.TrimPrefix(idStr, "SPDXRef-") - if eltRefID == "" { - return spdx.DocElementID{}, fmt.Errorf("element identifier has nothing after prefix") - } - - // we're good - return spdx.DocElementID{DocumentRefID: docRefID, ElementRefID: spdx.ElementID(eltRefID)}, nil -} - -// used to extract SPDXRef values from an SPDX Identifier, OR "special" strings -// from a specified set of permitted values. The primary use case for this is -// the right-hand side of Relationships, where beginning in SPDX 2.2 the values -// "NONE" and "NOASSERTION" are permitted. If the value does not match one of -// the specified permitted values, it will fall back to the ordinary -// DocElementID extractor. -func extractDocElementSpecial(value string, permittedSpecial []string) (spdx.DocElementID, error) { - // check value against special set first - for _, sp := range permittedSpecial { - if sp == value { - return spdx.DocElementID{SpecialID: sp}, nil - } - } - // not found, fall back to regular search - return extractDocElementID(value) -} - -// used to extract SPDXRef values only from an SPDX Identifier which can point -// to this document only. Use extractDocElementID for parsing IDs that can -// refer either to this document or a different one. -func extractElementID(value string) (spdx.ElementID, error) { - // check prefix to confirm it's got the right prefix for element IDs - if !strings.HasPrefix(value, "SPDXRef-") { - return spdx.ElementID(""), fmt.Errorf("missing SPDXRef- prefix for element identifier") - } - - // make sure no colons are present - if strings.Contains(value, ":") { - return spdx.ElementID(""), fmt.Errorf("invalid colon in element identifier") - } - - // trim the prefix and confirm non-empty - eltRefID := strings.TrimPrefix(value, "SPDXRef-") - if eltRefID == "" { - return spdx.ElementID(""), fmt.Errorf("element identifier has nothing after prefix") - } - - // we're good - return spdx.ElementID(eltRefID), nil -} diff --git a/jsonloader/parser2v2/util_test.go b/jsonloader/parser2v2/util_test.go deleted file mode 100644 index e2f75d7b..00000000 --- a/jsonloader/parser2v2/util_test.go +++ /dev/null @@ -1,156 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later -package parser2v2 - -import ( - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -// ===== Helper function tests ===== - -func TestCanExtractSubvalues(t *testing.T) { - subkey, subvalue, err := extractSubs("SHA1: abc123") - if err != nil { - t.Errorf("got error when calling extractSubs: %v", err) - } - if subkey != "SHA1" { - t.Errorf("got %v for subkey", subkey) - } - if subvalue != "abc123" { - t.Errorf("got %v for subvalue", subvalue) - } -} - -func TestReturnsErrorForInvalidSubvalueFormat(t *testing.T) { - _, _, err := extractSubs("blah") - if err == nil { - t.Errorf("expected error when calling extractSubs for invalid format (0 colons), got nil") - } -} - -func TestCanExtractDocumentAndElementRefsFromID(t *testing.T) { - // test with valid ID in this document - helperForExtractDocElementID(t, "SPDXRef-file1", false, "", "file1") - // test with valid ID in another document - helperForExtractDocElementID(t, "DocumentRef-doc2:SPDXRef-file2", false, "doc2", "file2") - // test with invalid ID in this document - helperForExtractDocElementID(t, "a:SPDXRef-file1", true, "", "") - helperForExtractDocElementID(t, "file1", true, "", "") - helperForExtractDocElementID(t, "SPDXRef-", true, "", "") - helperForExtractDocElementID(t, "SPDXRef-file1:", true, "", "") - // test with invalid ID in another document - helperForExtractDocElementID(t, "DocumentRef-doc2", true, "", "") - helperForExtractDocElementID(t, "DocumentRef-doc2:", true, "", "") - helperForExtractDocElementID(t, "DocumentRef-doc2:SPDXRef-", true, "", "") - helperForExtractDocElementID(t, "DocumentRef-doc2:a", true, "", "") - helperForExtractDocElementID(t, "DocumentRef-:", true, "", "") - helperForExtractDocElementID(t, "DocumentRef-:SPDXRef-file1", true, "", "") - // test with invalid formats - helperForExtractDocElementID(t, "DocumentRef-doc2:SPDXRef-file1:file2", true, "", "") -} - -func helperForExtractDocElementID(t *testing.T, tst string, wantErr bool, wantDoc string, wantElt string) { - deID, err := extractDocElementID(tst) - if err != nil && wantErr == false { - t.Errorf("testing %v: expected nil error, got %v", tst, err) - } - if err == nil && wantErr == true { - t.Errorf("testing %v: expected non-nil error, got nil", tst) - } - if deID.DocumentRefID != wantDoc { - if wantDoc == "" { - t.Errorf("testing %v: want empty string for DocumentRefID, got %v", tst, deID.DocumentRefID) - } else { - t.Errorf("testing %v: want %v for DocumentRefID, got %v", tst, wantDoc, deID.DocumentRefID) - } - } - if deID.ElementRefID != spdx.ElementID(wantElt) { - if wantElt == "" { - t.Errorf("testing %v: want empty string for ElementRefID, got %v", tst, deID.ElementRefID) - } else { - t.Errorf("testing %v: want %v for ElementRefID, got %v", tst, wantElt, deID.ElementRefID) - } - } -} - -func TestCanExtractSpecialDocumentIDs(t *testing.T) { - permittedSpecial := []string{"NONE", "NOASSERTION"} - // test with valid special values - helperForExtractDocElementSpecial(t, permittedSpecial, "NONE", false, "", "", "NONE") - helperForExtractDocElementSpecial(t, permittedSpecial, "NOASSERTION", false, "", "", "NOASSERTION") - // test with valid regular IDs - helperForExtractDocElementSpecial(t, permittedSpecial, "SPDXRef-file1", false, "", "file1", "") - helperForExtractDocElementSpecial(t, permittedSpecial, "DocumentRef-doc2:SPDXRef-file2", false, "doc2", "file2", "") - helperForExtractDocElementSpecial(t, permittedSpecial, "a:SPDXRef-file1", true, "", "", "") - helperForExtractDocElementSpecial(t, permittedSpecial, "DocumentRef-doc2", true, "", "", "") - // test with invalid other words not on permitted list - helperForExtractDocElementSpecial(t, permittedSpecial, "FOO", true, "", "", "") -} - -func helperForExtractDocElementSpecial(t *testing.T, permittedSpecial []string, tst string, wantErr bool, wantDoc string, wantElt string, wantSpecial string) { - deID, err := extractDocElementSpecial(tst, permittedSpecial) - if err != nil && wantErr == false { - t.Errorf("testing %v: expected nil error, got %v", tst, err) - } - if err == nil && wantErr == true { - t.Errorf("testing %v: expected non-nil error, got nil", tst) - } - if deID.DocumentRefID != wantDoc { - if wantDoc == "" { - t.Errorf("testing %v: want empty string for DocumentRefID, got %v", tst, deID.DocumentRefID) - } else { - t.Errorf("testing %v: want %v for DocumentRefID, got %v", tst, wantDoc, deID.DocumentRefID) - } - } - if deID.ElementRefID != spdx.ElementID(wantElt) { - if wantElt == "" { - t.Errorf("testing %v: want empty string for ElementRefID, got %v", tst, deID.ElementRefID) - } else { - t.Errorf("testing %v: want %v for ElementRefID, got %v", tst, wantElt, deID.ElementRefID) - } - } - if deID.SpecialID != wantSpecial { - if wantSpecial == "" { - t.Errorf("testing %v: want empty string for SpecialID, got %v", tst, deID.SpecialID) - } else { - t.Errorf("testing %v: want %v for SpecialID, got %v", tst, wantSpecial, deID.SpecialID) - } - } -} - -func TestCanExtractElementRefsOnlyFromID(t *testing.T) { - // test with valid ID in this document - helperForExtractElementID(t, "SPDXRef-file1", false, "file1") - // test with valid ID in another document - helperForExtractElementID(t, "DocumentRef-doc2:SPDXRef-file2", true, "") - // test with invalid ID in this document - helperForExtractElementID(t, "a:SPDXRef-file1", true, "") - helperForExtractElementID(t, "file1", true, "") - helperForExtractElementID(t, "SPDXRef-", true, "") - helperForExtractElementID(t, "SPDXRef-file1:", true, "") - // test with invalid ID in another document - helperForExtractElementID(t, "DocumentRef-doc2", true, "") - helperForExtractElementID(t, "DocumentRef-doc2:", true, "") - helperForExtractElementID(t, "DocumentRef-doc2:SPDXRef-", true, "") - helperForExtractElementID(t, "DocumentRef-doc2:a", true, "") - helperForExtractElementID(t, "DocumentRef-:", true, "") - helperForExtractElementID(t, "DocumentRef-:SPDXRef-file1", true, "") -} - -func helperForExtractElementID(t *testing.T, tst string, wantErr bool, wantElt string) { - eID, err := extractElementID(tst) - if err != nil && wantErr == false { - t.Errorf("testing %v: expected nil error, got %v", tst, err) - } - if err == nil && wantErr == true { - t.Errorf("testing %v: expected non-nil error, got nil", tst) - } - if eID != spdx.ElementID(wantElt) { - if wantElt == "" { - t.Errorf("testing %v: want emptyString for ElementRefID, got %v", tst, eID) - } else { - t.Errorf("testing %v: want %v for ElementRefID, got %v", tst, wantElt, eID) - } - } -} diff --git a/jsonsaver/jsonsaver.go b/jsonsaver/jsonsaver.go deleted file mode 100644 index 4748e16c..00000000 --- a/jsonsaver/jsonsaver.go +++ /dev/null @@ -1,25 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package jsonsaver - -import ( - "bytes" - "io" - - "github.com/spdx/tools-golang/jsonsaver/saver2v2" - "github.com/spdx/tools-golang/spdx" -) - -// Save2_2 takes an io.Writer and an SPDX Document (version 2.2), -// and writes it to the writer in json format. It returns error -// if any error is encountered. -func Save2_2(doc *spdx.Document2_2, w io.Writer) error { - var b []byte - buf := bytes.NewBuffer(b) - err := saver2v2.RenderDocument2_2(doc, buf) - if err != nil { - return err - } - w.Write(buf.Bytes()) - return nil -} diff --git a/jsonsaver/jsonsaver_test.go b/jsonsaver/jsonsaver_test.go deleted file mode 100644 index 3d5daa93..00000000 --- a/jsonsaver/jsonsaver_test.go +++ /dev/null @@ -1,228 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package jsonsaver - -import ( - "bytes" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func TestSave2_2(t *testing.T) { - type args struct { - doc *spdx.Document2_2 - } - test1 := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - DataLicense: "CC0-1.0", - SPDXVersion: "SPDX-2.2", - SPDXIdentifier: "DOCUMENT", - DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", - LicenseListVersion: "3.8", - Created: "2010-01-29T18:30:22Z", - CreatorPersons: []string{"Jane Doe ()"}, - CreatorOrganizations: []string{"ExampleCodeInspect ()"}, - CreatorTools: []string{"LicenseFind-1.0"}, - DocumentName: "SPDX-Tools-v2.0", - DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", - CreatorComment: "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", - ExternalDocumentReferences: map[string]spdx.ExternalDocumentRef2_2{ - "spdx-tool-1.2": { - DocumentRefID: "spdx-tool-1.2", - URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", - Alg: "SHA1", - Checksum: "d6a770ba38583ed4bb4525bd96e50461655d2759", - }, - }, - }, - OtherLicenses: []*spdx.OtherLicense2_2{ - { - ExtractedText: "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp 0 { - jsondocument["externalDocumentRefs"] = refs - } - } - - return nil -} diff --git a/jsonsaver/saver2v2/save_creation_info_test.go b/jsonsaver/saver2v2/save_creation_info_test.go deleted file mode 100644 index 4498025e..00000000 --- a/jsonsaver/saver2v2/save_creation_info_test.go +++ /dev/null @@ -1,90 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func Test_renderCreationInfo2_2(t *testing.T) { - type args struct { - ci *spdx.CreationInfo2_2 - jsondocument map[string]interface{} - } - tests := []struct { - name string - args args - want map[string]interface{} - wantErr bool - }{ - // TODO: Add test cases. - { - name: "success", - args: args{ - ci: &spdx.CreationInfo2_2{ - DataLicense: "CC0-1.0", - SPDXVersion: "SPDX-2.2", - SPDXIdentifier: "DOCUMENT", - DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", - LicenseListVersion: "3.8", - Created: "2010-01-29T18:30:22Z", - CreatorPersons: []string{"Jane Doe ()"}, - CreatorOrganizations: []string{"ExampleCodeInspect ()"}, - CreatorTools: []string{"LicenseFind-1.0"}, - DocumentName: "SPDX-Tools-v2.0", - DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", - CreatorComment: "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", - ExternalDocumentReferences: map[string]spdx.ExternalDocumentRef2_2{ - "spdx-tool-1.2": { - DocumentRefID: "spdx-tool-1.2", - URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", - Alg: "SHA1", - Checksum: "d6a770ba38583ed4bb4525bd96e50461655d2759", - }, - }, - }, - jsondocument: make(map[string]interface{}), - }, - want: map[string]interface{}{ - "dataLicense": "CC0-1.0", - "spdxVersion": "SPDX-2.2", - "SPDXID": "SPDXRef-DOCUMENT", - "documentNamespace": "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", - "name": "SPDX-Tools-v2.0", - "comment": "This document was created using SPDX 2.0 using licenses from the web site.", - "creationInfo": map[string]interface{}{ - "comment": "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", - "created": "2010-01-29T18:30:22Z", - "creators": []string{"Tool: LicenseFind-1.0", "Organization: ExampleCodeInspect ()", "Person: Jane Doe ()"}, - "licenseListVersion": "3.8", - }, - "externalDocumentRefs": []interface{}{ - map[string]interface{}{ - "externalDocumentId": "DocumentRef-spdx-tool-1.2", - "spdxDocument": "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", - "checksum": map[string]string{ - "algorithm": "SHA1", - "checksumValue": "d6a770ba38583ed4bb4525bd96e50461655d2759", - }, - }, - }, - }, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := renderCreationInfo2_2(tt.args.ci, tt.args.jsondocument); (err != nil) != tt.wantErr { - t.Errorf("renderCreationInfo2_2() error = %v, wantErr %v", err, tt.wantErr) - } - for k, v := range tt.want { - if !reflect.DeepEqual(tt.args.jsondocument[k], v) { - t.Errorf("renderCreationInfo2_2() = %v, want %v", tt.args.jsondocument[k], v) - } - } - }) - } -} diff --git a/jsonsaver/saver2v2/save_document.go b/jsonsaver/saver2v2/save_document.go deleted file mode 100644 index 43ea9072..00000000 --- a/jsonsaver/saver2v2/save_document.go +++ /dev/null @@ -1,110 +0,0 @@ -// Package saver2v2 contains functions to render and write a json -// formatted version of an in-memory SPDX document and its sections -// (version 2.2). -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later -package saver2v2 - -import ( - "bytes" - "encoding/json" - "fmt" - - "github.com/spdx/tools-golang/spdx" - "github.com/spdx/tools-golang/spdxlib" -) - -// RenderDocument2_2 is the main entry point to take an SPDX in-memory -// Document (version 2.2), and render it to the received *bytes.Buffer. -// It is only exported in order to be available to the jsonsaver package, -// and typically does not need to be called by client code. -func RenderDocument2_2(doc *spdx.Document2_2, buf *bytes.Buffer) error { - - jsondocument := make(map[string]interface{}) - // start to parse the creationInfo - if doc.CreationInfo == nil { - return fmt.Errorf("document had nil CreationInfo section") - } - err := renderCreationInfo2_2(doc.CreationInfo, jsondocument) - if err != nil { - return err - } - - // save otherlicenses from sodx struct to json - if doc.OtherLicenses != nil { - _, err = renderOtherLicenses2_2(doc.OtherLicenses, jsondocument) - if err != nil { - return err - } - } - - // save document level annotations - if doc.Annotations != nil { - ann, err := renderAnnotations2_2(doc.Annotations, spdx.MakeDocElementID("", string(doc.CreationInfo.SPDXIdentifier))) - if err != nil { - return err - } - - jsondocument["annotations"] = ann - - } - - // save document describes - describes, _ := spdxlib.GetDescribedPackageIDs2_2(doc) - if describes != nil { - var describesID []string - for _, v := range describes { - describesID = append(describesID, spdx.RenderElementID(v)) - } - jsondocument["documentDescribes"] = describesID - } - allfiles := make(map[spdx.ElementID]*spdx.File2_2) - // save packages from spdx to json - if doc.Packages != nil { - _, err = renderPackage2_2(doc, jsondocument, allfiles) - if err != nil { - return err - } - } - - for k, v := range doc.UnpackagedFiles { - allfiles[k] = v - } - - // save files and snippets from spdx to json - if allfiles != nil { - _, err = renderFiles2_2(doc, jsondocument, allfiles) - if err != nil { - return err - } - _, err = renderSnippets2_2(jsondocument, allfiles) - if err != nil { - return err - } - - } - - // save reviews from spdx to json - if doc.Reviews != nil { - _, err = renderReviews2_2(doc.Reviews, jsondocument) - if err != nil { - return err - } - - } - - // save relationships from spdx to json - if doc.Relationships != nil { - _, err = renderRelationships2_2(doc.Relationships, jsondocument) - if err != nil { - return err - } - - } - - jsonspec, err := json.MarshalIndent(jsondocument, "", "\t") - if err != nil { - return err - } - buf.Write(jsonspec) - return nil -} diff --git a/jsonsaver/saver2v2/save_document_test.go b/jsonsaver/saver2v2/save_document_test.go deleted file mode 100644 index 9651367a..00000000 --- a/jsonsaver/saver2v2/save_document_test.go +++ /dev/null @@ -1,436 +0,0 @@ -// Package saver2v2 contains functions to render and write a json -// formatted version of an in-memory SPDX document and its sections -// (version 2.2). -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later -package saver2v2 - -import ( - "bytes" - "encoding/json" - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func TestRenderDocument2_2(t *testing.T) { - - test1 := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - DataLicense: "CC0-1.0", - SPDXVersion: "SPDX-2.2", - SPDXIdentifier: "DOCUMENT", - DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", - LicenseListVersion: "3.8", - Created: "2010-01-29T18:30:22Z", - CreatorPersons: []string{"Jane Doe ()"}, - CreatorOrganizations: []string{"ExampleCodeInspect ()"}, - CreatorTools: []string{"LicenseFind-1.0"}, - DocumentName: "SPDX-Tools-v2.0", - DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", - CreatorComment: "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", - ExternalDocumentReferences: map[string]spdx.ExternalDocumentRef2_2{ - "spdx-tool-1.2": { - DocumentRefID: "spdx-tool-1.2", - URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", - Alg: "SHA1", - Checksum: "d6a770ba38583ed4bb4525bd96e50461655d2759", - }, - }, - }, - OtherLicenses: []*spdx.OtherLicense2_2{ - { - ExtractedText: "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp 0 { - jsondocument["files"] = files - } - return files, nil -} diff --git a/jsonsaver/saver2v2/save_files_test.go b/jsonsaver/saver2v2/save_files_test.go deleted file mode 100644 index 1a31b31b..00000000 --- a/jsonsaver/saver2v2/save_files_test.go +++ /dev/null @@ -1,160 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func Test_renderFiles2_2(t *testing.T) { - type args struct { - doc *spdx.Document2_2 - jsondocument map[string]interface{} - } - tests := []struct { - name string - args args - want []interface{} - wantErr bool - }{ - // TODO: Add test cases. - { - name: "success", - args: args{ - doc: &spdx.Document2_2{ - Annotations: []*spdx.Annotation2_2{ - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "File"}, - AnnotationDate: "2011-01-29T18:30:22Z", - AnnotationType: "OTHER", - AnnotatorType: "Person", - Annotator: "File Commenter", - AnnotationComment: "File level annotation", - }, - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - AnnotationDate: "2011-01-29T18:30:22Z", - AnnotationType: "OTHER", - AnnotatorType: "Person", - Annotator: "Package Commenter", - AnnotationComment: "Package level annotation", - }, - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - AnnotationDate: "2010-02-10T00:00:00Z", - AnnotationType: "REVIEW", - AnnotatorType: "Person", - Annotator: "Joe Reviewer", - AnnotationComment: "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses", - }, - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - AnnotationDate: "2011-03-13T00:00:00Z", - AnnotationType: "REVIEW", - AnnotatorType: "Person", - Annotator: "Suzanne Reviewer", - AnnotationComment: "Another example reviewer.", - }, - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - AnnotationDate: "2010-01-29T18:30:22Z", - AnnotationType: "OTHER", - AnnotatorType: "Person", - Annotator: "Jane Doe ()", - AnnotationComment: "Document level annotation", - }, - }, - UnpackagedFiles: map[spdx.ElementID]*spdx.File2_2{ - "File": { - FileSPDXIdentifier: "File", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", - }, - "MD5": { - Algorithm: "MD5", - Value: "624c1abb3664f4b35547e7c73864ad24", - }, - }, - FileComment: "The concluded license was taken from the package level that the file was .", - FileCopyrightText: "Copyright 2008-2010 John Smith", - FileContributor: []string{"The Regents of the University of California", "Modified by Paul Mundt lethal@linux-sh.org", "IBM Corporation"}, - FileName: "./package/foo.c", - FileType: []string{"SOURCE"}, - LicenseComments: "The concluded license was taken from the package level that the file was included in.", - LicenseConcluded: "(LGPL-2.0-only OR LicenseRef-2)", - LicenseInfoInFile: []string{"GPL-2.0-only", "LicenseRef-2"}, - FileNotice: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.", - FileAttributionTexts: []string{"text1", "text2 "}, - }, - }, - }, - jsondocument: make(map[string]interface{}), - }, - want: []interface{}{ - map[string]interface{}{ - "SPDXID": "SPDXRef-File", - "annotations": []interface{}{ - map[string]interface{}{ - "annotationDate": "2011-01-29T18:30:22Z", - "annotationType": "OTHER", - "annotator": "Person: File Commenter", - "comment": "File level annotation", - }, - }, - "checksums": []interface{}{ - map[string]interface{}{ - "algorithm": "MD5", - "checksumValue": "624c1abb3664f4b35547e7c73864ad24", - }, - map[string]interface{}{ - "algorithm": "SHA1", - "checksumValue": "d6a770ba38583ed4bb4525bd96e50461655d2758", - }, - }, - "comment": "The concluded license was taken from the package level that the file was .", - "copyrightText": "Copyright 2008-2010 John Smith", - "fileContributors": []string{"The Regents of the University of California", "Modified by Paul Mundt lethal@linux-sh.org", "IBM Corporation"}, - "fileName": "./package/foo.c", - "fileTypes": []string{"SOURCE"}, - "licenseComments": "The concluded license was taken from the package level that the file was included in.", - "licenseConcluded": "(LGPL-2.0-only OR LicenseRef-2)", - "licenseInfoInFiles": []string{"GPL-2.0-only", "LicenseRef-2"}, - "noticeText": "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.", - "attributionTexts": []string{"text1", "text2 "}, - }, - }, - }, - { - name: "success empty", - args: args{ - doc: &spdx.Document2_2{ - Annotations: []*spdx.Annotation2_2{}, - UnpackagedFiles: map[spdx.ElementID]*spdx.File2_2{}, - }, - jsondocument: make(map[string]interface{}), - }, - want: []interface{}{ - map[string]interface{}{}, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := renderFiles2_2(tt.args.doc, tt.args.jsondocument, tt.args.doc.UnpackagedFiles) - if (err != nil) != tt.wantErr { - t.Errorf("renderFiles2_2() error = %v, wantErr %v", err, tt.wantErr) - } - for k, v := range got { - if !reflect.DeepEqual(v, tt.want[k]) { - t.Errorf("renderFiles2_2() error = %v, want %v", v, tt.want[k]) - } - } - - }) - } -} diff --git a/jsonsaver/saver2v2/save_other_license.go b/jsonsaver/saver2v2/save_other_license.go deleted file mode 100644 index fc8a6b51..00000000 --- a/jsonsaver/saver2v2/save_other_license.go +++ /dev/null @@ -1,35 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "github.com/spdx/tools-golang/spdx" -) - -func renderOtherLicenses2_2(otherlicenses []*spdx.OtherLicense2_2, jsondocument map[string]interface{}) ([]interface{}, error) { - - var licenses []interface{} - for _, v := range otherlicenses { - lic := make(map[string]interface{}) - if v.LicenseIdentifier != "" { - lic["licenseId"] = v.LicenseIdentifier - } - if v.ExtractedText != "" { - lic["extractedText"] = v.ExtractedText - } - if v.LicenseComment != "" { - lic["comment"] = v.LicenseComment - } - if v.LicenseName != "" { - lic["name"] = v.LicenseName - } - if v.LicenseCrossReferences != nil { - lic["seeAlsos"] = v.LicenseCrossReferences - } - licenses = append(licenses, lic) - } - if len(licenses) > 0 { - jsondocument["hasExtractedLicensingInfos"] = licenses - } - return licenses, nil -} diff --git a/jsonsaver/saver2v2/save_other_license_test.go b/jsonsaver/saver2v2/save_other_license_test.go deleted file mode 100644 index bf1a264a..00000000 --- a/jsonsaver/saver2v2/save_other_license_test.go +++ /dev/null @@ -1,85 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func Test_renderOtherLicenses2_2(t *testing.T) { - type args struct { - otherlicenses []*spdx.OtherLicense2_2 - jsondocument map[string]interface{} - } - tests := []struct { - name string - args args - want []interface{} - wantErr bool - }{ - // TODO: Add test cases. - { - name: "success", - args: args{ - otherlicenses: []*spdx.OtherLicense2_2{ - { - ExtractedText: "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp 0 { - jsondocument["packages"] = packages - } - return packages, nil -} diff --git a/jsonsaver/saver2v2/save_package_test.go b/jsonsaver/saver2v2/save_package_test.go deleted file mode 100644 index 41a6c9c6..00000000 --- a/jsonsaver/saver2v2/save_package_test.go +++ /dev/null @@ -1,233 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func Test_renderPackage2_2(t *testing.T) { - type args struct { - doc *spdx.Document2_2 - jsondocument map[string]interface{} - } - tests := []struct { - name string - args args - want []interface{} - wantErr bool - }{ - // TODO: Add test cases. - { - name: "success", - args: args{ - doc: &spdx.Document2_2{ - Annotations: []*spdx.Annotation2_2{ - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "File"}, - AnnotationDate: "2011-01-29T18:30:22Z", - AnnotationType: "OTHER", - AnnotatorType: "Person", - Annotator: "File Commenter", - AnnotationComment: "File level annotation", - }, - { - AnnotationSPDXIdentifier: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - AnnotationDate: "2011-01-29T18:30:22Z", - AnnotationType: "OTHER", - AnnotatorType: "Person", - Annotator: "Package Commenter", - AnnotationComment: "Package level annotation", - }, - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{ - "Package": { - PackageSPDXIdentifier: "Package", - PackageAttributionTexts: []string{"The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually."}, - PackageChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "MD5": { - Algorithm: "MD5", - Value: "624c1abb3664f4b35547e7c73864ad24", - }, - }, - PackageCopyrightText: "Copyright 2008-2010 John Smith", - PackageDescription: "The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.", - PackageDownloadLocation: "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", - PackageExternalReferences: []*spdx.PackageExternalReference2_2{ - { - RefType: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge", - ExternalRefComment: "This is the external ref for Acme", - Category: "OTHER", - Locator: "acmecorp/acmenator/4.1.3-alpha", - }, - { - RefType: "http://spdx.org/rdf/references/cpe23Type", - Category: "SECURITY", - Locator: "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*", - }, - }, - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, - Files: map[spdx.ElementID]*spdx.File2_2{ - "DoapSource": { - FileSPDXIdentifier: "DoapSource", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", - }, - }, - Snippets: map[spdx.ElementID]*spdx.Snippet2_2{ - "Snippet": { - SnippetSPDXIdentifier: "Snippet", - SnippetFromFileSPDXIdentifier: spdx.DocElementID{ElementRefID: "DoapSource"}, - SnippetComment: "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", - SnippetCopyrightText: "Copyright 2008-2010 John Smith", - SnippetLicenseComments: "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", - SnippetLicenseConcluded: "GPL-2.0-only", - LicenseInfoInSnippet: []string{"GPL-2.0-only"}, - SnippetName: "from linux kernel", - SnippetByteRangeStart: 310, - SnippetByteRangeEnd: 420, - SnippetLineRangeStart: 5, - SnippetLineRangeEnd: 23, - }, - }, - FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", - FileContributor: []string{"Protecode Inc.", "SPDX Technical Team Members", "Open Logic Inc.", "Source Auditor Inc.", "Black Duck Software In.c"}, - FileDependencies: []string{"SPDXRef-JenaLib", "SPDXRef-CommonsLangSrc"}, - FileName: "./src/org/spdx/parser/DOAPProject.java", - FileType: []string{"SOURCE"}, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, - }, - }, - PackageHomePage: "http://ftp.gnu.org/gnu/glibc", - PackageLicenseComments: "The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.", - PackageLicenseConcluded: "(LGPL-2.0-only OR LicenseRef-3)", - PackageLicenseDeclared: "(LGPL-2.0-only AND LicenseRef-3)", - PackageLicenseInfoFromFiles: []string{"GPL-2.0-only", "LicenseRef-2", "LicenseRef-1"}, - PackageName: "glibc", - PackageOriginatorOrganization: "ExampleCodeInspect (contact@example.com)", - PackageFileName: "glibc-2.11.1.tar.gz", - PackageVerificationCodeExcludedFile: "./package.spdx", - PackageVerificationCode: "d6a770ba38583ed4bb4525bd96e50461655d2758", - PackageSourceInfo: "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git.", - PackageSummary: "GNU C library.", - PackageSupplierPerson: "Jane Doe (jane.doe@example.com)", - PackageVersion: "2.11.1", - }, - }, - UnpackagedFiles: map[spdx.ElementID]*spdx.File2_2{ - "File": { - FileSPDXIdentifier: "File", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", - }, - "MD5": { - Algorithm: "MD5", - Value: "624c1abb3664f4b35547e7c73864ad24", - }, - }, - FileComment: "The concluded license was taken from the package level that the file was included in.\nThis information was found in the COPYING.txt file in the xyz directory.", - FileCopyrightText: "Copyright 2008-2010 John Smith", - FileContributor: []string{"The Regents of the University of California", "Modified by Paul Mundt lethal@linux-sh.org", "IBM Corporation"}, - FileName: "./package/foo.c", - FileType: []string{"SOURCE"}, - LicenseComments: "The concluded license was taken from the package level that the file was included in.", - LicenseConcluded: "(LGPL-2.0-only OR LicenseRef-2)", - LicenseInfoInFile: []string{"GPL-2.0-only", "LicenseRef-2"}, - FileNotice: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: \nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", - }, - }, - }, - jsondocument: make(map[string]interface{}), - }, - want: []interface{}{ - map[string]interface{}{ - "SPDXID": "SPDXRef-Package", - "annotations": []interface{}{ - map[string]interface{}{ - "annotationDate": "2011-01-29T18:30:22Z", - "annotationType": "OTHER", - "annotator": "Person: Package Commenter", - "comment": "Package level annotation", - }, - }, - "attributionTexts": []string{"The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually."}, - "checksums": []interface{}{ - map[string]interface{}{ - "algorithm": "MD5", - "checksumValue": "624c1abb3664f4b35547e7c73864ad24", - }, - }, - "copyrightText": "Copyright 2008-2010 John Smith", - "description": "The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.", - "downloadLocation": "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", - "externalRefs": []interface{}{ - map[string]interface{}{ - "comment": "This is the external ref for Acme", - "referenceCategory": "OTHER", - "referenceLocator": "acmecorp/acmenator/4.1.3-alpha", - "referenceType": "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge", - }, - map[string]interface{}{ - "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*", - "referenceType": "http://spdx.org/rdf/references/cpe23Type", - }, - }, - "filesAnalyzed": true, - "hasFiles": []string{"SPDXRef-DoapSource"}, - "homepage": "http://ftp.gnu.org/gnu/glibc", - "licenseComments": "The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.", - "licenseConcluded": "(LGPL-2.0-only OR LicenseRef-3)", - "licenseDeclared": "(LGPL-2.0-only AND LicenseRef-3)", - "licenseInfoFromFiles": []string{"GPL-2.0-only", "LicenseRef-2", "LicenseRef-1"}, - "name": "glibc", - "originator": "Organization: ExampleCodeInspect (contact@example.com)", - "packageFileName": "glibc-2.11.1.tar.gz", - "packageVerificationCode": map[string]interface{}{ - "packageVerificationCodeExcludedFiles": []string{"./package.spdx"}, - "packageVerificationCodeValue": "d6a770ba38583ed4bb4525bd96e50461655d2758", - }, - "sourceInfo": "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git.", - "summary": "GNU C library.", - "supplier": "Person: Jane Doe (jane.doe@example.com)", - "versionInfo": "2.11.1", - }, - }, - }, - { - name: "success empty", - args: args{ - doc: &spdx.Document2_2{ - Annotations: []*spdx.Annotation2_2{{}}, - }, - jsondocument: make(map[string]interface{}), - }, - want: []interface{}{ - map[string]interface{}{}, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := renderPackage2_2(tt.args.doc, tt.args.jsondocument, make(map[spdx.ElementID]*spdx.File2_2)) - if (err != nil) != tt.wantErr { - t.Errorf("renderPackage2_2() error = %v, wantErr %v", err, tt.wantErr) - return - } - for k, v := range got { - if !reflect.DeepEqual(v, tt.want[k]) { - t.Errorf("renderPackage2_2() = %v, want %v", v, tt.want[k]) - } - } - }) - } -} diff --git a/jsonsaver/saver2v2/save_relationships.go b/jsonsaver/saver2v2/save_relationships.go deleted file mode 100644 index 42382b3f..00000000 --- a/jsonsaver/saver2v2/save_relationships.go +++ /dev/null @@ -1,32 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "github.com/spdx/tools-golang/spdx" -) - -func renderRelationships2_2(relationships []*spdx.Relationship2_2, jsondocument map[string]interface{}) ([]interface{}, error) { - - var rels []interface{} - for _, v := range relationships { - rel := make(map[string]interface{}) - if len(v.RefA.ElementRefID) != 0 || len(v.RefA.DocumentRefID) != 0 { - rel["spdxElementId"] = spdx.RenderDocElementID(v.RefA) - } - if len(v.RefB.ElementRefID) != 0 || len(v.RefB.DocumentRefID) != 0 { - rel["relatedSpdxElement"] = spdx.RenderDocElementID(v.RefB) - } - if v.Relationship != "" { - rel["relationshipType"] = v.Relationship - } - if v.RelationshipComment != "" { - rel["comment"] = v.RelationshipComment - } - rels = append(rels, rel) - } - if len(rels) > 0 { - jsondocument["relationships"] = rels - } - return rels, nil -} diff --git a/jsonsaver/saver2v2/save_relationships_test.go b/jsonsaver/saver2v2/save_relationships_test.go deleted file mode 100644 index bdbde818..00000000 --- a/jsonsaver/saver2v2/save_relationships_test.go +++ /dev/null @@ -1,94 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func Test_renderRelationships2_2(t *testing.T) { - type args struct { - relationships []*spdx.Relationship2_2 - jsondocument map[string]interface{} - } - tests := []struct { - name string - args args - want []interface{} - wantErr bool - }{ - // TODO: Add test cases. - { - name: "success", - args: args{ - relationships: []*spdx.Relationship2_2{ - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "spdx-tool-1.2", ElementRefID: "ToolsElement"}, - Relationship: "COPY_OF", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "Package"}, - Relationship: "CONTAINS", - }, - { - RefA: spdx.DocElementID{DocumentRefID: "", ElementRefID: "DOCUMENT"}, - RefB: spdx.DocElementID{DocumentRefID: "", ElementRefID: "File"}, - Relationship: "DESCRIBES", - RelationshipComment: "This is a comment.", - }, - }, - jsondocument: make(map[string]interface{}), - }, - want: []interface{}{ - map[string]interface{}{ - "spdxElementId": "SPDXRef-DOCUMENT", - "relatedSpdxElement": "DocumentRef-spdx-tool-1.2:SPDXRef-ToolsElement", - "relationshipType": "COPY_OF", - }, - map[string]interface{}{ - "spdxElementId": "SPDXRef-DOCUMENT", - "relatedSpdxElement": "SPDXRef-Package", - "relationshipType": "CONTAINS", - }, - map[string]interface{}{ - "spdxElementId": "SPDXRef-DOCUMENT", - "relatedSpdxElement": "SPDXRef-File", - "relationshipType": "DESCRIBES", - "comment": "This is a comment.", - }, - }, - }, - { - name: "success empty", - args: args{ - relationships: []*spdx.Relationship2_2{ - {}, - }, - jsondocument: make(map[string]interface{}), - }, - want: []interface{}{ - map[string]interface{}{}, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := renderRelationships2_2(tt.args.relationships, tt.args.jsondocument) - if (err != nil) != tt.wantErr { - t.Errorf("renderRelationships2_2() error = %v, wantErr %v", err, tt.wantErr) - return - } - for k, v := range got { - if !reflect.DeepEqual(v, tt.want[k]) { - t.Errorf("renderRelationships2_2() = %v, want %v", v, tt.want[k]) - } - } - - }) - } -} diff --git a/jsonsaver/saver2v2/save_reviews.go b/jsonsaver/saver2v2/save_reviews.go deleted file mode 100644 index 307b9cf8..00000000 --- a/jsonsaver/saver2v2/save_reviews.go +++ /dev/null @@ -1,33 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "fmt" - - "github.com/spdx/tools-golang/spdx" -) - -func renderReviews2_2(reviews []*spdx.Review2_2, jsondocument map[string]interface{}) ([]interface{}, error) { - - var review []interface{} - for _, v := range reviews { - rev := make(map[string]interface{}) - if len(v.ReviewDate) > 0 { - rev["reviewDate"] = v.ReviewDate - } - if len(v.ReviewerType) > 0 || len(v.Reviewer) > 0 { - rev["reviewer"] = fmt.Sprintf("%s: %s", v.ReviewerType, v.Reviewer) - } - if len(v.ReviewComment) > 0 { - rev["comment"] = v.ReviewComment - } - if len(rev) > 0 { - review = append(review, rev) - } - } - if len(review) > 0 { - jsondocument["revieweds"] = review - } - return review, nil -} diff --git a/jsonsaver/saver2v2/save_reviews_test.go b/jsonsaver/saver2v2/save_reviews_test.go deleted file mode 100644 index fa3d4055..00000000 --- a/jsonsaver/saver2v2/save_reviews_test.go +++ /dev/null @@ -1,72 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func Test_renderReviews2_2(t *testing.T) { - type args struct { - reviews []*spdx.Review2_2 - jsondocument map[string]interface{} - } - tests := []struct { - name string - args args - want []interface{} - wantErr bool - }{ - // TODO: Add test cases. - { - name: "success", - args: args{ - reviews: []*spdx.Review2_2{ - { - ReviewDate: "2010-02-10T00:00:00Z", - ReviewerType: "Person", - Reviewer: "Joe Reviewer", - ReviewComment: "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses", - }, - }, - jsondocument: make(map[string]interface{}), - }, - want: []interface{}{ - map[string]interface{}{ - "reviewDate": "2010-02-10T00:00:00Z", - "reviewer": "Person: Joe Reviewer", - "comment": "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses", - }, - }, - }, - { - name: "success empty", - args: args{ - reviews: []*spdx.Review2_2{ - {}, - }, - jsondocument: make(map[string]interface{}), - }, - want: []interface{}{ - map[string]interface{}{}, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := renderReviews2_2(tt.args.reviews, tt.args.jsondocument) - if (err != nil) != tt.wantErr { - t.Errorf("renderReviews2_2() error = %v, wantErr %v", err, tt.wantErr) - return - } - for k, v := range got { - if !reflect.DeepEqual(v, tt.want[k]) { - t.Errorf("renderReviews2_2() = %v, want %v", v, tt.want[k]) - } - } - }) - } -} diff --git a/jsonsaver/saver2v2/save_snippets.go b/jsonsaver/saver2v2/save_snippets.go deleted file mode 100644 index ca08e2e5..00000000 --- a/jsonsaver/saver2v2/save_snippets.go +++ /dev/null @@ -1,87 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "sort" - - "github.com/spdx/tools-golang/spdx" -) - -func renderSnippets2_2(jsondocument map[string]interface{}, allfiles map[spdx.ElementID]*spdx.File2_2) ([]interface{}, error) { - - var snippets []interface{} - for _, value := range allfiles { - snippet := make(map[string]interface{}) - - var keys []string - for ke := range value.Snippets { - keys = append(keys, string(ke)) - } - sort.Strings(keys) - for _, k := range keys { - v := value.Snippets[spdx.ElementID(k)] - snippet["SPDXID"] = spdx.RenderElementID(v.SnippetSPDXIdentifier) - if v.SnippetComment != "" { - snippet["comment"] = v.SnippetComment - } - if v.SnippetCopyrightText != "" { - snippet["copyrightText"] = v.SnippetCopyrightText - } - if v.SnippetLicenseComments != "" { - snippet["licenseComments"] = v.SnippetLicenseComments - } - if v.SnippetLicenseConcluded != "" { - snippet["licenseConcluded"] = v.SnippetLicenseConcluded - } - if v.LicenseInfoInSnippet != nil { - snippet["licenseInfoInSnippets"] = v.LicenseInfoInSnippet - } - if v.SnippetName != "" { - snippet["name"] = v.SnippetName - } - if v.SnippetName != "" { - snippet["snippetFromFile"] = spdx.RenderDocElementID(v.SnippetFromFileSPDXIdentifier) - } - if v.SnippetAttributionTexts != nil { - snippet["attributionTexts"] = v.SnippetAttributionTexts - } - - // save snippet ranges - var ranges []interface{} - - byterange := map[string]interface{}{ - "endPointer": map[string]interface{}{ - "offset": v.SnippetByteRangeEnd, - "reference": spdx.RenderDocElementID(v.SnippetFromFileSPDXIdentifier), - }, - "startPointer": map[string]interface{}{ - "offset": v.SnippetByteRangeStart, - "reference": spdx.RenderDocElementID(v.SnippetFromFileSPDXIdentifier), - }, - } - linerange := map[string]interface{}{ - "endPointer": map[string]interface{}{ - "lineNumber": v.SnippetLineRangeEnd, - "reference": spdx.RenderDocElementID(v.SnippetFromFileSPDXIdentifier), - }, - "startPointer": map[string]interface{}{ - "lineNumber": v.SnippetLineRangeStart, - "reference": spdx.RenderDocElementID(v.SnippetFromFileSPDXIdentifier), - }, - } - if len(byterange) > 0 { - ranges = append(ranges, byterange) - } - if len(linerange) > 0 { - ranges = append(ranges, linerange) - } - snippet["ranges"] = ranges - snippets = append(snippets, snippet) - } - } - if len(snippets) > 0 { - jsondocument["snippets"] = snippets - } - return snippets, nil -} diff --git a/jsonsaver/saver2v2/save_snippets_test.go b/jsonsaver/saver2v2/save_snippets_test.go deleted file mode 100644 index dfe02133..00000000 --- a/jsonsaver/saver2v2/save_snippets_test.go +++ /dev/null @@ -1,127 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later - -package saver2v2 - -import ( - "reflect" - "testing" - - "github.com/spdx/tools-golang/spdx" -) - -func Test_renderSnippets2_2(t *testing.T) { - type args struct { - doc *spdx.Document2_2 - jsondocument map[string]interface{} - } - tests := []struct { - name string - args args - want []interface{} - wantErr bool - }{ - // TODO: Add test cases. - { - name: "success", - args: args{ - doc: &spdx.Document2_2{ - UnpackagedFiles: map[spdx.ElementID]*spdx.File2_2{ - "DoapSource": { - FileSPDXIdentifier: "DoapSource", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - "SHA1": { - Algorithm: "SHA1", - Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", - }, - }, - Snippets: map[spdx.ElementID]*spdx.Snippet2_2{ - "Snippet": { - SnippetSPDXIdentifier: "Snippet", - SnippetFromFileSPDXIdentifier: spdx.DocElementID{ElementRefID: "DoapSource"}, - SnippetComment: "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", - SnippetCopyrightText: "Copyright 2008-2010 John Smith", - SnippetLicenseComments: "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", - SnippetLicenseConcluded: "GPL-2.0-only", - LicenseInfoInSnippet: []string{"GPL-2.0-only"}, - SnippetName: "from linux kernel", - SnippetByteRangeStart: 310, - SnippetByteRangeEnd: 420, - SnippetLineRangeStart: 5, - SnippetLineRangeEnd: 23, - SnippetAttributionTexts: []string{"text1", "text2 "}, - }, - }, - FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", - FileContributor: []string{"Protecode Inc.", "SPDX Technical Team Members", "Open Logic Inc.", "Source Auditor Inc.", "Black Duck Software In.c"}, - FileDependencies: []string{"SPDXRef-JenaLib", "SPDXRef-CommonsLangSrc"}, - FileName: "./src/org/spdx/parser/DOAPProject.java", - FileType: []string{"SOURCE"}, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, - }, - }, - }, - jsondocument: make(map[string]interface{}), - }, - want: []interface{}{ - map[string]interface{}{ - "SPDXID": "SPDXRef-Snippet", - "comment": "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", - "copyrightText": "Copyright 2008-2010 John Smith", - "licenseComments": "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", - "licenseConcluded": "GPL-2.0-only", - "licenseInfoInSnippets": []string{"GPL-2.0-only"}, - "name": "from linux kernel", - "ranges": []interface{}{ - map[string]interface{}{ - "endPointer": map[string]interface{}{ - "offset": 420, - "reference": "SPDXRef-DoapSource", - }, - "startPointer": map[string]interface{}{ - "offset": 310, - "reference": "SPDXRef-DoapSource", - }, - }, - map[string]interface{}{ - "endPointer": map[string]interface{}{ - "lineNumber": 23, - "reference": "SPDXRef-DoapSource", - }, - "startPointer": map[string]interface{}{ - "lineNumber": 5, - "reference": "SPDXRef-DoapSource", - }, - }, - }, - "snippetFromFile": "SPDXRef-DoapSource", - "attributionTexts": []string{"text1", "text2 "}, - }, - }, - }, - { - name: "success empty", - args: args{ - doc: &spdx.Document2_2{}, - jsondocument: make(map[string]interface{}), - }, - want: []interface{}{ - map[string]interface{}{}, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := renderSnippets2_2(tt.args.jsondocument, tt.args.doc.UnpackagedFiles) - if (err != nil) != tt.wantErr { - t.Errorf("renderSnippets2_2() error = %v, wantErr %v", err, tt.wantErr) - return - } - for k, v := range got { - if !reflect.DeepEqual(v, tt.want[k]) { - t.Errorf("renderSnippets2_2() = %v, want %v", v, tt.want[k]) - } - } - }) - } -} diff --git a/licensediff/licensediff_test.go b/licensediff/licensediff_test.go index bd53f315..2142efc0 100644 --- a/licensediff/licensediff_test.go +++ b/licensediff/licensediff_test.go @@ -15,10 +15,10 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { f1 := &spdx.File2_1{ FileName: "/project/file1.txt", FileSPDXIdentifier: spdx.ElementID("File561"), - FileChecksumSHA1: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + Checksums: []spdx.Checksum{{Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", Algorithm: spdx.SHA1}}, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ - "LicenseRef-We-will-ignore-LicenseInfoInFile", + LicenseInfoInFiles: []string{ + "LicenseRef-We-will-ignore-LicenseInfoInFiles", }, FileCopyrightText: "We'll ignore copyright values", } @@ -27,9 +27,9 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { f2 := &spdx.File2_1{ FileName: "/project/file2.txt", FileSPDXIdentifier: spdx.ElementID("File562"), - FileChecksumSHA1: "066c5139bd9a43d15812ec1a1755b08ccf199824", + Checksums: []spdx.Checksum{{Value: "066c5139bd9a43d15812ec1a1755b08ccf199824", Algorithm: spdx.SHA1}}, LicenseConcluded: "GPL-2.0-or-later", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -39,9 +39,9 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { f3 := &spdx.File2_1{ FileName: "/project/file3.txt", FileSPDXIdentifier: spdx.ElementID("File563"), - FileChecksumSHA1: "bd0f4863b15fad2b79b35303af54fcb5baaf7c68", + Checksums: []spdx.Checksum{{Value: "bd0f4863b15fad2b79b35303af54fcb5baaf7c68", Algorithm: spdx.SHA1}}, LicenseConcluded: "MPL-2.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -52,9 +52,9 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { f4_1 := &spdx.File2_1{ FileName: "/project/file4.txt", FileSPDXIdentifier: spdx.ElementID("File564"), - FileChecksumSHA1: "bc417a575ceae93435bcb7bfd382ac28cbdaa8b5", + Checksums: []spdx.Checksum{{Value: "bc417a575ceae93435bcb7bfd382ac28cbdaa8b5", Algorithm: spdx.SHA1}}, LicenseConcluded: "MIT", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -62,9 +62,9 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { f4_2 := &spdx.File2_1{ FileName: "/project/file4.txt", FileSPDXIdentifier: spdx.ElementID("File564"), - FileChecksumSHA1: "bc417a575ceae93435bcb7bfd382ac28cbdaa8b5", + Checksums: []spdx.Checksum{{Value: "bc417a575ceae93435bcb7bfd382ac28cbdaa8b5", Algorithm: spdx.SHA1}}, LicenseConcluded: "Apache-2.0 AND MIT", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -75,9 +75,9 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { f5_1 := &spdx.File2_1{ FileName: "/project/file5.txt", FileSPDXIdentifier: spdx.ElementID("File565"), - FileChecksumSHA1: "ba226db943bbbf455da77afab6f16dbab156d000", + Checksums: []spdx.Checksum{{Value: "ba226db943bbbf455da77afab6f16dbab156d000", Algorithm: spdx.SHA1}}, LicenseConcluded: "BSD-3-Clause", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -85,9 +85,9 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { f5_2 := &spdx.File2_1{ FileName: "/project/file5.txt", FileSPDXIdentifier: spdx.ElementID("File565"), - FileChecksumSHA1: "b6e0ec7d085c5699b46f6f8d425413702652874d", + Checksums: []spdx.Checksum{{Value: "b6e0ec7d085c5699b46f6f8d425413702652874d", Algorithm: spdx.SHA1}}, LicenseConcluded: "BSD-3-Clause", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -98,9 +98,9 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { f6_1 := &spdx.File2_1{ FileName: "/project/file6.txt", FileSPDXIdentifier: spdx.ElementID("File566"), - FileChecksumSHA1: "ba226db943bbbf455da77afab6f16dbab156d000", + Checksums: []spdx.Checksum{{Value: "ba226db943bbbf455da77afab6f16dbab156d000", Algorithm: spdx.SHA1}}, LicenseConcluded: "CC0-1.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -108,9 +108,9 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { f6_2 := &spdx.File2_1{ FileName: "/project/file6.txt", FileSPDXIdentifier: spdx.ElementID("File566"), - FileChecksumSHA1: "b6e0ec7d085c5699b46f6f8d425413702652874d", + Checksums: []spdx.Checksum{{Value: "b6e0ec7d085c5699b46f6f8d425413702652874d", Algorithm: spdx.SHA1}}, LicenseConcluded: "Unlicense", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -124,19 +124,19 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { FilesAnalyzed: true, IsFilesAnalyzedTagPresent: true, // fake the verification code for present purposes - PackageVerificationCode: "abc123abc123", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "abc123abc123"}, PackageLicenseConcluded: "NOASSERTION", PackageLicenseInfoFromFiles: []string{ "NOASSERTION", }, PackageLicenseDeclared: "NOASSERTION", PackageCopyrightText: "NOASSERTION", - Files: map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID(f1.FileSPDXIdentifier): f1, - spdx.ElementID(f2.FileSPDXIdentifier): f2, - spdx.ElementID(f4_1.FileSPDXIdentifier): f4_1, - spdx.ElementID(f5_1.FileSPDXIdentifier): f5_1, - spdx.ElementID(f6_1.FileSPDXIdentifier): f6_1, + Files: []*spdx.File2_1{ + f1, + f2, + f4_1, + f5_1, + f6_1, }, } p2 := &spdx.Package2_1{ @@ -146,19 +146,19 @@ func Test2_1DifferCanCreateDiffPairs(t *testing.T) { FilesAnalyzed: true, IsFilesAnalyzedTagPresent: true, // fake the verification code for present purposes - PackageVerificationCode: "def456def456", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "def456def456"}, PackageLicenseConcluded: "NOASSERTION", PackageLicenseInfoFromFiles: []string{ "NOASSERTION", }, PackageLicenseDeclared: "NOASSERTION", PackageCopyrightText: "NOASSERTION", - Files: map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID(f1.FileSPDXIdentifier): f1, - spdx.ElementID(f3.FileSPDXIdentifier): f3, - spdx.ElementID(f4_2.FileSPDXIdentifier): f4_2, - spdx.ElementID(f5_2.FileSPDXIdentifier): f5_2, - spdx.ElementID(f6_2.FileSPDXIdentifier): f6_2, + Files: []*spdx.File2_1{ + f1, + f3, + f4_2, + f5_2, + f6_2, }, } @@ -254,10 +254,10 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { f1 := &spdx.File2_1{ FileName: "/project/file1.txt", FileSPDXIdentifier: spdx.ElementID("File561"), - FileChecksumSHA1: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + Checksums: []spdx.Checksum{{Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", Algorithm: spdx.SHA1}}, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ - "LicenseRef-We-will-ignore-LicenseInfoInFile", + LicenseInfoInFiles: []string{ + "LicenseRef-We-will-ignore-LicenseInfoInFiles", }, FileCopyrightText: "We'll ignore copyright values", } @@ -266,9 +266,9 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { f2 := &spdx.File2_1{ FileName: "/project/file2.txt", FileSPDXIdentifier: spdx.ElementID("File562"), - FileChecksumSHA1: "066c5139bd9a43d15812ec1a1755b08ccf199824", + Checksums: []spdx.Checksum{{Value: "066c5139bd9a43d15812ec1a1755b08ccf199824", Algorithm: spdx.SHA1}}, LicenseConcluded: "GPL-2.0-or-later", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -278,9 +278,9 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { f3 := &spdx.File2_1{ FileName: "/project/file3.txt", FileSPDXIdentifier: spdx.ElementID("File563"), - FileChecksumSHA1: "bd0f4863b15fad2b79b35303af54fcb5baaf7c68", + Checksums: []spdx.Checksum{{Value: "bd0f4863b15fad2b79b35303af54fcb5baaf7c68", Algorithm: spdx.SHA1}}, LicenseConcluded: "MPL-2.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -291,9 +291,9 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { f4_1 := &spdx.File2_1{ FileName: "/project/file4.txt", FileSPDXIdentifier: spdx.ElementID("File564"), - FileChecksumSHA1: "bc417a575ceae93435bcb7bfd382ac28cbdaa8b5", + Checksums: []spdx.Checksum{{Value: "bc417a575ceae93435bcb7bfd382ac28cbdaa8b5", Algorithm: spdx.SHA1}}, LicenseConcluded: "MIT", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -301,9 +301,9 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { f4_2 := &spdx.File2_1{ FileName: "/project/file4.txt", FileSPDXIdentifier: spdx.ElementID("File564"), - FileChecksumSHA1: "bc417a575ceae93435bcb7bfd382ac28cbdaa8b5", + Checksums: []spdx.Checksum{{Value: "bc417a575ceae93435bcb7bfd382ac28cbdaa8b5", Algorithm: spdx.SHA1}}, LicenseConcluded: "Apache-2.0 AND MIT", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -314,9 +314,9 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { f5_1 := &spdx.File2_1{ FileName: "/project/file5.txt", FileSPDXIdentifier: spdx.ElementID("File565"), - FileChecksumSHA1: "ba226db943bbbf455da77afab6f16dbab156d000", + Checksums: []spdx.Checksum{{Value: "ba226db943bbbf455da77afab6f16dbab156d000", Algorithm: spdx.SHA1}}, LicenseConcluded: "BSD-3-Clause", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -324,9 +324,9 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { f5_2 := &spdx.File2_1{ FileName: "/project/file5.txt", FileSPDXIdentifier: spdx.ElementID("File565"), - FileChecksumSHA1: "b6e0ec7d085c5699b46f6f8d425413702652874d", + Checksums: []spdx.Checksum{{Value: "b6e0ec7d085c5699b46f6f8d425413702652874d", Algorithm: spdx.SHA1}}, LicenseConcluded: "BSD-3-Clause", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -337,9 +337,9 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { f6_1 := &spdx.File2_1{ FileName: "/project/file6.txt", FileSPDXIdentifier: spdx.ElementID("File566"), - FileChecksumSHA1: "ba226db943bbbf455da77afab6f16dbab156d000", + Checksums: []spdx.Checksum{{Value: "ba226db943bbbf455da77afab6f16dbab156d000", Algorithm: spdx.SHA1}}, LicenseConcluded: "CC0-1.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -347,9 +347,9 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { f6_2 := &spdx.File2_1{ FileName: "/project/file6.txt", FileSPDXIdentifier: spdx.ElementID("File566"), - FileChecksumSHA1: "b6e0ec7d085c5699b46f6f8d425413702652874d", + Checksums: []spdx.Checksum{{Value: "b6e0ec7d085c5699b46f6f8d425413702652874d", Algorithm: spdx.SHA1}}, LicenseConcluded: "Unlicense", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -363,19 +363,19 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { FilesAnalyzed: true, IsFilesAnalyzedTagPresent: true, // fake the verification code for present purposes - PackageVerificationCode: "abc123abc123", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "abc123abc123"}, PackageLicenseConcluded: "NOASSERTION", PackageLicenseInfoFromFiles: []string{ "NOASSERTION", }, PackageLicenseDeclared: "NOASSERTION", PackageCopyrightText: "NOASSERTION", - Files: map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID(f1.FileSPDXIdentifier): f1, - spdx.ElementID(f2.FileSPDXIdentifier): f2, - spdx.ElementID(f4_1.FileSPDXIdentifier): f4_1, - spdx.ElementID(f5_1.FileSPDXIdentifier): f5_1, - spdx.ElementID(f6_1.FileSPDXIdentifier): f6_1, + Files: []*spdx.File2_1{ + f1, + f2, + f4_1, + f5_1, + f6_1, }, } p2 := &spdx.Package2_1{ @@ -385,19 +385,19 @@ func Test2_1DifferCanCreateDiffStructuredResults(t *testing.T) { FilesAnalyzed: true, IsFilesAnalyzedTagPresent: true, // fake the verification code for present purposes - PackageVerificationCode: "def456def456", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "def456def456"}, PackageLicenseConcluded: "NOASSERTION", PackageLicenseInfoFromFiles: []string{ "NOASSERTION", }, PackageLicenseDeclared: "NOASSERTION", PackageCopyrightText: "NOASSERTION", - Files: map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID(f1.FileSPDXIdentifier): f1, - spdx.ElementID(f3.FileSPDXIdentifier): f3, - spdx.ElementID(f4_2.FileSPDXIdentifier): f4_2, - spdx.ElementID(f5_2.FileSPDXIdentifier): f5_2, - spdx.ElementID(f6_2.FileSPDXIdentifier): f6_2, + Files: []*spdx.File2_1{ + f1, + f3, + f4_2, + f5_2, + f6_2, }, } @@ -501,15 +501,14 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { f1 := &spdx.File2_2{ FileName: "/project/file1.txt", FileSPDXIdentifier: spdx.ElementID("File561"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ - "LicenseRef-We-will-ignore-LicenseInfoInFile", + LicenseInfoInFiles: []string{ + "LicenseRef-We-will-ignore-LicenseInfoInFiles", }, FileCopyrightText: "We'll ignore copyright values", } @@ -518,14 +517,13 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { f2 := &spdx.File2_2{ FileName: "/project/file2.txt", FileSPDXIdentifier: spdx.ElementID("File562"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "GPL-2.0-or-later", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -535,14 +533,13 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { f3 := &spdx.File2_2{ FileName: "/project/file3.txt", FileSPDXIdentifier: spdx.ElementID("File563"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "MPL-2.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -553,14 +550,13 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { f4_1 := &spdx.File2_2{ FileName: "/project/file4.txt", FileSPDXIdentifier: spdx.ElementID("File564"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "MIT", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -568,14 +564,13 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { f4_2 := &spdx.File2_2{ FileName: "/project/file4.txt", FileSPDXIdentifier: spdx.ElementID("File564"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "Apache-2.0 AND MIT", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -586,14 +581,13 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { f5_1 := &spdx.File2_2{ FileName: "/project/file5.txt", FileSPDXIdentifier: spdx.ElementID("File565"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "BSD-3-Clause", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -601,14 +595,13 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { f5_2 := &spdx.File2_2{ FileName: "/project/file5.txt", FileSPDXIdentifier: spdx.ElementID("File565"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "BSD-3-Clause", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -619,14 +612,13 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { f6_1 := &spdx.File2_2{ FileName: "/project/file6.txt", FileSPDXIdentifier: spdx.ElementID("File566"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "CC0-1.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -634,14 +626,13 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { f6_2 := &spdx.File2_2{ FileName: "/project/file6.txt", FileSPDXIdentifier: spdx.ElementID("File566"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "Unlicense", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -655,19 +646,19 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { FilesAnalyzed: true, IsFilesAnalyzedTagPresent: true, // fake the verification code for present purposes - PackageVerificationCode: "abc123abc123", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "abc123abc123"}, PackageLicenseConcluded: "NOASSERTION", PackageLicenseInfoFromFiles: []string{ "NOASSERTION", }, PackageLicenseDeclared: "NOASSERTION", PackageCopyrightText: "NOASSERTION", - Files: map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID(f1.FileSPDXIdentifier): f1, - spdx.ElementID(f2.FileSPDXIdentifier): f2, - spdx.ElementID(f4_1.FileSPDXIdentifier): f4_1, - spdx.ElementID(f5_1.FileSPDXIdentifier): f5_1, - spdx.ElementID(f6_1.FileSPDXIdentifier): f6_1, + Files: []*spdx.File2_2{ + f1, + f2, + f4_1, + f5_1, + f6_1, }, } p2 := &spdx.Package2_2{ @@ -677,19 +668,19 @@ func Test2_2DifferCanCreateDiffPairs(t *testing.T) { FilesAnalyzed: true, IsFilesAnalyzedTagPresent: true, // fake the verification code for present purposes - PackageVerificationCode: "def456def456", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "def456def456"}, PackageLicenseConcluded: "NOASSERTION", PackageLicenseInfoFromFiles: []string{ "NOASSERTION", }, PackageLicenseDeclared: "NOASSERTION", PackageCopyrightText: "NOASSERTION", - Files: map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID(f1.FileSPDXIdentifier): f1, - spdx.ElementID(f3.FileSPDXIdentifier): f3, - spdx.ElementID(f4_2.FileSPDXIdentifier): f4_2, - spdx.ElementID(f5_2.FileSPDXIdentifier): f5_2, - spdx.ElementID(f6_2.FileSPDXIdentifier): f6_2, + Files: []*spdx.File2_2{ + f1, + f3, + f4_2, + f5_2, + f6_2, }, } @@ -785,15 +776,14 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { f1 := &spdx.File2_2{ FileName: "/project/file1.txt", FileSPDXIdentifier: spdx.ElementID("File561"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ - "LicenseRef-We-will-ignore-LicenseInfoInFile", + LicenseInfoInFiles: []string{ + "LicenseRef-We-will-ignore-LicenseInfoInFiles", }, FileCopyrightText: "We'll ignore copyright values", } @@ -802,14 +792,13 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { f2 := &spdx.File2_2{ FileName: "/project/file2.txt", FileSPDXIdentifier: spdx.ElementID("File562"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "GPL-2.0-or-later", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -819,14 +808,13 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { f3 := &spdx.File2_2{ FileName: "/project/file3.txt", FileSPDXIdentifier: spdx.ElementID("File563"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "MPL-2.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -837,14 +825,13 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { f4_1 := &spdx.File2_2{ FileName: "/project/file4.txt", FileSPDXIdentifier: spdx.ElementID("File564"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "MIT", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -852,14 +839,13 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { f4_2 := &spdx.File2_2{ FileName: "/project/file4.txt", FileSPDXIdentifier: spdx.ElementID("File564"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "Apache-2.0 AND MIT", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -871,7 +857,7 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { FileName: "/project/file5.txt", FileSPDXIdentifier: spdx.ElementID("File565"), LicenseConcluded: "BSD-3-Clause", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -879,15 +865,14 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { f5_2 := &spdx.File2_2{ FileName: "/project/file5.txt", FileSPDXIdentifier: spdx.ElementID("File565"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "BSD-3-Clause", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -898,14 +883,13 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { f6_1 := &spdx.File2_2{ FileName: "/project/file6.txt", FileSPDXIdentifier: spdx.ElementID("File566"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "CC0-1.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -913,14 +897,13 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { f6_2 := &spdx.File2_2{ FileName: "/project/file6.txt", FileSPDXIdentifier: spdx.ElementID("File566"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", - }, + Checksums: []spdx.Checksum{{ + Algorithm: spdx.SHA1, + Value: "6c92dc8bc462b6889d9b1c0bc16c54d19a2cbdd3", + }, }, LicenseConcluded: "Unlicense", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "NOASSERTION", }, FileCopyrightText: "NOASSERTION", @@ -934,19 +917,19 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { FilesAnalyzed: true, IsFilesAnalyzedTagPresent: true, // fake the verification code for present purposes - PackageVerificationCode: "abc123abc123", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "abc123abc123"}, PackageLicenseConcluded: "NOASSERTION", PackageLicenseInfoFromFiles: []string{ "NOASSERTION", }, PackageLicenseDeclared: "NOASSERTION", PackageCopyrightText: "NOASSERTION", - Files: map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID(f1.FileSPDXIdentifier): f1, - spdx.ElementID(f2.FileSPDXIdentifier): f2, - spdx.ElementID(f4_1.FileSPDXIdentifier): f4_1, - spdx.ElementID(f5_1.FileSPDXIdentifier): f5_1, - spdx.ElementID(f6_1.FileSPDXIdentifier): f6_1, + Files: []*spdx.File2_2{ + f1, + f2, + f4_1, + f5_1, + f6_1, }, } p2 := &spdx.Package2_2{ @@ -956,19 +939,19 @@ func Test2_2DifferCanCreateDiffStructuredResults(t *testing.T) { FilesAnalyzed: true, IsFilesAnalyzedTagPresent: true, // fake the verification code for present purposes - PackageVerificationCode: "def456def456", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "def456def456"}, PackageLicenseConcluded: "NOASSERTION", PackageLicenseInfoFromFiles: []string{ "NOASSERTION", }, PackageLicenseDeclared: "NOASSERTION", PackageCopyrightText: "NOASSERTION", - Files: map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID(f1.FileSPDXIdentifier): f1, - spdx.ElementID(f3.FileSPDXIdentifier): f3, - spdx.ElementID(f4_2.FileSPDXIdentifier): f4_2, - spdx.ElementID(f5_2.FileSPDXIdentifier): f5_2, - spdx.ElementID(f6_2.FileSPDXIdentifier): f6_2, + Files: []*spdx.File2_2{ + f1, + f3, + f4_2, + f5_2, + f6_2, }, } diff --git a/rdfloader/parser2v2/license_utils.go b/rdfloader/parser2v2/license_utils.go index 80b0a710..0a823efe 100644 --- a/rdfloader/parser2v2/license_utils.go +++ b/rdfloader/parser2v2/license_utils.go @@ -5,6 +5,7 @@ package parser2v2 import ( "fmt" gordfParser "github.com/spdx/gordf/rdfloader/parser" + "github.com/spdx/tools-golang/spdx" "strings" ) @@ -25,7 +26,7 @@ func getLicenseStringFromURI(uri string) string { // returns the checksum algorithm and it's value // In the newer versions, these two strings will be bound to a single checksum struct // whose pointer will be returned. -func (parser *rdfParser2_2) getChecksumFromNode(checksumNode *gordfParser.Node) (algorithm string, value string, err error) { +func (parser *rdfParser2_2) getChecksumFromNode(checksumNode *gordfParser.Node) (algorithm spdx.ChecksumAlgorithm, value string, err error) { var checksumValue, checksumAlgorithm string for _, checksumTriple := range parser.nodeToTriples(checksumNode) { switch checksumTriple.Predicate.ID { @@ -45,7 +46,7 @@ func (parser *rdfParser2_2) getChecksumFromNode(checksumNode *gordfParser.Node) return } } - return checksumAlgorithm, checksumValue, nil + return spdx.ChecksumAlgorithm(checksumAlgorithm), checksumValue, nil } func getAlgorithmFromURI(algorithmURI string) (checksumAlgorithm string, err error) { diff --git a/rdfloader/parser2v2/parse_annotation.go b/rdfloader/parser2v2/parse_annotation.go index 92a56100..18e4533d 100644 --- a/rdfloader/parser2v2/parse_annotation.go +++ b/rdfloader/parser2v2/parse_annotation.go @@ -59,8 +59,8 @@ func setAnnotatorFromString(annotatorString string, ann *spdx.Annotation2_2) err return err } if subkey == "Person" || subkey == "Organization" || subkey == "Tool" { - ann.AnnotatorType = subkey - ann.Annotator = subvalue + ann.Annotator.AnnotatorType = subkey + ann.Annotator.Annotator = subvalue return nil } return fmt.Errorf("unrecognized Annotator type %v while parsing annotation", subkey) diff --git a/rdfloader/parser2v2/parse_annotation_test.go b/rdfloader/parser2v2/parse_annotation_test.go index 49a099d4..0226d856 100644 --- a/rdfloader/parser2v2/parse_annotation_test.go +++ b/rdfloader/parser2v2/parse_annotation_test.go @@ -31,10 +31,10 @@ func Test_setAnnotatorFromString(t *testing.T) { if err != nil { t.Errorf("unexpected error for a valid annotator") } - if ann.AnnotatorType != "Person" { + if ann.Annotator.AnnotatorType != "Person" { t.Errorf("wrnog annotator type: expected: %s, found: %s", "Person", ann.Annotator) } - if ann.Annotator != "Rishabh" { + if ann.Annotator.Annotator != "Rishabh" { t.Errorf("wrong annotator: expected: %s, found: %s", "Rishabh", ann.Annotator) } } @@ -169,11 +169,11 @@ func Test_rdfParser2_2_parseAnnotationFromNode(t *testing.T) { t.Errorf(`expected: "%s", found "%s"`, expectedDate, ann.AnnotationDate) } expectedAnnotator := "Jane Doe" - if expectedAnnotator != ann.Annotator { + if expectedAnnotator != ann.Annotator.Annotator { t.Errorf(`expected: "%s", found "%s"`, expectedAnnotator, ann.Annotator) } - if ann.AnnotatorType != "Person" { - t.Errorf(`expected: "%s", found "%s"`, "Person", ann.AnnotatorType) + if ann.Annotator.AnnotatorType != "Person" { + t.Errorf(`expected: "%s", found "%s"`, "Person", ann.Annotator.AnnotatorType) } expectedAnnotationType := "OTHER" if expectedAnnotationType != ann.AnnotationType { diff --git a/rdfloader/parser2v2/parse_creation_info.go b/rdfloader/parser2v2/parse_creation_info.go index 45c39b10..dc4da77e 100644 --- a/rdfloader/parser2v2/parse_creation_info.go +++ b/rdfloader/parser2v2/parse_creation_info.go @@ -35,20 +35,22 @@ func (parser *rdfParser2_2) parseCreationInfoFromNode(ci *spdx.CreationInfo2_2, return nil } -func setCreator(creator string, ci *spdx.CreationInfo2_2) error { - entityType, entity, err := ExtractSubs(creator, ":") +func setCreator(creatorStr string, ci *spdx.CreationInfo2_2) error { + entityType, entity, err := ExtractSubs(creatorStr, ":") if err != nil { return fmt.Errorf("error setting creator of a creation info: %s", err) } + + creator := spdx.Creator{Creator: entity} + switch entityType { - case "Person": - ci.CreatorPersons = append(ci.CreatorPersons, entity) - case "Organization": - ci.CreatorOrganizations = append(ci.CreatorOrganizations, entity) - case "Tool": - ci.CreatorTools = append(ci.CreatorTools, entity) + case "Person", "Organization", "Tool": + creator.CreatorType = entityType default: return fmt.Errorf("unknown creatorType %v in a creation info", entityType) } + + ci.Creators = append(ci.Creators, creator) + return nil } diff --git a/rdfloader/parser2v2/parse_creation_info_test.go b/rdfloader/parser2v2/parse_creation_info_test.go index 9ea62d91..415d18ed 100644 --- a/rdfloader/parser2v2/parse_creation_info_test.go +++ b/rdfloader/parser2v2/parse_creation_info_test.go @@ -29,12 +29,12 @@ func Test_setCreator(t *testing.T) { if err != nil { t.Errorf("error parsing a valid input: %v", err) } - if len(ci.CreatorPersons) != 1 { - t.Errorf("creationInfo should've had 1 creatorPersons, found %d", len(ci.CreatorPersons)) + if len(ci.Creators) != 1 { + t.Errorf("creationInfo should've had 1 creatorPersons, found %d", len(ci.Creators)) } expectedPerson := "Jane Doe" - if ci.CreatorPersons[0] != expectedPerson { - t.Errorf("expected %s, found %s", expectedPerson, ci.CreatorPersons[0]) + if ci.Creators[0].Creator != expectedPerson { + t.Errorf("expected %s, found %s", expectedPerson, ci.Creators[0]) } } @@ -88,12 +88,12 @@ func Test_rdfParser2_2_parseCreationInfoFromNode(t *testing.T) { if ci.LicenseListVersion != "2.6" { t.Errorf(`expected %s, found %s`, "2.6", ci.LicenseListVersion) } - n := len(ci.CreatorPersons) + n := len(ci.Creators) if n != 1 { t.Errorf("expected 1 creatorPersons, found %d", n) } - if ci.CreatorPersons[0] != "fossy" { - t.Errorf("expected %s, found %s", "fossy", ci.CreatorPersons[0]) + if ci.Creators[0].Creator != "fossy" { + t.Errorf("expected %s, found %s", "fossy", ci.Creators[0].Creator) } expectedCreated := "2018-08-24T19:55:34Z" if ci.Created != expectedCreated { diff --git a/rdfloader/parser2v2/parse_file.go b/rdfloader/parser2v2/parse_file.go index b3b0e498..a149712d 100644 --- a/rdfloader/parser2v2/parse_file.go +++ b/rdfloader/parser2v2/parse_file.go @@ -56,7 +56,7 @@ func (parser *rdfParser2_2) getFileFromNode(fileNode *gordfParser.Node) (file *s // cardinality: min 0 fileType := "" fileType, err = parser.getFileTypeFromUri(subTriple.Object.ID) - file.FileType = append(file.FileType, fileType) + file.FileTypes = append(file.FileTypes, fileType) case SPDX_CHECKSUM: // 4.4 // cardinality: min 1 err = parser.setFileChecksumFromNode(file, subTriple.Object) @@ -73,7 +73,7 @@ func (parser *rdfParser2_2) getFileFromNode(fileNode *gordfParser.Node) (file *s if err != nil { return nil, fmt.Errorf("error parsing licenseInfoInFile: %v", err) } - file.LicenseInfoInFile = append(file.LicenseInfoInFile, lic.ToLicenseString()) + file.LicenseInfoInFiles = append(file.LicenseInfoInFiles, lic.ToLicenseString()) case SPDX_LICENSE_COMMENTS: // 4.7 // cardinality: max 1 file.LicenseComments = subTriple.Object.ID @@ -97,7 +97,7 @@ func (parser *rdfParser2_2) getFileFromNode(fileNode *gordfParser.Node) (file *s file.FileNotice = getNoticeTextFromNode(subTriple.Object) case SPDX_FILE_CONTRIBUTOR: // 4.14 // cardinality: min 0 - file.FileContributor = append(file.FileContributor, subTriple.Object.ID) + file.FileContributors = append(file.FileContributors, subTriple.Object.ID) case SPDX_FILE_DEPENDENCY: // cardinality: min 0 newFile, err := parser.getFileFromNode(subTriple.Object) @@ -130,13 +130,12 @@ func (parser *rdfParser2_2) setFileChecksumFromNode(file *spdx.File2_2, checksum if err != nil { return fmt.Errorf("error parsing checksumNode of a file: %v", err) } - if file.FileChecksums == nil { - file.FileChecksums = map[spdx.ChecksumAlgorithm]spdx.Checksum{} + if file.Checksums == nil { + file.Checksums = []spdx.Checksum{} } switch checksumAlgorithm { case spdx.MD5, spdx.SHA1, spdx.SHA256: - algorithm := spdx.ChecksumAlgorithm(checksumAlgorithm) - file.FileChecksums[algorithm] = spdx.Checksum{Algorithm: algorithm, Value: checksumValue} + file.Checksums = append(file.Checksums, spdx.Checksum{Algorithm: checksumAlgorithm, Value: checksumValue}) case "": return fmt.Errorf("empty checksum algorithm and value") default: @@ -176,13 +175,13 @@ func (parser *rdfParser2_2) getFileTypeFromUri(uri string) (string, error) { return strings.TrimPrefix(lastPart, "fileType_"), nil } -// populates parser.doc.UnpackagedFiles by a list of files which are not +// populates parser.doc.Files by a list of files which are not // associated with a package by the hasFile attribute // assumes: all the packages are already parsed. func (parser *rdfParser2_2) setUnpackagedFiles() { for fileID := range parser.files { if !parser.assocWithPackage[fileID] { - parser.doc.UnpackagedFiles[fileID] = parser.files[fileID] + parser.doc.Files = append(parser.doc.Files, parser.files[fileID]) } } } diff --git a/rdfloader/parser2v2/parse_file_test.go b/rdfloader/parser2v2/parse_file_test.go index 069eb268..8c5ea0df 100644 --- a/rdfloader/parser2v2/parse_file_test.go +++ b/rdfloader/parser2v2/parse_file_test.go @@ -157,18 +157,18 @@ func Test_rdfParser2_2_setUnpackagedFiles(t *testing.T) { rdfParser.setUnpackagedFiles() - // after setting unpackaged files, parser.doc.UnpackagedFiles must've file2 and file3 - if n := len(rdfParser.doc.UnpackagedFiles); n != 2 { + // after setting unpackaged files, parser.doc.Files must've file2 and file3 + if n := len(rdfParser.doc.Files); n != 2 { t.Errorf("unpackage files should've had 2 files, found %d files", n) } // checking if the unpackagedFiles contain only file2 & file3. - for fileID, _ := range rdfParser.doc.UnpackagedFiles { - switch string(fileID) { + for _, file := range rdfParser.doc.Files { + switch string(file.FileSPDXIdentifier) { case "file2", "file3": continue default: - t.Errorf("unexpected file with id %s found in unpackaged files", fileID) + t.Errorf("unexpected file with id %s found in unpackaged files", file.FileSPDXIdentifier) } } } @@ -207,7 +207,7 @@ func Test_rdfParser2_2_setFileChecksumFromNode(t *testing.T) { t.Errorf("error parsing a valid checksum node") } checksumValue := "d2356e0fe1c0b85285d83c6b2ad51b5f" - for _, checksum := range file.FileChecksums { + for _, checksum := range file.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != "" { @@ -237,7 +237,7 @@ func Test_rdfParser2_2_setFileChecksumFromNode(t *testing.T) { if err != nil { t.Errorf("error parsing a valid checksum node") } - for _, checksum := range file.FileChecksums { + for _, checksum := range file.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != checksumValue { @@ -267,7 +267,7 @@ func Test_rdfParser2_2_setFileChecksumFromNode(t *testing.T) { if err != nil { t.Errorf("error parsing a valid checksum node") } - for _, checksum := range file.FileChecksums { + for _, checksum := range file.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != checksumValue { @@ -508,12 +508,12 @@ func Test_rdfParser2_2_getFileFromNode(t *testing.T) { t.Errorf("expected %s, found %s", expectedLicenseConcluded, file.LicenseConcluded) } expectedFileType := "source" - if file.FileType[0] != expectedFileType { - t.Errorf("expected %s, found %s", expectedFileType, file.FileType) + if file.FileTypes[0] != expectedFileType { + t.Errorf("expected %s, found %s", expectedFileType, file.FileTypes) } expectedLicenseInfoInFile := "NOASSERTION" - if file.LicenseInfoInFile[0] != expectedLicenseInfoInFile { - t.Errorf("expected %s, found %s", expectedLicenseInfoInFile, file.LicenseInfoInFile[0]) + if file.LicenseInfoInFiles[0] != expectedLicenseInfoInFile { + t.Errorf("expected %s, found %s", expectedLicenseInfoInFile, file.LicenseInfoInFiles[0]) } // TestCase 12: checking if recursive dependencies are resolved. @@ -590,17 +590,17 @@ func Test_rdfParser2_2_getFileFromNode(t *testing.T) { t.Errorf("expected %s, found %s", expectedFileName, file.FileName) } - if len(file.FileType) != 1 { - t.Errorf("given file should have 1 fileType attribute. found %d", len(file.FileType)) + if len(file.FileTypes) != 1 { + t.Errorf("given file should have 1 fileType attribute. found %d", len(file.FileTypes)) } expectedFileType = "source" - if file.FileType[0] != expectedFileType { - t.Errorf("expected %s, found %s", expectedFileType, file.FileType) + if file.FileTypes[0] != expectedFileType { + t.Errorf("expected %s, found %s", expectedFileType, file.FileTypes) } expectedChecksum := "0a3a0e1ab72b7c132f5021c538a7a3ea6d539bcd" - for _, checksum := range file.FileChecksums { + for _, checksum := range file.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != expectedChecksum { @@ -614,12 +614,12 @@ func Test_rdfParser2_2_getFileFromNode(t *testing.T) { t.Errorf("expected %s, found %s", expectedLicenseConcluded, file.LicenseConcluded) } - if len(file.LicenseInfoInFile) != 1 { - t.Errorf("given file should have 1 licenseInfoInFile attribute. found %d", len(file.LicenseInfoInFile)) + if len(file.LicenseInfoInFiles) != 1 { + t.Errorf("given file should have 1 licenseInfoInFile attribute. found %d", len(file.LicenseInfoInFiles)) } expectedLicenseInfoInFile = "NOASSERTION" - if file.LicenseInfoInFile[0] != expectedLicenseInfoInFile { - t.Errorf("expected %s, found %s", expectedLicenseInfoInFile, file.LicenseInfoInFile[0]) + if file.LicenseInfoInFiles[0] != expectedLicenseInfoInFile { + t.Errorf("expected %s, found %s", expectedLicenseInfoInFile, file.LicenseInfoInFiles[0]) } expectedLicenseComments := "no comments" @@ -657,12 +657,12 @@ func Test_rdfParser2_2_getFileFromNode(t *testing.T) { t.Errorf("expected %s, found %s", expectedNoticeText, file.FileNotice) } - if n := len(file.FileContributor); n != 1 { + if n := len(file.FileContributors); n != 1 { t.Errorf("given file should have 1 fileContributor. found %d", n) } expectedFileContributor := "Some Organization" - if file.FileContributor[0] != expectedFileContributor { - t.Errorf("expected %s, found %s", expectedFileContributor, file.FileContributor) + if file.FileContributors[0] != expectedFileContributor { + t.Errorf("expected %s, found %s", expectedFileContributor, file.FileContributors) } if n := len(file.FileDependencies); n != 1 { @@ -698,12 +698,12 @@ func Test_rdfParser2_2_getFileFromNode(t *testing.T) { t.Errorf("expected %s, found %s", expectedAnnotationType, ann.AnnotationType) } expectedAnnotator := "File Commenter" - if ann.Annotator != expectedAnnotator { + if ann.Annotator.Annotator != expectedAnnotator { t.Errorf("expected %s, found %s", expectedAnnotator, ann.Annotator) } expectedAnnotatorType := "Person" if ann.AnnotationType != expectedAnnotationType { - t.Errorf("expected %s, found %s", expectedAnnotatorType, ann.AnnotatorType) + t.Errorf("expected %s, found %s", expectedAnnotatorType, ann.Annotator.AnnotatorType) } if n := len(parser.doc.Relationships); n != 1 { diff --git a/rdfloader/parser2v2/parse_package.go b/rdfloader/parser2v2/parse_package.go index 47a2af84..41ccab30 100644 --- a/rdfloader/parser2v2/parse_package.go +++ b/rdfloader/parser2v2/parse_package.go @@ -40,8 +40,14 @@ func (parser *rdfParser2_2) getPackageFromNode(packageNode *gordfParser.Node) (p } pkg.PackageSPDXIdentifier = eId // 3.2 - if existingPkg := parser.doc.Packages[eId]; existingPkg != nil { - pkg = existingPkg + // check if we already have a package initialized for this ID + existingPackageIndex := -1 + for ii, existingPkg := range parser.doc.Packages { + if existingPkg != nil && existingPkg.PackageSPDXIdentifier == eId { + existingPackageIndex = ii + pkg = existingPkg + break + } } // iterate over all the triples associated with the provided package packageNode. @@ -150,7 +156,12 @@ func (parser *rdfParser2_2) getPackageFromNode(packageNode *gordfParser.Node) (p } } - parser.doc.Packages[pkg.PackageSPDXIdentifier] = pkg + if existingPackageIndex != -1 { + parser.doc.Packages[existingPackageIndex] = pkg + } else { + parser.doc.Packages = append(parser.doc.Packages, pkg) + } + return pkg, nil } @@ -199,10 +210,10 @@ func (parser *rdfParser2_2) setPackageVerificationCode(pkg *spdx.Package2_2, nod switch subTriple.Predicate.ID { case SPDX_PACKAGE_VERIFICATION_CODE_VALUE: // cardinality: exactly 1 - pkg.PackageVerificationCode = subTriple.Object.ID + pkg.PackageVerificationCode.Value = subTriple.Object.ID case SPDX_PACKAGE_VERIFICATION_CODE_EXCLUDED_FILE: // cardinality: min 0 - pkg.PackageVerificationCodeExcludedFile = subTriple.Object.ID + pkg.PackageVerificationCode.ExcludedFiles = append(pkg.PackageVerificationCode.ExcludedFiles, subTriple.Object.ID) case RDF_TYPE: // cardinality: exactly 1 continue @@ -217,9 +228,9 @@ func (parser *rdfParser2_2) setPackageVerificationCode(pkg *spdx.Package2_2, nod // file to indicate the file is associated with a package func (parser *rdfParser2_2) setFileToPackage(pkg *spdx.Package2_2, file *spdx.File2_2) { if pkg.Files == nil { - pkg.Files = map[spdx.ElementID]*spdx.File2_2{} + pkg.Files = []*spdx.File2_2{} } - pkg.Files[file.FileSPDXIdentifier] = file + pkg.Files = append(pkg.Files, file) parser.assocWithPackage[file.FileSPDXIdentifier] = true } @@ -228,22 +239,27 @@ func (parser *rdfParser2_2) setFileToPackage(pkg *spdx.Package2_2, file *spdx.Fi // value: [NOASSERTION | [Person | Organization]: string] func setPackageSupplier(pkg *spdx.Package2_2, value string) error { value = strings.TrimSpace(value) + supplier := &spdx.Supplier{} if strings.ToUpper(value) == "NOASSERTION" { - pkg.PackageSupplierNOASSERTION = true + supplier.Supplier = "NOASSERTION" + pkg.PackageSupplier = supplier return nil } + subKey, subValue, err := ExtractSubs(value, ":") if err != nil { return fmt.Errorf("package supplier must be of the form NOASSERTION or [Person|Organization]: string. found: %s", value) } switch subKey { - case "Person": - pkg.PackageSupplierPerson = subValue - case "Organization": - pkg.PackageSupplierOrganization = subValue + case "Person", "Organization": + supplier.Supplier = subValue + supplier.SupplierType = subKey default: return fmt.Errorf("unknown supplier %s", subKey) } + + pkg.PackageSupplier = supplier + return nil } @@ -252,23 +268,27 @@ func setPackageSupplier(pkg *spdx.Package2_2, value string) error { // value: [NOASSERTION | [Person | Organization]: string] func setPackageOriginator(pkg *spdx.Package2_2, value string) error { value = strings.TrimSpace(value) + originator := &spdx.Originator{} if strings.ToUpper(value) == "NOASSERTION" { - pkg.PackageOriginatorNOASSERTION = true + originator.Originator = "NOASSERTION" + pkg.PackageOriginator = originator return nil } + subKey, subValue, err := ExtractSubs(value, ":") if err != nil { - return fmt.Errorf("package originator must be of the form NOASSERTION or [Person|Organization]: string. found: %s", value) + return fmt.Errorf("package Originator must be of the form NOASSERTION or [Person|Organization]: string. found: %s", value) } - switch subKey { - case "Person": - pkg.PackageOriginatorPerson = subValue - case "Organization": - pkg.PackageOriginatorOrganization = subValue + case "Person", "Organization": + originator.Originator = subValue + originator.OriginatorType = subKey default: - return fmt.Errorf("originator can be either a Person or Organization. found %s", subKey) + return fmt.Errorf("unknown Originator %s", subKey) } + + pkg.PackageOriginator = originator + return nil } @@ -302,12 +322,11 @@ func (parser *rdfParser2_2) setPackageChecksum(pkg *spdx.Package2_2, node *gordf return fmt.Errorf("error getting checksum algorithm and value from %v", node) } if pkg.PackageChecksums == nil { - pkg.PackageChecksums = make(map[spdx.ChecksumAlgorithm]spdx.Checksum) + pkg.PackageChecksums = make([]spdx.Checksum, 0, 1) } switch checksumAlgorithm { case spdx.MD5, spdx.SHA1, spdx.SHA256: - algorithm := spdx.ChecksumAlgorithm(checksumAlgorithm) - pkg.PackageChecksums[algorithm] = spdx.Checksum{Algorithm: algorithm, Value: checksumValue} + pkg.PackageChecksums = append(pkg.PackageChecksums, spdx.Checksum{Algorithm: checksumAlgorithm, Value: checksumValue}) default: return fmt.Errorf("unknown checksumAlgorithm %s while parsing a package", checksumAlgorithm) } diff --git a/rdfloader/parser2v2/parse_package_test.go b/rdfloader/parser2v2/parse_package_test.go index 97447608..c1bc7ed6 100644 --- a/rdfloader/parser2v2/parse_package_test.go +++ b/rdfloader/parser2v2/parse_package_test.go @@ -19,8 +19,8 @@ func Test_setPackageSupplier(t *testing.T) { if err != nil { t.Fatalf("unexpected error: %v", err) } - if !pkg.PackageSupplierNOASSERTION { - t.Errorf("PackageSupplierNOASSERTION must've been set to true") + if pkg.PackageSupplier.Supplier != "NOASSERTION" { + t.Errorf("PackageSupplier must've been set to NOASSERTION") } // TestCase 2: lower-case noassertion must also set the @@ -30,8 +30,8 @@ func Test_setPackageSupplier(t *testing.T) { if err != nil { t.Fatalf("unexpected error: %v", err) } - if !pkg.PackageSupplierNOASSERTION { - t.Errorf("PackageSupplierNOASSERTION must've been set to true") + if pkg.PackageSupplier.Supplier != "NOASSERTION" { + t.Errorf("PackageSupplier must've been set to NOASSERTION") } // TestCase 3: invalid input without colon separator. must raise an error @@ -50,8 +50,8 @@ func Test_setPackageSupplier(t *testing.T) { if err != nil { t.Errorf("unexpected error: %v", err) } - if pkg.PackageSupplierPerson != personName { - t.Errorf("PackageSupplierPerson should be %s. found %s", personName, pkg.PackageSupplierPerson) + if pkg.PackageSupplier.Supplier != personName { + t.Errorf("PackageSupplierPerson should be %s. found %s", personName, pkg.PackageSupplier.Supplier) } // TestCase 5: Valid Organization @@ -62,8 +62,8 @@ func Test_setPackageSupplier(t *testing.T) { if err != nil { t.Errorf("unexpected error: %v", err) } - if pkg.PackageSupplierOrganization != orgName { - t.Errorf("PackageSupplierPerson should be %s. found %s", orgName, pkg.PackageSupplierOrganization) + if pkg.PackageSupplier.Supplier != orgName { + t.Errorf("PackageSupplierPerson should be %s. found %s", orgName, pkg.PackageSupplier.Supplier) } // TestCase 6: Invalid EntityType @@ -84,8 +84,8 @@ func Test_setPackageOriginator(t *testing.T) { if err != nil { t.Fatalf("unexpected error: %v", err) } - if !pkg.PackageOriginatorNOASSERTION { - t.Errorf("PackageOriginatorNOASSERTION must've been set to true") + if pkg.PackageOriginator.Originator != "NOASSERTION" { + t.Errorf("PackageOriginator must've been set to NOASSERTION") } // TestCase 2: lower-case noassertion must also set the @@ -95,8 +95,8 @@ func Test_setPackageOriginator(t *testing.T) { if err != nil { t.Fatalf("unexpected error: %v", err) } - if !pkg.PackageOriginatorNOASSERTION { - t.Errorf("PackageOriginatorNOASSERTION must've been set to true") + if pkg.PackageOriginator.Originator != "NOASSERTION" { + t.Errorf("PackageOriginator must've been set to NOASSERTION") } // TestCase 3: invalid input without colon separator. must raise an error @@ -115,8 +115,8 @@ func Test_setPackageOriginator(t *testing.T) { if err != nil { t.Errorf("unexpected error: %v", err) } - if pkg.PackageOriginatorPerson != personName { - t.Errorf("PackageOriginatorPerson should be %s. found %s", personName, pkg.PackageOriginatorPerson) + if pkg.PackageOriginator.Originator != personName { + t.Errorf("PackageOriginatorPerson should be %s. found %s", personName, pkg.PackageOriginator.Originator) } // TestCase 5: Valid Organization @@ -127,8 +127,8 @@ func Test_setPackageOriginator(t *testing.T) { if err != nil { t.Errorf("unexpected error: %v", err) } - if pkg.PackageOriginatorOrganization != orgName { - t.Errorf("PackageOriginatorOrganization should be %s. found %s", orgName, pkg.PackageOriginatorOrganization) + if pkg.PackageOriginator.Originator != orgName { + t.Errorf("PackageOriginatorOrganization should be %s. found %s", orgName, pkg.PackageOriginator.Originator) } // TestCase 6: Invalid EntityType @@ -175,12 +175,12 @@ func Test_rdfParser2_2_setPackageVerificationCode(t *testing.T) { t.Errorf("unexpected error: %v", err) } expectedValue := "cbceb8b5689b75a584efe35587b5d41bd48820ce" - if pkg.PackageVerificationCode != expectedValue { + if pkg.PackageVerificationCode.Value != expectedValue { t.Errorf("expected %v, got %v", expectedValue, pkg.PackageVerificationCode) } expectedExcludedFile := "./package.spdx" - if pkg.PackageVerificationCodeExcludedFile != expectedExcludedFile { - t.Errorf("expected %v, got %v", expectedExcludedFile, pkg.PackageVerificationCodeExcludedFile) + if pkg.PackageVerificationCode.ExcludedFiles[0] != expectedExcludedFile { + t.Errorf("expected %v, got %v", expectedExcludedFile, pkg.PackageVerificationCode.ExcludedFiles) } } diff --git a/rdfloader/parser2v2/parse_snippet_info.go b/rdfloader/parser2v2/parse_snippet_info.go index d9c82794..a09d6716 100644 --- a/rdfloader/parser2v2/parse_snippet_info.go +++ b/rdfloader/parser2v2/parse_snippet_info.go @@ -31,7 +31,8 @@ func (parser *rdfParser2_2) getSnippetInformationFromNode2_2(node *gordfParser.N if err != nil { return nil, err } - si.SnippetFromFileSPDXIdentifier, err = ExtractDocElementID(getLastPartOfURI(siTriple.Object.ID)) + docElemID, err := ExtractDocElementID(getLastPartOfURI(siTriple.Object.ID)) + si.SnippetFromFileSPDXIdentifier = docElemID.ElementRefID case SPDX_RANGE: // cardinality: min 1 err = parser.setSnippetRangeFromNode(siTriple.Object, si) @@ -131,12 +132,17 @@ func (parser *rdfParser2_2) setSnippetRangeFromNode(node *gordfParser.Node, si * return fmt.Errorf("start and end range type doesn't match") } + si.Ranges = []spdx.SnippetRange{{ + StartPointer: spdx.SnippetRangePointer{FileSPDXIdentifier: si.SnippetFromFileSPDXIdentifier}, + EndPointer: spdx.SnippetRangePointer{FileSPDXIdentifier: si.SnippetFromFileSPDXIdentifier}, + }} + if startRangeType == LINE_RANGE { - si.SnippetLineRangeStart = start - si.SnippetLineRangeEnd = end + si.Ranges[0].StartPointer.LineNumber = start + si.Ranges[0].EndPointer.LineNumber = end } else { - si.SnippetByteRangeStart = start - si.SnippetByteRangeEnd = end + si.Ranges[0].StartPointer.Offset = start + si.Ranges[0].EndPointer.Offset = end } return nil } diff --git a/rdfloader/parser2v2/parse_spdx_document.go b/rdfloader/parser2v2/parse_spdx_document.go index e98fbf61..61593172 100644 --- a/rdfloader/parser2v2/parse_spdx_document.go +++ b/rdfloader/parser2v2/parse_spdx_document.go @@ -18,8 +18,8 @@ func (parser *rdfParser2_2) parseSpdxDocumentNode(spdxDocNode *gordfParser.Node) if err != nil { return err } - ci.DocumentNamespace = baseUri // 2.5 - ci.SPDXIdentifier = spdx.ElementID(offset) // 2.3 + parser.doc.DocumentNamespace = baseUri // 2.5 + parser.doc.SPDXIdentifier = spdx.ElementID(offset) // 2.3 // parse other associated triples. for _, subTriple := range parser.nodeToTriples(spdxDocNode) { @@ -29,17 +29,17 @@ func (parser *rdfParser2_2) parseSpdxDocumentNode(spdxDocNode *gordfParser.Node) continue case SPDX_SPEC_VERSION: // 2.1: specVersion // cardinality: exactly 1 - ci.SPDXVersion = objectValue + parser.doc.SPDXVersion = objectValue case SPDX_DATA_LICENSE: // 2.2: dataLicense // cardinality: exactly 1 dataLicense, err := parser.getAnyLicenseFromNode(subTriple.Object) if err != nil { return err } - ci.DataLicense = dataLicense.ToLicenseString() + parser.doc.DataLicense = dataLicense.ToLicenseString() case SPDX_NAME: // 2.4: DocumentName // cardinality: exactly 1 - ci.DocumentName = objectValue + parser.doc.DocumentName = objectValue case SPDX_EXTERNAL_DOCUMENT_REF: // 2.6: externalDocumentReferences // cardinality: min 0 var extRef spdx.ExternalDocumentRef2_2 @@ -47,13 +47,13 @@ func (parser *rdfParser2_2) parseSpdxDocumentNode(spdxDocNode *gordfParser.Node) if err != nil { return err } - ci.ExternalDocumentReferences[extRef.DocumentRefID] = extRef + parser.doc.ExternalDocumentReferences = append(parser.doc.ExternalDocumentReferences, extRef) case SPDX_CREATION_INFO: // 2.7 - 2.10: // cardinality: exactly 1 err = parser.parseCreationInfoFromNode(ci, subTriple.Object) case RDFS_COMMENT: // 2.11: Document Comment // cardinality: max 1 - ci.DocumentComment = objectValue + parser.doc.DocumentComment = objectValue case SPDX_REVIEWED: // reviewed: // cardinality: min 0 err = parser.setReviewFromNode(subTriple.Object) @@ -64,7 +64,7 @@ func (parser *rdfParser2_2) parseSpdxDocumentNode(spdxDocNode *gordfParser.Node) if err != nil { return err } - parser.doc.Packages[pkg.PackageSPDXIdentifier] = pkg + parser.doc.Packages = append(parser.doc.Packages, pkg) case SPDX_HAS_EXTRACTED_LICENSING_INFO: // hasExtractedLicensingInfo // cardinality: min 0 extractedLicensingInfo, err := parser.getExtractedLicensingInfoFromNode(subTriple.Object) @@ -102,10 +102,12 @@ func (parser *rdfParser2_2) getExternalDocumentRefFromNode(node *gordfParser.Nod edr.URI = triple.Object.ID case SPDX_CHECKSUM: // cardinality: exactly 1 - edr.Alg, edr.Checksum, err = parser.getChecksumFromNode(triple.Object) + alg, checksum, err := parser.getChecksumFromNode(triple.Object) if err != nil { return edr, err } + edr.Checksum.Value = checksum + edr.Checksum.Algorithm = alg case RDF_TYPE: continue default: diff --git a/rdfloader/parser2v2/parser.go b/rdfloader/parser2v2/parser.go index ec4f7ff9..6329dc49 100644 --- a/rdfloader/parser2v2/parser.go +++ b/rdfloader/parser2v2/parser.go @@ -16,19 +16,18 @@ func NewParser2_2(gordfParserObj *gordfParser.Parser, nodeToTriples map[string][ gordfParserObj: gordfParserObj, nodeStringToTriples: nodeToTriples, doc: &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - ExternalDocumentReferences: map[string]spdx.ExternalDocumentRef2_2{}, - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{}, - UnpackagedFiles: map[spdx.ElementID]*spdx.File2_2{}, - OtherLicenses: []*spdx.OtherLicense2_2{}, - Relationships: []*spdx.Relationship2_2{}, - Annotations: []*spdx.Annotation2_2{}, - Reviews: []*spdx.Review2_2{}, + ExternalDocumentReferences: []spdx.ExternalDocumentRef2_2{}, + CreationInfo: &spdx.CreationInfo2_2{}, + Packages: []*spdx.Package2_2{}, + Files: []*spdx.File2_2{}, + OtherLicenses: []*spdx.OtherLicense2_2{}, + Relationships: []*spdx.Relationship2_2{}, + Annotations: []*spdx.Annotation2_2{}, + Reviews: []*spdx.Review2_2{}, }, files: map[spdx.ElementID]*spdx.File2_2{}, assocWithPackage: map[spdx.ElementID]bool{}, - cache: map[string]*nodeState{}, + cache: map[string]*nodeState{}, } return &parser } @@ -66,7 +65,7 @@ func LoadFromGoRDFParser(gordfParserObj *gordfParser.Parser) (*spdx.Document2_2, if err != nil { return nil, fmt.Errorf("error parsing a snippet: %v", err) } - err = parser.setSnippetToFileWithID(snippet, snippet.SnippetFromFileSPDXIdentifier.ElementRefID) + err = parser.setSnippetToFileWithID(snippet, snippet.SnippetFromFileSPDXIdentifier) if err != nil { return nil, err } @@ -81,7 +80,7 @@ func LoadFromGoRDFParser(gordfParserObj *gordfParser.Parser) (*spdx.Document2_2, // parsing packages and files sets the files to a files variable which is // associated with the parser and not the document. following method is // necessary to transfer the files which are not set in the packages to the - // UnpackagedFiles attribute of the document + // Files attribute of the document // WARNING: do not relocate following function call. It must be at the end of the function parser.setUnpackagedFiles() return parser.doc, nil diff --git a/rdfloader/parser2v2/parser_test.go b/rdfloader/parser2v2/parser_test.go index 0d9c30d1..11b2da6b 100644 --- a/rdfloader/parser2v2/parser_test.go +++ b/rdfloader/parser2v2/parser_test.go @@ -24,8 +24,8 @@ func TestNewParser2_2(t *testing.T) { if parser.doc.Packages == nil { t.Errorf("doc.Packages should've been initialised, got %v", parser.doc.Packages) } - if parser.doc.UnpackagedFiles == nil { - t.Errorf("doc.UnpackagedFiles should've been initialised, got %v", parser.doc.UnpackagedFiles) + if parser.doc.Files == nil { + t.Errorf("doc.Files should've been initialised, got %v", parser.doc.Files) } } diff --git a/reporter/reporter_test.go b/reporter/reporter_test.go index 9de377e7..eceeb7b0 100644 --- a/reporter/reporter_test.go +++ b/reporter/reporter_test.go @@ -13,19 +13,19 @@ import ( func Test2_1ReporterCanMakeReportFromPackage(t *testing.T) { pkg := &spdx.Package2_1{ FilesAnalyzed: true, - Files: map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID("File0"): &spdx.File2_1{FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, - spdx.ElementID("File1"): &spdx.File2_1{FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, - spdx.ElementID("File2"): &spdx.File2_1{FileSPDXIdentifier: "File2", LicenseConcluded: "MIT"}, - spdx.ElementID("File3"): &spdx.File2_1{FileSPDXIdentifier: "File3", LicenseConcluded: "MIT"}, - spdx.ElementID("File4"): &spdx.File2_1{FileSPDXIdentifier: "File4", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File5"): &spdx.File2_1{FileSPDXIdentifier: "File5", LicenseConcluded: "NOASSERTION"}, - spdx.ElementID("File6"): &spdx.File2_1{FileSPDXIdentifier: "File6", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File7"): &spdx.File2_1{FileSPDXIdentifier: "File7", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File8"): &spdx.File2_1{FileSPDXIdentifier: "File8", LicenseConcluded: "MIT"}, - spdx.ElementID("File9"): &spdx.File2_1{FileSPDXIdentifier: "File9", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File10"): &spdx.File2_1{FileSPDXIdentifier: "File10", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File11"): &spdx.File2_1{FileSPDXIdentifier: "File11", LicenseConcluded: "NOASSERTION"}, + Files: []*spdx.File2_1{ + {FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, + {FileSPDXIdentifier: "File2", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File3", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File4", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File5", LicenseConcluded: "NOASSERTION"}, + {FileSPDXIdentifier: "File6", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File7", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File8", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File9", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File10", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File11", LicenseConcluded: "NOASSERTION"}, }, } @@ -71,19 +71,19 @@ func Test2_1ReporterReturnsErrorIfPackageFilesNotAnalyzed(t *testing.T) { func Test2_1CanGetCountsOfLicenses(t *testing.T) { pkg := &spdx.Package2_1{ FilesAnalyzed: true, - Files: map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID("File0"): &spdx.File2_1{FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, - spdx.ElementID("File1"): &spdx.File2_1{FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, - spdx.ElementID("File2"): &spdx.File2_1{FileSPDXIdentifier: "File2", LicenseConcluded: "MIT"}, - spdx.ElementID("File3"): &spdx.File2_1{FileSPDXIdentifier: "File3", LicenseConcluded: "MIT"}, - spdx.ElementID("File4"): &spdx.File2_1{FileSPDXIdentifier: "File4", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File5"): &spdx.File2_1{FileSPDXIdentifier: "File5", LicenseConcluded: "NOASSERTION"}, - spdx.ElementID("File6"): &spdx.File2_1{FileSPDXIdentifier: "File6", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File7"): &spdx.File2_1{FileSPDXIdentifier: "File7", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File8"): &spdx.File2_1{FileSPDXIdentifier: "File8", LicenseConcluded: "MIT"}, - spdx.ElementID("File9"): &spdx.File2_1{FileSPDXIdentifier: "File9", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File10"): &spdx.File2_1{FileSPDXIdentifier: "File10", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File11"): &spdx.File2_1{FileSPDXIdentifier: "File11", LicenseConcluded: "NOASSERTION"}, + Files: []*spdx.File2_1{ + {FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, + {FileSPDXIdentifier: "File2", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File3", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File4", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File5", LicenseConcluded: "NOASSERTION"}, + {FileSPDXIdentifier: "File6", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File7", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File8", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File9", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File10", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File11", LicenseConcluded: "NOASSERTION"}, }, } @@ -137,19 +137,19 @@ func Test2_1NilPackageReturnsZeroCountsOfLicenses(t *testing.T) { func Test2_2ReporterCanMakeReportFromPackage(t *testing.T) { pkg := &spdx.Package2_2{ FilesAnalyzed: true, - Files: map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID("File0"): &spdx.File2_2{FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, - spdx.ElementID("File1"): &spdx.File2_2{FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, - spdx.ElementID("File2"): &spdx.File2_2{FileSPDXIdentifier: "File2", LicenseConcluded: "MIT"}, - spdx.ElementID("File3"): &spdx.File2_2{FileSPDXIdentifier: "File3", LicenseConcluded: "MIT"}, - spdx.ElementID("File4"): &spdx.File2_2{FileSPDXIdentifier: "File4", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File5"): &spdx.File2_2{FileSPDXIdentifier: "File5", LicenseConcluded: "NOASSERTION"}, - spdx.ElementID("File6"): &spdx.File2_2{FileSPDXIdentifier: "File6", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File7"): &spdx.File2_2{FileSPDXIdentifier: "File7", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File8"): &spdx.File2_2{FileSPDXIdentifier: "File8", LicenseConcluded: "MIT"}, - spdx.ElementID("File9"): &spdx.File2_2{FileSPDXIdentifier: "File9", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File10"): &spdx.File2_2{FileSPDXIdentifier: "File10", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File11"): &spdx.File2_2{FileSPDXIdentifier: "File11", LicenseConcluded: "NOASSERTION"}, + Files: []*spdx.File2_2{ + {FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, + {FileSPDXIdentifier: "File2", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File3", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File4", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File5", LicenseConcluded: "NOASSERTION"}, + {FileSPDXIdentifier: "File6", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File7", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File8", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File9", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File10", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File11", LicenseConcluded: "NOASSERTION"}, }, } @@ -195,19 +195,19 @@ func Test2_2ReporterReturnsErrorIfPackageFilesNotAnalyzed(t *testing.T) { func Test2_2CanGetCountsOfLicenses(t *testing.T) { pkg := &spdx.Package2_2{ FilesAnalyzed: true, - Files: map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID("File0"): &spdx.File2_2{FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, - spdx.ElementID("File1"): &spdx.File2_2{FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, - spdx.ElementID("File2"): &spdx.File2_2{FileSPDXIdentifier: "File2", LicenseConcluded: "MIT"}, - spdx.ElementID("File3"): &spdx.File2_2{FileSPDXIdentifier: "File3", LicenseConcluded: "MIT"}, - spdx.ElementID("File4"): &spdx.File2_2{FileSPDXIdentifier: "File4", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File5"): &spdx.File2_2{FileSPDXIdentifier: "File5", LicenseConcluded: "NOASSERTION"}, - spdx.ElementID("File6"): &spdx.File2_2{FileSPDXIdentifier: "File6", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File7"): &spdx.File2_2{FileSPDXIdentifier: "File7", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File8"): &spdx.File2_2{FileSPDXIdentifier: "File8", LicenseConcluded: "MIT"}, - spdx.ElementID("File9"): &spdx.File2_2{FileSPDXIdentifier: "File9", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File10"): &spdx.File2_2{FileSPDXIdentifier: "File10", LicenseConcluded: "GPL-2.0-only"}, - spdx.ElementID("File11"): &spdx.File2_2{FileSPDXIdentifier: "File11", LicenseConcluded: "NOASSERTION"}, + Files: []*spdx.File2_2{ + {FileSPDXIdentifier: "File0", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File1", LicenseConcluded: "NOASSERTION"}, + {FileSPDXIdentifier: "File2", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File3", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File4", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File5", LicenseConcluded: "NOASSERTION"}, + {FileSPDXIdentifier: "File6", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File7", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File8", LicenseConcluded: "MIT"}, + {FileSPDXIdentifier: "File9", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File10", LicenseConcluded: "GPL-2.0-only"}, + {FileSPDXIdentifier: "File11", LicenseConcluded: "NOASSERTION"}, }, } diff --git a/spdx/annotation.go b/spdx/annotation.go index ede9c8ac..560b6f00 100644 --- a/spdx/annotation.go +++ b/spdx/annotation.go @@ -2,54 +2,91 @@ package spdx -// Annotation2_1 is an Annotation section of an SPDX Document for version 2.1 of the spec. -type Annotation2_1 struct { +import ( + "encoding/json" + "fmt" + "strings" +) - // 8.1: Annotator - // Cardinality: conditional (mandatory, one) if there is an Annotation +type Annotator struct { Annotator string // including AnnotatorType: one of "Person", "Organization" or "Tool" AnnotatorType string +} + +// UnmarshalJSON takes an annotator in the typical one-line format and parses it into an Annotator struct. +// This function is also used when unmarshalling YAML +func (a *Annotator) UnmarshalJSON(data []byte) error { + // annotator will simply be a string + annotatorStr := string(data) + annotatorStr = strings.Trim(annotatorStr, "\"") + + annotatorFields := strings.SplitN(annotatorStr, ": ", 2) + + if len(annotatorFields) != 2 { + return fmt.Errorf("failed to parse Annotator '%s'", annotatorStr) + } + + a.AnnotatorType = annotatorFields[0] + a.Annotator = annotatorFields[1] + + return nil +} + +// MarshalJSON converts the receiver into a slice of bytes representing an Annotator in string form. +// This function is also used when marshalling to YAML +func (a Annotator) MarshalJSON() ([]byte, error) { + if a.Annotator != "" { + return json.Marshal(fmt.Sprintf("%s: %s", a.AnnotatorType, a.Annotator)) + } + + return []byte{}, nil +} + +// Annotation2_1 is an Annotation section of an SPDX Document for version 2.1 of the spec. +type Annotation2_1 struct { + // 8.1: Annotator + // Cardinality: conditional (mandatory, one) if there is an Annotation + Annotator Annotator `json:"annotator"` // 8.2: Annotation Date: YYYY-MM-DDThh:mm:ssZ // Cardinality: conditional (mandatory, one) if there is an Annotation - AnnotationDate string + AnnotationDate string `json:"annotationDate"` // 8.3: Annotation Type: "REVIEW" or "OTHER" // Cardinality: conditional (mandatory, one) if there is an Annotation - AnnotationType string + AnnotationType string `json:"annotationType"` // 8.4: SPDX Identifier Reference // Cardinality: conditional (mandatory, one) if there is an Annotation - AnnotationSPDXIdentifier DocElementID + // This field is not used in hierarchical data formats where the referenced element is clear, such as JSON or YAML. + AnnotationSPDXIdentifier DocElementID `json:"-"` // 8.5: Annotation Comment // Cardinality: conditional (mandatory, one) if there is an Annotation - AnnotationComment string + AnnotationComment string `json:"comment"` } // Annotation2_2 is an Annotation section of an SPDX Document for version 2.2 of the spec. type Annotation2_2 struct { - // 8.1: Annotator // Cardinality: conditional (mandatory, one) if there is an Annotation - Annotator string - // including AnnotatorType: one of "Person", "Organization" or "Tool" - AnnotatorType string + Annotator Annotator `json:"annotator"` // 8.2: Annotation Date: YYYY-MM-DDThh:mm:ssZ // Cardinality: conditional (mandatory, one) if there is an Annotation - AnnotationDate string + AnnotationDate string `json:"annotationDate"` // 8.3: Annotation Type: "REVIEW" or "OTHER" // Cardinality: conditional (mandatory, one) if there is an Annotation - AnnotationType string + AnnotationType string `json:"annotationType"` // 8.4: SPDX Identifier Reference // Cardinality: conditional (mandatory, one) if there is an Annotation - AnnotationSPDXIdentifier DocElementID + // This field is not used in hierarchical data formats where the referenced element is clear, such as JSON or YAML. + AnnotationSPDXIdentifier DocElementID `json:"-"` // 8.5: Annotation Comment // Cardinality: conditional (mandatory, one) if there is an Annotation - AnnotationComment string + AnnotationComment string `json:"comment"` } diff --git a/spdx/checksum.go b/spdx/checksum.go index 872aee29..3295969a 100644 --- a/spdx/checksum.go +++ b/spdx/checksum.go @@ -2,25 +2,25 @@ package spdx -// ChecksumAlgorithm2_2 represents the algorithm used to generate the file checksum in the Checksum2_2 struct. +// ChecksumAlgorithm represents the algorithm used to generate the file checksum in the Checksum struct. type ChecksumAlgorithm string // The checksum algorithms mentioned in the spdxv2.2.0 https://spdx.github.io/spdx-spec/4-file-information/#44-file-checksum const ( SHA224 ChecksumAlgorithm = "SHA224" - SHA1 = "SHA1" - SHA256 = "SHA256" - SHA384 = "SHA384" - SHA512 = "SHA512" - MD2 = "MD2" - MD4 = "MD4" - MD5 = "MD5" - MD6 = "MD6" + SHA1 ChecksumAlgorithm = "SHA1" + SHA256 ChecksumAlgorithm = "SHA256" + SHA384 ChecksumAlgorithm = "SHA384" + SHA512 ChecksumAlgorithm = "SHA512" + MD2 ChecksumAlgorithm = "MD2" + MD4 ChecksumAlgorithm = "MD4" + MD5 ChecksumAlgorithm = "MD5" + MD6 ChecksumAlgorithm = "MD6" ) -//Checksum2_2 struct Provide a unique identifier to match analysis information on each specific file in a package. -// The Algorithm field describes the ChecksumAlgorithm2_2 used and the Value represents the file checksum +// Checksum provides a unique identifier to match analysis information on each specific file in a package. +// The Algorithm field describes the ChecksumAlgorithm used and the Value represents the file checksum type Checksum struct { - Algorithm ChecksumAlgorithm - Value string + Algorithm ChecksumAlgorithm `json:"algorithm"` + Value string `json:"checksumValue"` } diff --git a/spdx/creation_info.go b/spdx/creation_info.go index 1bdaaab7..c0b6f636 100644 --- a/spdx/creation_info.go +++ b/spdx/creation_info.go @@ -2,146 +2,85 @@ package spdx -// CreationInfo2_1 is a Document Creation Information section of an -// SPDX Document for version 2.1 of the spec. -type CreationInfo2_1 struct { +import ( + "encoding/json" + "fmt" + "strings" +) + +// Creator is a wrapper around the Creator SPDX field. The SPDX field contains two values, which requires special +// handling in order to marshal/unmarshal it to/from Go data types. +type Creator struct { + Creator string + // CreatorType should be one of "Person", "Organization", or "Tool" + CreatorType string +} - // 2.1: SPDX Version; should be in the format "SPDX-2.1" - // Cardinality: mandatory, one - SPDXVersion string +// UnmarshalJSON takes an annotator in the typical one-line format and parses it into a Creator struct. +// This function is also used when unmarshalling YAML +func (c *Creator) UnmarshalJSON(data []byte) error { + str := string(data) + str = strings.Trim(str, "\"") + fields := strings.SplitN(str, ": ", 2) - // 2.2: Data License; should be "CC0-1.0" - // Cardinality: mandatory, one - DataLicense string + if len(fields) != 2 { + return fmt.Errorf("failed to parse Creator '%s'", str) + } - // 2.3: SPDX Identifier; should be "DOCUMENT" to represent - // mandatory identifier of SPDXRef-DOCUMENT - // Cardinality: mandatory, one - SPDXIdentifier ElementID + c.CreatorType = fields[0] + c.Creator = fields[1] - // 2.4: Document Name - // Cardinality: mandatory, one - DocumentName string + return nil +} - // 2.5: Document Namespace - // Cardinality: mandatory, one - DocumentNamespace string +// MarshalJSON converts the receiver into a slice of bytes representing a Creator in string form. +// This function is also used with marshalling to YAML +func (c Creator) MarshalJSON() ([]byte, error) { + if c.Creator != "" { + return json.Marshal(fmt.Sprintf("%s: %s", c.CreatorType, c.Creator)) + } - // 2.6: External Document References - // Cardinality: optional, one or many - ExternalDocumentReferences map[string]ExternalDocumentRef2_1 + return []byte{}, nil +} +// CreationInfo2_1 is a Document Creation Information section of an +// SPDX Document for version 2.1 of the spec. +type CreationInfo2_1 struct { // 2.7: License List Version // Cardinality: optional, one - LicenseListVersion string + LicenseListVersion string `json:"licenseListVersion"` // 2.8: Creators: may have multiple keys for Person, Organization // and/or Tool // Cardinality: mandatory, one or many - CreatorPersons []string - CreatorOrganizations []string - CreatorTools []string + Creators []Creator `json:"creators"` // 2.9: Created: data format YYYY-MM-DDThh:mm:ssZ // Cardinality: mandatory, one - Created string + Created string `json:"created"` // 2.10: Creator Comment // Cardinality: optional, one - CreatorComment string - - // 2.11: Document Comment - // Cardinality: optional, one - DocumentComment string -} - -// ExternalDocumentRef2_1 is a reference to an external SPDX document -// as defined in section 2.6 for version 2.1 of the spec. -type ExternalDocumentRef2_1 struct { - - // DocumentRefID is the ID string defined in the start of the - // reference. It should _not_ contain the "DocumentRef-" part - // of the mandatory ID string. - DocumentRefID string - - // URI is the URI defined for the external document - URI string - - // Alg is the type of hash algorithm used, e.g. "SHA1", "SHA256" - Alg string - - // Checksum is the actual hash data - Checksum string + CreatorComment string `json:"comment"` } // CreationInfo2_2 is a Document Creation Information section of an // SPDX Document for version 2.2 of the spec. type CreationInfo2_2 struct { - - // 2.1: SPDX Version; should be in the format "SPDX-2.2" - // Cardinality: mandatory, one - SPDXVersion string - - // 2.2: Data License; should be "CC0-1.0" - // Cardinality: mandatory, one - DataLicense string - - // 2.3: SPDX Identifier; should be "DOCUMENT" to represent - // mandatory identifier of SPDXRef-DOCUMENT - // Cardinality: mandatory, one - SPDXIdentifier ElementID - - // 2.4: Document Name - // Cardinality: mandatory, one - DocumentName string - - // 2.5: Document Namespace - // Cardinality: mandatory, one - DocumentNamespace string - - // 2.6: External Document References - // Cardinality: optional, one or many - ExternalDocumentReferences map[string]ExternalDocumentRef2_2 - // 2.7: License List Version // Cardinality: optional, one - LicenseListVersion string + LicenseListVersion string `json:"licenseListVersion"` // 2.8: Creators: may have multiple keys for Person, Organization // and/or Tool // Cardinality: mandatory, one or many - CreatorPersons []string - CreatorOrganizations []string - CreatorTools []string + Creators []Creator `json:"creators"` // 2.9: Created: data format YYYY-MM-DDThh:mm:ssZ // Cardinality: mandatory, one - Created string + Created string `json:"created"` // 2.10: Creator Comment // Cardinality: optional, one - CreatorComment string - - // 2.11: Document Comment - // Cardinality: optional, one - DocumentComment string -} - -// ExternalDocumentRef2_2 is a reference to an external SPDX document -// as defined in section 2.6 for version 2.2 of the spec. -type ExternalDocumentRef2_2 struct { - - // DocumentRefID is the ID string defined in the start of the - // reference. It should _not_ contain the "DocumentRef-" part - // of the mandatory ID string. - DocumentRefID string - - // URI is the URI defined for the external document - URI string - - // Alg is the type of hash algorithm used, e.g. "SHA1", "SHA256" - Alg string - - // Checksum is the actual hash data - Checksum string + CreatorComment string `json:"comment"` } diff --git a/spdx/document.go b/spdx/document.go index 6a7bc3d6..a3117cb7 100644 --- a/spdx/document.go +++ b/spdx/document.go @@ -3,15 +3,75 @@ // SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later package spdx +// ExternalDocumentRef2_1 is a reference to an external SPDX document +// as defined in section 2.6 for version 2.1 of the spec. +type ExternalDocumentRef2_1 struct { + // DocumentRefID is the ID string defined in the start of the + // reference. It should _not_ contain the "DocumentRef-" part + // of the mandatory ID string. + DocumentRefID string `json:"externalDocumentId"` + + // URI is the URI defined for the external document + URI string `json:"spdxDocument"` + + // Checksum is the actual hash data + Checksum Checksum `json:"checksum"` +} + +// ExternalDocumentRef2_2 is a reference to an external SPDX document +// as defined in section 2.6 for version 2.2 of the spec. +type ExternalDocumentRef2_2 struct { + // DocumentRefID is the ID string defined in the start of the + // reference. It should _not_ contain the "DocumentRef-" part + // of the mandatory ID string. + DocumentRefID string `json:"externalDocumentId"` + + // URI is the URI defined for the external document + URI string `json:"spdxDocument"` + + // Checksum is the actual hash data + Checksum Checksum `json:"checksum"` +} + // Document2_1 is an SPDX Document for version 2.1 of the spec. // See https://spdx.org/sites/cpstandard/files/pages/files/spdxversion2.1.pdf type Document2_1 struct { - CreationInfo *CreationInfo2_1 - Packages map[ElementID]*Package2_1 - UnpackagedFiles map[ElementID]*File2_1 - OtherLicenses []*OtherLicense2_1 - Relationships []*Relationship2_1 - Annotations []*Annotation2_1 + // 2.1: SPDX Version; should be in the format "SPDX-2.1" + // Cardinality: mandatory, one + SPDXVersion string `json:"spdxVersion"` + + // 2.2: Data License; should be "CC0-1.0" + // Cardinality: mandatory, one + DataLicense string `json:"dataLicense"` + + // 2.3: SPDX Identifier; should be "DOCUMENT" to represent + // mandatory identifier of SPDXRef-DOCUMENT + // Cardinality: mandatory, one + SPDXIdentifier ElementID `json:"SPDXID"` + + // 2.4: Document Name + // Cardinality: mandatory, one + DocumentName string `json:"name"` + + // 2.5: Document Namespace + // Cardinality: mandatory, one + DocumentNamespace string `json:"documentNamespace"` + + // 2.6: External Document References + // Cardinality: optional, one or many + ExternalDocumentReferences []ExternalDocumentRef2_1 `json:"externalDocumentRefs,omitempty"` + + // 2.11: Document Comment + // Cardinality: optional, one + DocumentComment string `json:"comment,omitempty"` + + CreationInfo *CreationInfo2_1 `json:"creationInfo"` + Packages []*Package2_1 `json:"packages"` + Files []*File2_1 `json:"files"` + OtherLicenses []*OtherLicense2_1 `json:"hasExtractedLicensingInfos"` + Relationships []*Relationship2_1 `json:"relationships"` + Annotations []*Annotation2_1 `json:"annotations"` + Snippets []Snippet2_1 `json:"snippets"` // DEPRECATED in version 2.0 of spec Reviews []*Review2_1 @@ -20,12 +80,42 @@ type Document2_1 struct { // Document2_2 is an SPDX Document for version 2.2 of the spec. // See https://spdx.github.io/spdx-spec/v2-draft/ (DRAFT) type Document2_2 struct { - CreationInfo *CreationInfo2_2 - Packages map[ElementID]*Package2_2 - UnpackagedFiles map[ElementID]*File2_2 - OtherLicenses []*OtherLicense2_2 - Relationships []*Relationship2_2 - Annotations []*Annotation2_2 + // 2.1: SPDX Version; should be in the format "SPDX-2.2" + // Cardinality: mandatory, one + SPDXVersion string `json:"spdxVersion"` + + // 2.2: Data License; should be "CC0-1.0" + // Cardinality: mandatory, one + DataLicense string `json:"dataLicense"` + + // 2.3: SPDX Identifier; should be "DOCUMENT" to represent + // mandatory identifier of SPDXRef-DOCUMENT + // Cardinality: mandatory, one + SPDXIdentifier ElementID `json:"SPDXID"` + + // 2.4: Document Name + // Cardinality: mandatory, one + DocumentName string `json:"name"` + + // 2.5: Document Namespace + // Cardinality: mandatory, one + DocumentNamespace string `json:"documentNamespace"` + + // 2.6: External Document References + // Cardinality: optional, one or many + ExternalDocumentReferences []ExternalDocumentRef2_2 `json:"externalDocumentRefs,omitempty"` + + // 2.11: Document Comment + // Cardinality: optional, one + DocumentComment string `json:"comment,omitempty"` + + CreationInfo *CreationInfo2_2 `json:"creationInfo"` + Packages []*Package2_2 `json:"packages"` + Files []*File2_2 `json:"files"` + OtherLicenses []*OtherLicense2_2 `json:"hasExtractedLicensingInfos"` + Relationships []*Relationship2_2 `json:"relationships"` + Annotations []*Annotation2_2 `json:"annotations"` + Snippets []Snippet2_2 `json:"snippets"` // DEPRECATED in version 2.0 of spec Reviews []*Review2_2 diff --git a/spdx/file.go b/spdx/file.go index a745dc3b..01dbb368 100644 --- a/spdx/file.go +++ b/spdx/file.go @@ -4,68 +4,67 @@ package spdx // File2_1 is a File section of an SPDX Document for version 2.1 of the spec. type File2_1 struct { - // 4.1: File Name // Cardinality: mandatory, one - FileName string + FileName string `json:"fileName"` // 4.2: File SPDX Identifier: "SPDXRef-[idstring]" // Cardinality: mandatory, one - FileSPDXIdentifier ElementID + FileSPDXIdentifier ElementID `json:"SPDXID"` - // 4.3: File Type + // 4.3: File Types // Cardinality: optional, multiple - FileType []string + FileTypes []string `json:"fileTypes,omitempty"` // 4.4: File Checksum: may have keys for SHA1, SHA256 and/or MD5 // Cardinality: mandatory, one SHA1, others may be optionally provided - FileChecksumSHA1 string - FileChecksumSHA256 string - FileChecksumMD5 string + Checksums []Checksum `json:"checksums"` // 4.5: Concluded License: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - LicenseConcluded string + LicenseConcluded string `json:"licenseConcluded"` // 4.6: License Information in File: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one or many - LicenseInfoInFile []string + LicenseInfoInFiles []string `json:"licenseInfoInFiles"` // 4.7: Comments on License // Cardinality: optional, one - LicenseComments string + LicenseComments string `json:"licenseComments,omitempty"` // 4.8: Copyright Text: copyright notice(s) text, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - FileCopyrightText string + FileCopyrightText string `json:"copyrightText"` // DEPRECATED in version 2.1 of spec // 4.9-4.11: Artifact of Project variables (defined below) // Cardinality: optional, one or many - ArtifactOfProjects []*ArtifactOfProject2_1 + ArtifactOfProjects []*ArtifactOfProject2_1 `json:"-"` // 4.12: File Comment // Cardinality: optional, one - FileComment string + FileComment string `json:"comment,omitempty"` // 4.13: File Notice // Cardinality: optional, one - FileNotice string + FileNotice string `json:"noticeText,omitempty"` // 4.14: File Contributor // Cardinality: optional, one or many - FileContributor []string + FileContributors []string `json:"fileContributors,omitempty"` // DEPRECATED in version 2.0 of spec // 4.15: File Dependencies // Cardinality: optional, one or many - FileDependencies []string + FileDependencies []string `json:"-"` // Snippets contained in this File // Note that Snippets could be defined in a different Document! However, - // the only ones that _THIS_ document can contain are this ones that are + // the only ones that _THIS_ document can contain are the ones that are // defined here -- so this should just be an ElementID. - Snippets map[ElementID]*Snippet2_1 + Snippets map[ElementID]*Snippet2_1 `json:"-"` + + Annotations []Annotation2_1 `json:"annotations"` } // ArtifactOfProject2_1 is a DEPRECATED collection of data regarding @@ -90,70 +89,71 @@ type ArtifactOfProject2_1 struct { // File2_2 is a File section of an SPDX Document for version 2.2 of the spec. type File2_2 struct { - // 4.1: File Name // Cardinality: mandatory, one - FileName string + FileName string `json:"fileName"` // 4.2: File SPDX Identifier: "SPDXRef-[idstring]" // Cardinality: mandatory, one - FileSPDXIdentifier ElementID + FileSPDXIdentifier ElementID `json:"SPDXID"` - // 4.3: File Type + // 4.3: File Types // Cardinality: optional, multiple - FileType []string + FileTypes []string `json:"fileTypes,omitempty"` // 4.4: File Checksum: may have keys for SHA1, SHA256 and/or MD5 // Cardinality: mandatory, one SHA1, others may be optionally provided - FileChecksums map[ChecksumAlgorithm]Checksum + Checksums []Checksum `json:"checksums"` // 4.5: Concluded License: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - LicenseConcluded string + LicenseConcluded string `json:"licenseConcluded"` // 4.6: License Information in File: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one or many - LicenseInfoInFile []string + LicenseInfoInFiles []string `json:"licenseInfoInFiles"` // 4.7: Comments on License // Cardinality: optional, one - LicenseComments string + LicenseComments string `json:"licenseComments,omitempty"` // 4.8: Copyright Text: copyright notice(s) text, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - FileCopyrightText string + FileCopyrightText string `json:"copyrightText"` // DEPRECATED in version 2.1 of spec // 4.9-4.11: Artifact of Project variables (defined below) // Cardinality: optional, one or many - ArtifactOfProjects []*ArtifactOfProject2_2 + ArtifactOfProjects []*ArtifactOfProject2_2 `json:"-"` // 4.12: File Comment // Cardinality: optional, one - FileComment string + FileComment string `json:"comment,omitempty"` // 4.13: File Notice // Cardinality: optional, one - FileNotice string + FileNotice string `json:"noticeText,omitempty"` // 4.14: File Contributor // Cardinality: optional, one or many - FileContributor []string + FileContributors []string `json:"fileContributors,omitempty"` // 4.15: File Attribution Text // Cardinality: optional, one or many - FileAttributionTexts []string + FileAttributionTexts []string `json:"attributionTexts,omitempty"` // DEPRECATED in version 2.0 of spec // 4.16: File Dependencies // Cardinality: optional, one or many - FileDependencies []string + FileDependencies []string `json:"-"` // Snippets contained in this File // Note that Snippets could be defined in a different Document! However, // the only ones that _THIS_ document can contain are this ones that are // defined here -- so this should just be an ElementID. - Snippets map[ElementID]*Snippet2_2 + Snippets map[ElementID]*Snippet2_2 `json:"-"` + + Annotations []Annotation2_2 `json:"annotations,omitempty"` } // ArtifactOfProject2_2 is a DEPRECATED collection of data regarding diff --git a/spdx/identifier.go b/spdx/identifier.go index baf44c1c..56f8ffc8 100644 --- a/spdx/identifier.go +++ b/spdx/identifier.go @@ -2,6 +2,12 @@ package spdx +import ( + "encoding/json" + "fmt" + "strings" +) + // ElementID represents the identifier string portion of an SPDX element // identifier. DocElementID should be used for any attributes which can // contain identifiers defined in a different SPDX document. @@ -28,6 +34,62 @@ type DocElementID struct { SpecialID string } +// UnmarshalJSON takes a SPDX Identifier string parses it into a DocElementID struct. +// This function is also used when unmarshalling YAML +func (d *DocElementID) UnmarshalJSON(data []byte) error { + // SPDX identifier will simply be a string + idStr := string(data) + idStr = strings.Trim(idStr, "\"") + + // handle special cases + if idStr == "NONE" || idStr == "NOASSERTION" { + d.SpecialID = idStr + return nil + } + + var idFields []string + // handle DocumentRef- if present + if strings.HasPrefix(idStr, "DocumentRef-") { + // strip out the "DocumentRef-" so we can get the value + idFields = strings.SplitN(idStr, "DocumentRef-", 2) + idStr = idFields[1] + + // an SPDXRef can appear after a DocumentRef, separated by a colon + idFields = strings.SplitN(idStr, ":", 2) + d.DocumentRefID = idFields[0] + + if len(idFields) == 2 { + idStr = idFields[1] + } else { + return nil + } + } + + // handle SPDXRef- + idFields = strings.SplitN(idStr, "SPDXRef-", 2) + if len(idFields) != 2 { + return fmt.Errorf("failed to parse SPDX Identifier '%s'", idStr) + } + + d.ElementRefID = ElementID(idFields[1]) + + return nil +} + +// MarshalJSON converts the receiver into a slice of bytes representing a DocElementID in string form. +// This function is also used when marshalling to YAML +func (d DocElementID) MarshalJSON() ([]byte, error) { + if d.DocumentRefID != "" && d.ElementRefID != "" { + return json.Marshal(fmt.Sprintf("DocumentRef-%s:SPDXRef-%s", d.DocumentRefID, d.ElementRefID)) + } else if d.ElementRefID != "" { + return json.Marshal(fmt.Sprintf("SPDXRef-%s", d.ElementRefID)) + } else if d.SpecialID != "" { + return json.Marshal(d.SpecialID) + } + + return []byte{}, fmt.Errorf("failed to marshal empty DocElementID") +} + // TODO: add equivalents for LicenseRef- identifiers // MakeDocElementID takes strings (without prefixes) for the DocumentRef- diff --git a/spdx/other_license.go b/spdx/other_license.go index a509c472..6e43676b 100644 --- a/spdx/other_license.go +++ b/spdx/other_license.go @@ -5,57 +5,55 @@ package spdx // OtherLicense2_1 is an Other License Information section of an // SPDX Document for version 2.1 of the spec. type OtherLicense2_1 struct { - // 6.1: License Identifier: "LicenseRef-[idstring]" // Cardinality: conditional (mandatory, one) if license is not // on SPDX License List - LicenseIdentifier string + LicenseIdentifier string `json:"licenseId"` // 6.2: Extracted Text // Cardinality: conditional (mandatory, one) if there is a // License Identifier assigned - ExtractedText string + ExtractedText string `json:"extractedText"` // 6.3: License Name: single line of text or "NOASSERTION" // Cardinality: conditional (mandatory, one) if license is not // on SPDX License List - LicenseName string + LicenseName string `json:"name,omitempty"` // 6.4: License Cross Reference // Cardinality: conditional (optional, one or many) if license // is not on SPDX License List - LicenseCrossReferences []string + LicenseCrossReferences []string `json:"seeAlsos,omitempty"` // 6.5: License Comment // Cardinality: optional, one - LicenseComment string + LicenseComment string `json:"comment,omitempty"` } // OtherLicense2_2 is an Other License Information section of an // SPDX Document for version 2.2 of the spec. type OtherLicense2_2 struct { - // 6.1: License Identifier: "LicenseRef-[idstring]" // Cardinality: conditional (mandatory, one) if license is not // on SPDX License List - LicenseIdentifier string + LicenseIdentifier string `json:"licenseId"` // 6.2: Extracted Text // Cardinality: conditional (mandatory, one) if there is a // License Identifier assigned - ExtractedText string + ExtractedText string `json:"extractedText"` // 6.3: License Name: single line of text or "NOASSERTION" // Cardinality: conditional (mandatory, one) if license is not // on SPDX License List - LicenseName string + LicenseName string `json:"name,omitempty"` // 6.4: License Cross Reference // Cardinality: conditional (optional, one or many) if license // is not on SPDX License List - LicenseCrossReferences []string + LicenseCrossReferences []string `json:"seeAlsos,omitempty"` // 6.5: License Comment // Cardinality: optional, one - LicenseComment string + LicenseComment string `json:"comment,omitempty"` } diff --git a/spdx/package.go b/spdx/package.go index 9aeb8a20..e6c45223 100644 --- a/spdx/package.go +++ b/spdx/package.go @@ -2,140 +2,225 @@ package spdx +import ( + "encoding/json" + "fmt" + "strings" +) + +type Supplier struct { + // can be "NOASSERTION" + Supplier string + // SupplierType can be one of "Person", "Organization", or empty if Supplier is "NOASSERTION" + SupplierType string +} + +// UnmarshalJSON takes a supplier in the typical one-line format and parses it into a Supplier struct. +// This function is also used when unmarshalling YAML +func (s *Supplier) UnmarshalJSON(data []byte) error { + // the value is just a string presented as a slice of bytes + supplierStr := string(data) + supplierStr = strings.Trim(supplierStr, "\"") + + if supplierStr == "NOASSERTION" { + s.Supplier = supplierStr + return nil + } + + supplierFields := strings.SplitN(supplierStr, ": ", 2) + + if len(supplierFields) != 2 { + return fmt.Errorf("failed to parse Supplier '%s'", supplierStr) + } + + s.SupplierType = supplierFields[0] + s.Supplier = supplierFields[1] + + return nil +} + +// MarshalJSON converts the receiver into a slice of bytes representing a Supplier in string form. +// This function is also used when marshalling to YAML +func (s Supplier) MarshalJSON() ([]byte, error) { + if s.Supplier == "NOASSERTION" { + return json.Marshal(s.Supplier) + } else if s.SupplierType != "" && s.Supplier != "" { + return json.Marshal(fmt.Sprintf("%s: %s", s.SupplierType, s.Supplier)) + } + + return []byte{}, fmt.Errorf("failed to marshal invalid Supplier: %+v", s) +} + +type Originator struct { + // can be "NOASSERTION" + Originator string + // OriginatorType can be one of "Person", "Organization", or empty if Originator is "NOASSERTION" + OriginatorType string +} + +// UnmarshalJSON takes an originator in the typical one-line format and parses it into an Originator struct. +// This function is also used when unmarshalling YAML +func (o *Originator) UnmarshalJSON(data []byte) error { + // the value is just a string presented as a slice of bytes + originatorStr := string(data) + originatorStr = strings.Trim(originatorStr, "\"") + + if originatorStr == "NOASSERTION" { + o.Originator = originatorStr + return nil + } + + originatorFields := strings.SplitN(originatorStr, ": ", 2) + + if len(originatorFields) != 2 { + return fmt.Errorf("failed to parse Originator '%s'", originatorStr) + } + + o.OriginatorType = originatorFields[0] + o.Originator = originatorFields[1] + + return nil +} + +// MarshalJSON converts the receiver into a slice of bytes representing an Originator in string form. +// This function is also used when marshalling to YAML +func (o Originator) MarshalJSON() ([]byte, error) { + if o.Originator == "NOASSERTION" { + return json.Marshal(o.Originator) + } else if o.Originator != "" { + return json.Marshal(fmt.Sprintf("%s: %s", o.OriginatorType, o.Originator)) + } + + return []byte{}, nil +} + +type PackageVerificationCode struct { + // Cardinality: mandatory, one if filesAnalyzed is true / omitted; + // zero (must be omitted) if filesAnalyzed is false + Value string `json:"packageVerificationCodeValue"` + // Spec also allows specifying files to exclude from the + // verification code algorithm; intended to enable exclusion of + // the SPDX document file itself. + ExcludedFiles []string `json:"packageVerificationCodeExcludedFiles"` +} + // Package2_1 is a Package section of an SPDX Document for version 2.1 of the spec. type Package2_1 struct { - // 3.1: Package Name // Cardinality: mandatory, one - PackageName string + PackageName string `json:"name"` // 3.2: Package SPDX Identifier: "SPDXRef-[idstring]" // Cardinality: mandatory, one - PackageSPDXIdentifier ElementID + PackageSPDXIdentifier ElementID `json:"SPDXID"` // 3.3: Package Version // Cardinality: optional, one - PackageVersion string + PackageVersion string `json:"versionInfo,omitempty"` // 3.4: Package File Name // Cardinality: optional, one - PackageFileName string + PackageFileName string `json:"packageFileName,omitempty"` // 3.5: Package Supplier: may have single result for either Person or Organization, // or NOASSERTION // Cardinality: optional, one - PackageSupplierPerson string - PackageSupplierOrganization string - PackageSupplierNOASSERTION bool + PackageSupplier *Supplier `json:"supplier,omitempty"` // 3.6: Package Originator: may have single result for either Person or Organization, // or NOASSERTION // Cardinality: optional, one - PackageOriginatorPerson string - PackageOriginatorOrganization string - PackageOriginatorNOASSERTION bool + PackageOriginator *Originator `json:"originator,omitempty"` // 3.7: Package Download Location // Cardinality: mandatory, one - PackageDownloadLocation string + PackageDownloadLocation string `json:"downloadLocation"` // 3.8: FilesAnalyzed // Cardinality: optional, one; default value is "true" if omitted - FilesAnalyzed bool + FilesAnalyzed bool `json:"filesAnalyzed,omitempty"` // NOT PART OF SPEC: did FilesAnalyzed tag appear? - IsFilesAnalyzedTagPresent bool + IsFilesAnalyzedTagPresent bool `json:"-"` // 3.9: Package Verification Code - // Cardinality: mandatory, one if filesAnalyzed is true / omitted; - // zero (must be omitted) if filesAnalyzed is false - PackageVerificationCode string - // Spec also allows specifying a single file to exclude from the - // verification code algorithm; intended to enable exclusion of - // the SPDX document file itself. - PackageVerificationCodeExcludedFile string + PackageVerificationCode PackageVerificationCode `json:"packageVerificationCode"` // 3.10: Package Checksum: may have keys for SHA1, SHA256 and/or MD5 // Cardinality: optional, one or many - PackageChecksumSHA1 string - PackageChecksumSHA256 string - PackageChecksumMD5 string + PackageChecksums []Checksum `json:"checksums,omitempty"` // 3.11: Package Home Page // Cardinality: optional, one - PackageHomePage string + PackageHomePage string `json:"homepage,omitempty"` // 3.12: Source Information // Cardinality: optional, one - PackageSourceInfo string + PackageSourceInfo string `json:"sourceInfo,omitempty"` // 3.13: Concluded License: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - PackageLicenseConcluded string + PackageLicenseConcluded string `json:"licenseConcluded"` // 3.14: All Licenses Info from Files: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one or many if filesAnalyzed is true / omitted; // zero (must be omitted) if filesAnalyzed is false - PackageLicenseInfoFromFiles []string + PackageLicenseInfoFromFiles []string `json:"licenseInfoFromFiles"` // 3.15: Declared License: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - PackageLicenseDeclared string + PackageLicenseDeclared string `json:"licenseDeclared"` // 3.16: Comments on License // Cardinality: optional, one - PackageLicenseComments string + PackageLicenseComments string `json:"licenseComments,omitempty"` // 3.17: Copyright Text: copyright notice(s) text, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - PackageCopyrightText string + PackageCopyrightText string `json:"copyrightText"` // 3.18: Package Summary Description // Cardinality: optional, one - PackageSummary string + PackageSummary string `json:"summary,omitempty"` // 3.19: Package Detailed Description // Cardinality: optional, one - PackageDescription string + PackageDescription string `json:"description,omitempty"` // 3.20: Package Comment // Cardinality: optional, one - PackageComment string + PackageComment string `json:"comment,omitempty"` // 3.21: Package External Reference // Cardinality: optional, one or many - PackageExternalReferences []*PackageExternalReference2_1 - - // 3.22: Package External Reference Comment - // Cardinality: conditional (optional, one) for each External Reference - // contained within PackageExternalReference2_1 struct, if present + PackageExternalReferences []*PackageExternalReference2_1 `json:"externalRefs,omitempty"` // Files contained in this Package - Files map[ElementID]*File2_1 + Files []*File2_1 + + Annotations []Annotation2_1 `json:"annotations,omitempty"` } // PackageExternalReference2_1 is an External Reference to additional info // about a Package, as defined in section 3.21 in version 2.1 of the spec. type PackageExternalReference2_1 struct { - // category is "SECURITY", "PACKAGE-MANAGER" or "OTHER" - Category string + Category string `json:"referenceCategory"` // type is an [idstring] as defined in Appendix VI; // called RefType here due to "type" being a Golang keyword - RefType string + RefType string `json:"referenceType"` // locator is a unique string to access the package-specific // info, metadata or content within the target location - Locator string + Locator string `json:"referenceLocator"` // 3.22: Package External Reference Comment // Cardinality: conditional (optional, one) for each External Reference - ExternalRefComment string + ExternalRefComment string `json:"comment"` } // Package2_2 is a Package section of an SPDX Document for version 2.2 of the spec. type Package2_2 struct { - // NOT PART OF SPEC // flag: does this "package" contain files that were in fact "unpackaged", // e.g. included directly in the Document without being in a Package? @@ -143,101 +228,91 @@ type Package2_2 struct { // 3.1: Package Name // Cardinality: mandatory, one - PackageName string + PackageName string `json:"name"` // 3.2: Package SPDX Identifier: "SPDXRef-[idstring]" // Cardinality: mandatory, one - PackageSPDXIdentifier ElementID + PackageSPDXIdentifier ElementID `json:"SPDXID"` // 3.3: Package Version // Cardinality: optional, one - PackageVersion string + PackageVersion string `json:"versionInfo,omitempty"` // 3.4: Package File Name // Cardinality: optional, one - PackageFileName string + PackageFileName string `json:"packageFileName,omitempty"` // 3.5: Package Supplier: may have single result for either Person or Organization, // or NOASSERTION // Cardinality: optional, one - PackageSupplierPerson string - PackageSupplierOrganization string - PackageSupplierNOASSERTION bool + PackageSupplier *Supplier `json:"supplier,omitempty"` // 3.6: Package Originator: may have single result for either Person or Organization, // or NOASSERTION // Cardinality: optional, one - PackageOriginatorPerson string - PackageOriginatorOrganization string - PackageOriginatorNOASSERTION bool + PackageOriginator *Originator `json:"originator,omitempty"` // 3.7: Package Download Location // Cardinality: mandatory, one - PackageDownloadLocation string + PackageDownloadLocation string `json:"downloadLocation"` // 3.8: FilesAnalyzed // Cardinality: optional, one; default value is "true" if omitted - FilesAnalyzed bool + FilesAnalyzed bool `json:"filesAnalyzed,omitempty"` // NOT PART OF SPEC: did FilesAnalyzed tag appear? IsFilesAnalyzedTagPresent bool // 3.9: Package Verification Code - // Cardinality: mandatory, one if filesAnalyzed is true / omitted; - // zero (must be omitted) if filesAnalyzed is false - PackageVerificationCode string - // Spec also allows specifying a single file to exclude from the - // verification code algorithm; intended to enable exclusion of - // the SPDX document file itself. - PackageVerificationCodeExcludedFile string + PackageVerificationCode PackageVerificationCode `json:"packageVerificationCode"` // 3.10: Package Checksum: may have keys for SHA1, SHA256 and/or MD5 // Cardinality: optional, one or many - PackageChecksums map[ChecksumAlgorithm]Checksum + PackageChecksums []Checksum `json:"checksums"` // 3.11: Package Home Page // Cardinality: optional, one - PackageHomePage string + PackageHomePage string `json:"homepage,omitempty"` // 3.12: Source Information // Cardinality: optional, one - PackageSourceInfo string + PackageSourceInfo string `json:"sourceInfo,omitempty"` // 3.13: Concluded License: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - PackageLicenseConcluded string + PackageLicenseConcluded string `json:"licenseConcluded"` // 3.14: All Licenses Info from Files: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one or many if filesAnalyzed is true / omitted; // zero (must be omitted) if filesAnalyzed is false - PackageLicenseInfoFromFiles []string + PackageLicenseInfoFromFiles []string `json:"licenseInfoFromFiles"` // 3.15: Declared License: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - PackageLicenseDeclared string + PackageLicenseDeclared string `json:"licenseDeclared"` // 3.16: Comments on License // Cardinality: optional, one - PackageLicenseComments string + PackageLicenseComments string `json:"licenseComments,omitempty"` // 3.17: Copyright Text: copyright notice(s) text, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - PackageCopyrightText string + PackageCopyrightText string `json:"copyrightText"` // 3.18: Package Summary Description // Cardinality: optional, one - PackageSummary string + PackageSummary string `json:"summary,omitempty"` // 3.19: Package Detailed Description // Cardinality: optional, one - PackageDescription string + PackageDescription string `json:"description,omitempty"` // 3.20: Package Comment // Cardinality: optional, one - PackageComment string + PackageComment string `json:"comment,omitempty"` // 3.21: Package External Reference // Cardinality: optional, one or many - PackageExternalReferences []*PackageExternalReference2_2 + PackageExternalReferences []*PackageExternalReference2_2 `json:"externalRefs,omitempty"` // 3.22: Package External Reference Comment // Cardinality: conditional (optional, one) for each External Reference @@ -245,28 +320,29 @@ type Package2_2 struct { // 3.23: Package Attribution Text // Cardinality: optional, one or many - PackageAttributionTexts []string + PackageAttributionTexts []string `json:"attributionTexts,omitempty"` // Files contained in this Package - Files map[ElementID]*File2_2 + Files []*File2_2 + + Annotations []Annotation2_2 `json:"annotations"` } // PackageExternalReference2_2 is an External Reference to additional info // about a Package, as defined in section 3.21 in version 2.2 of the spec. type PackageExternalReference2_2 struct { - - // category is "SECURITY", "PACKAGE-MANAGER", "PERSISTENT-ID" or "OTHER" - Category string + // category is "SECURITY", "PACKAGE-MANAGER" or "OTHER" + Category string `json:"referenceCategory"` // type is an [idstring] as defined in Appendix VI; // called RefType here due to "type" being a Golang keyword - RefType string + RefType string `json:"referenceType"` // locator is a unique string to access the package-specific // info, metadata or content within the target location - Locator string + Locator string `json:"referenceLocator"` // 3.22: Package External Reference Comment // Cardinality: conditional (optional, one) for each External Reference - ExternalRefComment string + ExternalRefComment string `json:"comment"` } diff --git a/spdx/relationship.go b/spdx/relationship.go index 9e06838c..91277277 100644 --- a/spdx/relationship.go +++ b/spdx/relationship.go @@ -11,13 +11,13 @@ type Relationship2_1 struct { // one mandatory for SPDX Document with multiple packages // RefA and RefB are first and second item // Relationship is type from 7.1.1 - RefA DocElementID - RefB DocElementID - Relationship string + RefA DocElementID `json:"spdxElementId"` + RefB DocElementID `json:"relatedSpdxElement"` + Relationship string `json:"relationshipType"` // 7.2: Relationship Comment // Cardinality: optional, one - RelationshipComment string + RelationshipComment string `json:"comment,omitempty"` } // Relationship2_2 is a Relationship section of an SPDX Document for @@ -29,11 +29,11 @@ type Relationship2_2 struct { // one mandatory for SPDX Document with multiple packages // RefA and RefB are first and second item // Relationship is type from 7.1.1 - RefA DocElementID - RefB DocElementID - Relationship string + RefA DocElementID `json:"spdxElementId"` + RefB DocElementID `json:"relatedSpdxElement"` + Relationship string `json:"relationshipType"` // 7.2: Relationship Comment // Cardinality: optional, one - RelationshipComment string + RelationshipComment string `json:"comment,omitempty"` } diff --git a/spdx/snippet.go b/spdx/snippet.go index 5fe37ca3..6bffb8c8 100644 --- a/spdx/snippet.go +++ b/spdx/snippet.go @@ -2,50 +2,60 @@ package spdx +type SnippetRangePointer struct { + // 5.3: Snippet Byte Range: [start byte]:[end byte] + // Cardinality: mandatory, one + Offset int `json:"offset,omitempty"` + + // 5.4: Snippet Line Range: [start line]:[end line] + // Cardinality: optional, one + LineNumber int `json:"lineNumber,omitempty"` + + FileSPDXIdentifier ElementID `json:"reference"` +} + +type SnippetRange struct { + StartPointer SnippetRangePointer `json:"startPointer"` + EndPointer SnippetRangePointer `json:"endPointer"` +} + // Snippet2_1 is a Snippet section of an SPDX Document for version 2.1 of the spec. type Snippet2_1 struct { // 5.1: Snippet SPDX Identifier: "SPDXRef-[idstring]" // Cardinality: mandatory, one - SnippetSPDXIdentifier ElementID + SnippetSPDXIdentifier ElementID `json:"SPDXID"` // 5.2: Snippet from File SPDX Identifier // Cardinality: mandatory, one - SnippetFromFileSPDXIdentifier DocElementID + SnippetFromFileSPDXIdentifier ElementID `json:"snippetFromFile"` - // 5.3: Snippet Byte Range: [start byte]:[end byte] - // Cardinality: mandatory, one - SnippetByteRangeStart int - SnippetByteRangeEnd int - - // 5.4: Snippet Line Range: [start line]:[end line] - // Cardinality: optional, one - SnippetLineRangeStart int - SnippetLineRangeEnd int + // Ranges denotes the start/end byte offsets or line numbers that the snippet is relevant to + Ranges []SnippetRange `json:"ranges"` // 5.5: Snippet Concluded License: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - SnippetLicenseConcluded string + SnippetLicenseConcluded string `json:"licenseConcluded"` // 5.6: License Information in Snippet: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: optional, one or many - LicenseInfoInSnippet []string + LicenseInfoInSnippet []string `json:"licenseInfoInSnippets,omitempty"` // 5.7: Snippet Comments on License // Cardinality: optional, one - SnippetLicenseComments string + SnippetLicenseComments string `json:"licenseComments,omitempty"` // 5.8: Snippet Copyright Text: copyright notice(s) text, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - SnippetCopyrightText string + SnippetCopyrightText string `json:"copyrightText"` // 5.9: Snippet Comment // Cardinality: optional, one - SnippetComment string + SnippetComment string `json:"comment,omitempty"` // 5.10: Snippet Name // Cardinality: optional, one - SnippetName string + SnippetName string `json:"name,omitempty"` } // Snippet2_2 is a Snippet section of an SPDX Document for version 2.2 of the spec. @@ -53,47 +63,40 @@ type Snippet2_2 struct { // 5.1: Snippet SPDX Identifier: "SPDXRef-[idstring]" // Cardinality: mandatory, one - SnippetSPDXIdentifier ElementID + SnippetSPDXIdentifier ElementID `json:"SPDXID"` // 5.2: Snippet from File SPDX Identifier // Cardinality: mandatory, one - SnippetFromFileSPDXIdentifier DocElementID + SnippetFromFileSPDXIdentifier ElementID `json:"snippetFromFile"` - // 5.3: Snippet Byte Range: [start byte]:[end byte] - // Cardinality: mandatory, one - SnippetByteRangeStart int - SnippetByteRangeEnd int - - // 5.4: Snippet Line Range: [start line]:[end line] - // Cardinality: optional, one - SnippetLineRangeStart int - SnippetLineRangeEnd int + // Ranges denotes the start/end byte offsets or line numbers that the snippet is relevant to + Ranges []SnippetRange `json:"ranges"` // 5.5: Snippet Concluded License: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - SnippetLicenseConcluded string + SnippetLicenseConcluded string `json:"licenseConcluded"` // 5.6: License Information in Snippet: SPDX License Expression, "NONE" or "NOASSERTION" // Cardinality: optional, one or many - LicenseInfoInSnippet []string + LicenseInfoInSnippet []string `json:"licenseInfoInSnippets,omitempty"` // 5.7: Snippet Comments on License // Cardinality: optional, one - SnippetLicenseComments string + SnippetLicenseComments string `json:"licenseComments,omitempty"` // 5.8: Snippet Copyright Text: copyright notice(s) text, "NONE" or "NOASSERTION" // Cardinality: mandatory, one - SnippetCopyrightText string + SnippetCopyrightText string `json:"copyrightText"` // 5.9: Snippet Comment // Cardinality: optional, one - SnippetComment string + SnippetComment string `json:"comment,omitempty"` // 5.10: Snippet Name // Cardinality: optional, one - SnippetName string + SnippetName string `json:"name,omitempty"` // 5.11: Snippet Attribution Text // Cardinality: optional, one or many - SnippetAttributionTexts []string + SnippetAttributionTexts []string `json:"-"` } diff --git a/spdxlib/described_elements.go b/spdxlib/described_elements.go index e8373dac..21d8e7ed 100644 --- a/spdxlib/described_elements.go +++ b/spdxlib/described_elements.go @@ -23,8 +23,8 @@ func GetDescribedPackageIDs2_1(doc *spdx.Document2_1) ([]spdx.ElementID, error) } if len(doc.Packages) == 1 { // get first (only) one and return its ID - for i := range doc.Packages { - return []spdx.ElementID{i}, nil + for _, pkg := range doc.Packages { + return []spdx.ElementID{pkg.PackageSPDXIdentifier}, nil } } @@ -74,8 +74,8 @@ func GetDescribedPackageIDs2_2(doc *spdx.Document2_2) ([]spdx.ElementID, error) } if len(doc.Packages) == 1 { // get first (only) one and return its ID - for i := range doc.Packages { - return []spdx.ElementID{i}, nil + for _, pkg := range doc.Packages { + return []spdx.ElementID{pkg.PackageSPDXIdentifier}, nil } } diff --git a/spdxlib/described_elements_test.go b/spdxlib/described_elements_test.go index 32fa7269..4c2a1a13 100644 --- a/spdxlib/described_elements_test.go +++ b/spdxlib/described_elements_test.go @@ -13,17 +13,16 @@ import ( func Test2_1CanGetIDsOfDescribedPackages(t *testing.T) { // set up document and some packages and relationships doc := &spdx.Document2_1{ - CreationInfo: &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_1{ - spdx.ElementID("p1"): &spdx.Package2_1{PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): &spdx.Package2_1{PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, - spdx.ElementID("p3"): &spdx.Package2_1{PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, - spdx.ElementID("p4"): &spdx.Package2_1{PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, - spdx.ElementID("p5"): &spdx.Package2_1{PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_1{}, + Packages: []*spdx.Package2_1{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, + {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, + {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, }, Relationships: []*spdx.Relationship2_1{ &spdx.Relationship2_1{ @@ -75,13 +74,12 @@ func Test2_1GetDescribedPackagesReturnsSinglePackageIfOnlyOne(t *testing.T) { // set up document and one package, but no relationships // b/c only one package doc := &spdx.Document2_1{ - CreationInfo: &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_1{ - spdx.ElementID("p1"): &spdx.Package2_1{PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_1{}, + Packages: []*spdx.Package2_1{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, }, } @@ -102,17 +100,16 @@ func Test2_1GetDescribedPackagesReturnsSinglePackageIfOnlyOne(t *testing.T) { func Test2_1FailsToGetDescribedPackagesIfMoreThanOneWithoutDescribesRelationship(t *testing.T) { // set up document and multiple packages, but no DESCRIBES relationships doc := &spdx.Document2_1{ - CreationInfo: &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_1{ - spdx.ElementID("p1"): &spdx.Package2_1{PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): &spdx.Package2_1{PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, - spdx.ElementID("p3"): &spdx.Package2_1{PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, - spdx.ElementID("p4"): &spdx.Package2_1{PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, - spdx.ElementID("p5"): &spdx.Package2_1{PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_1{}, + Packages: []*spdx.Package2_1{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, + {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, + {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, }, Relationships: []*spdx.Relationship2_1{ // different relationship @@ -133,14 +130,13 @@ func Test2_1FailsToGetDescribedPackagesIfMoreThanOneWithoutDescribesRelationship func Test2_1FailsToGetDescribedPackagesIfMoreThanOneWithNilRelationships(t *testing.T) { // set up document and multiple packages, but no relationships slice doc := &spdx.Document2_1{ - CreationInfo: &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_1{ - spdx.ElementID("p1"): &spdx.Package2_1{PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): &spdx.Package2_1{PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_1{}, + Packages: []*spdx.Package2_1{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, }, } @@ -153,12 +149,11 @@ func Test2_1FailsToGetDescribedPackagesIfMoreThanOneWithNilRelationships(t *test func Test2_1FailsToGetDescribedPackagesIfZeroPackagesInMap(t *testing.T) { // set up document but no packages doc := &spdx.Document2_1{ - CreationInfo: &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_1{}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_1{}, + Packages: []*spdx.Package2_1{}, } _, err := GetDescribedPackageIDs2_1(doc) @@ -170,11 +165,10 @@ func Test2_1FailsToGetDescribedPackagesIfZeroPackagesInMap(t *testing.T) { func Test2_1FailsToGetDescribedPackagesIfNilMap(t *testing.T) { // set up document but no packages doc := &spdx.Document2_1{ - CreationInfo: &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_1{}, } _, err := GetDescribedPackageIDs2_1(doc) @@ -188,17 +182,16 @@ func Test2_1FailsToGetDescribedPackagesIfNilMap(t *testing.T) { func Test2_2CanGetIDsOfDescribedPackages(t *testing.T) { // set up document and some packages and relationships doc := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{ - spdx.ElementID("p1"): &spdx.Package2_2{PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): &spdx.Package2_2{PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, - spdx.ElementID("p3"): &spdx.Package2_2{PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, - spdx.ElementID("p4"): &spdx.Package2_2{PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, - spdx.ElementID("p5"): &spdx.Package2_2{PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, + SPDXVersion: "SPDX-2.2", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_2{}, + Packages: []*spdx.Package2_2{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, + {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, + {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, }, Relationships: []*spdx.Relationship2_2{ &spdx.Relationship2_2{ @@ -250,13 +243,12 @@ func Test2_2GetDescribedPackagesReturnsSinglePackageIfOnlyOne(t *testing.T) { // set up document and one package, but no relationships // b/c only one package doc := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{ - spdx.ElementID("p1"): &spdx.Package2_2{PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + SPDXVersion: "SPDX-2.2", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_2{}, + Packages: []*spdx.Package2_2{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, }, } @@ -277,17 +269,16 @@ func Test2_2GetDescribedPackagesReturnsSinglePackageIfOnlyOne(t *testing.T) { func Test2_2FailsToGetDescribedPackagesIfMoreThanOneWithoutDescribesRelationship(t *testing.T) { // set up document and multiple packages, but no DESCRIBES relationships doc := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{ - spdx.ElementID("p1"): &spdx.Package2_2{PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): &spdx.Package2_2{PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, - spdx.ElementID("p3"): &spdx.Package2_2{PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, - spdx.ElementID("p4"): &spdx.Package2_2{PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, - spdx.ElementID("p5"): &spdx.Package2_2{PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, + SPDXVersion: "SPDX-2.2", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_2{}, + Packages: []*spdx.Package2_2{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, + {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, + {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, }, Relationships: []*spdx.Relationship2_2{ // different relationship @@ -308,14 +299,13 @@ func Test2_2FailsToGetDescribedPackagesIfMoreThanOneWithoutDescribesRelationship func Test2_2FailsToGetDescribedPackagesIfMoreThanOneWithNilRelationships(t *testing.T) { // set up document and multiple packages, but no relationships slice doc := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{ - spdx.ElementID("p1"): &spdx.Package2_2{PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): &spdx.Package2_2{PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + SPDXVersion: "SPDX-2.2", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_2{}, + Packages: []*spdx.Package2_2{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, }, } @@ -328,12 +318,11 @@ func Test2_2FailsToGetDescribedPackagesIfMoreThanOneWithNilRelationships(t *test func Test2_2FailsToGetDescribedPackagesIfZeroPackagesInMap(t *testing.T) { // set up document but no packages doc := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{}, + SPDXVersion: "SPDX-2.2", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_2{}, + Packages: []*spdx.Package2_2{}, } _, err := GetDescribedPackageIDs2_2(doc) @@ -345,11 +334,10 @@ func Test2_2FailsToGetDescribedPackagesIfZeroPackagesInMap(t *testing.T) { func Test2_2FailsToGetDescribedPackagesIfNilMap(t *testing.T) { // set up document but no packages doc := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, + SPDXVersion: "SPDX-2.2", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_2{}, } _, err := GetDescribedPackageIDs2_2(doc) diff --git a/spdxlib/documents.go b/spdxlib/documents.go index bd689e71..1f7122a7 100644 --- a/spdxlib/documents.go +++ b/spdxlib/documents.go @@ -16,7 +16,7 @@ func ValidateDocument2_1(doc *spdx.Document2_1) error { validElementIDs[docPackage.PackageSPDXIdentifier] = true } - for _, unpackagedFile := range doc.UnpackagedFiles { + for _, unpackagedFile := range doc.Files { validElementIDs[unpackagedFile.FileSPDXIdentifier] = true } @@ -46,7 +46,7 @@ func ValidateDocument2_2(doc *spdx.Document2_2) error { validElementIDs[docPackage.PackageSPDXIdentifier] = true } - for _, unpackagedFile := range doc.UnpackagedFiles { + for _, unpackagedFile := range doc.Files { validElementIDs[unpackagedFile.FileSPDXIdentifier] = true } diff --git a/spdxlib/documents_test.go b/spdxlib/documents_test.go index 60a39b9c..aa1f6c35 100644 --- a/spdxlib/documents_test.go +++ b/spdxlib/documents_test.go @@ -13,17 +13,16 @@ import ( func Test2_1ValidDocumentPassesValidation(t *testing.T) { // set up document and some packages and relationships doc := &spdx.Document2_1{ - CreationInfo: &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_1{ - spdx.ElementID("p1"): {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, - spdx.ElementID("p3"): {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, - spdx.ElementID("p4"): {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, - spdx.ElementID("p5"): {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_1{}, + Packages: []*spdx.Package2_1{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, + {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, + {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, }, Relationships: []*spdx.Relationship2_1{ { @@ -60,15 +59,14 @@ func Test2_1ValidDocumentPassesValidation(t *testing.T) { func Test2_1InvalidDocumentFailsValidation(t *testing.T) { // set up document and some packages and relationships doc := &spdx.Document2_1{ - CreationInfo: &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_1{ - spdx.ElementID("p1"): {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, - spdx.ElementID("p3"): {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_1{}, + Packages: []*spdx.Package2_1{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, }, Relationships: []*spdx.Relationship2_1{ { @@ -101,17 +99,16 @@ func Test2_1InvalidDocumentFailsValidation(t *testing.T) { func Test2_2ValidDocumentPassesValidation(t *testing.T) { // set up document and some packages and relationships doc := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{ - spdx.ElementID("p1"): {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, - spdx.ElementID("p3"): {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, - spdx.ElementID("p4"): {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, - spdx.ElementID("p5"): {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_2{}, + Packages: []*spdx.Package2_2{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, + {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, + {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, }, Relationships: []*spdx.Relationship2_2{ { @@ -148,15 +145,14 @@ func Test2_2ValidDocumentPassesValidation(t *testing.T) { func Test2_2InvalidDocumentFailsValidation(t *testing.T) { // set up document and some packages and relationships doc := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{ - spdx.ElementID("p1"): {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, - spdx.ElementID("p3"): {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_2{}, + Packages: []*spdx.Package2_2{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, }, Relationships: []*spdx.Relationship2_2{ { diff --git a/spdxlib/relationships_test.go b/spdxlib/relationships_test.go index 3c0ef240..e710d6e5 100644 --- a/spdxlib/relationships_test.go +++ b/spdxlib/relationships_test.go @@ -13,17 +13,16 @@ import ( func Test2_1FilterForDependencies(t *testing.T) { // set up document and some packages and relationships doc := &spdx.Document2_1{ - CreationInfo: &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_1{ - spdx.ElementID("p1"): {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, - spdx.ElementID("p3"): {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, - spdx.ElementID("p4"): {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, - spdx.ElementID("p5"): {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_1{}, + Packages: []*spdx.Package2_1{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, + {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, + {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, }, Relationships: []*spdx.Relationship2_1{ { @@ -82,17 +81,16 @@ func Test2_1FilterForDependencies(t *testing.T) { func Test2_2FindsDependsOnRelationships(t *testing.T) { // set up document and some packages and relationships doc := &spdx.Document2_2{ - CreationInfo: &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - }, - Packages: map[spdx.ElementID]*spdx.Package2_2{ - spdx.ElementID("p1"): {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, - spdx.ElementID("p2"): {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, - spdx.ElementID("p3"): {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, - spdx.ElementID("p4"): {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, - spdx.ElementID("p5"): {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, + SPDXVersion: "SPDX-2.2", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + CreationInfo: &spdx.CreationInfo2_2{}, + Packages: []*spdx.Package2_2{ + {PackageName: "pkg1", PackageSPDXIdentifier: "p1"}, + {PackageName: "pkg2", PackageSPDXIdentifier: "p2"}, + {PackageName: "pkg3", PackageSPDXIdentifier: "p3"}, + {PackageName: "pkg4", PackageSPDXIdentifier: "p4"}, + {PackageName: "pkg5", PackageSPDXIdentifier: "p5"}, }, Relationships: []*spdx.Relationship2_2{ { diff --git a/tvloader/parser2v1/parse_annotation.go b/tvloader/parser2v1/parse_annotation.go index 65680d9a..ca2e8504 100644 --- a/tvloader/parser2v1/parse_annotation.go +++ b/tvloader/parser2v1/parse_annotation.go @@ -18,8 +18,8 @@ func (parser *tvParser2_1) parsePairForAnnotation2_1(tag string, value string) e return err } if subkey == "Person" || subkey == "Organization" || subkey == "Tool" { - parser.ann.AnnotatorType = subkey - parser.ann.Annotator = subvalue + parser.ann.Annotator.AnnotatorType = subkey + parser.ann.Annotator.Annotator = subvalue return nil } return fmt.Errorf("unrecognized Annotator type %v", subkey) diff --git a/tvloader/parser2v1/parse_annotation_test.go b/tvloader/parser2v1/parse_annotation_test.go index 3fdce9f6..eb0f4cf5 100644 --- a/tvloader/parser2v1/parse_annotation_test.go +++ b/tvloader/parser2v1/parse_annotation_test.go @@ -70,11 +70,11 @@ func TestParser2_1CanParseAnnotationTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.ann.Annotator != "John Doe" { + if parser.ann.Annotator.Annotator != "John Doe" { t.Errorf("got %v for Annotator, expected John Doe", parser.ann.Annotator) } - if parser.ann.AnnotatorType != "Person" { - t.Errorf("got %v for AnnotatorType, expected Person", parser.ann.AnnotatorType) + if parser.ann.Annotator.AnnotatorType != "Person" { + t.Errorf("got %v for AnnotatorType, expected Person", parser.ann.Annotator.AnnotatorType) } // Annotation Date @@ -156,4 +156,3 @@ func TestParser2_1FailsIfAnnotationRefInvalid(t *testing.T) { t.Errorf("expected non-nil error, got nil") } } - diff --git a/tvloader/parser2v1/parse_creation_info.go b/tvloader/parser2v1/parse_creation_info.go index 8742bf2a..df16008b 100644 --- a/tvloader/parser2v1/parse_creation_info.go +++ b/tvloader/parser2v1/parse_creation_info.go @@ -17,39 +17,11 @@ func (parser *tvParser2_1) parsePairFromCreationInfo2_1(tag string, value string // create an SPDX Creation Info data struct if we don't have one already if parser.doc.CreationInfo == nil { - parser.doc.CreationInfo = &spdx.CreationInfo2_1{ - ExternalDocumentReferences: map[string]spdx.ExternalDocumentRef2_1{}, - } + parser.doc.CreationInfo = &spdx.CreationInfo2_1{} } ci := parser.doc.CreationInfo switch tag { - case "SPDXVersion": - ci.SPDXVersion = value - case "DataLicense": - ci.DataLicense = value - case "SPDXID": - eID, err := extractElementID(value) - if err != nil { - return err - } - ci.SPDXIdentifier = eID - case "DocumentName": - ci.DocumentName = value - case "DocumentNamespace": - ci.DocumentNamespace = value - case "ExternalDocumentRef": - documentRefID, uri, alg, checksum, err := extractExternalDocumentReference(value) - if err != nil { - return err - } - edr := spdx.ExternalDocumentRef2_1{ - DocumentRefID: documentRefID, - URI: uri, - Alg: alg, - Checksum: checksum, - } - ci.ExternalDocumentReferences[documentRefID] = edr case "LicenseListVersion": ci.LicenseListVersion = value case "Creator": @@ -57,22 +29,20 @@ func (parser *tvParser2_1) parsePairFromCreationInfo2_1(tag string, value string if err != nil { return err } + + creator := spdx.Creator{Creator: subvalue} switch subkey { - case "Person": - ci.CreatorPersons = append(ci.CreatorPersons, subvalue) - case "Organization": - ci.CreatorOrganizations = append(ci.CreatorOrganizations, subvalue) - case "Tool": - ci.CreatorTools = append(ci.CreatorTools, subvalue) + case "Person", "Organization", "Tool": + creator.CreatorType = subkey default: return fmt.Errorf("unrecognized Creator type %v", subkey) } + + ci.Creators = append(ci.Creators, creator) case "Created": ci.Created = value case "CreatorComment": ci.CreatorComment = value - case "DocumentComment": - ci.DocumentComment = value // tag for going on to package section case "PackageName": @@ -91,7 +61,7 @@ func (parser *tvParser2_1) parsePairFromCreationInfo2_1(tag string, value string return parser.parsePairFromPackage2_1(tag, value) // tag for going on to _unpackaged_ file section case "FileName": - // leave pkg as nil, so that packages will be placed in UnpackagedFiles + // leave pkg as nil, so that packages will be placed in Files parser.st = psFile2_1 parser.pkg = nil return parser.parsePairFromFile2_1(tag, value) diff --git a/tvloader/parser2v1/parse_creation_info_test.go b/tvloader/parser2v1/parse_creation_info_test.go index 2a8c094d..83058dd8 100644 --- a/tvloader/parser2v1/parse_creation_info_test.go +++ b/tvloader/parser2v1/parse_creation_info_test.go @@ -58,7 +58,7 @@ func TestParser2_1CIMovesToFileAfterParsingFileNameTagWithNoPackages(t *testing. t.Errorf("parser is in state %v, expected %v", parser.st, psFile2_1) } // and current package should be nil, meaning Files are placed in the - // UnpackagedFiles map instead of in a Package + // Files map instead of in a Package if parser.pkg != nil { t.Fatalf("expected pkg to be nil, got non-nil pkg") } @@ -179,7 +179,7 @@ func TestParser2_1HasCreationInfoAfterCallToParseFirstTag(t *testing.T) { doc: &spdx.Document2_1{}, st: psCreationInfo2_1, } - err := parser.parsePairFromCreationInfo2_1("SPDXVersion", "SPDX-2.1") + err := parser.parsePairFromCreationInfo2_1("LicenseListVersion", "3.9") if err != nil { t.Errorf("got error when calling parsePairFromCreationInfo2_1: %v", err) } @@ -194,96 +194,8 @@ func TestParser2_1CanParseCreationInfoTags(t *testing.T) { st: psCreationInfo2_1, } - // SPDX Version - err := parser.parsePairFromCreationInfo2_1("SPDXVersion", "SPDX-2.1") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.SPDXVersion != "SPDX-2.1" { - t.Errorf("got %v for SPDXVersion", parser.doc.CreationInfo.SPDXVersion) - } - - // Data License - err = parser.parsePairFromCreationInfo2_1("DataLicense", "CC0-1.0") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.DataLicense != "CC0-1.0" { - t.Errorf("got %v for DataLicense", parser.doc.CreationInfo.DataLicense) - } - - // SPDX Identifier - err = parser.parsePairFromCreationInfo2_1("SPDXID", "SPDXRef-DOCUMENT") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.SPDXIdentifier != "DOCUMENT" { - t.Errorf("got %v for SPDXIdentifier", parser.doc.CreationInfo.SPDXIdentifier) - } - - // Document Name - err = parser.parsePairFromCreationInfo2_1("DocumentName", "xyz-2.1.5") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.DocumentName != "xyz-2.1.5" { - t.Errorf("got %v for DocumentName", parser.doc.CreationInfo.DocumentName) - } - - // Document Namespace - err = parser.parsePairFromCreationInfo2_1("DocumentNamespace", "http://example.com/xyz-2.1.5.spdx") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.DocumentNamespace != "http://example.com/xyz-2.1.5.spdx" { - t.Errorf("got %v for DocumentNamespace", parser.doc.CreationInfo.DocumentNamespace) - } - - // External Document Reference - refs := []string{ - "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1: d6a770ba38583ed4bb4525bd96e50461655d2759", - "DocumentRef-xyz-2.1.2 http://example.com/xyz-2.1.2 SHA1:d6a770ba38583ed4bb4525bd96e50461655d2760", - } - wantRef0 := spdx.ExternalDocumentRef2_1{ - DocumentRefID: "spdx-tool-1.2", - URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", - Alg: "SHA1", - Checksum: "d6a770ba38583ed4bb4525bd96e50461655d2759", - } - wantRef1 := spdx.ExternalDocumentRef2_1{ - DocumentRefID: "xyz-2.1.2", - URI: "http://example.com/xyz-2.1.2", - Alg: "SHA1", - Checksum: "d6a770ba38583ed4bb4525bd96e50461655d2760", - } - err = parser.parsePairFromCreationInfo2_1("ExternalDocumentRef", refs[0]) - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - err = parser.parsePairFromCreationInfo2_1("ExternalDocumentRef", refs[1]) - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if len(parser.doc.CreationInfo.ExternalDocumentReferences) != 2 { - t.Errorf("got %d ExternalDocumentReferences, expected %d", len(parser.doc.CreationInfo.ExternalDocumentReferences), 2) - } - gotRef0 := parser.doc.CreationInfo.ExternalDocumentReferences["spdx-tool-1.2"] - if gotRef0.DocumentRefID != wantRef0.DocumentRefID || - gotRef0.URI != wantRef0.URI || - gotRef0.Alg != wantRef0.Alg || - gotRef0.Checksum != wantRef0.Checksum { - t.Errorf("got %#v for ExternalDocumentReferences[0], wanted %#v", gotRef0, wantRef0) - } - gotRef1 := parser.doc.CreationInfo.ExternalDocumentReferences["xyz-2.1.2"] - if gotRef1.DocumentRefID != wantRef1.DocumentRefID || - gotRef1.URI != wantRef1.URI || - gotRef1.Alg != wantRef1.Alg || - gotRef1.Checksum != wantRef1.Checksum { - t.Errorf("got %#v for ExternalDocumentReferences[1], wanted %#v", gotRef1, wantRef1) - } - // License List Version - err = parser.parsePairFromCreationInfo2_1("LicenseListVersion", "2.2") + err := parser.parsePairFromCreationInfo2_1("LicenseListVersion", "2.2") if err != nil { t.Errorf("expected nil error, got %v", err) } @@ -304,10 +216,10 @@ func TestParser2_1CanParseCreationInfoTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if len(parser.doc.CreationInfo.CreatorPersons) != 2 || - parser.doc.CreationInfo.CreatorPersons[0] != "Person A" || - parser.doc.CreationInfo.CreatorPersons[1] != "Person B" { - t.Errorf("got %v for CreatorPersons", parser.doc.CreationInfo.CreatorPersons) + if len(parser.doc.CreationInfo.Creators) != 2 || + parser.doc.CreationInfo.Creators[0].Creator != "Person A" || + parser.doc.CreationInfo.Creators[1].Creator != "Person B" { + t.Errorf("got %+v for Creators", parser.doc.CreationInfo.Creators) } // Creators: Organizations @@ -323,10 +235,10 @@ func TestParser2_1CanParseCreationInfoTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if len(parser.doc.CreationInfo.CreatorOrganizations) != 2 || - parser.doc.CreationInfo.CreatorOrganizations[0] != "Organization A" || - parser.doc.CreationInfo.CreatorOrganizations[1] != "Organization B" { - t.Errorf("got %v for CreatorOrganizations", parser.doc.CreationInfo.CreatorOrganizations) + if len(parser.doc.CreationInfo.Creators) != 4 || + parser.doc.CreationInfo.Creators[2].Creator != "Organization A" || + parser.doc.CreationInfo.Creators[3].Creator != "Organization B" { + t.Errorf("got %+v for CreatorOrganizations", parser.doc.CreationInfo.Creators) } // Creators: Tools @@ -342,10 +254,10 @@ func TestParser2_1CanParseCreationInfoTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if len(parser.doc.CreationInfo.CreatorTools) != 2 || - parser.doc.CreationInfo.CreatorTools[0] != "Tool A" || - parser.doc.CreationInfo.CreatorTools[1] != "Tool B" { - t.Errorf("got %v for CreatorTools", parser.doc.CreationInfo.CreatorTools) + if len(parser.doc.CreationInfo.Creators) != 6 || + parser.doc.CreationInfo.Creators[4].Creator != "Tool A" || + parser.doc.CreationInfo.Creators[5].Creator != "Tool B" { + t.Errorf("got %v for CreatorTools", parser.doc.CreationInfo.Creators) } // Created date @@ -365,16 +277,6 @@ func TestParser2_1CanParseCreationInfoTags(t *testing.T) { if parser.doc.CreationInfo.CreatorComment != "Blah whatever" { t.Errorf("got %v for CreatorComment", parser.doc.CreationInfo.CreatorComment) } - - // Document Comment - err = parser.parsePairFromCreationInfo2_1("DocumentComment", "Blah whatever") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.DocumentComment != "Blah whatever" { - t.Errorf("got %v for DocumentComment", parser.doc.CreationInfo.DocumentComment) - } - } func TestParser2_1InvalidCreatorTagsFail(t *testing.T) { diff --git a/tvloader/parser2v1/parse_file.go b/tvloader/parser2v1/parse_file.go index 73473849..81768bb6 100644 --- a/tvloader/parser2v1/parse_file.go +++ b/tvloader/parser2v1/parse_file.go @@ -49,37 +49,37 @@ func (parser *tvParser2_1) parsePairFromFile2_1(tag string, value string) error } parser.file.FileSPDXIdentifier = eID if parser.pkg == nil { - if parser.doc.UnpackagedFiles == nil { - parser.doc.UnpackagedFiles = map[spdx.ElementID]*spdx.File2_1{} + if parser.doc.Files == nil { + parser.doc.Files = []*spdx.File2_1{} } - parser.doc.UnpackagedFiles[eID] = parser.file + parser.doc.Files = append(parser.doc.Files, parser.file) } else { if parser.pkg.Files == nil { - parser.pkg.Files = map[spdx.ElementID]*spdx.File2_1{} + parser.pkg.Files = []*spdx.File2_1{} } - parser.pkg.Files[eID] = parser.file + parser.pkg.Files = append(parser.pkg.Files, parser.file) } case "FileType": - parser.file.FileType = append(parser.file.FileType, value) + parser.file.FileTypes = append(parser.file.FileTypes, value) case "FileChecksum": subkey, subvalue, err := extractSubs(value) if err != nil { return err } - switch subkey { - case "SHA1": - parser.file.FileChecksumSHA1 = subvalue - case "SHA256": - parser.file.FileChecksumSHA256 = subvalue - case "MD5": - parser.file.FileChecksumMD5 = subvalue + if parser.file.Checksums == nil { + parser.file.Checksums = []spdx.Checksum{} + } + switch spdx.ChecksumAlgorithm(subkey) { + case spdx.SHA1, spdx.SHA256, spdx.MD5: + algorithm := spdx.ChecksumAlgorithm(subkey) + parser.file.Checksums = append(parser.file.Checksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) default: return fmt.Errorf("got unknown checksum type %s", subkey) } case "LicenseConcluded": parser.file.LicenseConcluded = value case "LicenseInfoInFile": - parser.file.LicenseInfoInFile = append(parser.file.LicenseInfoInFile, value) + parser.file.LicenseInfoInFiles = append(parser.file.LicenseInfoInFiles, value) case "LicenseComments": parser.file.LicenseComments = value case "FileCopyrightText": @@ -103,7 +103,7 @@ func (parser *tvParser2_1) parsePairFromFile2_1(tag string, value string) error case "FileNotice": parser.file.FileNotice = value case "FileContributor": - parser.file.FileContributor = append(parser.file.FileContributor, value) + parser.file.FileContributors = append(parser.file.FileContributors, value) case "FileDependency": parser.file.FileDependencies = append(parser.file.FileDependencies, value) // for relationship tags, pass along but don't change state diff --git a/tvloader/parser2v1/parse_file_test.go b/tvloader/parser2v1/parse_file_test.go index 9f42b555..375f9677 100644 --- a/tvloader/parser2v1/parse_file_test.go +++ b/tvloader/parser2v1/parse_file_test.go @@ -13,23 +13,23 @@ func TestParser2_1FileStartsNewFileAfterParsingFileNameTag(t *testing.T) { fileOldName := "f1.txt" parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: fileOldName, FileSPDXIdentifier: "f1"}, } fileOld := parser.file - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = fileOld + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, fileOld) // the Package's Files should have this one only if len(parser.pkg.Files) != 1 { t.Fatalf("expected 1 file, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f1"] != fileOld { - t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files["f1"]) + if parser.pkg.Files[0] != fileOld { + t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files[0]) } - if parser.pkg.Files["f1"].FileName != fileOldName { - t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files["f1"].FileName) + if parser.pkg.Files[0].FileName != fileOldName { + t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files[0].FileName) } // now add a new file @@ -55,11 +55,11 @@ func TestParser2_1FileStartsNewFileAfterParsingFileNameTag(t *testing.T) { if len(parser.pkg.Files) != 1 { t.Fatalf("expected 1 file, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f1"] != fileOld { - t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files["f1"]) + if parser.pkg.Files[0] != fileOld { + t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files[0]) } - if parser.pkg.Files["f1"].FileName != fileOldName { - t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files["f1"].FileName) + if parser.pkg.Files[0].FileName != fileOldName { + t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files[0].FileName) } // now parse an SPDX identifier tag @@ -71,17 +71,17 @@ func TestParser2_1FileStartsNewFileAfterParsingFileNameTag(t *testing.T) { if len(parser.pkg.Files) != 2 { t.Fatalf("expected 2 files, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f1"] != fileOld { - t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files["f1"]) + if parser.pkg.Files[0] != fileOld { + t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files[0]) } - if parser.pkg.Files["f1"].FileName != fileOldName { - t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files["f1"].FileName) + if parser.pkg.Files[0].FileName != fileOldName { + t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files[0].FileName) } - if parser.pkg.Files["f2ID"] != parser.file { - t.Errorf("expected file %v in Files[f2ID], got %v", parser.file, parser.pkg.Files["f2ID"]) + if parser.pkg.Files[1] != parser.file { + t.Errorf("expected file %v in Files[f2ID], got %v", parser.file, parser.pkg.Files[1]) } - if parser.pkg.Files["f2ID"].FileName != fileName { - t.Errorf("expected file name %s in Files[f2ID], got %s", fileName, parser.pkg.Files["f2ID"].FileName) + if parser.pkg.Files[1].FileName != fileName { + t.Errorf("expected file name %s in Files[f2ID], got %s", fileName, parser.pkg.Files[1].FileName) } } @@ -103,12 +103,12 @@ func TestParser2_1FileAddsToPackageOrUnpackagedFiles(t *testing.T) { t.Errorf("got error when calling parsePair2_1: %v", err) } fileOld := parser.file - // should have been added to UnpackagedFiles - if len(parser.doc.UnpackagedFiles) != 1 { - t.Fatalf("expected 1 file in UnpackagedFiles, got %d", len(parser.doc.UnpackagedFiles)) + // should have been added to Files + if len(parser.doc.Files) != 1 { + t.Fatalf("expected 1 file in Files, got %d", len(parser.doc.Files)) } - if parser.doc.UnpackagedFiles["f2ID"] != fileOld { - t.Errorf("expected file %v in UnpackagedFiles[f2ID], got %v", fileOld, parser.doc.UnpackagedFiles["f2ID"]) + if parser.doc.Files[0] != fileOld { + t.Errorf("expected file %v in Files[f2ID], got %v", fileOld, parser.doc.Files[0]) } // now create a package and a new file err = parser.parsePair2_1("PackageName", "package1") @@ -127,19 +127,19 @@ func TestParser2_1FileAddsToPackageOrUnpackagedFiles(t *testing.T) { if err != nil { t.Errorf("got error when calling parsePair2_1: %v", err) } - // UnpackagedFiles should still be size 1 and have old file only - if len(parser.doc.UnpackagedFiles) != 1 { - t.Fatalf("expected 1 file in UnpackagedFiles, got %d", len(parser.doc.UnpackagedFiles)) + // Files should still be size 1 and have old file only + if len(parser.doc.Files) != 1 { + t.Fatalf("expected 1 file in Files, got %d", len(parser.doc.Files)) } - if parser.doc.UnpackagedFiles["f2ID"] != fileOld { - t.Errorf("expected file %v in UnpackagedFiles[f2ID], got %v", fileOld, parser.doc.UnpackagedFiles["f2ID"]) + if parser.doc.Files[0] != fileOld { + t.Errorf("expected file %v in Files[f2ID], got %v", fileOld, parser.doc.Files[0]) } // and new package should have gotten the new file if len(parser.pkg.Files) != 1 { t.Fatalf("expected 1 file in Files, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f3ID"] != parser.file { - t.Errorf("expected file %v in Files[f3ID], got %v", parser.file, parser.pkg.Files["f3ID"]) + if parser.pkg.Files[0] != parser.file { + t.Errorf("expected file %v in Files[f3ID], got %v", parser.file, parser.pkg.Files[0]) } } @@ -149,15 +149,15 @@ func TestParser2_1FileStartsNewPackageAfterParsingPackageNameTag(t *testing.T) { f1Name := "f1.txt" parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: p1Name, PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: p1Name, PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: f1Name, FileSPDXIdentifier: "f1"}, } p1 := parser.pkg f1 := parser.file - parser.doc.Packages["package1"] = p1 - parser.pkg.Files["f1"] = f1 + parser.doc.Packages = append(parser.doc.Packages, p1) + parser.pkg.Files = append(parser.pkg.Files, f1) // now add a new package p2Name := "package2" @@ -193,21 +193,21 @@ func TestParser2_1FileStartsNewPackageAfterParsingPackageNameTag(t *testing.T) { if len(parser.doc.Packages) != 1 { t.Fatalf("expected 1 package, got %d", len(parser.doc.Packages)) } - if parser.doc.Packages["package1"] != p1 { - t.Errorf("Expected package %v in Packages[package1], got %v", p1, parser.doc.Packages["package1"]) + if parser.doc.Packages[0] != p1 { + t.Errorf("Expected package %v in Packages[package1], got %v", p1, parser.doc.Packages[0]) } - if parser.doc.Packages["package1"].PackageName != p1Name { - t.Errorf("expected package name %s in Packages[package1], got %s", p1Name, parser.doc.Packages["package1"].PackageName) + if parser.doc.Packages[0].PackageName != p1Name { + t.Errorf("expected package name %s in Packages[package1], got %s", p1Name, parser.doc.Packages[0].PackageName) } // and the first Package's Files should be of size 1 and have f1 only - if len(parser.doc.Packages["package1"].Files) != 1 { - t.Errorf("Expected 1 file in Packages[package1].Files, got %d", len(parser.doc.Packages["package1"].Files)) + if len(parser.doc.Packages[0].Files) != 1 { + t.Errorf("Expected 1 file in Packages[package1].Files, got %d", len(parser.doc.Packages[0].Files)) } - if parser.doc.Packages["package1"].Files["f1"] != f1 { - t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.doc.Packages["package1"].Files["f1"]) + if parser.doc.Packages[0].Files[0] != f1 { + t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.doc.Packages[0].Files[0]) } - if parser.doc.Packages["package1"].Files["f1"].FileName != f1Name { - t.Errorf("expected file name %s in Files[f1], got %s", f1Name, parser.doc.Packages["package1"].Files["f1"].FileName) + if parser.doc.Packages[0].Files[0].FileName != f1Name { + t.Errorf("expected file name %s in Files[f1], got %s", f1Name, parser.doc.Packages[0].Files[0].FileName) } // and the current file should be nil if parser.file != nil { @@ -217,13 +217,13 @@ func TestParser2_1FileStartsNewPackageAfterParsingPackageNameTag(t *testing.T) { func TestParser2_1FileMovesToSnippetAfterParsingSnippetSPDXIDTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) fileCurrent := parser.file err := parser.parsePair2_1("SnippetSPDXID", "SPDXRef-Test1") @@ -242,13 +242,13 @@ func TestParser2_1FileMovesToSnippetAfterParsingSnippetSPDXIDTag(t *testing.T) { func TestParser2_1FileMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_1("LicenseID", "LicenseRef-TestLic") if err != nil { @@ -261,13 +261,13 @@ func TestParser2_1FileMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing.T) func TestParser2_1FileMovesToReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_1("Reviewer", "Person: John Doe") if err != nil { @@ -280,13 +280,13 @@ func TestParser2_1FileMovesToReviewAfterParsingReviewerTag(t *testing.T) { func TestParser2_1FileStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_1("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-else") if err != nil { @@ -309,13 +309,13 @@ func TestParser2_1FileStaysAfterParsingRelationshipTags(t *testing.T) { func TestParser2_1FileStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_1("Annotator", "Person: John Doe ()") if err != nil { @@ -361,11 +361,11 @@ func TestParser2_1FileStaysAfterParsingAnnotationTags(t *testing.T) { // ===== File data section tests ===== func TestParser2_1CanParseFileTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // File Name err := parser.parsePairFromFile2_1("FileName", "f1.txt") @@ -393,8 +393,8 @@ func TestParser2_1CanParseFileTags(t *testing.T) { if len(parser.pkg.Files) != 1 { t.Errorf("expected 1 file, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f1"] != parser.file { - t.Errorf("expected Files[f1] to be %v, got %v", parser.file, parser.pkg.Files["f1"]) + if parser.pkg.Files[0] != parser.file { + t.Errorf("expected Files[f1] to be %v, got %v", parser.file, parser.pkg.Files[0]) } // File Type @@ -410,18 +410,18 @@ func TestParser2_1CanParseFileTags(t *testing.T) { } for _, typeWant := range fileTypes { flagFound := false - for _, typeCheck := range parser.file.FileType { + for _, typeCheck := range parser.file.FileTypes { if typeWant == typeCheck { flagFound = true } } if flagFound == false { - t.Errorf("didn't find %s in FileType", typeWant) + t.Errorf("didn't find %s in FileTypes", typeWant) } } - if len(fileTypes) != len(parser.file.FileType) { - t.Errorf("expected %d types in FileType, got %d", len(fileTypes), - len(parser.file.FileType)) + if len(fileTypes) != len(parser.file.FileTypes) { + t.Errorf("expected %d types in FileTypes, got %d", len(fileTypes), + len(parser.file.FileTypes)) } // File Checksums @@ -443,16 +443,22 @@ func TestParser2_1CanParseFileTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.file.FileChecksumSHA1 != codeSha1 { - t.Errorf("expected %s for FileChecksumSHA1, got %s", codeSha1, parser.file.FileChecksumSHA1) - } - if parser.file.FileChecksumSHA256 != codeSha256 { - t.Errorf("expected %s for FileChecksumSHA256, got %s", codeSha256, parser.file.FileChecksumSHA256) - } - if parser.file.FileChecksumMD5 != codeMd5 { - t.Errorf("expected %s for FileChecksumMD5, got %s", codeMd5, parser.file.FileChecksumMD5) + for _, checksum := range parser.file.Checksums { + switch checksum.Algorithm { + case spdx.SHA1: + if checksum.Value != codeSha1 { + t.Errorf("expected %s for FileChecksumSHA1, got %s", codeSha1, checksum.Value) + } + case spdx.SHA256: + if checksum.Value != codeSha256 { + t.Errorf("expected %s for FileChecksumSHA1, got %s", codeSha256, checksum.Value) + } + case spdx.MD5: + if checksum.Value != codeMd5 { + t.Errorf("expected %s for FileChecksumSHA1, got %s", codeMd5, checksum.Value) + } + } } - // Concluded License err = parser.parsePairFromFile2_1("LicenseConcluded", "Apache-2.0 OR GPL-2.0-or-later") if err != nil { @@ -476,18 +482,18 @@ func TestParser2_1CanParseFileTags(t *testing.T) { } for _, licWant := range lics { flagFound := false - for _, licCheck := range parser.file.LicenseInfoInFile { + for _, licCheck := range parser.file.LicenseInfoInFiles { if licWant == licCheck { flagFound = true } } if flagFound == false { - t.Errorf("didn't find %s in LicenseInfoInFile", licWant) + t.Errorf("didn't find %s in LicenseInfoInFiles", licWant) } } - if len(lics) != len(parser.file.LicenseInfoInFile) { - t.Errorf("expected %d licenses in LicenseInfoInFile, got %d", len(lics), - len(parser.file.LicenseInfoInFile)) + if len(lics) != len(parser.file.LicenseInfoInFiles) { + t.Errorf("expected %d licenses in LicenseInfoInFiles, got %d", len(lics), + len(parser.file.LicenseInfoInFiles)) } // Comments on License @@ -625,18 +631,18 @@ func TestParser2_1CanParseFileTags(t *testing.T) { } for _, contribWant := range contribs { flagFound := false - for _, contribCheck := range parser.file.FileContributor { + for _, contribCheck := range parser.file.FileContributors { if contribWant == contribCheck { flagFound = true } } if flagFound == false { - t.Errorf("didn't find %s in FileContributor", contribWant) + t.Errorf("didn't find %s in FileContributors", contribWant) } } - if len(contribs) != len(parser.file.FileContributor) { - t.Errorf("expected %d contribenses in FileContributor, got %d", len(contribs), - len(parser.file.FileContributor)) + if len(contribs) != len(parser.file.FileContributors) { + t.Errorf("expected %d contribenses in FileContributors, got %d", len(contribs), + len(parser.file.FileContributors)) } // File Dependencies @@ -670,13 +676,13 @@ func TestParser2_1CanParseFileTags(t *testing.T) { func TestParser2_1FileCreatesRelationshipInDocument(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_1("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-whatever") if err != nil { @@ -692,13 +698,13 @@ func TestParser2_1FileCreatesRelationshipInDocument(t *testing.T) { func TestParser2_1FileCreatesAnnotationInDocument(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_1("Annotator", "Person: John Doe ()") if err != nil { @@ -714,13 +720,13 @@ func TestParser2_1FileCreatesAnnotationInDocument(t *testing.T) { func TestParser2_1FileUnknownTagFails(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePairFromFile2_1("blah", "something") if err == nil { @@ -730,13 +736,13 @@ func TestParser2_1FileUnknownTagFails(t *testing.T) { func TestFileAOPPointerChangesAfterTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePairFromFile2_1("ArtifactOfProjectName", "project1") if err != nil { @@ -787,11 +793,11 @@ func TestFileAOPPointerChangesAfterTags(t *testing.T) { func TestParser2_1FailsIfInvalidSPDXIDInFileSection(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // start with File Name err := parser.parsePairFromFile2_1("FileName", "f1.txt") @@ -807,11 +813,11 @@ func TestParser2_1FailsIfInvalidSPDXIDInFileSection(t *testing.T) { func TestParser2_1FailsIfInvalidChecksumFormatInFileSection(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // start with File Name err := parser.parsePairFromFile2_1("FileName", "f1.txt") @@ -827,11 +833,11 @@ func TestParser2_1FailsIfInvalidChecksumFormatInFileSection(t *testing.T) { func TestParser2_1FailsIfUnknownChecksumTypeInFileSection(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // start with File Name err := parser.parsePairFromFile2_1("FileName", "f1.txt") @@ -847,11 +853,11 @@ func TestParser2_1FailsIfUnknownChecksumTypeInFileSection(t *testing.T) { func TestParser2_1FailsIfArtifactHomePageBeforeArtifactName(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // start with File Name err := parser.parsePairFromFile2_1("FileName", "f1.txt") @@ -867,11 +873,11 @@ func TestParser2_1FailsIfArtifactHomePageBeforeArtifactName(t *testing.T) { func TestParser2_1FailsIfArtifactURIBeforeArtifactName(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // start with File Name err := parser.parsePairFromFile2_1("FileName", "f1.txt") @@ -888,7 +894,7 @@ func TestParser2_1FailsIfArtifactURIBeforeArtifactName(t *testing.T) { func TestParser2_1FilesWithoutSpdxIdThrowError(t *testing.T) { // case 1: The previous file (packaged or unpackaged) does not contain spdxID parser1 := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, file: &spdx.File2_1{FileName: "FileName"}, } diff --git a/tvloader/parser2v1/parse_other_license_test.go b/tvloader/parser2v1/parse_other_license_test.go index d97eb1c8..5ae520b9 100644 --- a/tvloader/parser2v1/parse_other_license_test.go +++ b/tvloader/parser2v1/parse_other_license_test.go @@ -14,9 +14,9 @@ func TestParser2_1OLStartsNewOtherLicenseAfterParsingLicenseIDTag(t *testing.T) olname1 := "License 11" parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psOtherLicense2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: olid1, @@ -24,8 +24,8 @@ func TestParser2_1OLStartsNewOtherLicenseAfterParsingLicenseIDTag(t *testing.T) }, } olic1 := parser.otherLic - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) // the Document's OtherLicenses should have this one only @@ -90,13 +90,13 @@ func TestParser2_1OLStartsNewOtherLicenseAfterParsingLicenseIDTag(t *testing.T) func TestParser2_1OLMovesToReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psOtherLicense2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) err := parser.parsePair2_1("Reviewer", "Person: John Doe") @@ -110,17 +110,17 @@ func TestParser2_1OLMovesToReviewAfterParsingReviewerTag(t *testing.T) { func TestParser2_1OtherLicenseStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psOtherLicense2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-whatever", LicenseName: "the whatever license", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) err := parser.parsePair2_1("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-else") @@ -152,17 +152,17 @@ func TestParser2_1OtherLicenseStaysAfterParsingRelationshipTags(t *testing.T) { func TestParser2_1OtherLicenseStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psOtherLicense2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-whatever", LicenseName: "the whatever license", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) err := parser.parsePair2_1("Annotator", "Person: John Doe ()") @@ -209,24 +209,24 @@ func TestParser2_1OtherLicenseStaysAfterParsingAnnotationTags(t *testing.T) { if len(parser.doc.Annotations) != 1 { t.Fatalf("expected doc.Annotations to have len 1, got %d", len(parser.doc.Annotations)) } - if parser.doc.Annotations[0].Annotator != "John Doe ()" { + if parser.doc.Annotations[0].Annotator.Annotator != "John Doe ()" { t.Errorf("expected Annotator to be %s, got %s", "John Doe ()", parser.doc.Annotations[0].Annotator) } } func TestParser2_1OLFailsAfterParsingOtherSectionTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psOtherLicense2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-Lic11", LicenseName: "License 11", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) // can't go back to old sections @@ -247,13 +247,13 @@ func TestParser2_1OLFailsAfterParsingOtherSectionTags(t *testing.T) { // ===== Other License data section tests ===== func TestParser2_1CanParseOtherLicenseTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psOtherLicense2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) // License Identifier @@ -323,13 +323,13 @@ func TestParser2_1CanParseOtherLicenseTags(t *testing.T) { func TestParser2_1OLUnknownTagFails(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psOtherLicense2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) err := parser.parsePairFromOtherLicense2_1("blah", "something") diff --git a/tvloader/parser2v1/parse_package.go b/tvloader/parser2v1/parse_package.go index a8671079..22fc1ed2 100644 --- a/tvloader/parser2v1/parse_package.go +++ b/tvloader/parser2v1/parse_package.go @@ -45,16 +45,17 @@ func (parser *tvParser2_1) parsePairFromPackage2_1(tag string, value string) err } parser.pkg.PackageSPDXIdentifier = eID if parser.doc.Packages == nil { - parser.doc.Packages = map[spdx.ElementID]*spdx.Package2_1{} + parser.doc.Packages = []*spdx.Package2_1{} } - parser.doc.Packages[eID] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) case "PackageVersion": parser.pkg.PackageVersion = value case "PackageFileName": parser.pkg.PackageFileName = value case "PackageSupplier": + parser.pkg.PackageSupplier = &spdx.Supplier{} if value == "NOASSERTION" { - parser.pkg.PackageSupplierNOASSERTION = true + parser.pkg.PackageSupplier.Supplier = value break } subkey, subvalue, err := extractSubs(value) @@ -62,16 +63,16 @@ func (parser *tvParser2_1) parsePairFromPackage2_1(tag string, value string) err return err } switch subkey { - case "Person": - parser.pkg.PackageSupplierPerson = subvalue - case "Organization": - parser.pkg.PackageSupplierOrganization = subvalue + case "Person", "Organization": + parser.pkg.PackageSupplier.Supplier = subvalue + parser.pkg.PackageSupplier.SupplierType = subkey default: return fmt.Errorf("unrecognized PackageSupplier type %v", subkey) } case "PackageOriginator": + parser.pkg.PackageOriginator = &spdx.Originator{} if value == "NOASSERTION" { - parser.pkg.PackageOriginatorNOASSERTION = true + parser.pkg.PackageOriginator.Originator = value break } subkey, subvalue, err := extractSubs(value) @@ -79,10 +80,9 @@ func (parser *tvParser2_1) parsePairFromPackage2_1(tag string, value string) err return err } switch subkey { - case "Person": - parser.pkg.PackageOriginatorPerson = subvalue - case "Organization": - parser.pkg.PackageOriginatorOrganization = subvalue + case "Person", "Organization": + parser.pkg.PackageOriginator.Originator = subvalue + parser.pkg.PackageOriginator.OriginatorType = subkey default: return fmt.Errorf("unrecognized PackageOriginator type %v", subkey) } @@ -96,21 +96,19 @@ func (parser *tvParser2_1) parsePairFromPackage2_1(tag string, value string) err parser.pkg.FilesAnalyzed = true } case "PackageVerificationCode": - code, excludesFileName := extractCodeAndExcludes(value) - parser.pkg.PackageVerificationCode = code - parser.pkg.PackageVerificationCodeExcludedFile = excludesFileName + parser.pkg.PackageVerificationCode = extractCodeAndExcludes(value) case "PackageChecksum": subkey, subvalue, err := extractSubs(value) if err != nil { return err } - switch subkey { - case "SHA1": - parser.pkg.PackageChecksumSHA1 = subvalue - case "SHA256": - parser.pkg.PackageChecksumSHA256 = subvalue - case "MD5": - parser.pkg.PackageChecksumMD5 = subvalue + if parser.pkg.PackageChecksums == nil { + parser.pkg.PackageChecksums = []spdx.Checksum{} + } + switch spdx.ChecksumAlgorithm(subkey) { + case spdx.SHA1, spdx.SHA256, spdx.MD5: + algorithm := spdx.ChecksumAlgorithm(subkey) + parser.pkg.PackageChecksums = append(parser.pkg.PackageChecksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) default: return fmt.Errorf("got unknown checksum type %s", subkey) } @@ -184,13 +182,13 @@ func (parser *tvParser2_1) parsePairFromPackage2_1(tag string, value string) err // ===== Helper functions ===== -func extractCodeAndExcludes(value string) (string, string) { +func extractCodeAndExcludes(value string) spdx.PackageVerificationCode { // FIXME this should probably be done using regular expressions instead // split by paren + word "excludes:" sp := strings.SplitN(value, "(excludes:", 2) if len(sp) < 2 { // not found; return the whole string as just the code - return value, "" + return spdx.PackageVerificationCode{Value: value, ExcludedFiles: []string{}} } // if we're here, code is in first part and excludes filename is in @@ -198,7 +196,7 @@ func extractCodeAndExcludes(value string) (string, string) { code := strings.TrimSpace(sp[0]) parsedSp := strings.SplitN(sp[1], ")", 2) fileName := strings.TrimSpace(parsedSp[0]) - return code, fileName + return spdx.PackageVerificationCode{Value: code, ExcludedFiles: []string{fileName}} } func extractPackageExternalReference(value string) (string, string, string, error) { diff --git a/tvloader/parser2v1/parse_package_test.go b/tvloader/parser2v1/parse_package_test.go index 3cda3ce4..734fc913 100644 --- a/tvloader/parser2v1/parse_package_test.go +++ b/tvloader/parser2v1/parse_package_test.go @@ -13,19 +13,12 @@ func TestParser2_1PackageStartsNewPackageAfterParsingPackageNameTag(t *testing.T pkgOldName := "p1" parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: pkgOldName, PackageSPDXIdentifier: "p1"}, } pkgOld := parser.pkg - parser.doc.Packages["p1"] = pkgOld - // the Document's Packages should have this one only - if parser.doc.Packages["p1"] != pkgOld { - t.Errorf("expected package %v, got %v", pkgOld, parser.doc.Packages["p1"]) - } - if len(parser.doc.Packages) != 1 { - t.Errorf("expected 1 package, got %d", len(parser.doc.Packages)) - } + parser.doc.Packages = append(parser.doc.Packages, pkgOld) // now add a new package pkgName := "p2" @@ -57,8 +50,8 @@ func TestParser2_1PackageStartsNewPackageAfterParsingPackageNameTag(t *testing.T t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") } // and the Document's Packages should still be of size 1 and have pkgOld only - if parser.doc.Packages["p1"] != pkgOld { - t.Errorf("Expected package %v, got %v", pkgOld, parser.doc.Packages["p1"]) + if parser.doc.Packages[0] != pkgOld { + t.Errorf("Expected package %v, got %v", pkgOld, parser.doc.Packages[0]) } if len(parser.doc.Packages) != 1 { t.Errorf("expected 1 package, got %d", len(parser.doc.Packages)) @@ -67,9 +60,9 @@ func TestParser2_1PackageStartsNewPackageAfterParsingPackageNameTag(t *testing.T func TestParser2_1PackageStartsNewPackageAfterParsingPackageNameTagWhileInUnpackaged(t *testing.T) { // pkg is nil, so that Files appearing before the first PackageName tag - // are added to UnpackagedFiles instead of Packages + // are added to Files instead of Packages parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psFile2_1, pkg: nil, } @@ -112,11 +105,11 @@ func TestParser2_1PackageStartsNewPackageAfterParsingPackageNameTagWhileInUnpack func TestParser2_1PackageMovesToFileAfterParsingFileNameTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) pkgCurrent := parser.pkg err := parser.parsePair2_1("FileName", "testFile") @@ -135,11 +128,11 @@ func TestParser2_1PackageMovesToFileAfterParsingFileNameTag(t *testing.T) { func TestParser2_1PackageMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_1("LicenseID", "LicenseRef-TestLic") if err != nil { @@ -152,11 +145,11 @@ func TestParser2_1PackageMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing. func TestParser2_1PackageMovesToReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_1("Reviewer", "Person: John Doe") if err != nil { @@ -169,11 +162,11 @@ func TestParser2_1PackageMovesToReviewAfterParsingReviewerTag(t *testing.T) { func TestParser2_1PackageStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_1("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-else") if err != nil { @@ -196,11 +189,11 @@ func TestParser2_1PackageStaysAfterParsingRelationshipTags(t *testing.T) { func TestParser2_1PackageStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_1("Annotator", "Person: John Doe ()") if err != nil { @@ -246,7 +239,7 @@ func TestParser2_1PackageStaysAfterParsingAnnotationTags(t *testing.T) { // ===== Package data section tests ===== func TestParser2_1CanParsePackageTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -282,8 +275,8 @@ func TestParser2_1CanParsePackageTags(t *testing.T) { if len(parser.doc.Packages) != 1 { t.Errorf("expected 1 package, got %d", len(parser.doc.Packages)) } - if parser.doc.Packages["p1"] != parser.pkg { - t.Errorf("expected to point to parser.pkg, got %v", parser.doc.Packages["p1"]) + if parser.doc.Packages[0] != parser.pkg { + t.Errorf("expected to point to parser.pkg, got %v", parser.doc.Packages[0]) } // Package Version @@ -353,14 +346,22 @@ func TestParser2_1CanParsePackageTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageChecksumSHA1 != codeSha1 { - t.Errorf("expected %s for PackageChecksumSHA1, got %s", codeSha1, parser.pkg.PackageChecksumSHA1) - } - if parser.pkg.PackageChecksumSHA256 != codeSha256 { - t.Errorf("expected %s for PackageChecksumSHA256, got %s", codeSha256, parser.pkg.PackageChecksumSHA256) - } - if parser.pkg.PackageChecksumMD5 != codeMd5 { - t.Errorf("expected %s for PackageChecksumMD5, got %s", codeMd5, parser.pkg.PackageChecksumMD5) + + for _, checksum := range parser.pkg.PackageChecksums { + switch checksum.Algorithm { + case spdx.SHA1: + if checksum.Value != codeSha1 { + t.Errorf("expected %s for PackageChecksum SHA1, got %s", codeSha1, checksum.Value) + } + case spdx.SHA256: + if checksum.Value != codeSha256 { + t.Errorf("expected %s for PackageChecksum SHA256, got %s", codeSha256, checksum.Value) + } + case spdx.MD5: + if checksum.Value != codeMd5 { + t.Errorf("expected %s for PackageChecksum MD5, got %s", codeMd5, checksum.Value) + } + } } // Package Home Page @@ -555,119 +556,119 @@ func TestParser2_1CanParsePackageTags(t *testing.T) { func TestParser2_1CanParsePackageSupplierPersonTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Supplier: Person err := parser.parsePairFromPackage2_1("PackageSupplier", "Person: John Doe") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageSupplierPerson != "John Doe" { - t.Errorf("got %v for PackageSupplierPerson", parser.pkg.PackageSupplierPerson) + if parser.pkg.PackageSupplier.Supplier != "John Doe" { + t.Errorf("got %v for PackageSupplierPerson", parser.pkg.PackageSupplier.Supplier) } } func TestParser2_1CanParsePackageSupplierOrganizationTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Supplier: Organization err := parser.parsePairFromPackage2_1("PackageSupplier", "Organization: John Doe, Inc.") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageSupplierOrganization != "John Doe, Inc." { - t.Errorf("got %v for PackageSupplierOrganization", parser.pkg.PackageSupplierOrganization) + if parser.pkg.PackageSupplier.Supplier != "John Doe, Inc." { + t.Errorf("got %v for PackageSupplierOrganization", parser.pkg.PackageSupplier.Supplier) } } func TestParser2_1CanParsePackageSupplierNOASSERTIONTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Supplier: NOASSERTION err := parser.parsePairFromPackage2_1("PackageSupplier", "NOASSERTION") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageSupplierNOASSERTION != true { + if parser.pkg.PackageSupplier.Supplier != "NOASSERTION" { t.Errorf("got false for PackageSupplierNOASSERTION") } } func TestParser2_1CanParsePackageOriginatorPersonTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Originator: Person err := parser.parsePairFromPackage2_1("PackageOriginator", "Person: John Doe") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageOriginatorPerson != "John Doe" { - t.Errorf("got %v for PackageOriginatorPerson", parser.pkg.PackageOriginatorPerson) + if parser.pkg.PackageOriginator.Originator != "John Doe" { + t.Errorf("got %v for PackageOriginatorPerson", parser.pkg.PackageOriginator.Originator) } } func TestParser2_1CanParsePackageOriginatorOrganizationTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Originator: Organization err := parser.parsePairFromPackage2_1("PackageOriginator", "Organization: John Doe, Inc.") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageOriginatorOrganization != "John Doe, Inc." { - t.Errorf("got %v for PackageOriginatorOrganization", parser.pkg.PackageOriginatorOrganization) + if parser.pkg.PackageOriginator.Originator != "John Doe, Inc." { + t.Errorf("got %v for PackageOriginatorOrganization", parser.pkg.PackageOriginator.Originator) } } func TestParser2_1CanParsePackageOriginatorNOASSERTIONTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Originator: NOASSERTION err := parser.parsePairFromPackage2_1("PackageOriginator", "NOASSERTION") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageOriginatorNOASSERTION != true { + if parser.pkg.PackageOriginator.Originator != "NOASSERTION" { t.Errorf("got false for PackageOriginatorNOASSERTION") } } func TestParser2_1CanParsePackageVerificationCodeTagWithExcludes(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Verification Code with excludes parenthetical code := "d6a770ba38583ed4bb4525bd96e50461655d2758" @@ -677,22 +678,22 @@ func TestParser2_1CanParsePackageVerificationCodeTagWithExcludes(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageVerificationCode != code { + if parser.pkg.PackageVerificationCode.Value != code { t.Errorf("got %v for PackageVerificationCode", parser.pkg.PackageVerificationCode) } - if parser.pkg.PackageVerificationCodeExcludedFile != fileName { - t.Errorf("got %v for PackageVerificationCodeExcludedFile", parser.pkg.PackageVerificationCodeExcludedFile) + if len(parser.pkg.PackageVerificationCode.ExcludedFiles) != 1 || parser.pkg.PackageVerificationCode.ExcludedFiles[0] != fileName { + t.Errorf("got %v for PackageVerificationCodeExcludedFile", parser.pkg.PackageVerificationCode.ExcludedFiles) } } func TestParser2_1CanParsePackageVerificationCodeTagWithoutExcludes(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Verification Code without excludes parenthetical code := "d6a770ba38583ed4bb4525bd96e50461655d2758" @@ -700,22 +701,22 @@ func TestParser2_1CanParsePackageVerificationCodeTagWithoutExcludes(t *testing.T if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageVerificationCode != code { + if parser.pkg.PackageVerificationCode.Value != code { t.Errorf("got %v for PackageVerificationCode", parser.pkg.PackageVerificationCode) } - if parser.pkg.PackageVerificationCodeExcludedFile != "" { - t.Errorf("got %v for PackageVerificationCodeExcludedFile", parser.pkg.PackageVerificationCodeExcludedFile) + if len(parser.pkg.PackageVerificationCode.ExcludedFiles) != 0 { + t.Errorf("got %v for PackageVerificationCodeExcludedFile", parser.pkg.PackageVerificationCode.ExcludedFiles) } } func TestPackageExternalRefPointerChangesAfterTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) ref1 := "SECURITY cpe23Type cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*" err := parser.parsePairFromPackage2_1("ExternalRef", ref1) @@ -756,11 +757,11 @@ func TestPackageExternalRefPointerChangesAfterTags(t *testing.T) { func TestParser2_1PackageCreatesRelationshipInDocument(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_1("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-whatever") if err != nil { @@ -776,11 +777,11 @@ func TestParser2_1PackageCreatesRelationshipInDocument(t *testing.T) { func TestParser2_1PackageCreatesAnnotationInDocument(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_1("Annotator", "Person: John Doe ()") if err != nil { @@ -796,11 +797,11 @@ func TestParser2_1PackageCreatesAnnotationInDocument(t *testing.T) { func TestParser2_1PackageUnknownTagFails(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePairFromPackage2_1("blah", "something") if err == nil { @@ -810,7 +811,7 @@ func TestParser2_1PackageUnknownTagFails(t *testing.T) { func TestParser2_1FailsIfInvalidSPDXIDInPackageSection(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -829,7 +830,7 @@ func TestParser2_1FailsIfInvalidSPDXIDInPackageSection(t *testing.T) { func TestParser2_1FailsIfInvalidPackageSupplierFormat(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -848,7 +849,7 @@ func TestParser2_1FailsIfInvalidPackageSupplierFormat(t *testing.T) { func TestParser2_1FailsIfUnknownPackageSupplierType(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -867,7 +868,7 @@ func TestParser2_1FailsIfUnknownPackageSupplierType(t *testing.T) { func TestParser2_1FailsIfInvalidPackageOriginatorFormat(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -886,7 +887,7 @@ func TestParser2_1FailsIfInvalidPackageOriginatorFormat(t *testing.T) { func TestParser2_1FailsIfUnknownPackageOriginatorType(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -905,7 +906,7 @@ func TestParser2_1FailsIfUnknownPackageOriginatorType(t *testing.T) { func TestParser2_1SetsFilesAnalyzedTagsCorrectly(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -930,7 +931,7 @@ func TestParser2_1SetsFilesAnalyzedTagsCorrectly(t *testing.T) { func TestParser2_1FailsIfInvalidPackageChecksumFormat(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -949,7 +950,7 @@ func TestParser2_1FailsIfInvalidPackageChecksumFormat(t *testing.T) { func TestParser2_1FailsIfInvalidPackageChecksumType(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -968,7 +969,7 @@ func TestParser2_1FailsIfInvalidPackageChecksumType(t *testing.T) { func TestParser2_1FailsIfInvalidExternalRefFormat(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -987,7 +988,7 @@ func TestParser2_1FailsIfInvalidExternalRefFormat(t *testing.T) { func TestParser2_1FailsIfExternalRefCommentBeforeExternalRef(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{}, } @@ -1011,12 +1012,12 @@ func TestCanCheckAndExtractExcludesFilenameAndCode(t *testing.T) { fileName := "./package.spdx" fullCodeValue := "d6a770ba38583ed4bb4525bd96e50461655d2758 (excludes: ./package.spdx)" - gotCode, gotFileName := extractCodeAndExcludes(fullCodeValue) - if gotCode != code { + gotCode := extractCodeAndExcludes(fullCodeValue) + if gotCode.Value != code { t.Errorf("got %v for gotCode", gotCode) } - if gotFileName != fileName { - t.Errorf("got %v for gotFileName", gotFileName) + if len(gotCode.ExcludedFiles) != 1 || gotCode.ExcludedFiles[0] != fileName { + t.Errorf("got %v for gotFileName", gotCode.ExcludedFiles) } } @@ -1073,15 +1074,15 @@ func TestParser2_1PackageWithoutSpdxIdentifierThrowsError(t *testing.T) { // More than one package, the previous package doesn't contain the SPDXID pkgOldName := "p1" parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psPackage2_1, pkg: &spdx.Package2_1{PackageName: pkgOldName}, } pkgOld := parser.pkg - parser.doc.Packages["p1"] = pkgOld + parser.doc.Packages = append(parser.doc.Packages, pkgOld) // the Document's Packages should have this one only - if parser.doc.Packages["p1"] != pkgOld { - t.Errorf("expected package %v, got %v", pkgOld, parser.doc.Packages["p1"]) + if parser.doc.Packages[0] != pkgOld { + t.Errorf("expected package %v, got %v", pkgOld, parser.doc.Packages[0]) } if len(parser.doc.Packages) != 1 { t.Errorf("expected 1 package, got %d", len(parser.doc.Packages)) diff --git a/tvloader/parser2v1/parse_review_test.go b/tvloader/parser2v1/parse_review_test.go index efcde5c4..2ef70067 100644 --- a/tvloader/parser2v1/parse_review_test.go +++ b/tvloader/parser2v1/parse_review_test.go @@ -12,9 +12,9 @@ func TestParser2_1ReviewStartsNewReviewAfterParsingReviewerTag(t *testing.T) { // create the first review rev1 := "John Doe" parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psReview2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-Lic11", @@ -25,8 +25,8 @@ func TestParser2_1ReviewStartsNewReviewAfterParsingReviewerTag(t *testing.T) { ReviewerType: "Person", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) r1 := parser.rev @@ -82,9 +82,9 @@ func TestParser2_1ReviewStartsNewReviewAfterParsingReviewerTag(t *testing.T) { func TestParser2_1ReviewStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psReview2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-Lic11", @@ -95,8 +95,8 @@ func TestParser2_1ReviewStaysAfterParsingRelationshipTags(t *testing.T) { ReviewerType: "Person", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -129,9 +129,9 @@ func TestParser2_1ReviewStaysAfterParsingRelationshipTags(t *testing.T) { func TestParser2_1ReviewStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psReview2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-Lic11", @@ -142,8 +142,8 @@ func TestParser2_1ReviewStaysAfterParsingAnnotationTags(t *testing.T) { ReviewerType: "Person", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -191,16 +191,16 @@ func TestParser2_1ReviewStaysAfterParsingAnnotationTags(t *testing.T) { if len(parser.doc.Annotations) != 1 { t.Fatalf("expected doc.Annotations to have len 1, got %d", len(parser.doc.Annotations)) } - if parser.doc.Annotations[0].Annotator != "John Doe ()" { + if parser.doc.Annotations[0].Annotator.Annotator != "John Doe ()" { t.Errorf("expected Annotator to be %s, got %s", "John Doe ()", parser.doc.Annotations[0].Annotator) } } func TestParser2_1ReviewFailsAfterParsingOtherSectionTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psReview2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-Lic11", @@ -208,8 +208,8 @@ func TestParser2_1ReviewFailsAfterParsingOtherSectionTags(t *testing.T) { }, rev: &spdx.Review2_1{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -235,9 +235,9 @@ func TestParser2_1ReviewFailsAfterParsingOtherSectionTags(t *testing.T) { // ===== Review data section tests ===== func TestParser2_1CanParseReviewTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psReview2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-Lic11", @@ -245,8 +245,8 @@ func TestParser2_1CanParseReviewTags(t *testing.T) { }, rev: &spdx.Review2_1{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -274,9 +274,9 @@ func TestParser2_1CanParseReviewTags(t *testing.T) { func TestParser2_1CanParseReviewerPersonTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psReview2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-Lic11", @@ -284,8 +284,8 @@ func TestParser2_1CanParseReviewerPersonTag(t *testing.T) { }, rev: &spdx.Review2_1{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -304,9 +304,9 @@ func TestParser2_1CanParseReviewerPersonTag(t *testing.T) { func TestParser2_1CanParseReviewerOrganizationTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psReview2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-Lic11", @@ -314,8 +314,8 @@ func TestParser2_1CanParseReviewerOrganizationTag(t *testing.T) { }, rev: &spdx.Review2_1{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -334,9 +334,9 @@ func TestParser2_1CanParseReviewerOrganizationTag(t *testing.T) { func TestParser2_1CanParseReviewerToolTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psReview2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-Lic11", @@ -344,8 +344,8 @@ func TestParser2_1CanParseReviewerToolTag(t *testing.T) { }, rev: &spdx.Review2_1{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -364,8 +364,8 @@ func TestParser2_1CanParseReviewerToolTag(t *testing.T) { func TestParser2_1FailsIfReviewerInvalidFormat(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, - st: psReview2_1, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, + st: psReview2_1, rev: &spdx.Review2_1{}, } parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -378,8 +378,8 @@ func TestParser2_1FailsIfReviewerInvalidFormat(t *testing.T) { func TestParser2_1FailsIfReviewerUnknownType(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, - st: psReview2_1, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, + st: psReview2_1, rev: &spdx.Review2_1{}, } parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -392,9 +392,9 @@ func TestParser2_1FailsIfReviewerUnknownType(t *testing.T) { func TestParser2_1ReviewUnknownTagFails(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psReview2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_1{ LicenseIdentifier: "LicenseRef-Lic11", @@ -402,8 +402,8 @@ func TestParser2_1ReviewUnknownTagFails(t *testing.T) { }, rev: &spdx.Review2_1{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -412,5 +412,3 @@ func TestParser2_1ReviewUnknownTagFails(t *testing.T) { t.Errorf("expected error from parsing unknown tag") } } - - diff --git a/tvloader/parser2v1/parse_snippet.go b/tvloader/parser2v1/parse_snippet.go index d5903833..33392d56 100644 --- a/tvloader/parser2v1/parse_snippet.go +++ b/tvloader/parser2v1/parse_snippet.go @@ -51,7 +51,7 @@ func (parser *tvParser2_1) parsePairFromSnippet2_1(tag string, value string) err if err != nil { return err } - parser.snippet.SnippetFromFileSPDXIdentifier = deID + parser.snippet.SnippetFromFileSPDXIdentifier = deID.ElementRefID case "SnippetByteRange": byteStart, byteEnd, err := extractSubs(value) if err != nil { @@ -65,8 +65,12 @@ func (parser *tvParser2_1) parsePairFromSnippet2_1(tag string, value string) err if err != nil { return err } - parser.snippet.SnippetByteRangeStart = bIntStart - parser.snippet.SnippetByteRangeEnd = bIntEnd + + if parser.snippet.Ranges == nil { + parser.snippet.Ranges = []spdx.SnippetRange{} + } + byteRange := spdx.SnippetRange{StartPointer: spdx.SnippetRangePointer{Offset: bIntStart}, EndPointer: spdx.SnippetRangePointer{Offset: bIntEnd}} + parser.snippet.Ranges = append(parser.snippet.Ranges, byteRange) case "SnippetLineRange": lineStart, lineEnd, err := extractSubs(value) if err != nil { @@ -80,8 +84,12 @@ func (parser *tvParser2_1) parsePairFromSnippet2_1(tag string, value string) err if err != nil { return err } - parser.snippet.SnippetLineRangeStart = lInttStart - parser.snippet.SnippetLineRangeEnd = lInttEnd + + if parser.snippet.Ranges == nil { + parser.snippet.Ranges = []spdx.SnippetRange{} + } + lineRange := spdx.SnippetRange{StartPointer: spdx.SnippetRangePointer{LineNumber: lInttStart}, EndPointer: spdx.SnippetRangePointer{LineNumber: lInttEnd}} + parser.snippet.Ranges = append(parser.snippet.Ranges, lineRange) case "SnippetLicenseConcluded": parser.snippet.SnippetLicenseConcluded = value case "LicenseInfoInSnippet": diff --git a/tvloader/parser2v1/parse_snippet_test.go b/tvloader/parser2v1/parse_snippet_test.go index 603abc5f..ea747f48 100644 --- a/tvloader/parser2v1/parse_snippet_test.go +++ b/tvloader/parser2v1/parse_snippet_test.go @@ -12,15 +12,15 @@ func TestParser2_1SnippetStartsNewSnippetAfterParsingSnippetSPDXIDTag(t *testing // create the first snippet sid1 := spdx.ElementID("s1") parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{SnippetSPDXIdentifier: sid1}, } s1 := parser.snippet - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets[sid1] = parser.snippet // the File's Snippets should have this one only @@ -71,16 +71,16 @@ func TestParser2_1SnippetStartsNewSnippetAfterParsingSnippetSPDXIDTag(t *testing func TestParser2_1SnippetStartsNewPackageAfterParsingPackageNameTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{SnippetSPDXIdentifier: "s1"}, } p1 := parser.pkg f1 := parser.file - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet // now add a new package @@ -113,21 +113,21 @@ func TestParser2_1SnippetStartsNewPackageAfterParsingPackageNameTag(t *testing.T if len(parser.doc.Packages) != 1 { t.Errorf("Expected len(Packages) to be 1, got %d", len(parser.doc.Packages)) } - if parser.doc.Packages["package1"] != p1 { - t.Errorf("Expected package %v in Packages[package1], got %v", p1, parser.doc.Packages["package1"]) + if parser.doc.Packages[0] != p1 { + t.Errorf("Expected package %v in Packages[package1], got %v", p1, parser.doc.Packages[0]) } - if parser.doc.Packages["package1"].PackageName != "package1" { - t.Errorf("expected package name %s in Packages[package1], got %s", "package1", parser.doc.Packages["package1"].PackageName) + if parser.doc.Packages[0].PackageName != "package1" { + t.Errorf("expected package name %s in Packages[package1], got %s", "package1", parser.doc.Packages[0].PackageName) } // and the first Package's Files should be of size 1 and have f1 only - if len(parser.doc.Packages["package1"].Files) != 1 { - t.Errorf("Expected 1 file in Packages[package1].Files, got %d", len(parser.doc.Packages["package1"].Files)) + if len(parser.doc.Packages[0].Files) != 1 { + t.Errorf("Expected 1 file in Packages[package1].Files, got %d", len(parser.doc.Packages[0].Files)) } - if parser.doc.Packages["package1"].Files["f1"] != f1 { - t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.doc.Packages["package1"].Files["f1"]) + if parser.doc.Packages[0].Files[0] != f1 { + t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.doc.Packages[0].Files[0]) } - if parser.doc.Packages["package1"].Files["f1"].FileName != "f1.txt" { - t.Errorf("expected file name %s in Files[f1], got %s", "f1.txt", parser.doc.Packages["package1"].Files["f1"].FileName) + if parser.doc.Packages[0].Files[0].FileName != "f1.txt" { + t.Errorf("expected file name %s in Files[f1], got %s", "f1.txt", parser.doc.Packages[0].Files[0].FileName) } // and the new Package should have no files if len(parser.pkg.Files) != 0 { @@ -146,16 +146,16 @@ func TestParser2_1SnippetStartsNewPackageAfterParsingPackageNameTag(t *testing.T func TestParser2_1SnippetMovesToFileAfterParsingFileNameTag(t *testing.T) { f1Name := "f1.txt" parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{SnippetSPDXIdentifier: "s1"}, } p1 := parser.pkg f1 := parser.file - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet f2Name := "f2.txt" @@ -184,11 +184,11 @@ func TestParser2_1SnippetMovesToFileAfterParsingFileNameTag(t *testing.T) { if len(parser.pkg.Files) != 1 { t.Errorf("Expected len(Files) to be 1, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f1"] != f1 { - t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.pkg.Files["f1"]) + if parser.pkg.Files[0] != f1 { + t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.pkg.Files[0]) } - if parser.pkg.Files["f1"].FileName != f1Name { - t.Errorf("expected file name %s in Files[f1], got %s", f1Name, parser.pkg.Files["f1"].FileName) + if parser.pkg.Files[0].FileName != f1Name { + t.Errorf("expected file name %s in Files[f1], got %s", f1Name, parser.pkg.Files[0].FileName) } // and the current snippet should be nil if parser.snippet != nil { @@ -198,14 +198,14 @@ func TestParser2_1SnippetMovesToFileAfterParsingFileNameTag(t *testing.T) { func TestParser2_1SnippetMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{SnippetSPDXIdentifier: "s1"}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet err := parser.parsePair2_1("LicenseID", "LicenseRef-TestLic") @@ -219,14 +219,14 @@ func TestParser2_1SnippetMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing. func TestParser2_1SnippetMovesToReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{SnippetSPDXIdentifier: "s1"}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet err := parser.parsePair2_1("Reviewer", "Person: John Doe") @@ -240,14 +240,14 @@ func TestParser2_1SnippetMovesToReviewAfterParsingReviewerTag(t *testing.T) { func TestParser2_1SnippetStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{SnippetSPDXIdentifier: "s1"}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet err := parser.parsePair2_1("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-else") @@ -279,14 +279,14 @@ func TestParser2_1SnippetStaysAfterParsingRelationshipTags(t *testing.T) { func TestParser2_1SnippetStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{SnippetSPDXIdentifier: "s1"}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet err := parser.parsePair2_1("Annotator", "Person: John Doe ()") @@ -333,7 +333,7 @@ func TestParser2_1SnippetStaysAfterParsingAnnotationTags(t *testing.T) { if len(parser.doc.Annotations) != 1 { t.Fatalf("expected doc.Annotations to have len 1, got %d", len(parser.doc.Annotations)) } - if parser.doc.Annotations[0].Annotator != "John Doe ()" { + if parser.doc.Annotations[0].Annotator.Annotator != "John Doe ()" { t.Errorf("expected Annotator to be %s, got %s", "John Doe ()", parser.doc.Annotations[0].Annotator) } } @@ -341,14 +341,14 @@ func TestParser2_1SnippetStaysAfterParsingAnnotationTags(t *testing.T) { // ===== Snippet data section tests ===== func TestParser2_1CanParseSnippetTags(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) // Snippet SPDX Identifier err := parser.parsePairFromSnippet2_1("SnippetSPDXID", "SPDXRef-s1") @@ -365,7 +365,7 @@ func TestParser2_1CanParseSnippetTags(t *testing.T) { t.Errorf("expected nil error, got %v", err) } wantDeID := spdx.DocElementID{DocumentRefID: "", ElementRefID: spdx.ElementID("f1")} - if parser.snippet.SnippetFromFileSPDXIdentifier != wantDeID { + if parser.snippet.SnippetFromFileSPDXIdentifier != wantDeID.ElementRefID { t.Errorf("got %v for SnippetFromFileSPDXIdentifier", parser.snippet.SnippetFromFileSPDXIdentifier) } @@ -374,11 +374,11 @@ func TestParser2_1CanParseSnippetTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.snippet.SnippetByteRangeStart != 20 { - t.Errorf("got %v for SnippetByteRangeStart", parser.snippet.SnippetByteRangeStart) + if parser.snippet.Ranges[0].StartPointer.Offset != 20 { + t.Errorf("got %v for SnippetByteRangeStart", parser.snippet.Ranges[0].StartPointer.Offset) } - if parser.snippet.SnippetByteRangeEnd != 320 { - t.Errorf("got %v for SnippetByteRangeEnd", parser.snippet.SnippetByteRangeEnd) + if parser.snippet.Ranges[0].EndPointer.Offset != 320 { + t.Errorf("got %v for SnippetByteRangeEnd", parser.snippet.Ranges[0].EndPointer.Offset) } // Snippet Line Range @@ -386,11 +386,11 @@ func TestParser2_1CanParseSnippetTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.snippet.SnippetLineRangeStart != 5 { - t.Errorf("got %v for SnippetLineRangeStart", parser.snippet.SnippetLineRangeStart) + if parser.snippet.Ranges[1].StartPointer.LineNumber != 5 { + t.Errorf("got %v for SnippetLineRangeStart", parser.snippet.Ranges[1].StartPointer.LineNumber) } - if parser.snippet.SnippetLineRangeEnd != 12 { - t.Errorf("got %v for SnippetLineRangeEnd", parser.snippet.SnippetLineRangeEnd) + if parser.snippet.Ranges[1].EndPointer.LineNumber != 12 { + t.Errorf("got %v for SnippetLineRangeEnd", parser.snippet.Ranges[1].EndPointer.LineNumber) } // Snippet Concluded License @@ -469,14 +469,14 @@ func TestParser2_1CanParseSnippetTags(t *testing.T) { func TestParser2_1SnippetUnknownTagFails(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{SnippetSPDXIdentifier: "s1"}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePairFromSnippet2_1("blah", "something") if err == nil { @@ -486,14 +486,14 @@ func TestParser2_1SnippetUnknownTagFails(t *testing.T) { func TestParser2_1FailsForInvalidSnippetSPDXID(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) // invalid Snippet SPDX Identifier err := parser.parsePairFromSnippet2_1("SnippetSPDXID", "whoops") @@ -504,14 +504,14 @@ func TestParser2_1FailsForInvalidSnippetSPDXID(t *testing.T) { func TestParser2_1FailsForInvalidSnippetFromFileSPDXID(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) // start with Snippet SPDX Identifier err := parser.parsePairFromSnippet2_1("SnippetSPDXID", "SPDXRef-s1") @@ -527,14 +527,14 @@ func TestParser2_1FailsForInvalidSnippetFromFileSPDXID(t *testing.T) { func TestParser2_1FailsForInvalidSnippetByteValues(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) // start with Snippet SPDX Identifier err := parser.parsePairFromSnippet2_1("SnippetSPDXID", "SPDXRef-s1") @@ -558,14 +558,14 @@ func TestParser2_1FailsForInvalidSnippetByteValues(t *testing.T) { func TestParser2_1FailsForInvalidSnippetLineValues(t *testing.T) { parser := tvParser2_1{ - doc: &spdx.Document2_1{Packages: map[spdx.ElementID]*spdx.Package2_1{}}, + doc: &spdx.Document2_1{Packages: []*spdx.Package2_1{}}, st: psSnippet2_1, - pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_1{}}, + pkg: &spdx.Package2_1{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_1{}}, file: &spdx.File2_1{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_1{}}, snippet: &spdx.Snippet2_1{}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) // start with Snippet SPDX Identifier err := parser.parsePairFromSnippet2_1("SnippetSPDXID", "SPDXRef-s1") diff --git a/tvloader/parser2v1/parser.go b/tvloader/parser2v1/parser.go index f4a5ae1e..70f4819c 100644 --- a/tvloader/parser2v1/parser.go +++ b/tvloader/parser2v1/parser.go @@ -59,12 +59,44 @@ func (parser *tvParser2_1) parsePairFromStart2_1(tag string, value string) error // create an SPDX Document data struct if we don't have one already if parser.doc == nil { - parser.doc = &spdx.Document2_1{} + parser.doc = &spdx.Document2_1{ + ExternalDocumentReferences: []spdx.ExternalDocumentRef2_1{}, + } } - // move to Creation Info parser state - parser.st = psCreationInfo2_1 + switch tag { + case "SPDXVersion": + parser.doc.SPDXVersion = value + case "DataLicense": + parser.doc.DataLicense = value + case "SPDXID": + eID, err := extractElementID(value) + if err != nil { + return err + } + parser.doc.SPDXIdentifier = eID + case "DocumentName": + parser.doc.DocumentName = value + case "DocumentNamespace": + parser.doc.DocumentNamespace = value + case "ExternalDocumentRef": + documentRefID, uri, alg, checksum, err := extractExternalDocumentReference(value) + if err != nil { + return err + } + edr := spdx.ExternalDocumentRef2_1{ + DocumentRefID: documentRefID, + URI: uri, + Checksum: spdx.Checksum{Algorithm: spdx.ChecksumAlgorithm(alg), Value: checksum}, + } + parser.doc.ExternalDocumentReferences = append(parser.doc.ExternalDocumentReferences, edr) + case "DocumentComment": + parser.doc.DocumentComment = value + default: + // move to Creation Info parser state + parser.st = psCreationInfo2_1 + return parser.parsePairFromCreationInfo2_1(tag, value) + } - // and ask Creation Info subfunc to parse - return parser.parsePairFromCreationInfo2_1(tag, value) + return nil } diff --git a/tvloader/parser2v1/parser_test.go b/tvloader/parser2v1/parser_test.go index 9fe051ff..e8954455 100644 --- a/tvloader/parser2v1/parser_test.go +++ b/tvloader/parser2v1/parser_test.go @@ -24,14 +24,14 @@ func TestParser2_1CanParseTagValues(t *testing.T) { if err != nil { t.Errorf("got error when calling ParseTagValues: %v", err) } - if doc.CreationInfo.SPDXVersion != "SPDX-2.1" { - t.Errorf("expected SPDXVersion to be SPDX-2.1, got %v", doc.CreationInfo.SPDXVersion) + if doc.SPDXVersion != "SPDX-2.1" { + t.Errorf("expected SPDXVersion to be SPDX-2.1, got %v", doc.SPDXVersion) } - if doc.CreationInfo.DataLicense != "CC0-1.0" { - t.Errorf("expected DataLicense to be CC0-1.0, got %v", doc.CreationInfo.DataLicense) + if doc.DataLicense != "CC0-1.0" { + t.Errorf("expected DataLicense to be CC0-1.0, got %v", doc.DataLicense) } - if doc.CreationInfo.SPDXIdentifier != "DOCUMENT" { - t.Errorf("expected SPDXIdentifier to be DOCUMENT, got %v", doc.CreationInfo.SPDXIdentifier) + if doc.SPDXIdentifier != "DOCUMENT" { + t.Errorf("expected SPDXIdentifier to be DOCUMENT, got %v", doc.SPDXIdentifier) } } @@ -58,18 +58,6 @@ func TestParser2_1HasDocumentAfterCallToParseFirstTag(t *testing.T) { } } -// ===== Parser start state change tests ===== -func TestParser2_1StartMovesToCreationInfoStateAfterParsingFirstTag(t *testing.T) { - parser := tvParser2_1{} - err := parser.parsePair2_1("SPDXVersion", "b") - if err != nil { - t.Errorf("got error when calling parsePair2_1: %v", err) - } - if parser.st != psCreationInfo2_1 { - t.Errorf("parser is in state %v, expected %v", parser.st, psCreationInfo2_1) - } -} - func TestParser2_1StartFailsToParseIfInInvalidState(t *testing.T) { parser := tvParser2_1{st: psReview2_1} err := parser.parsePairFromStart2_1("SPDXVersion", "SPDX-2.1") diff --git a/tvloader/parser2v2/parse_annotation.go b/tvloader/parser2v2/parse_annotation.go index 8cd5b76e..4c5188e9 100644 --- a/tvloader/parser2v2/parse_annotation.go +++ b/tvloader/parser2v2/parse_annotation.go @@ -18,8 +18,8 @@ func (parser *tvParser2_2) parsePairForAnnotation2_2(tag string, value string) e return err } if subkey == "Person" || subkey == "Organization" || subkey == "Tool" { - parser.ann.AnnotatorType = subkey - parser.ann.Annotator = subvalue + parser.ann.Annotator.AnnotatorType = subkey + parser.ann.Annotator.Annotator = subvalue return nil } return fmt.Errorf("unrecognized Annotator type %v", subkey) diff --git a/tvloader/parser2v2/parse_annotation_test.go b/tvloader/parser2v2/parse_annotation_test.go index 7a12adfb..cdd05415 100644 --- a/tvloader/parser2v2/parse_annotation_test.go +++ b/tvloader/parser2v2/parse_annotation_test.go @@ -70,11 +70,11 @@ func TestParser2_2CanParseAnnotationTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.ann.Annotator != "John Doe" { - t.Errorf("got %v for Annotator, expected John Doe", parser.ann.Annotator) + if parser.ann.Annotator.Annotator != "John Doe" { + t.Errorf("got %+v for Annotator, expected John Doe", parser.ann.Annotator.Annotator) } - if parser.ann.AnnotatorType != "Person" { - t.Errorf("got %v for AnnotatorType, expected Person", parser.ann.AnnotatorType) + if parser.ann.Annotator.AnnotatorType != "Person" { + t.Errorf("got %v for AnnotatorType, expected Person", parser.ann.Annotator.AnnotatorType) } // Annotation Date diff --git a/tvloader/parser2v2/parse_creation_info.go b/tvloader/parser2v2/parse_creation_info.go index c2bfe40f..f8406fc5 100644 --- a/tvloader/parser2v2/parse_creation_info.go +++ b/tvloader/parser2v2/parse_creation_info.go @@ -17,39 +17,11 @@ func (parser *tvParser2_2) parsePairFromCreationInfo2_2(tag string, value string // create an SPDX Creation Info data struct if we don't have one already if parser.doc.CreationInfo == nil { - parser.doc.CreationInfo = &spdx.CreationInfo2_2{ - ExternalDocumentReferences: map[string]spdx.ExternalDocumentRef2_2{}, - } + parser.doc.CreationInfo = &spdx.CreationInfo2_2{} } ci := parser.doc.CreationInfo switch tag { - case "SPDXVersion": - ci.SPDXVersion = value - case "DataLicense": - ci.DataLicense = value - case "SPDXID": - eID, err := extractElementID(value) - if err != nil { - return err - } - ci.SPDXIdentifier = eID - case "DocumentName": - ci.DocumentName = value - case "DocumentNamespace": - ci.DocumentNamespace = value - case "ExternalDocumentRef": - documentRefID, uri, alg, checksum, err := extractExternalDocumentReference(value) - if err != nil { - return err - } - edr := spdx.ExternalDocumentRef2_2{ - DocumentRefID: documentRefID, - URI: uri, - Alg: alg, - Checksum: checksum, - } - ci.ExternalDocumentReferences[documentRefID] = edr case "LicenseListVersion": ci.LicenseListVersion = value case "Creator": @@ -57,22 +29,20 @@ func (parser *tvParser2_2) parsePairFromCreationInfo2_2(tag string, value string if err != nil { return err } + + creator := spdx.Creator{Creator: subvalue} switch subkey { - case "Person": - ci.CreatorPersons = append(ci.CreatorPersons, subvalue) - case "Organization": - ci.CreatorOrganizations = append(ci.CreatorOrganizations, subvalue) - case "Tool": - ci.CreatorTools = append(ci.CreatorTools, subvalue) + case "Person", "Organization", "Tool": + creator.CreatorType = subkey default: return fmt.Errorf("unrecognized Creator type %v", subkey) } + + ci.Creators = append(ci.Creators, creator) case "Created": ci.Created = value case "CreatorComment": ci.CreatorComment = value - case "DocumentComment": - ci.DocumentComment = value // tag for going on to package section case "PackageName": @@ -91,7 +61,7 @@ func (parser *tvParser2_2) parsePairFromCreationInfo2_2(tag string, value string return parser.parsePairFromPackage2_2(tag, value) // tag for going on to _unpackaged_ file section case "FileName": - // leave pkg as nil, so that packages will be placed in UnpackagedFiles + // leave pkg as nil, so that packages will be placed in Files parser.st = psFile2_2 parser.pkg = nil return parser.parsePairFromFile2_2(tag, value) diff --git a/tvloader/parser2v2/parse_creation_info_test.go b/tvloader/parser2v2/parse_creation_info_test.go index e12fc010..71213460 100644 --- a/tvloader/parser2v2/parse_creation_info_test.go +++ b/tvloader/parser2v2/parse_creation_info_test.go @@ -58,7 +58,7 @@ func TestParser2_2CIMovesToFileAfterParsingFileNameTagWithNoPackages(t *testing. t.Errorf("parser is in state %v, expected %v", parser.st, psFile2_2) } // and current package should be nil, meaning Files are placed in the - // UnpackagedFiles map instead of in a Package + // Files map instead of in a Package if parser.pkg != nil { t.Fatalf("expected pkg to be nil, got non-nil pkg") } @@ -179,7 +179,7 @@ func TestParser2_2HasCreationInfoAfterCallToParseFirstTag(t *testing.T) { doc: &spdx.Document2_2{}, st: psCreationInfo2_2, } - err := parser.parsePairFromCreationInfo2_2("SPDXVersion", "SPDX-2.2") + err := parser.parsePairFromCreationInfo2_2("LicenseListVersion", "3.9") if err != nil { t.Errorf("got error when calling parsePairFromCreationInfo2_2: %v", err) } @@ -194,96 +194,8 @@ func TestParser2_2CanParseCreationInfoTags(t *testing.T) { st: psCreationInfo2_2, } - // SPDX Version - err := parser.parsePairFromCreationInfo2_2("SPDXVersion", "SPDX-2.2") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.SPDXVersion != "SPDX-2.2" { - t.Errorf("got %v for SPDXVersion", parser.doc.CreationInfo.SPDXVersion) - } - - // Data License - err = parser.parsePairFromCreationInfo2_2("DataLicense", "CC0-1.0") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.DataLicense != "CC0-1.0" { - t.Errorf("got %v for DataLicense", parser.doc.CreationInfo.DataLicense) - } - - // SPDX Identifier - err = parser.parsePairFromCreationInfo2_2("SPDXID", "SPDXRef-DOCUMENT") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.SPDXIdentifier != "DOCUMENT" { - t.Errorf("got %v for SPDXIdentifier", parser.doc.CreationInfo.SPDXIdentifier) - } - - // Document Name - err = parser.parsePairFromCreationInfo2_2("DocumentName", "xyz-2.1.5") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.DocumentName != "xyz-2.1.5" { - t.Errorf("got %v for DocumentName", parser.doc.CreationInfo.DocumentName) - } - - // Document Namespace - err = parser.parsePairFromCreationInfo2_2("DocumentNamespace", "http://example.com/xyz-2.1.5.spdx") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.DocumentNamespace != "http://example.com/xyz-2.1.5.spdx" { - t.Errorf("got %v for DocumentNamespace", parser.doc.CreationInfo.DocumentNamespace) - } - - // External Document Reference - refs := []string{ - "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1: d6a770ba38583ed4bb4525bd96e50461655d2759", - "DocumentRef-xyz-2.1.2 http://example.com/xyz-2.1.2 SHA1:d6a770ba38583ed4bb4525bd96e50461655d2760", - } - wantRef0 := spdx.ExternalDocumentRef2_2{ - DocumentRefID: "spdx-tool-1.2", - URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", - Alg: "SHA1", - Checksum: "d6a770ba38583ed4bb4525bd96e50461655d2759", - } - wantRef1 := spdx.ExternalDocumentRef2_2{ - DocumentRefID: "xyz-2.1.2", - URI: "http://example.com/xyz-2.1.2", - Alg: "SHA1", - Checksum: "d6a770ba38583ed4bb4525bd96e50461655d2760", - } - err = parser.parsePairFromCreationInfo2_2("ExternalDocumentRef", refs[0]) - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - err = parser.parsePairFromCreationInfo2_2("ExternalDocumentRef", refs[1]) - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if len(parser.doc.CreationInfo.ExternalDocumentReferences) != 2 { - t.Errorf("got %d ExternalDocumentReferences, expected %d", len(parser.doc.CreationInfo.ExternalDocumentReferences), 2) - } - gotRef0 := parser.doc.CreationInfo.ExternalDocumentReferences["spdx-tool-1.2"] - if gotRef0.DocumentRefID != wantRef0.DocumentRefID || - gotRef0.URI != wantRef0.URI || - gotRef0.Alg != wantRef0.Alg || - gotRef0.Checksum != wantRef0.Checksum { - t.Errorf("got %#v for ExternalDocumentReferences[0], wanted %#v", gotRef0, wantRef0) - } - gotRef1 := parser.doc.CreationInfo.ExternalDocumentReferences["xyz-2.1.2"] - if gotRef1.DocumentRefID != wantRef1.DocumentRefID || - gotRef1.URI != wantRef1.URI || - gotRef1.Alg != wantRef1.Alg || - gotRef1.Checksum != wantRef1.Checksum { - t.Errorf("got %#v for ExternalDocumentReferences[1], wanted %#v", gotRef1, wantRef1) - } - // License List Version - err = parser.parsePairFromCreationInfo2_2("LicenseListVersion", "2.2") + err := parser.parsePairFromCreationInfo2_2("LicenseListVersion", "2.2") if err != nil { t.Errorf("expected nil error, got %v", err) } @@ -304,10 +216,10 @@ func TestParser2_2CanParseCreationInfoTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if len(parser.doc.CreationInfo.CreatorPersons) != 2 || - parser.doc.CreationInfo.CreatorPersons[0] != "Person A" || - parser.doc.CreationInfo.CreatorPersons[1] != "Person B" { - t.Errorf("got %v for CreatorPersons", parser.doc.CreationInfo.CreatorPersons) + if len(parser.doc.CreationInfo.Creators) != 2 || + parser.doc.CreationInfo.Creators[0].Creator != "Person A" || + parser.doc.CreationInfo.Creators[1].Creator != "Person B" { + t.Errorf("got %v for CreatorPersons", parser.doc.CreationInfo.Creators) } // Creators: Organizations @@ -323,10 +235,10 @@ func TestParser2_2CanParseCreationInfoTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if len(parser.doc.CreationInfo.CreatorOrganizations) != 2 || - parser.doc.CreationInfo.CreatorOrganizations[0] != "Organization A" || - parser.doc.CreationInfo.CreatorOrganizations[1] != "Organization B" { - t.Errorf("got %v for CreatorOrganizations", parser.doc.CreationInfo.CreatorOrganizations) + if len(parser.doc.CreationInfo.Creators) != 4 || + parser.doc.CreationInfo.Creators[2].Creator != "Organization A" || + parser.doc.CreationInfo.Creators[3].Creator != "Organization B" { + t.Errorf("got %v for CreatorOrganizations", parser.doc.CreationInfo.Creators) } // Creators: Tools @@ -342,10 +254,10 @@ func TestParser2_2CanParseCreationInfoTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if len(parser.doc.CreationInfo.CreatorTools) != 2 || - parser.doc.CreationInfo.CreatorTools[0] != "Tool A" || - parser.doc.CreationInfo.CreatorTools[1] != "Tool B" { - t.Errorf("got %v for CreatorTools", parser.doc.CreationInfo.CreatorTools) + if len(parser.doc.CreationInfo.Creators) != 6 || + parser.doc.CreationInfo.Creators[4].Creator != "Tool A" || + parser.doc.CreationInfo.Creators[5].Creator != "Tool B" { + t.Errorf("got %v for CreatorTools", parser.doc.CreationInfo.Creators) } // Created date @@ -365,16 +277,6 @@ func TestParser2_2CanParseCreationInfoTags(t *testing.T) { if parser.doc.CreationInfo.CreatorComment != "Blah whatever" { t.Errorf("got %v for CreatorComment", parser.doc.CreationInfo.CreatorComment) } - - // Document Comment - err = parser.parsePairFromCreationInfo2_2("DocumentComment", "Blah whatever") - if err != nil { - t.Errorf("expected nil error, got %v", err) - } - if parser.doc.CreationInfo.DocumentComment != "Blah whatever" { - t.Errorf("got %v for DocumentComment", parser.doc.CreationInfo.DocumentComment) - } - } func TestParser2_2InvalidCreatorTagsFail(t *testing.T) { diff --git a/tvloader/parser2v2/parse_file.go b/tvloader/parser2v2/parse_file.go index 27ec6a40..e564147a 100644 --- a/tvloader/parser2v2/parse_file.go +++ b/tvloader/parser2v2/parse_file.go @@ -49,37 +49,37 @@ func (parser *tvParser2_2) parsePairFromFile2_2(tag string, value string) error } parser.file.FileSPDXIdentifier = eID if parser.pkg == nil { - if parser.doc.UnpackagedFiles == nil { - parser.doc.UnpackagedFiles = map[spdx.ElementID]*spdx.File2_2{} + if parser.doc.Files == nil { + parser.doc.Files = []*spdx.File2_2{} } - parser.doc.UnpackagedFiles[eID] = parser.file + parser.doc.Files = append(parser.doc.Files, parser.file) } else { if parser.pkg.Files == nil { - parser.pkg.Files = map[spdx.ElementID]*spdx.File2_2{} + parser.pkg.Files = []*spdx.File2_2{} } - parser.pkg.Files[eID] = parser.file + parser.pkg.Files = append(parser.pkg.Files, parser.file) } case "FileType": - parser.file.FileType = append(parser.file.FileType, value) + parser.file.FileTypes = append(parser.file.FileTypes, value) case "FileChecksum": subkey, subvalue, err := extractSubs(value) if err != nil { return err } - if parser.file.FileChecksums == nil { - parser.file.FileChecksums = map[spdx.ChecksumAlgorithm]spdx.Checksum{} + if parser.file.Checksums == nil { + parser.file.Checksums = []spdx.Checksum{} } - switch subkey { + switch spdx.ChecksumAlgorithm(subkey) { case spdx.SHA1, spdx.SHA256, spdx.MD5: algorithm := spdx.ChecksumAlgorithm(subkey) - parser.file.FileChecksums[algorithm] = spdx.Checksum{Algorithm: algorithm, Value: subvalue} + parser.file.Checksums = append(parser.file.Checksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) default: return fmt.Errorf("got unknown checksum type %s", subkey) } case "LicenseConcluded": parser.file.LicenseConcluded = value case "LicenseInfoInFile": - parser.file.LicenseInfoInFile = append(parser.file.LicenseInfoInFile, value) + parser.file.LicenseInfoInFiles = append(parser.file.LicenseInfoInFiles, value) case "LicenseComments": parser.file.LicenseComments = value case "FileCopyrightText": @@ -103,7 +103,7 @@ func (parser *tvParser2_2) parsePairFromFile2_2(tag string, value string) error case "FileNotice": parser.file.FileNotice = value case "FileContributor": - parser.file.FileContributor = append(parser.file.FileContributor, value) + parser.file.FileContributors = append(parser.file.FileContributors, value) case "FileDependency": parser.file.FileDependencies = append(parser.file.FileDependencies, value) case "FileAttributionText": diff --git a/tvloader/parser2v2/parse_file_test.go b/tvloader/parser2v2/parse_file_test.go index 689a2df1..30f9f5e7 100644 --- a/tvloader/parser2v2/parse_file_test.go +++ b/tvloader/parser2v2/parse_file_test.go @@ -13,23 +13,23 @@ func TestParser2_2FileStartsNewFileAfterParsingFileNameTag(t *testing.T) { fileOldName := "f1.txt" parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: fileOldName, FileSPDXIdentifier: "f1"}, } fileOld := parser.file - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = fileOld + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, fileOld) // the Package's Files should have this one only if len(parser.pkg.Files) != 1 { t.Fatalf("expected 1 file, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f1"] != fileOld { - t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files["f1"]) + if parser.pkg.Files[0] != fileOld { + t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files[0]) } - if parser.pkg.Files["f1"].FileName != fileOldName { - t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files["f1"].FileName) + if parser.pkg.Files[0].FileName != fileOldName { + t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files[0].FileName) } // now add a new file @@ -55,11 +55,11 @@ func TestParser2_2FileStartsNewFileAfterParsingFileNameTag(t *testing.T) { if len(parser.pkg.Files) != 1 { t.Fatalf("expected 1 file, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f1"] != fileOld { - t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files["f1"]) + if parser.pkg.Files[0] != fileOld { + t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files[0]) } - if parser.pkg.Files["f1"].FileName != fileOldName { - t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files["f1"].FileName) + if parser.pkg.Files[0].FileName != fileOldName { + t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files[0].FileName) } // now parse an SPDX identifier tag @@ -71,17 +71,17 @@ func TestParser2_2FileStartsNewFileAfterParsingFileNameTag(t *testing.T) { if len(parser.pkg.Files) != 2 { t.Fatalf("expected 2 files, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f1"] != fileOld { - t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files["f1"]) + if parser.pkg.Files[0] != fileOld { + t.Errorf("expected file %v in Files[f1], got %v", fileOld, parser.pkg.Files[0]) } - if parser.pkg.Files["f1"].FileName != fileOldName { - t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files["f1"].FileName) + if parser.pkg.Files[0].FileName != fileOldName { + t.Errorf("expected file name %s in Files[f1], got %s", fileOldName, parser.pkg.Files[0].FileName) } - if parser.pkg.Files["f2ID"] != parser.file { - t.Errorf("expected file %v in Files[f2ID], got %v", parser.file, parser.pkg.Files["f2ID"]) + if parser.pkg.Files[1] != parser.file { + t.Errorf("expected file %v in Files[f2ID], got %v", parser.file, parser.pkg.Files[1]) } - if parser.pkg.Files["f2ID"].FileName != fileName { - t.Errorf("expected file name %s in Files[f2ID], got %s", fileName, parser.pkg.Files["f2ID"].FileName) + if parser.pkg.Files[1].FileName != fileName { + t.Errorf("expected file name %s in Files[f2ID], got %s", fileName, parser.pkg.Files[1].FileName) } } @@ -103,12 +103,12 @@ func TestParser2_2FileAddsToPackageOrUnpackagedFiles(t *testing.T) { t.Errorf("got error when calling parsePair2_2: %v", err) } fileOld := parser.file - // should have been added to UnpackagedFiles - if len(parser.doc.UnpackagedFiles) != 1 { - t.Fatalf("expected 1 file in UnpackagedFiles, got %d", len(parser.doc.UnpackagedFiles)) + // should have been added to Files + if len(parser.doc.Files) != 1 { + t.Fatalf("expected 1 file in Files, got %d", len(parser.doc.Files)) } - if parser.doc.UnpackagedFiles["f2ID"] != fileOld { - t.Errorf("expected file %v in UnpackagedFiles[f2ID], got %v", fileOld, parser.doc.UnpackagedFiles["f2ID"]) + if parser.doc.Files[0] != fileOld { + t.Errorf("expected file %v in Files[f2ID], got %v", fileOld, parser.doc.Files[0]) } // now create a package and a new file err = parser.parsePair2_2("PackageName", "package1") @@ -127,19 +127,19 @@ func TestParser2_2FileAddsToPackageOrUnpackagedFiles(t *testing.T) { if err != nil { t.Errorf("got error when calling parsePair2_2: %v", err) } - // UnpackagedFiles should still be size 1 and have old file only - if len(parser.doc.UnpackagedFiles) != 1 { - t.Fatalf("expected 1 file in UnpackagedFiles, got %d", len(parser.doc.UnpackagedFiles)) + // Files should still be size 1 and have old file only + if len(parser.doc.Files) != 1 { + t.Fatalf("expected 1 file in Files, got %d", len(parser.doc.Files)) } - if parser.doc.UnpackagedFiles["f2ID"] != fileOld { - t.Errorf("expected file %v in UnpackagedFiles[f2ID], got %v", fileOld, parser.doc.UnpackagedFiles["f2ID"]) + if parser.doc.Files[0] != fileOld { + t.Errorf("expected file %v in Files[f2ID], got %v", fileOld, parser.doc.Files[0]) } // and new package should have gotten the new file if len(parser.pkg.Files) != 1 { t.Fatalf("expected 1 file in Files, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f3ID"] != parser.file { - t.Errorf("expected file %v in Files[f3ID], got %v", parser.file, parser.pkg.Files["f3ID"]) + if parser.pkg.Files[0] != parser.file { + t.Errorf("expected file %v in Files[f3ID], got %v", parser.file, parser.pkg.Files[0]) } } @@ -149,15 +149,15 @@ func TestParser2_2FileStartsNewPackageAfterParsingPackageNameTag(t *testing.T) { f1Name := "f1.txt" parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: p1Name, PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: p1Name, PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: f1Name, FileSPDXIdentifier: "f1"}, } p1 := parser.pkg f1 := parser.file - parser.doc.Packages["package1"] = p1 - parser.pkg.Files["f1"] = f1 + parser.doc.Packages = append(parser.doc.Packages, p1) + parser.pkg.Files = append(parser.pkg.Files, f1) // now add a new package p2Name := "package2" @@ -193,21 +193,21 @@ func TestParser2_2FileStartsNewPackageAfterParsingPackageNameTag(t *testing.T) { if len(parser.doc.Packages) != 1 { t.Fatalf("expected 1 package, got %d", len(parser.doc.Packages)) } - if parser.doc.Packages["package1"] != p1 { - t.Errorf("Expected package %v in Packages[package1], got %v", p1, parser.doc.Packages["package1"]) + if parser.doc.Packages[0] != p1 { + t.Errorf("Expected package %v in Packages[package1], got %v", p1, parser.doc.Packages[0]) } - if parser.doc.Packages["package1"].PackageName != p1Name { - t.Errorf("expected package name %s in Packages[package1], got %s", p1Name, parser.doc.Packages["package1"].PackageName) + if parser.doc.Packages[0].PackageName != p1Name { + t.Errorf("expected package name %s in Packages[package1], got %s", p1Name, parser.doc.Packages[0].PackageName) } // and the first Package's Files should be of size 1 and have f1 only - if len(parser.doc.Packages["package1"].Files) != 1 { - t.Errorf("Expected 1 file in Packages[package1].Files, got %d", len(parser.doc.Packages["package1"].Files)) + if len(parser.doc.Packages[0].Files) != 1 { + t.Errorf("Expected 1 file in Packages[package1].Files, got %d", len(parser.doc.Packages[0].Files)) } - if parser.doc.Packages["package1"].Files["f1"] != f1 { - t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.doc.Packages["package1"].Files["f1"]) + if parser.doc.Packages[0].Files[0] != f1 { + t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.doc.Packages[0].Files[0]) } - if parser.doc.Packages["package1"].Files["f1"].FileName != f1Name { - t.Errorf("expected file name %s in Files[f1], got %s", f1Name, parser.doc.Packages["package1"].Files["f1"].FileName) + if parser.doc.Packages[0].Files[0].FileName != f1Name { + t.Errorf("expected file name %s in Files[f1], got %s", f1Name, parser.doc.Packages[0].Files[0].FileName) } // and the current file should be nil if parser.file != nil { @@ -217,13 +217,13 @@ func TestParser2_2FileStartsNewPackageAfterParsingPackageNameTag(t *testing.T) { func TestParser2_2FileMovesToSnippetAfterParsingSnippetSPDXIDTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) fileCurrent := parser.file err := parser.parsePair2_2("SnippetSPDXID", "SPDXRef-Test1") @@ -242,13 +242,13 @@ func TestParser2_2FileMovesToSnippetAfterParsingSnippetSPDXIDTag(t *testing.T) { func TestParser2_2FileMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f2"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_2("LicenseID", "LicenseRef-TestLic") if err != nil { @@ -261,13 +261,13 @@ func TestParser2_2FileMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing.T) func TestParser2_2FileMovesToReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_2("Reviewer", "Person: John Doe") if err != nil { @@ -280,13 +280,13 @@ func TestParser2_2FileMovesToReviewAfterParsingReviewerTag(t *testing.T) { func TestParser2_2FileStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_2("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-else") if err != nil { @@ -309,13 +309,13 @@ func TestParser2_2FileStaysAfterParsingRelationshipTags(t *testing.T) { func TestParser2_2FileStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_2("Annotator", "Person: John Doe ()") if err != nil { @@ -361,11 +361,11 @@ func TestParser2_2FileStaysAfterParsingAnnotationTags(t *testing.T) { // ===== File data section tests ===== func TestParser2_2CanParseFileTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // File Name err := parser.parsePairFromFile2_2("FileName", "f1.txt") @@ -393,8 +393,8 @@ func TestParser2_2CanParseFileTags(t *testing.T) { if len(parser.pkg.Files) != 1 { t.Errorf("expected 1 file, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f1"] != parser.file { - t.Errorf("expected Files[f1] to be %v, got %v", parser.file, parser.pkg.Files["f1"]) + if parser.pkg.Files[0] != parser.file { + t.Errorf("expected Files[f1] to be %v, got %v", parser.file, parser.pkg.Files[0]) } // File Type @@ -410,18 +410,18 @@ func TestParser2_2CanParseFileTags(t *testing.T) { } for _, typeWant := range fileTypes { flagFound := false - for _, typeCheck := range parser.file.FileType { + for _, typeCheck := range parser.file.FileTypes { if typeWant == typeCheck { flagFound = true } } if flagFound == false { - t.Errorf("didn't find %s in FileType", typeWant) + t.Errorf("didn't find %s in FileTypes", typeWant) } } - if len(fileTypes) != len(parser.file.FileType) { - t.Errorf("expected %d types in FileType, got %d", len(fileTypes), - len(parser.file.FileType)) + if len(fileTypes) != len(parser.file.FileTypes) { + t.Errorf("expected %d types in FileTypes, got %d", len(fileTypes), + len(parser.file.FileTypes)) } // File Checksums @@ -443,7 +443,7 @@ func TestParser2_2CanParseFileTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - for _, checksum := range parser.file.FileChecksums { + for _, checksum := range parser.file.Checksums { switch checksum.Algorithm { case spdx.SHA1: if checksum.Value != codeSha1 { @@ -482,18 +482,18 @@ func TestParser2_2CanParseFileTags(t *testing.T) { } for _, licWant := range lics { flagFound := false - for _, licCheck := range parser.file.LicenseInfoInFile { + for _, licCheck := range parser.file.LicenseInfoInFiles { if licWant == licCheck { flagFound = true } } if flagFound == false { - t.Errorf("didn't find %s in LicenseInfoInFile", licWant) + t.Errorf("didn't find %s in LicenseInfoInFiles", licWant) } } - if len(lics) != len(parser.file.LicenseInfoInFile) { - t.Errorf("expected %d licenses in LicenseInfoInFile, got %d", len(lics), - len(parser.file.LicenseInfoInFile)) + if len(lics) != len(parser.file.LicenseInfoInFiles) { + t.Errorf("expected %d licenses in LicenseInfoInFiles, got %d", len(lics), + len(parser.file.LicenseInfoInFiles)) } // Comments on License @@ -631,18 +631,18 @@ func TestParser2_2CanParseFileTags(t *testing.T) { } for _, contribWant := range contribs { flagFound := false - for _, contribCheck := range parser.file.FileContributor { + for _, contribCheck := range parser.file.FileContributors { if contribWant == contribCheck { flagFound = true } } if flagFound == false { - t.Errorf("didn't find %s in FileContributor", contribWant) + t.Errorf("didn't find %s in FileContributors", contribWant) } } - if len(contribs) != len(parser.file.FileContributor) { - t.Errorf("expected %d contribenses in FileContributor, got %d", len(contribs), - len(parser.file.FileContributor)) + if len(contribs) != len(parser.file.FileContributors) { + t.Errorf("expected %d contribenses in FileContributors, got %d", len(contribs), + len(parser.file.FileContributors)) } // File Dependencies @@ -703,13 +703,13 @@ func TestParser2_2CanParseFileTags(t *testing.T) { func TestParser2_2FileCreatesRelationshipInDocument(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_2("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-whatever") if err != nil { @@ -725,13 +725,13 @@ func TestParser2_2FileCreatesRelationshipInDocument(t *testing.T) { func TestParser2_2FileCreatesAnnotationInDocument(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePair2_2("Annotator", "Person: John Doe ()") if err != nil { @@ -747,13 +747,13 @@ func TestParser2_2FileCreatesAnnotationInDocument(t *testing.T) { func TestParser2_2FileUnknownTagFails(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePairFromFile2_2("blah", "something") if err == nil { @@ -763,13 +763,13 @@ func TestParser2_2FileUnknownTagFails(t *testing.T) { func TestFileAOPPointerChangesAfterTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePairFromFile2_2("ArtifactOfProjectName", "project1") if err != nil { @@ -820,11 +820,11 @@ func TestFileAOPPointerChangesAfterTags(t *testing.T) { func TestParser2_2FailsIfInvalidSPDXIDInFileSection(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // start with File Name err := parser.parsePairFromFile2_2("FileName", "f1.txt") @@ -840,11 +840,11 @@ func TestParser2_2FailsIfInvalidSPDXIDInFileSection(t *testing.T) { func TestParser2_2FailsIfInvalidChecksumFormatInFileSection(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // start with File Name err := parser.parsePairFromFile2_2("FileName", "f1.txt") @@ -860,11 +860,11 @@ func TestParser2_2FailsIfInvalidChecksumFormatInFileSection(t *testing.T) { func TestParser2_1FailsIfUnknownChecksumTypeInFileSection(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // start with File Name err := parser.parsePairFromFile2_2("FileName", "f1.txt") @@ -880,11 +880,11 @@ func TestParser2_1FailsIfUnknownChecksumTypeInFileSection(t *testing.T) { func TestParser2_2FailsIfArtifactHomePageBeforeArtifactName(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // start with File Name err := parser.parsePairFromFile2_2("FileName", "f1.txt") @@ -900,11 +900,11 @@ func TestParser2_2FailsIfArtifactHomePageBeforeArtifactName(t *testing.T) { func TestParser2_2FailsIfArtifactURIBeforeArtifactName(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, } - parser.doc.Packages["test"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // start with File Name err := parser.parsePairFromFile2_2("FileName", "f1.txt") @@ -921,7 +921,7 @@ func TestParser2_2FailsIfArtifactURIBeforeArtifactName(t *testing.T) { func TestParser2_2FilesWithoutSpdxIdThrowError(t *testing.T) { // case 1: The previous file (packaged or unpackaged) does not contain spdx ID parser1 := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, file: &spdx.File2_2{FileName: "FileName"}, } diff --git a/tvloader/parser2v2/parse_other_license_test.go b/tvloader/parser2v2/parse_other_license_test.go index ebf4170c..e0607ee6 100644 --- a/tvloader/parser2v2/parse_other_license_test.go +++ b/tvloader/parser2v2/parse_other_license_test.go @@ -14,9 +14,9 @@ func TestParser2_2OLStartsNewOtherLicenseAfterParsingLicenseIDTag(t *testing.T) olname1 := "License 11" parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: olid1, @@ -24,8 +24,8 @@ func TestParser2_2OLStartsNewOtherLicenseAfterParsingLicenseIDTag(t *testing.T) }, } olic1 := parser.otherLic - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) // the Document's OtherLicenses should have this one only @@ -90,13 +90,13 @@ func TestParser2_2OLStartsNewOtherLicenseAfterParsingLicenseIDTag(t *testing.T) func TestParser2_2OLMovesToReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) err := parser.parsePair2_2("Reviewer", "Person: John Doe") @@ -110,17 +110,17 @@ func TestParser2_2OLMovesToReviewAfterParsingReviewerTag(t *testing.T) { func TestParser2_2OtherLicenseStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-whatever", LicenseName: "the whatever license", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) err := parser.parsePair2_2("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-else") @@ -152,17 +152,17 @@ func TestParser2_2OtherLicenseStaysAfterParsingRelationshipTags(t *testing.T) { func TestParser2_2OtherLicenseStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-whatever", LicenseName: "the whatever license", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) err := parser.parsePair2_2("Annotator", "Person: John Doe ()") @@ -209,24 +209,24 @@ func TestParser2_2OtherLicenseStaysAfterParsingAnnotationTags(t *testing.T) { if len(parser.doc.Annotations) != 1 { t.Fatalf("expected doc.Annotations to have len 1, got %d", len(parser.doc.Annotations)) } - if parser.doc.Annotations[0].Annotator != "John Doe ()" { + if parser.doc.Annotations[0].Annotator.Annotator != "John Doe ()" { t.Errorf("expected Annotator to be %s, got %s", "John Doe ()", parser.doc.Annotations[0].Annotator) } } func TestParser2_2OLFailsAfterParsingOtherSectionTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", LicenseName: "License 11", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) // can't go back to old sections @@ -247,13 +247,13 @@ func TestParser2_2OLFailsAfterParsingOtherSectionTags(t *testing.T) { // ===== Other License data section tests ===== func TestParser2_2CanParseOtherLicenseTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) // License Identifier @@ -323,13 +323,13 @@ func TestParser2_2CanParseOtherLicenseTags(t *testing.T) { func TestParser2_2OLUnknownTagFails(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psOtherLicense2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) err := parser.parsePairFromOtherLicense2_2("blah", "something") diff --git a/tvloader/parser2v2/parse_package.go b/tvloader/parser2v2/parse_package.go index 15f7dc69..4d6caf9d 100644 --- a/tvloader/parser2v2/parse_package.go +++ b/tvloader/parser2v2/parse_package.go @@ -45,47 +45,51 @@ func (parser *tvParser2_2) parsePairFromPackage2_2(tag string, value string) err } parser.pkg.PackageSPDXIdentifier = eID if parser.doc.Packages == nil { - parser.doc.Packages = map[spdx.ElementID]*spdx.Package2_2{} + parser.doc.Packages = []*spdx.Package2_2{} } - parser.doc.Packages[eID] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) case "PackageVersion": parser.pkg.PackageVersion = value case "PackageFileName": parser.pkg.PackageFileName = value case "PackageSupplier": + supplier := &spdx.Supplier{Supplier: value} if value == "NOASSERTION" { - parser.pkg.PackageSupplierNOASSERTION = true + parser.pkg.PackageSupplier = supplier break } + subkey, subvalue, err := extractSubs(value) if err != nil { return err } switch subkey { - case "Person": - parser.pkg.PackageSupplierPerson = subvalue - case "Organization": - parser.pkg.PackageSupplierOrganization = subvalue + case "Person", "Organization": + supplier.Supplier = subvalue + supplier.SupplierType = subkey default: return fmt.Errorf("unrecognized PackageSupplier type %v", subkey) } + parser.pkg.PackageSupplier = supplier case "PackageOriginator": + originator := &spdx.Originator{Originator: value} if value == "NOASSERTION" { - parser.pkg.PackageOriginatorNOASSERTION = true + parser.pkg.PackageOriginator = originator break } + subkey, subvalue, err := extractSubs(value) if err != nil { return err } switch subkey { - case "Person": - parser.pkg.PackageOriginatorPerson = subvalue - case "Organization": - parser.pkg.PackageOriginatorOrganization = subvalue + case "Person", "Organization": + originator.Originator = subvalue + originator.OriginatorType = subkey default: return fmt.Errorf("unrecognized PackageOriginator type %v", subkey) } + parser.pkg.PackageOriginator = originator case "PackageDownloadLocation": parser.pkg.PackageDownloadLocation = value case "FilesAnalyzed": @@ -96,21 +100,19 @@ func (parser *tvParser2_2) parsePairFromPackage2_2(tag string, value string) err parser.pkg.FilesAnalyzed = true } case "PackageVerificationCode": - code, excludesFileName := extractCodeAndExcludes(value) - parser.pkg.PackageVerificationCode = code - parser.pkg.PackageVerificationCodeExcludedFile = excludesFileName + parser.pkg.PackageVerificationCode = extractCodeAndExcludes(value) case "PackageChecksum": subkey, subvalue, err := extractSubs(value) if err != nil { return err } if parser.pkg.PackageChecksums == nil { - parser.pkg.PackageChecksums = make(map[spdx.ChecksumAlgorithm]spdx.Checksum, 9) + parser.pkg.PackageChecksums = []spdx.Checksum{} } - switch subkey { + switch spdx.ChecksumAlgorithm(subkey) { case spdx.SHA1, spdx.SHA256, spdx.MD5: algorithm := spdx.ChecksumAlgorithm(subkey) - parser.pkg.PackageChecksums[algorithm] = spdx.Checksum{Algorithm: algorithm, Value: subvalue} + parser.pkg.PackageChecksums = append(parser.pkg.PackageChecksums, spdx.Checksum{Algorithm: algorithm, Value: subvalue}) default: return fmt.Errorf("got unknown checksum type %s", subkey) } @@ -186,13 +188,13 @@ func (parser *tvParser2_2) parsePairFromPackage2_2(tag string, value string) err // ===== Helper functions ===== -func extractCodeAndExcludes(value string) (string, string) { +func extractCodeAndExcludes(value string) spdx.PackageVerificationCode { // FIXME this should probably be done using regular expressions instead // split by paren + word "excludes:" sp := strings.SplitN(value, "(excludes:", 2) if len(sp) < 2 { // not found; return the whole string as just the code - return value, "" + return spdx.PackageVerificationCode{Value: value, ExcludedFiles: []string{}} } // if we're here, code is in first part and excludes filename is in @@ -200,7 +202,7 @@ func extractCodeAndExcludes(value string) (string, string) { code := strings.TrimSpace(sp[0]) parsedSp := strings.SplitN(sp[1], ")", 2) fileName := strings.TrimSpace(parsedSp[0]) - return code, fileName + return spdx.PackageVerificationCode{Value: code, ExcludedFiles: []string{fileName}} } func extractPackageExternalReference(value string) (string, string, string, error) { diff --git a/tvloader/parser2v2/parse_package_test.go b/tvloader/parser2v2/parse_package_test.go index 58099314..6b58d0f9 100644 --- a/tvloader/parser2v2/parse_package_test.go +++ b/tvloader/parser2v2/parse_package_test.go @@ -13,15 +13,15 @@ func TestParser2_2PackageStartsNewPackageAfterParsingPackageNameTag(t *testing.T pkgOldName := "p1" parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: pkgOldName, PackageSPDXIdentifier: "p1"}, } pkgOld := parser.pkg - parser.doc.Packages["p1"] = pkgOld + parser.doc.Packages = append(parser.doc.Packages, pkgOld) // the Document's Packages should have this one only - if parser.doc.Packages["p1"] != pkgOld { - t.Errorf("expected package %v, got %v", pkgOld, parser.doc.Packages["p1"]) + if parser.doc.Packages[0] != pkgOld { + t.Errorf("expected package %v, got %v", pkgOld, parser.doc.Packages[0]) } if len(parser.doc.Packages) != 1 { t.Errorf("expected 1 package, got %d", len(parser.doc.Packages)) @@ -57,8 +57,8 @@ func TestParser2_2PackageStartsNewPackageAfterParsingPackageNameTag(t *testing.T t.Errorf("expected IsFilesAnalyzedTagPresent to default to false, got true") } // and the Document's Packages should still be of size 1 and have pkgOld only - if parser.doc.Packages["p1"] != pkgOld { - t.Errorf("Expected package %v, got %v", pkgOld, parser.doc.Packages["p1"]) + if parser.doc.Packages[0] != pkgOld { + t.Errorf("Expected package %v, got %v", pkgOld, parser.doc.Packages[0]) } if len(parser.doc.Packages) != 1 { t.Errorf("expected 1 package, got %d", len(parser.doc.Packages)) @@ -67,9 +67,9 @@ func TestParser2_2PackageStartsNewPackageAfterParsingPackageNameTag(t *testing.T func TestParser2_2PackageStartsNewPackageAfterParsingPackageNameTagWhileInUnpackaged(t *testing.T) { // pkg is nil, so that Files appearing before the first PackageName tag - // are added to UnpackagedFiles instead of Packages + // are added to Files instead of Packages parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psFile2_2, pkg: nil, } @@ -112,11 +112,11 @@ func TestParser2_2PackageStartsNewPackageAfterParsingPackageNameTagWhileInUnpack func TestParser2_2PackageMovesToFileAfterParsingFileNameTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) pkgCurrent := parser.pkg err := parser.parsePair2_2("FileName", "testFile") @@ -135,11 +135,11 @@ func TestParser2_2PackageMovesToFileAfterParsingFileNameTag(t *testing.T) { func TestParser2_2PackageMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_2("LicenseID", "LicenseRef-TestLic") if err != nil { @@ -152,11 +152,11 @@ func TestParser2_2PackageMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing. func TestParser2_2PackageMovesToReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_2("Reviewer", "Person: John Doe") if err != nil { @@ -169,11 +169,11 @@ func TestParser2_2PackageMovesToReviewAfterParsingReviewerTag(t *testing.T) { func TestParser2_2PackageStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_2("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-else") if err != nil { @@ -196,11 +196,11 @@ func TestParser2_2PackageStaysAfterParsingRelationshipTags(t *testing.T) { func TestParser2_2PackageStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_2("Annotator", "Person: John Doe ()") if err != nil { @@ -246,7 +246,7 @@ func TestParser2_2PackageStaysAfterParsingAnnotationTags(t *testing.T) { // ===== Package data section tests ===== func TestParser2_2CanParsePackageTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -282,8 +282,8 @@ func TestParser2_2CanParsePackageTags(t *testing.T) { if len(parser.doc.Packages) != 1 { t.Errorf("expected 1 package, got %d", len(parser.doc.Packages)) } - if parser.doc.Packages["p1"] != parser.pkg { - t.Errorf("expected to point to parser.pkg, got %v", parser.doc.Packages["p1"]) + if parser.doc.Packages[0] != parser.pkg { + t.Errorf("expected to point to parser.pkg, got %v", parser.doc.Packages[0]) } // Package Version @@ -589,119 +589,119 @@ func TestParser2_2CanParsePackageTags(t *testing.T) { func TestParser2_2CanParsePackageSupplierPersonTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Supplier: Person err := parser.parsePairFromPackage2_2("PackageSupplier", "Person: John Doe") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageSupplierPerson != "John Doe" { - t.Errorf("got %v for PackageSupplierPerson", parser.pkg.PackageSupplierPerson) + if parser.pkg.PackageSupplier.Supplier != "John Doe" { + t.Errorf("got %v for PackageSupplierPerson", parser.pkg.PackageSupplier.Supplier) } } func TestParser2_2CanParsePackageSupplierOrganizationTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Supplier: Organization err := parser.parsePairFromPackage2_2("PackageSupplier", "Organization: John Doe, Inc.") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageSupplierOrganization != "John Doe, Inc." { - t.Errorf("got %v for PackageSupplierOrganization", parser.pkg.PackageSupplierOrganization) + if parser.pkg.PackageSupplier.Supplier != "John Doe, Inc." { + t.Errorf("got %v for PackageSupplierOrganization", parser.pkg.PackageSupplier.Supplier) } } func TestParser2_2CanParsePackageSupplierNOASSERTIONTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Supplier: NOASSERTION err := parser.parsePairFromPackage2_2("PackageSupplier", "NOASSERTION") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageSupplierNOASSERTION != true { - t.Errorf("got false for PackageSupplierNOASSERTION") + if parser.pkg.PackageSupplier.Supplier != "NOASSERTION" { + t.Errorf("got value for Supplier, expected NOASSERTION") } } func TestParser2_2CanParsePackageOriginatorPersonTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Originator: Person err := parser.parsePairFromPackage2_2("PackageOriginator", "Person: John Doe") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageOriginatorPerson != "John Doe" { - t.Errorf("got %v for PackageOriginatorPerson", parser.pkg.PackageOriginatorPerson) + if parser.pkg.PackageOriginator.Originator != "John Doe" { + t.Errorf("got %v for PackageOriginator", parser.pkg.PackageOriginator.Originator) } } func TestParser2_2CanParsePackageOriginatorOrganizationTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Originator: Organization err := parser.parsePairFromPackage2_2("PackageOriginator", "Organization: John Doe, Inc.") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageOriginatorOrganization != "John Doe, Inc." { - t.Errorf("got %v for PackageOriginatorOrganization", parser.pkg.PackageOriginatorOrganization) + if parser.pkg.PackageOriginator.Originator != "John Doe, Inc." { + t.Errorf("got %v for PackageOriginator", parser.pkg.PackageOriginator.Originator) } } func TestParser2_2CanParsePackageOriginatorNOASSERTIONTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Originator: NOASSERTION err := parser.parsePairFromPackage2_2("PackageOriginator", "NOASSERTION") if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageOriginatorNOASSERTION != true { + if parser.pkg.PackageOriginator.Originator != "NOASSERTION" { t.Errorf("got false for PackageOriginatorNOASSERTION") } } func TestParser2_2CanParsePackageVerificationCodeTagWithExcludes(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Verification Code with excludes parenthetical code := "d6a770ba38583ed4bb4525bd96e50461655d2758" @@ -711,22 +711,22 @@ func TestParser2_2CanParsePackageVerificationCodeTagWithExcludes(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageVerificationCode != code { + if parser.pkg.PackageVerificationCode.Value != code { t.Errorf("got %v for PackageVerificationCode", parser.pkg.PackageVerificationCode) } - if parser.pkg.PackageVerificationCodeExcludedFile != fileName { - t.Errorf("got %v for PackageVerificationCodeExcludedFile", parser.pkg.PackageVerificationCodeExcludedFile) + if len(parser.pkg.PackageVerificationCode.ExcludedFiles) != 1 || parser.pkg.PackageVerificationCode.ExcludedFiles[0] != fileName { + t.Errorf("got %v for PackageVerificationCodeExcludedFile", parser.pkg.PackageVerificationCode.ExcludedFiles) } } func TestParser2_2CanParsePackageVerificationCodeTagWithoutExcludes(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) // Package Verification Code without excludes parenthetical code := "d6a770ba38583ed4bb4525bd96e50461655d2758" @@ -734,22 +734,22 @@ func TestParser2_2CanParsePackageVerificationCodeTagWithoutExcludes(t *testing.T if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.pkg.PackageVerificationCode != code { + if parser.pkg.PackageVerificationCode.Value != code { t.Errorf("got %v for PackageVerificationCode", parser.pkg.PackageVerificationCode) } - if parser.pkg.PackageVerificationCodeExcludedFile != "" { - t.Errorf("got %v for PackageVerificationCodeExcludedFile", parser.pkg.PackageVerificationCodeExcludedFile) + if len(parser.pkg.PackageVerificationCode.ExcludedFiles) != 0 { + t.Errorf("got %v for PackageVerificationCodeExcludedFile", parser.pkg.PackageVerificationCode.ExcludedFiles) } } func TestParser2_2PackageExternalRefPointerChangesAfterTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) ref1 := "SECURITY cpe23Type cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*" err := parser.parsePairFromPackage2_2("ExternalRef", ref1) @@ -790,11 +790,11 @@ func TestParser2_2PackageExternalRefPointerChangesAfterTags(t *testing.T) { func TestParser2_2PackageCreatesRelationshipInDocument(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_2("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-whatever") if err != nil { @@ -810,11 +810,11 @@ func TestParser2_2PackageCreatesRelationshipInDocument(t *testing.T) { func TestParser2_2PackageCreatesAnnotationInDocument(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePair2_2("Annotator", "Person: John Doe ()") if err != nil { @@ -830,11 +830,11 @@ func TestParser2_2PackageCreatesAnnotationInDocument(t *testing.T) { func TestParser2_2PackageUnknownTagFails(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: "p1", PackageSPDXIdentifier: "p1"}, } - parser.doc.Packages["p1"] = parser.pkg + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) err := parser.parsePairFromPackage2_2("blah", "something") if err == nil { @@ -844,7 +844,7 @@ func TestParser2_2PackageUnknownTagFails(t *testing.T) { func TestParser2_2FailsIfInvalidSPDXIDInPackageSection(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -863,7 +863,7 @@ func TestParser2_2FailsIfInvalidSPDXIDInPackageSection(t *testing.T) { func TestParser2_2FailsIfInvalidPackageSupplierFormat(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -882,7 +882,7 @@ func TestParser2_2FailsIfInvalidPackageSupplierFormat(t *testing.T) { func TestParser2_2FailsIfUnknownPackageSupplierType(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -901,7 +901,7 @@ func TestParser2_2FailsIfUnknownPackageSupplierType(t *testing.T) { func TestParser2_2FailsIfInvalidPackageOriginatorFormat(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -920,7 +920,7 @@ func TestParser2_2FailsIfInvalidPackageOriginatorFormat(t *testing.T) { func TestParser2_2FailsIfUnknownPackageOriginatorType(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -939,7 +939,7 @@ func TestParser2_2FailsIfUnknownPackageOriginatorType(t *testing.T) { func TestParser2_2SetsFilesAnalyzedTagsCorrectly(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -964,7 +964,7 @@ func TestParser2_2SetsFilesAnalyzedTagsCorrectly(t *testing.T) { func TestParser2_2FailsIfInvalidPackageChecksumFormat(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -983,7 +983,7 @@ func TestParser2_2FailsIfInvalidPackageChecksumFormat(t *testing.T) { func TestParser2_2FailsIfInvalidPackageChecksumType(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -1002,7 +1002,7 @@ func TestParser2_2FailsIfInvalidPackageChecksumType(t *testing.T) { func TestParser2_2FailsIfInvalidExternalRefFormat(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -1021,7 +1021,7 @@ func TestParser2_2FailsIfInvalidExternalRefFormat(t *testing.T) { func TestParser2_2FailsIfExternalRefCommentBeforeExternalRef(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{}, } @@ -1045,12 +1045,12 @@ func TestCanCheckAndExtractExcludesFilenameAndCode(t *testing.T) { fileName := "./package.spdx" fullCodeValue := "d6a770ba38583ed4bb4525bd96e50461655d2758 (excludes: ./package.spdx)" - gotCode, gotFileName := extractCodeAndExcludes(fullCodeValue) - if gotCode != code { + gotCode := extractCodeAndExcludes(fullCodeValue) + if gotCode.Value != code { t.Errorf("got %v for gotCode", gotCode) } - if gotFileName != fileName { - t.Errorf("got %v for gotFileName", gotFileName) + if len(gotCode.ExcludedFiles) != 1 || gotCode.ExcludedFiles[0] != fileName { + t.Errorf("got %v for gotFileName", gotCode.ExcludedFiles) } } @@ -1107,15 +1107,15 @@ func TestParser2_2PackageWithoutSpdxIdentifierThrowsError(t *testing.T) { // More than one package, the previous package doesn't contain an SPDX ID pkgOldName := "p1" parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psPackage2_2, pkg: &spdx.Package2_2{PackageName: pkgOldName}, } pkgOld := parser.pkg - parser.doc.Packages["p1"] = pkgOld + parser.doc.Packages = append(parser.doc.Packages, pkgOld) // the Document's Packages should have this one only - if parser.doc.Packages["p1"] != pkgOld { - t.Errorf("expected package %v, got %v", pkgOld, parser.doc.Packages["p1"]) + if parser.doc.Packages[0] != pkgOld { + t.Errorf("expected package %v, got %v", pkgOld, parser.doc.Packages[0]) } if len(parser.doc.Packages) != 1 { t.Errorf("expected 1 package, got %d", len(parser.doc.Packages)) diff --git a/tvloader/parser2v2/parse_review_test.go b/tvloader/parser2v2/parse_review_test.go index f482184c..de73ede0 100644 --- a/tvloader/parser2v2/parse_review_test.go +++ b/tvloader/parser2v2/parse_review_test.go @@ -12,9 +12,9 @@ func TestParser2_2ReviewStartsNewReviewAfterParsingReviewerTag(t *testing.T) { // create the first review rev1 := "John Doe" parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -25,8 +25,8 @@ func TestParser2_2ReviewStartsNewReviewAfterParsingReviewerTag(t *testing.T) { ReviewerType: "Person", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) r1 := parser.rev @@ -82,9 +82,9 @@ func TestParser2_2ReviewStartsNewReviewAfterParsingReviewerTag(t *testing.T) { func TestParser2_2ReviewStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -95,8 +95,8 @@ func TestParser2_2ReviewStaysAfterParsingRelationshipTags(t *testing.T) { ReviewerType: "Person", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -129,9 +129,9 @@ func TestParser2_2ReviewStaysAfterParsingRelationshipTags(t *testing.T) { func TestParser2_2ReviewStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -142,8 +142,8 @@ func TestParser2_2ReviewStaysAfterParsingAnnotationTags(t *testing.T) { ReviewerType: "Person", }, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -191,16 +191,16 @@ func TestParser2_2ReviewStaysAfterParsingAnnotationTags(t *testing.T) { if len(parser.doc.Annotations) != 1 { t.Fatalf("expected doc.Annotations to have len 1, got %d", len(parser.doc.Annotations)) } - if parser.doc.Annotations[0].Annotator != "John Doe ()" { + if parser.doc.Annotations[0].Annotator.Annotator != "John Doe ()" { t.Errorf("expected Annotator to be %s, got %s", "John Doe ()", parser.doc.Annotations[0].Annotator) } } func TestParser2_2ReviewFailsAfterParsingOtherSectionTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -208,8 +208,8 @@ func TestParser2_2ReviewFailsAfterParsingOtherSectionTags(t *testing.T) { }, rev: &spdx.Review2_2{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -235,9 +235,9 @@ func TestParser2_2ReviewFailsAfterParsingOtherSectionTags(t *testing.T) { // ===== Review data section tests ===== func TestParser2_2CanParseReviewTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -245,8 +245,8 @@ func TestParser2_2CanParseReviewTags(t *testing.T) { }, rev: &spdx.Review2_2{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -274,9 +274,9 @@ func TestParser2_2CanParseReviewTags(t *testing.T) { func TestParser2_2CanParseReviewerPersonTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -284,8 +284,8 @@ func TestParser2_2CanParseReviewerPersonTag(t *testing.T) { }, rev: &spdx.Review2_2{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -304,9 +304,9 @@ func TestParser2_2CanParseReviewerPersonTag(t *testing.T) { func TestParser2_2CanParseReviewerOrganizationTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -314,8 +314,8 @@ func TestParser2_2CanParseReviewerOrganizationTag(t *testing.T) { }, rev: &spdx.Review2_2{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -334,9 +334,9 @@ func TestParser2_2CanParseReviewerOrganizationTag(t *testing.T) { func TestParser2_2CanParseReviewerToolTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -344,8 +344,8 @@ func TestParser2_2CanParseReviewerToolTag(t *testing.T) { }, rev: &spdx.Review2_2{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) @@ -364,7 +364,7 @@ func TestParser2_2CanParseReviewerToolTag(t *testing.T) { func TestParser2_2FailsIfReviewerInvalidFormat(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, rev: &spdx.Review2_2{}, } @@ -378,7 +378,7 @@ func TestParser2_2FailsIfReviewerInvalidFormat(t *testing.T) { func TestParser2_2FailsIfReviewerUnknownType(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, rev: &spdx.Review2_2{}, } @@ -392,9 +392,9 @@ func TestParser2_2FailsIfReviewerUnknownType(t *testing.T) { func TestParser2_2ReviewUnknownTagFails(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psReview2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1"}, otherLic: &spdx.OtherLicense2_2{ LicenseIdentifier: "LicenseRef-Lic11", @@ -402,8 +402,8 @@ func TestParser2_2ReviewUnknownTagFails(t *testing.T) { }, rev: &spdx.Review2_2{}, } - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.doc.OtherLicenses = append(parser.doc.OtherLicenses, parser.otherLic) parser.doc.Reviews = append(parser.doc.Reviews, parser.rev) diff --git a/tvloader/parser2v2/parse_snippet.go b/tvloader/parser2v2/parse_snippet.go index 7f586046..d3bac476 100644 --- a/tvloader/parser2v2/parse_snippet.go +++ b/tvloader/parser2v2/parse_snippet.go @@ -51,7 +51,7 @@ func (parser *tvParser2_2) parsePairFromSnippet2_2(tag string, value string) err if err != nil { return err } - parser.snippet.SnippetFromFileSPDXIdentifier = deID + parser.snippet.SnippetFromFileSPDXIdentifier = deID.ElementRefID case "SnippetByteRange": byteStart, byteEnd, err := extractSubs(value) if err != nil { @@ -65,8 +65,12 @@ func (parser *tvParser2_2) parsePairFromSnippet2_2(tag string, value string) err if err != nil { return err } - parser.snippet.SnippetByteRangeStart = bIntStart - parser.snippet.SnippetByteRangeEnd = bIntEnd + + if parser.snippet.Ranges == nil { + parser.snippet.Ranges = []spdx.SnippetRange{} + } + byteRange := spdx.SnippetRange{StartPointer: spdx.SnippetRangePointer{Offset: bIntStart}, EndPointer: spdx.SnippetRangePointer{Offset: bIntEnd}} + parser.snippet.Ranges = append(parser.snippet.Ranges, byteRange) case "SnippetLineRange": lineStart, lineEnd, err := extractSubs(value) if err != nil { @@ -80,8 +84,12 @@ func (parser *tvParser2_2) parsePairFromSnippet2_2(tag string, value string) err if err != nil { return err } - parser.snippet.SnippetLineRangeStart = lInttStart - parser.snippet.SnippetLineRangeEnd = lInttEnd + + if parser.snippet.Ranges == nil { + parser.snippet.Ranges = []spdx.SnippetRange{} + } + lineRange := spdx.SnippetRange{StartPointer: spdx.SnippetRangePointer{LineNumber: lInttStart}, EndPointer: spdx.SnippetRangePointer{LineNumber: lInttEnd}} + parser.snippet.Ranges = append(parser.snippet.Ranges, lineRange) case "SnippetLicenseConcluded": parser.snippet.SnippetLicenseConcluded = value case "LicenseInfoInSnippet": diff --git a/tvloader/parser2v2/parse_snippet_test.go b/tvloader/parser2v2/parse_snippet_test.go index d019a0c5..545595af 100644 --- a/tvloader/parser2v2/parse_snippet_test.go +++ b/tvloader/parser2v2/parse_snippet_test.go @@ -12,15 +12,15 @@ func TestParser2_2SnippetStartsNewSnippetAfterParsingSnippetSPDXIDTag(t *testing // create the first snippet sid1 := spdx.ElementID("s1") parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "test", PackageSPDXIdentifier: "test", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{SnippetSPDXIdentifier: sid1}, } s1 := parser.snippet - parser.doc.Packages["test"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets[sid1] = parser.snippet // the File's Snippets should have this one only @@ -71,16 +71,16 @@ func TestParser2_2SnippetStartsNewSnippetAfterParsingSnippetSPDXIDTag(t *testing func TestParser2_2SnippetStartsNewPackageAfterParsingPackageNameTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{SnippetSPDXIdentifier: "s1"}, } p1 := parser.pkg f1 := parser.file - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet // now add a new package @@ -113,21 +113,21 @@ func TestParser2_2SnippetStartsNewPackageAfterParsingPackageNameTag(t *testing.T if len(parser.doc.Packages) != 1 { t.Errorf("Expected len(Packages) to be 1, got %d", len(parser.doc.Packages)) } - if parser.doc.Packages["package1"] != p1 { - t.Errorf("Expected package %v in Packages[package1], got %v", p1, parser.doc.Packages["package1"]) + if parser.doc.Packages[0] != p1 { + t.Errorf("Expected package %v in Packages[package1], got %v", p1, parser.doc.Packages[0]) } - if parser.doc.Packages["package1"].PackageName != "package1" { - t.Errorf("expected package name %s in Packages[package1], got %s", "package1", parser.doc.Packages["package1"].PackageName) + if parser.doc.Packages[0].PackageName != "package1" { + t.Errorf("expected package name %s in Packages[package1], got %s", "package1", parser.doc.Packages[0].PackageName) } // and the first Package's Files should be of size 1 and have f1 only - if len(parser.doc.Packages["package1"].Files) != 1 { - t.Errorf("Expected 1 file in Packages[package1].Files, got %d", len(parser.doc.Packages["package1"].Files)) + if len(parser.doc.Packages[0].Files) != 1 { + t.Errorf("Expected 1 file in Packages[package1].Files, got %d", len(parser.doc.Packages[0].Files)) } - if parser.doc.Packages["package1"].Files["f1"] != f1 { - t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.doc.Packages["package1"].Files["f1"]) + if parser.doc.Packages[0].Files[0] != f1 { + t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.doc.Packages[0].Files[0]) } - if parser.doc.Packages["package1"].Files["f1"].FileName != "f1.txt" { - t.Errorf("expected file name %s in Files[f1], got %s", "f1.txt", parser.doc.Packages["package1"].Files["f1"].FileName) + if parser.doc.Packages[0].Files[0].FileName != "f1.txt" { + t.Errorf("expected file name %s in Files[f1], got %s", "f1.txt", parser.doc.Packages[0].Files[0].FileName) } // and the new Package should have no files if len(parser.pkg.Files) != 0 { @@ -146,16 +146,16 @@ func TestParser2_2SnippetStartsNewPackageAfterParsingPackageNameTag(t *testing.T func TestParser2_2SnippetMovesToFileAfterParsingFileNameTag(t *testing.T) { f1Name := "f1.txt" parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{SnippetSPDXIdentifier: "s1"}, } p1 := parser.pkg f1 := parser.file - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet f2Name := "f2.txt" @@ -184,11 +184,11 @@ func TestParser2_2SnippetMovesToFileAfterParsingFileNameTag(t *testing.T) { if len(parser.pkg.Files) != 1 { t.Errorf("Expected len(Files) to be 1, got %d", len(parser.pkg.Files)) } - if parser.pkg.Files["f1"] != f1 { - t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.pkg.Files["f1"]) + if parser.pkg.Files[0] != f1 { + t.Errorf("Expected file %v in Files[f1], got %v", f1, parser.pkg.Files[0]) } - if parser.pkg.Files["f1"].FileName != f1Name { - t.Errorf("expected file name %s in Files[f1], got %s", f1Name, parser.pkg.Files["f1"].FileName) + if parser.pkg.Files[0].FileName != f1Name { + t.Errorf("expected file name %s in Files[f1], got %s", f1Name, parser.pkg.Files[0].FileName) } // and the current snippet should be nil if parser.snippet != nil { @@ -198,14 +198,14 @@ func TestParser2_2SnippetMovesToFileAfterParsingFileNameTag(t *testing.T) { func TestParser2_2SnippetMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{SnippetSPDXIdentifier: "s1"}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet err := parser.parsePair2_2("LicenseID", "LicenseRef-TestLic") @@ -219,14 +219,14 @@ func TestParser2_2SnippetMovesToOtherLicenseAfterParsingLicenseIDTag(t *testing. func TestParser2_2SnippetMovesToReviewAfterParsingReviewerTag(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{SnippetSPDXIdentifier: "s1"}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet err := parser.parsePair2_2("Reviewer", "Person: John Doe") @@ -240,14 +240,14 @@ func TestParser2_2SnippetMovesToReviewAfterParsingReviewerTag(t *testing.T) { func TestParser2_2SnippetStaysAfterParsingRelationshipTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{SnippetSPDXIdentifier: "s1"}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet err := parser.parsePair2_2("Relationship", "SPDXRef-blah CONTAINS SPDXRef-blah-else") @@ -279,14 +279,14 @@ func TestParser2_2SnippetStaysAfterParsingRelationshipTags(t *testing.T) { func TestParser2_2SnippetStaysAfterParsingAnnotationTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{SnippetSPDXIdentifier: "s1"}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) parser.file.Snippets["s1"] = parser.snippet err := parser.parsePair2_2("Annotator", "Person: John Doe ()") @@ -333,7 +333,7 @@ func TestParser2_2SnippetStaysAfterParsingAnnotationTags(t *testing.T) { if len(parser.doc.Annotations) != 1 { t.Fatalf("expected doc.Annotations to have len 1, got %d", len(parser.doc.Annotations)) } - if parser.doc.Annotations[0].Annotator != "John Doe ()" { + if parser.doc.Annotations[0].Annotator.Annotator != "John Doe ()" { t.Errorf("expected Annotator to be %s, got %s", "John Doe ()", parser.doc.Annotations[0].Annotator) } } @@ -341,14 +341,14 @@ func TestParser2_2SnippetStaysAfterParsingAnnotationTags(t *testing.T) { // ===== Snippet data section tests ===== func TestParser2_2CanParseSnippetTags(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) // Snippet SPDX Identifier err := parser.parsePairFromSnippet2_2("SnippetSPDXID", "SPDXRef-s1") @@ -365,7 +365,7 @@ func TestParser2_2CanParseSnippetTags(t *testing.T) { t.Errorf("expected nil error, got %v", err) } wantDeID := spdx.DocElementID{DocumentRefID: "", ElementRefID: spdx.ElementID("f1")} - if parser.snippet.SnippetFromFileSPDXIdentifier != wantDeID { + if parser.snippet.SnippetFromFileSPDXIdentifier != wantDeID.ElementRefID { t.Errorf("got %v for SnippetFromFileSPDXIdentifier", parser.snippet.SnippetFromFileSPDXIdentifier) } @@ -374,11 +374,11 @@ func TestParser2_2CanParseSnippetTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.snippet.SnippetByteRangeStart != 20 { - t.Errorf("got %v for SnippetByteRangeStart", parser.snippet.SnippetByteRangeStart) + if parser.snippet.Ranges[0].StartPointer.Offset != 20 { + t.Errorf("got %v for SnippetByteRangeStart", parser.snippet.Ranges[0].StartPointer.Offset) } - if parser.snippet.SnippetByteRangeEnd != 320 { - t.Errorf("got %v for SnippetByteRangeEnd", parser.snippet.SnippetByteRangeEnd) + if parser.snippet.Ranges[0].EndPointer.Offset != 320 { + t.Errorf("got %v for SnippetByteRangeEnd", parser.snippet.Ranges[0].EndPointer.Offset) } // Snippet Line Range @@ -386,11 +386,11 @@ func TestParser2_2CanParseSnippetTags(t *testing.T) { if err != nil { t.Errorf("expected nil error, got %v", err) } - if parser.snippet.SnippetLineRangeStart != 5 { - t.Errorf("got %v for SnippetLineRangeStart", parser.snippet.SnippetLineRangeStart) + if parser.snippet.Ranges[1].StartPointer.LineNumber != 5 { + t.Errorf("got %v for SnippetLineRangeStart", parser.snippet.Ranges[1].StartPointer.LineNumber) } - if parser.snippet.SnippetLineRangeEnd != 12 { - t.Errorf("got %v for SnippetLineRangeEnd", parser.snippet.SnippetLineRangeEnd) + if parser.snippet.Ranges[1].EndPointer.LineNumber != 12 { + t.Errorf("got %v for SnippetLineRangeEnd", parser.snippet.Ranges[1].EndPointer.LineNumber) } // Snippet Concluded License @@ -497,14 +497,14 @@ func TestParser2_2CanParseSnippetTags(t *testing.T) { func TestParser2_2SnippetUnknownTagFails(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{SnippetSPDXIdentifier: "s1"}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) err := parser.parsePairFromSnippet2_2("blah", "something") if err == nil { @@ -514,14 +514,14 @@ func TestParser2_2SnippetUnknownTagFails(t *testing.T) { func TestParser2_2FailsForInvalidSnippetSPDXID(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) // invalid Snippet SPDX Identifier err := parser.parsePairFromSnippet2_2("SnippetSPDXID", "whoops") @@ -532,14 +532,14 @@ func TestParser2_2FailsForInvalidSnippetSPDXID(t *testing.T) { func TestParser2_2FailsForInvalidSnippetFromFileSPDXID(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) // start with Snippet SPDX Identifier err := parser.parsePairFromSnippet2_2("SnippetSPDXID", "SPDXRef-s1") @@ -555,14 +555,14 @@ func TestParser2_2FailsForInvalidSnippetFromFileSPDXID(t *testing.T) { func TestParser2_2FailsForInvalidSnippetByteValues(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) // start with Snippet SPDX Identifier err := parser.parsePairFromSnippet2_2("SnippetSPDXID", "SPDXRef-s1") @@ -586,14 +586,14 @@ func TestParser2_2FailsForInvalidSnippetByteValues(t *testing.T) { func TestParser2_2FailsForInvalidSnippetLineValues(t *testing.T) { parser := tvParser2_2{ - doc: &spdx.Document2_2{Packages: map[spdx.ElementID]*spdx.Package2_2{}}, + doc: &spdx.Document2_2{Packages: []*spdx.Package2_2{}}, st: psSnippet2_2, - pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: map[spdx.ElementID]*spdx.File2_2{}}, + pkg: &spdx.Package2_2{PackageName: "package1", PackageSPDXIdentifier: "package1", Files: []*spdx.File2_2{}}, file: &spdx.File2_2{FileName: "f1.txt", FileSPDXIdentifier: "f1", Snippets: map[spdx.ElementID]*spdx.Snippet2_2{}}, snippet: &spdx.Snippet2_2{}, } - parser.doc.Packages["package1"] = parser.pkg - parser.pkg.Files["f1"] = parser.file + parser.doc.Packages = append(parser.doc.Packages, parser.pkg) + parser.pkg.Files = append(parser.pkg.Files, parser.file) // start with Snippet SPDX Identifier err := parser.parsePairFromSnippet2_2("SnippetSPDXID", "SPDXRef-s1") diff --git a/tvloader/parser2v2/parser.go b/tvloader/parser2v2/parser.go index 98868741..1d9f8e9c 100644 --- a/tvloader/parser2v2/parser.go +++ b/tvloader/parser2v2/parser.go @@ -58,12 +58,42 @@ func (parser *tvParser2_2) parsePairFromStart2_2(tag string, value string) error // create an SPDX Document data struct if we don't have one already if parser.doc == nil { - parser.doc = &spdx.Document2_2{} + parser.doc = &spdx.Document2_2{ExternalDocumentReferences: []spdx.ExternalDocumentRef2_2{}} } - // move to Creation Info parser state - parser.st = psCreationInfo2_2 + switch tag { + case "DocumentComment": + parser.doc.DocumentComment = value + case "SPDXVersion": + parser.doc.SPDXVersion = value + case "DataLicense": + parser.doc.DataLicense = value + case "SPDXID": + eID, err := extractElementID(value) + if err != nil { + return err + } + parser.doc.SPDXIdentifier = eID + case "DocumentName": + parser.doc.DocumentName = value + case "DocumentNamespace": + parser.doc.DocumentNamespace = value + case "ExternalDocumentRef": + documentRefID, uri, alg, checksum, err := extractExternalDocumentReference(value) + if err != nil { + return err + } + edr := spdx.ExternalDocumentRef2_2{ + DocumentRefID: documentRefID, + URI: uri, + Checksum: spdx.Checksum{Algorithm: spdx.ChecksumAlgorithm(alg), Value: checksum}, + } + parser.doc.ExternalDocumentReferences = append(parser.doc.ExternalDocumentReferences, edr) + default: + // move to Creation Info parser state + parser.st = psCreationInfo2_2 + return parser.parsePairFromCreationInfo2_2(tag, value) + } - // and ask Creation Info subfunc to parse - return parser.parsePairFromCreationInfo2_2(tag, value) + return nil } diff --git a/tvloader/parser2v2/parser_test.go b/tvloader/parser2v2/parser_test.go index 4cd5228f..148264d6 100644 --- a/tvloader/parser2v2/parser_test.go +++ b/tvloader/parser2v2/parser_test.go @@ -24,14 +24,14 @@ func TestParser2_2CanParseTagValues(t *testing.T) { if err != nil { t.Errorf("got error when calling ParseTagValues: %v", err) } - if doc.CreationInfo.SPDXVersion != "SPDX-2.2" { - t.Errorf("expected SPDXVersion to be SPDX-2.2, got %v", doc.CreationInfo.SPDXVersion) + if doc.SPDXVersion != "SPDX-2.2" { + t.Errorf("expected SPDXVersion to be SPDX-2.2, got %v", doc.SPDXVersion) } - if doc.CreationInfo.DataLicense != "CC0-1.0" { - t.Errorf("expected DataLicense to be CC0-1.0, got %v", doc.CreationInfo.DataLicense) + if doc.DataLicense != "CC0-1.0" { + t.Errorf("expected DataLicense to be CC0-1.0, got %v", doc.DataLicense) } - if doc.CreationInfo.SPDXIdentifier != "DOCUMENT" { - t.Errorf("expected SPDXIdentifier to be DOCUMENT, got %v", doc.CreationInfo.SPDXIdentifier) + if doc.SPDXIdentifier != "DOCUMENT" { + t.Errorf("expected SPDXIdentifier to be DOCUMENT, got %v", doc.SPDXIdentifier) } } @@ -58,18 +58,6 @@ func TestParser2_2HasDocumentAfterCallToParseFirstTag(t *testing.T) { } } -// ===== Parser start state change tests ===== -func TestParser2_2StartMovesToCreationInfoStateAfterParsingFirstTag(t *testing.T) { - parser := tvParser2_2{} - err := parser.parsePair2_2("SPDXVersion", "b") - if err != nil { - t.Errorf("got error when calling parsePair2_2: %v", err) - } - if parser.st != psCreationInfo2_2 { - t.Errorf("parser is in state %v, expected %v", parser.st, psCreationInfo2_2) - } -} - func TestParser2_2StartFailsToParseIfInInvalidState(t *testing.T) { parser := tvParser2_2{st: psReview2_2} err := parser.parsePairFromStart2_2("SPDXVersion", "SPDX-2.2") diff --git a/tvsaver/saver2v1/save_annotation.go b/tvsaver/saver2v1/save_annotation.go index 8c0ae89b..f7d79538 100644 --- a/tvsaver/saver2v1/save_annotation.go +++ b/tvsaver/saver2v1/save_annotation.go @@ -10,8 +10,8 @@ import ( ) func renderAnnotation2_1(ann *spdx.Annotation2_1, w io.Writer) error { - if ann.Annotator != "" && ann.AnnotatorType != "" { - fmt.Fprintf(w, "Annotator: %s: %s\n", ann.AnnotatorType, ann.Annotator) + if ann.Annotator.Annotator != "" && ann.Annotator.AnnotatorType != "" { + fmt.Fprintf(w, "Annotator: %s: %s\n", ann.Annotator.AnnotatorType, ann.Annotator.Annotator) } if ann.AnnotationDate != "" { fmt.Fprintf(w, "AnnotationDate: %s\n", ann.AnnotationDate) diff --git a/tvsaver/saver2v1/save_annotation_test.go b/tvsaver/saver2v1/save_annotation_test.go index 9cb0277d..3eef5a72 100644 --- a/tvsaver/saver2v1/save_annotation_test.go +++ b/tvsaver/saver2v1/save_annotation_test.go @@ -12,8 +12,7 @@ import ( // ===== Annotation section Saver tests ===== func TestSaver2_1AnnotationSavesTextForPerson(t *testing.T) { ann := &spdx.Annotation2_1{ - Annotator: "John Doe", - AnnotatorType: "Person", + Annotator: spdx.Annotator{AnnotatorType: "Person", Annotator: "John Doe"}, AnnotationDate: "2018-10-10T17:52:00Z", AnnotationType: "REVIEW", AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), @@ -45,8 +44,7 @@ AnnotationComment: This is an annotation about the SPDX document func TestSaver2_1AnnotationSavesTextForOrganization(t *testing.T) { ann := &spdx.Annotation2_1{ - Annotator: "John Doe, Inc.", - AnnotatorType: "Organization", + Annotator: spdx.Annotator{AnnotatorType: "Organization", Annotator: "John Doe, Inc."}, AnnotationDate: "2018-10-10T17:52:00Z", AnnotationType: "REVIEW", AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), @@ -78,8 +76,7 @@ AnnotationComment: This is an annotation about the SPDX document func TestSaver2_1AnnotationSavesTextForTool(t *testing.T) { ann := &spdx.Annotation2_1{ - Annotator: "magictool-1.1", - AnnotatorType: "Tool", + Annotator: spdx.Annotator{AnnotatorType: "Tool", Annotator: "magictool-1.1"}, AnnotationDate: "2018-10-10T17:52:00Z", AnnotationType: "REVIEW", AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), diff --git a/tvsaver/saver2v1/save_creation_info.go b/tvsaver/saver2v1/save_creation_info.go index 6ea60860..de8b107a 100644 --- a/tvsaver/saver2v1/save_creation_info.go +++ b/tvsaver/saver2v1/save_creation_info.go @@ -4,50 +4,16 @@ package saver2v1 import ( "fmt" - "io" - "sort" - "github.com/spdx/tools-golang/spdx" + "io" ) func renderCreationInfo2_1(ci *spdx.CreationInfo2_1, w io.Writer) error { - if ci.SPDXVersion != "" { - fmt.Fprintf(w, "SPDXVersion: %s\n", ci.SPDXVersion) - } - if ci.DataLicense != "" { - fmt.Fprintf(w, "DataLicense: %s\n", ci.DataLicense) - } - if ci.SPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(ci.SPDXIdentifier)) - } - if ci.DocumentName != "" { - fmt.Fprintf(w, "DocumentName: %s\n", ci.DocumentName) - } - if ci.DocumentNamespace != "" { - fmt.Fprintf(w, "DocumentNamespace: %s\n", ci.DocumentNamespace) - } - // print EDRs in order sorted by identifier - edrIDs := []string{} - for docRefID := range ci.ExternalDocumentReferences { - edrIDs = append(edrIDs, docRefID) - } - sort.Strings(edrIDs) - for _, edrID := range edrIDs { - edr := ci.ExternalDocumentReferences[edrID] - fmt.Fprintf(w, "ExternalDocumentRef: DocumentRef-%s %s %s:%s\n", - edr.DocumentRefID, edr.URI, edr.Alg, edr.Checksum) - } if ci.LicenseListVersion != "" { fmt.Fprintf(w, "LicenseListVersion: %s\n", ci.LicenseListVersion) } - for _, s := range ci.CreatorPersons { - fmt.Fprintf(w, "Creator: Person: %s\n", s) - } - for _, s := range ci.CreatorOrganizations { - fmt.Fprintf(w, "Creator: Organization: %s\n", s) - } - for _, s := range ci.CreatorTools { - fmt.Fprintf(w, "Creator: Tool: %s\n", s) + for _, creator := range ci.Creators { + fmt.Fprintf(w, "Creator: %s: %s\n", creator.CreatorType, creator.Creator) } if ci.Created != "" { fmt.Fprintf(w, "Created: %s\n", ci.Created) @@ -55,9 +21,6 @@ func renderCreationInfo2_1(ci *spdx.CreationInfo2_1, w io.Writer) error { if ci.CreatorComment != "" { fmt.Fprintf(w, "CreatorComment: %s\n", textify(ci.CreatorComment)) } - if ci.DocumentComment != "" { - fmt.Fprintf(w, "DocumentComment: %s\n", textify(ci.DocumentComment)) - } // add blank newline b/c end of a main section fmt.Fprintf(w, "\n") diff --git a/tvsaver/saver2v1/save_creation_info_test.go b/tvsaver/saver2v1/save_creation_info_test.go index cec03c7c..1784cf59 100644 --- a/tvsaver/saver2v1/save_creation_info_test.go +++ b/tvsaver/saver2v1/save_creation_info_test.go @@ -12,53 +12,22 @@ import ( // ===== Creation Info section Saver tests ===== func TestSaver2_1CISavesText(t *testing.T) { ci := &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - DocumentName: "spdx-go-0.0.1.abcdef", - DocumentNamespace: "https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever", - ExternalDocumentReferences: map[string]spdx.ExternalDocumentRef2_1{ - "spdx-go-0.0.1a": spdx.ExternalDocumentRef2_1{ - DocumentRefID: "spdx-go-0.0.1a", - URI: "https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1a.cdefab.whatever", - Alg: "SHA1", - Checksum: "0123456701234567012345670123456701234567", - }, - "time-1.2.3": spdx.ExternalDocumentRef2_1{ - DocumentRefID: "time-1.2.3", - URI: "https://github.com/swinslow/spdx-docs/time/time-1.2.3.cdefab.whatever", - Alg: "SHA1", - Checksum: "0123456701234567012345670123456701234568", - }, - }, LicenseListVersion: "2.0", - CreatorPersons: []string{ - "John Doe", - "Jane Doe (janedoe@example.com)", - }, - CreatorOrganizations: []string{ - "John Doe, Inc.", - "Jane Doe LLC", - }, - CreatorTools: []string{ - "magictool1-1.0", - "magictool2-1.0", - "magictool3-1.0", + Creators: []spdx.Creator{ + {Creator: "John Doe", CreatorType: "Person"}, + {Creator: "Jane Doe (janedoe@example.com)", CreatorType: "Person"}, + {Creator: "John Doe, Inc.", CreatorType: "Organization"}, + {Creator: "Jane Doe LLC", CreatorType: "Organization"}, + {Creator: "magictool1-1.0", CreatorType: "Tool"}, + {Creator: "magictool2-1.0", CreatorType: "Tool"}, + {Creator: "magictool3-1.0", CreatorType: "Tool"}, }, - Created: "2018-10-10T06:20:00Z", - CreatorComment: "this is a creator comment", - DocumentComment: "this is a document comment", + Created: "2018-10-10T06:20:00Z", + CreatorComment: "this is a creator comment", } // what we want to get, as a buffer of bytes - want := bytes.NewBufferString(`SPDXVersion: SPDX-2.1 -DataLicense: CC0-1.0 -SPDXID: SPDXRef-DOCUMENT -DocumentName: spdx-go-0.0.1.abcdef -DocumentNamespace: https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever -ExternalDocumentRef: DocumentRef-spdx-go-0.0.1a https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1a.cdefab.whatever SHA1:0123456701234567012345670123456701234567 -ExternalDocumentRef: DocumentRef-time-1.2.3 https://github.com/swinslow/spdx-docs/time/time-1.2.3.cdefab.whatever SHA1:0123456701234567012345670123456701234568 -LicenseListVersion: 2.0 + want := bytes.NewBufferString(`LicenseListVersion: 2.0 Creator: Person: John Doe Creator: Person: Jane Doe (janedoe@example.com) Creator: Organization: John Doe, Inc. @@ -68,7 +37,6 @@ Creator: Tool: magictool2-1.0 Creator: Tool: magictool3-1.0 Created: 2018-10-10T06:20:00Z CreatorComment: this is a creator comment -DocumentComment: this is a document comment `) @@ -89,24 +57,14 @@ DocumentComment: this is a document comment func TestSaver2_1CIOmitsOptionalFieldsIfEmpty(t *testing.T) { // --- need at least one creator; do first for Persons --- ci1 := &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - DocumentName: "spdx-go-0.0.1.abcdef", - DocumentNamespace: "https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever", - CreatorPersons: []string{ - "John Doe", + Creators: []spdx.Creator{ + {Creator: "John Doe", CreatorType: "Person"}, }, Created: "2018-10-10T06:20:00Z", } // what we want to get, as a buffer of bytes - want1 := bytes.NewBufferString(`SPDXVersion: SPDX-2.1 -DataLicense: CC0-1.0 -SPDXID: SPDXRef-DOCUMENT -DocumentName: spdx-go-0.0.1.abcdef -DocumentNamespace: https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever -Creator: Person: John Doe + want1 := bytes.NewBufferString(`Creator: Person: John Doe Created: 2018-10-10T06:20:00Z `) @@ -126,24 +84,14 @@ Created: 2018-10-10T06:20:00Z // --- need at least one creator; now switch to organization --- ci2 := &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - DocumentName: "spdx-go-0.0.1.abcdef", - DocumentNamespace: "https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever", - CreatorOrganizations: []string{ - "John Doe, Inc.", + Creators: []spdx.Creator{ + {Creator: "John Doe, Inc.", CreatorType: "Organization"}, }, Created: "2018-10-10T06:20:00Z", } // what we want to get, as a buffer of bytes - want2 := bytes.NewBufferString(`SPDXVersion: SPDX-2.1 -DataLicense: CC0-1.0 -SPDXID: SPDXRef-DOCUMENT -DocumentName: spdx-go-0.0.1.abcdef -DocumentNamespace: https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever -Creator: Organization: John Doe, Inc. + want2 := bytes.NewBufferString(`Creator: Organization: John Doe, Inc. Created: 2018-10-10T06:20:00Z `) diff --git a/tvsaver/saver2v1/save_document.go b/tvsaver/saver2v1/save_document.go index 67dfddc9..ea17db25 100644 --- a/tvsaver/saver2v1/save_document.go +++ b/tvsaver/saver2v1/save_document.go @@ -21,30 +21,50 @@ func RenderDocument2_1(doc *spdx.Document2_1, w io.Writer) error { return fmt.Errorf("Document had nil CreationInfo section") } + if doc.SPDXVersion != "" { + fmt.Fprintf(w, "SPDXVersion: %s\n", doc.SPDXVersion) + } + if doc.DataLicense != "" { + fmt.Fprintf(w, "DataLicense: %s\n", doc.DataLicense) + } + if doc.SPDXIdentifier != "" { + fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(doc.SPDXIdentifier)) + } + if doc.DocumentName != "" { + fmt.Fprintf(w, "DocumentName: %s\n", doc.DocumentName) + } + if doc.DocumentNamespace != "" { + fmt.Fprintf(w, "DocumentNamespace: %s\n", doc.DocumentNamespace) + } + // print EDRs in order sorted by identifier + sort.Slice(doc.ExternalDocumentReferences, func(i, j int) bool { + return doc.ExternalDocumentReferences[i].DocumentRefID < doc.ExternalDocumentReferences[j].DocumentRefID + }) + for _, edr := range doc.ExternalDocumentReferences { + fmt.Fprintf(w, "ExternalDocumentRef: DocumentRef-%s %s %s:%s\n", + edr.DocumentRefID, edr.URI, edr.Checksum.Algorithm, edr.Checksum.Value) + } + if doc.DocumentComment != "" { + fmt.Fprintf(w, "DocumentComment: %s\n", textify(doc.DocumentComment)) + } + renderCreationInfo2_1(doc.CreationInfo, w) - if len(doc.UnpackagedFiles) > 0 { + if len(doc.Files) > 0 { fmt.Fprintf(w, "##### Unpackaged files\n\n") - // get slice of identifiers so we can sort them - unpackagedFileKeys := []string{} - for k := range doc.UnpackagedFiles { - unpackagedFileKeys = append(unpackagedFileKeys, string(k)) - } - sort.Strings(unpackagedFileKeys) - for _, fiID := range unpackagedFileKeys { - fi := doc.UnpackagedFiles[spdx.ElementID(fiID)] + sort.Slice(doc.Files, func(i, j int) bool { + return doc.Files[i].FileSPDXIdentifier < doc.Files[j].FileSPDXIdentifier + }) + for _, fi := range doc.Files { renderFile2_1(fi, w) } } - // get slice of Package identifiers so we can sort them - packageKeys := []string{} - for k := range doc.Packages { - packageKeys = append(packageKeys, string(k)) - } - sort.Strings(packageKeys) - for _, pkgID := range packageKeys { - pkg := doc.Packages[spdx.ElementID(pkgID)] + // sort Packages by identifier + sort.Slice(doc.Packages, func(i, j int) bool { + return doc.Packages[i].PackageSPDXIdentifier < doc.Packages[j].PackageSPDXIdentifier + }) + for _, pkg := range doc.Packages { fmt.Fprintf(w, "##### Package: %s\n\n", pkg.PackageName) renderPackage2_1(pkg, w) } diff --git a/tvsaver/saver2v1/save_document_test.go b/tvsaver/saver2v1/save_document_test.go index 708eabf9..b1865647 100644 --- a/tvsaver/saver2v1/save_document_test.go +++ b/tvsaver/saver2v1/save_document_test.go @@ -14,13 +14,8 @@ func TestSaver2_1DocumentSavesText(t *testing.T) { // Creation Info section ci := &spdx.CreationInfo2_1{ - SPDXVersion: "SPDX-2.1", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - DocumentName: "spdx-go-0.0.1.abcdef", - DocumentNamespace: "https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever", - CreatorPersons: []string{ - "John Doe", + Creators: []spdx.Creator{ + {Creator: "John Doe", CreatorType: "Person"}, }, Created: "2018-10-10T06:20:00Z", } @@ -29,41 +24,39 @@ func TestSaver2_1DocumentSavesText(t *testing.T) { f1 := &spdx.File2_1{ FileName: "/tmp/whatever1.txt", FileSPDXIdentifier: spdx.ElementID("File1231"), - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983c", + Checksums: []spdx.Checksum{{Value: "85ed0817af83a24ad8da68c2b5094de69833983c", Algorithm: spdx.SHA1}}, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, + LicenseInfoInFiles: []string{"Apache-2.0"}, FileCopyrightText: "Copyright (c) Jane Doe", } f2 := &spdx.File2_1{ FileName: "/tmp/whatever2.txt", FileSPDXIdentifier: spdx.ElementID("File1232"), - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983d", + Checksums: []spdx.Checksum{{Value: "85ed0817af83a24ad8da68c2b5094de69833983d", Algorithm: spdx.SHA1}}, LicenseConcluded: "MIT", - LicenseInfoInFile: []string{"MIT"}, + LicenseInfoInFiles: []string{"MIT"}, FileCopyrightText: "Copyright (c) John Doe", } - unFiles := map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID("File1231"): f1, - spdx.ElementID("File1232"): f2, + unFiles := []*spdx.File2_1{ + f1, + f2, } // Package 1: packaged files with snippets sn1 := &spdx.Snippet2_1{ SnippetSPDXIdentifier: "Snippet19", - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "FileHasSnippets"), - SnippetByteRangeStart: 17, - SnippetByteRangeEnd: 209, + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "FileHasSnippets").ElementRefID, + Ranges: []spdx.SnippetRange{{StartPointer: spdx.SnippetRangePointer{Offset: 17}, EndPointer: spdx.SnippetRangePointer{Offset: 209}}}, SnippetLicenseConcluded: "GPL-2.0-or-later", SnippetCopyrightText: "Copyright (c) John Doe 20x6", } sn2 := &spdx.Snippet2_1{ SnippetSPDXIdentifier: "Snippet20", - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "FileHasSnippets"), - SnippetByteRangeStart: 268, - SnippetByteRangeEnd: 309, + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "FileHasSnippets").ElementRefID, + Ranges: []spdx.SnippetRange{{StartPointer: spdx.SnippetRangePointer{Offset: 268}, EndPointer: spdx.SnippetRangePointer{Offset: 309}}}, SnippetLicenseConcluded: "WTFPL", SnippetCopyrightText: "NOASSERTION", } @@ -71,9 +64,9 @@ func TestSaver2_1DocumentSavesText(t *testing.T) { f3 := &spdx.File2_1{ FileName: "/tmp/file-with-snippets.txt", FileSPDXIdentifier: spdx.ElementID("FileHasSnippets"), - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983e", + Checksums: []spdx.Checksum{{Value: "85ed0817af83a24ad8da68c2b5094de69833983e", Algorithm: spdx.SHA1}}, LicenseConcluded: "GPL-2.0-or-later AND WTFPL", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "Apache-2.0", "GPL-2.0-or-later", "WTFPL", @@ -88,9 +81,9 @@ func TestSaver2_1DocumentSavesText(t *testing.T) { f4 := &spdx.File2_1{ FileName: "/tmp/another-file.txt", FileSPDXIdentifier: spdx.ElementID("FileAnother"), - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983f", + Checksums: []spdx.Checksum{{Value: "85ed0817af83a24ad8da68c2b5094de69833983f", Algorithm: spdx.SHA1}}, LicenseConcluded: "BSD-3-Clause", - LicenseInfoInFile: []string{"BSD-3-Clause"}, + LicenseInfoInFiles: []string{"BSD-3-Clause"}, FileCopyrightText: "Copyright (c) Jane Doe LLC", } @@ -100,7 +93,7 @@ func TestSaver2_1DocumentSavesText(t *testing.T) { PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", FilesAnalyzed: true, IsFilesAnalyzedTagPresent: true, - PackageVerificationCode: "0123456789abcdef0123456789abcdef01234567", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, PackageLicenseConcluded: "GPL-2.0-or-later AND BSD-3-Clause AND WTFPL", PackageLicenseInfoFromFiles: []string{ "Apache-2.0", @@ -110,9 +103,9 @@ func TestSaver2_1DocumentSavesText(t *testing.T) { }, PackageLicenseDeclared: "Apache-2.0 OR GPL-2.0-or-later", PackageCopyrightText: "Copyright (c) John Doe, Inc.", - Files: map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID("FileHasSnippets"): f3, - spdx.ElementID("FileAnother"): f4, + Files: []*spdx.File2_1{ + f3, + f4, }, } @@ -152,8 +145,8 @@ blah blah blah blah`, // Annotations ann1 := &spdx.Annotation2_1{ - Annotator: "John Doe", - AnnotatorType: "Person", + Annotator: spdx.Annotator{Annotator: "John Doe", + AnnotatorType: "Person"}, AnnotationDate: "2018-10-10T17:52:00Z", AnnotationType: "REVIEW", AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), @@ -161,8 +154,8 @@ blah blah blah blah`, } ann2 := &spdx.Annotation2_1{ - Annotator: "John Doe, Inc.", - AnnotatorType: "Organization", + Annotator: spdx.Annotator{Annotator: "John Doe, Inc.", + AnnotatorType: "Organization"}, AnnotationDate: "2018-10-10T17:52:00Z", AnnotationType: "REVIEW", AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "p1"), @@ -184,11 +177,16 @@ blah blah blah blah`, // now, build the document doc := &spdx.Document2_1{ - CreationInfo: ci, - Packages: map[spdx.ElementID]*spdx.Package2_1{ - spdx.ElementID("p1"): pkgWith, + SPDXVersion: "SPDX-2.1", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + DocumentName: "spdx-go-0.0.1.abcdef", + DocumentNamespace: "https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever", + CreationInfo: ci, + Packages: []*spdx.Package2_1{ + pkgWith, }, - UnpackagedFiles: unFiles, + Files: unFiles, OtherLicenses: []*spdx.OtherLicense2_1{ ol1, ol2, diff --git a/tvsaver/saver2v1/save_file.go b/tvsaver/saver2v1/save_file.go index d22a012a..c1311220 100644 --- a/tvsaver/saver2v1/save_file.go +++ b/tvsaver/saver2v1/save_file.go @@ -17,22 +17,17 @@ func renderFile2_1(f *spdx.File2_1, w io.Writer) error { if f.FileSPDXIdentifier != "" { fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(f.FileSPDXIdentifier)) } - for _, s := range f.FileType { + for _, s := range f.FileTypes { fmt.Fprintf(w, "FileType: %s\n", s) } - if f.FileChecksumSHA1 != "" { - fmt.Fprintf(w, "FileChecksum: SHA1: %s\n", f.FileChecksumSHA1) - } - if f.FileChecksumSHA256 != "" { - fmt.Fprintf(w, "FileChecksum: SHA256: %s\n", f.FileChecksumSHA256) - } - if f.FileChecksumMD5 != "" { - fmt.Fprintf(w, "FileChecksum: MD5: %s\n", f.FileChecksumMD5) + + for _, checksum := range f.Checksums { + fmt.Fprintf(w, "FileChecksum: %s: %s\n", checksum.Algorithm, checksum.Value) } if f.LicenseConcluded != "" { fmt.Fprintf(w, "LicenseConcluded: %s\n", f.LicenseConcluded) } - for _, s := range f.LicenseInfoInFile { + for _, s := range f.LicenseInfoInFiles { fmt.Fprintf(w, "LicenseInfoInFile: %s\n", s) } if f.LicenseComments != "" { @@ -56,7 +51,7 @@ func renderFile2_1(f *spdx.File2_1, w io.Writer) error { if f.FileNotice != "" { fmt.Fprintf(w, "FileNotice: %s\n", textify(f.FileNotice)) } - for _, s := range f.FileContributor { + for _, s := range f.FileContributors { fmt.Fprintf(w, "FileContributor: %s\n", s) } for _, s := range f.FileDependencies { diff --git a/tvsaver/saver2v1/save_file_test.go b/tvsaver/saver2v1/save_file_test.go index cdd6d252..97084304 100644 --- a/tvsaver/saver2v1/save_file_test.go +++ b/tvsaver/saver2v1/save_file_test.go @@ -14,15 +14,17 @@ func TestSaver2_1FileSavesText(t *testing.T) { f := &spdx.File2_1{ FileName: "/tmp/whatever.txt", FileSPDXIdentifier: spdx.ElementID("File123"), - FileType: []string{ + FileTypes: []string{ "TEXT", "DOCUMENTATION", }, - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983c", - FileChecksumSHA256: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", - FileChecksumMD5: "624c1abb3664f4b35547e7c73864ad24", - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ + Checksums: []spdx.Checksum{ + {Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c"}, + {Algorithm: spdx.SHA256, Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd"}, + {Algorithm: spdx.MD5, Value: "624c1abb3664f4b35547e7c73864ad24"}, + }, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{ "Apache-2.0", "Apache-1.1", }, @@ -48,7 +50,7 @@ func TestSaver2_1FileSavesText(t *testing.T) { }, FileComment: "this is a file comment", FileNotice: "This file may be used under either Apache-2.0 or Apache-1.1.", - FileContributor: []string{ + FileContributors: []string{ "John Doe jdoe@example.com", "EvilCorp", }, @@ -105,18 +107,16 @@ FileDependency: g.txt func TestSaver2_1FileSavesSnippetsAlso(t *testing.T) { sn1 := &spdx.Snippet2_1{ SnippetSPDXIdentifier: spdx.ElementID("Snippet19"), - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File123"), - SnippetByteRangeStart: 17, - SnippetByteRangeEnd: 209, + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File123").ElementRefID, + Ranges: []spdx.SnippetRange{{StartPointer: spdx.SnippetRangePointer{Offset: 17}, EndPointer: spdx.SnippetRangePointer{Offset: 209}}}, SnippetLicenseConcluded: "GPL-2.0-or-later", SnippetCopyrightText: "Copyright (c) John Doe 20x6", } sn2 := &spdx.Snippet2_1{ SnippetSPDXIdentifier: spdx.ElementID("Snippet20"), - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File123"), - SnippetByteRangeStart: 268, - SnippetByteRangeEnd: 309, + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File123").ElementRefID, + Ranges: []spdx.SnippetRange{{StartPointer: spdx.SnippetRangePointer{Offset: 268}, EndPointer: spdx.SnippetRangePointer{Offset: 309}}}, SnippetLicenseConcluded: "WTFPL", SnippetCopyrightText: "NOASSERTION", } @@ -129,9 +129,11 @@ func TestSaver2_1FileSavesSnippetsAlso(t *testing.T) { f := &spdx.File2_1{ FileName: "/tmp/whatever.txt", FileSPDXIdentifier: spdx.ElementID("File123"), - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983c", - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ + Checksums: []spdx.Checksum{ + {Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c"}, + }, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{ "Apache-2.0", }, FileCopyrightText: "Copyright (c) Jane Doe", @@ -178,9 +180,11 @@ func TestSaver2_1FileOmitsOptionalFieldsIfEmpty(t *testing.T) { f := &spdx.File2_1{ FileName: "/tmp/whatever.txt", FileSPDXIdentifier: spdx.ElementID("File123"), - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983c", - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ + Checksums: []spdx.Checksum{ + {Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c"}, + }, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{ "Apache-2.0", }, FileCopyrightText: "Copyright (c) Jane Doe", @@ -214,9 +218,11 @@ func TestSaver2_1FileWrapsCopyrightMultiLine(t *testing.T) { f := &spdx.File2_1{ FileName: "/tmp/whatever.txt", FileSPDXIdentifier: spdx.ElementID("File123"), - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983c", - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ + Checksums: []spdx.Checksum{ + {Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c"}, + }, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{ "Apache-2.0", }, FileCopyrightText: `Copyright (c) Jane Doe @@ -252,11 +258,13 @@ func TestSaver2_1FileWrapsCommentsAndNoticesMultiLine(t *testing.T) { f := &spdx.File2_1{ FileName: "/tmp/whatever.txt", FileSPDXIdentifier: spdx.ElementID("File123"), - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983c", + Checksums: []spdx.Checksum{ + {Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c"}, + }, LicenseComments: `this is a multi-line license comment`, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "Apache-2.0", }, FileCopyrightText: "Copyright (c) Jane Doe", diff --git a/tvsaver/saver2v1/save_package.go b/tvsaver/saver2v1/save_package.go index 9b19cfa8..24a468c0 100644 --- a/tvsaver/saver2v1/save_package.go +++ b/tvsaver/saver2v1/save_package.go @@ -6,6 +6,7 @@ import ( "fmt" "io" "sort" + "strings" "github.com/spdx/tools-golang/spdx" ) @@ -23,23 +24,19 @@ func renderPackage2_1(pkg *spdx.Package2_1, w io.Writer) error { if pkg.PackageFileName != "" { fmt.Fprintf(w, "PackageFileName: %s\n", pkg.PackageFileName) } - if pkg.PackageSupplierPerson != "" { - fmt.Fprintf(w, "PackageSupplier: Person: %s\n", pkg.PackageSupplierPerson) - } - if pkg.PackageSupplierOrganization != "" { - fmt.Fprintf(w, "PackageSupplier: Organization: %s\n", pkg.PackageSupplierOrganization) - } - if pkg.PackageSupplierNOASSERTION == true { - fmt.Fprintf(w, "PackageSupplier: NOASSERTION\n") - } - if pkg.PackageOriginatorPerson != "" { - fmt.Fprintf(w, "PackageOriginator: Person: %s\n", pkg.PackageOriginatorPerson) - } - if pkg.PackageOriginatorOrganization != "" { - fmt.Fprintf(w, "PackageOriginator: Organization: %s\n", pkg.PackageOriginatorOrganization) + if pkg.PackageSupplier != nil && pkg.PackageSupplier.Supplier != "" { + if pkg.PackageSupplier.SupplierType == "" { + fmt.Fprintf(w, "PackageSupplier: %s\n", pkg.PackageSupplier.Supplier) + } else { + fmt.Fprintf(w, "PackageSupplier: %s: %s\n", pkg.PackageSupplier.SupplierType, pkg.PackageSupplier.Supplier) + } } - if pkg.PackageOriginatorNOASSERTION == true { - fmt.Fprintf(w, "PackageOriginator: NOASSERTION\n") + if pkg.PackageOriginator != nil && pkg.PackageOriginator.Originator != "" { + if pkg.PackageOriginator.OriginatorType == "" { + fmt.Fprintf(w, "PackageOriginator: %s\n", pkg.PackageOriginator.Originator) + } else { + fmt.Fprintf(w, "PackageOriginator: %s: %s\n", pkg.PackageOriginator.OriginatorType, pkg.PackageOriginator.Originator) + } } if pkg.PackageDownloadLocation != "" { fmt.Fprintf(w, "PackageDownloadLocation: %s\n", pkg.PackageDownloadLocation) @@ -51,22 +48,18 @@ func renderPackage2_1(pkg *spdx.Package2_1, w io.Writer) error { } else { fmt.Fprintf(w, "FilesAnalyzed: false\n") } - if pkg.PackageVerificationCode != "" && pkg.FilesAnalyzed == true { - if pkg.PackageVerificationCodeExcludedFile == "" { - fmt.Fprintf(w, "PackageVerificationCode: %s\n", pkg.PackageVerificationCode) + if pkg.PackageVerificationCode.Value != "" && pkg.FilesAnalyzed == true { + if len(pkg.PackageVerificationCode.ExcludedFiles) == 0 { + fmt.Fprintf(w, "PackageVerificationCode: %s\n", pkg.PackageVerificationCode.Value) } else { - fmt.Fprintf(w, "PackageVerificationCode: %s (excludes: %s)\n", pkg.PackageVerificationCode, pkg.PackageVerificationCodeExcludedFile) + fmt.Fprintf(w, "PackageVerificationCode: %s (excludes: %s)\n", pkg.PackageVerificationCode.Value, strings.Join(pkg.PackageVerificationCode.ExcludedFiles, ", ")) } } - if pkg.PackageChecksumSHA1 != "" { - fmt.Fprintf(w, "PackageChecksum: SHA1: %s\n", pkg.PackageChecksumSHA1) - } - if pkg.PackageChecksumSHA256 != "" { - fmt.Fprintf(w, "PackageChecksum: SHA256: %s\n", pkg.PackageChecksumSHA256) - } - if pkg.PackageChecksumMD5 != "" { - fmt.Fprintf(w, "PackageChecksum: MD5: %s\n", pkg.PackageChecksumMD5) + + for _, checksum := range pkg.PackageChecksums { + fmt.Fprintf(w, "PackageChecksum: %s: %s\n", checksum.Algorithm, checksum.Value) } + if pkg.PackageHomePage != "" { fmt.Fprintf(w, "PackageHomePage: %s\n", pkg.PackageHomePage) } @@ -109,14 +102,10 @@ func renderPackage2_1(pkg *spdx.Package2_1, w io.Writer) error { fmt.Fprintf(w, "\n") // also render any files for this package - // get slice of File identifiers so we can sort them - fileKeys := []string{} - for k := range pkg.Files { - fileKeys = append(fileKeys, string(k)) - } - sort.Strings(fileKeys) - for _, fiID := range fileKeys { - fi := pkg.Files[spdx.ElementID(fiID)] + sort.Slice(pkg.Files, func(i, j int) bool { + return pkg.Files[i].FileSPDXIdentifier < pkg.Files[j].FileSPDXIdentifier + }) + for _, fi := range pkg.Files { renderFile2_1(fi, w) } diff --git a/tvsaver/saver2v1/save_package_test.go b/tvsaver/saver2v1/save_package_test.go index fc6b2a67..0f1541ca 100644 --- a/tvsaver/saver2v1/save_package_test.go +++ b/tvsaver/saver2v1/save_package_test.go @@ -41,23 +41,36 @@ multi-line external ref comment`, } pkg := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplierOrganization: "John Doe, Inc.", - PackageOriginatorPerson: "John Doe", - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, - PackageVerificationCode: "0123456789abcdef0123456789abcdef01234567", - PackageVerificationCodeExcludedFile: "p1-0.1.0.spdx", - PackageChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983c", - PackageChecksumSHA256: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", - PackageChecksumMD5: "624c1abb3664f4b35547e7c73864ad24", - PackageHomePage: "http://example.com/p1", - PackageSourceInfo: "this is a source comment", - PackageLicenseConcluded: "GPL-2.0-or-later", + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{SupplierType: "Organization", Supplier: "John Doe, Inc."}, + PackageOriginator: &spdx.Originator{Originator: "John Doe", OriginatorType: "Person"}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: true, + IsFilesAnalyzedTagPresent: true, + PackageVerificationCode: spdx.PackageVerificationCode{ + Value: "0123456789abcdef0123456789abcdef01234567", + ExcludedFiles: []string{"p1-0.1.0.spdx"}, + }, + PackageChecksums: []spdx.Checksum{ + { + Algorithm: spdx.SHA1, + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + { + Algorithm: spdx.SHA256, + Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", + }, + { + Algorithm: spdx.MD5, + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + }, + PackageHomePage: "http://example.com/p1", + PackageSourceInfo: "this is a source comment", + PackageLicenseConcluded: "GPL-2.0-or-later", PackageLicenseInfoFromFiles: []string{ "Apache-1.1", "Apache-2.0", @@ -131,22 +144,33 @@ func TestSaver2_1PackageSavesTextCombo2(t *testing.T) { // PackageVerificationCodeExcludedFile is empty pkg := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplierNOASSERTION: true, - PackageOriginatorOrganization: "John Doe, Inc.", - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: false, - PackageVerificationCode: "0123456789abcdef0123456789abcdef01234567", - PackageChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983c", - PackageChecksumSHA256: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", - PackageChecksumMD5: "624c1abb3664f4b35547e7c73864ad24", - PackageHomePage: "http://example.com/p1", - PackageSourceInfo: "this is a source comment", - PackageLicenseConcluded: "GPL-2.0-or-later", + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{Supplier: "NOASSERTION"}, + PackageOriginator: &spdx.Originator{OriginatorType: "Organization", Originator: "John Doe, Inc."}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: true, + IsFilesAnalyzedTagPresent: false, + PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, + PackageChecksums: []spdx.Checksum{ + { + Algorithm: spdx.SHA1, + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + { + Algorithm: spdx.SHA256, + Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", + }, + { + Algorithm: spdx.MD5, + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + }, + PackageHomePage: "http://example.com/p1", + PackageSourceInfo: "this is a source comment", + PackageLicenseConcluded: "GPL-2.0-or-later", PackageLicenseInfoFromFiles: []string{ "Apache-1.1", "Apache-2.0", @@ -208,21 +232,32 @@ func TestSaver2_1PackageSavesTextCombo3(t *testing.T) { // PackageVerificationCodeExcludedFile is empty pkg := &spdx.Package2_1{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplierPerson: "John Doe", - PackageOriginatorNOASSERTION: true, - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{Supplier: "John Doe", SupplierType: "Person"}, + PackageOriginator: &spdx.Originator{Originator: "NOASSERTION"}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: false, + IsFilesAnalyzedTagPresent: true, // NOTE that verification code MUST be omitted from output // since FilesAnalyzed is false - PackageVerificationCode: "0123456789abcdef0123456789abcdef01234567", - PackageChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983c", - PackageChecksumSHA256: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", - PackageChecksumMD5: "624c1abb3664f4b35547e7c73864ad24", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, + PackageChecksums: []spdx.Checksum{ + { + Algorithm: spdx.SHA1, + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + { + Algorithm: spdx.SHA256, + Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", + }, + { + Algorithm: spdx.MD5, + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + }, PackageHomePage: "http://example.com/p1", PackageSourceInfo: "this is a source comment", PackageLicenseConcluded: "GPL-2.0-or-later", @@ -329,18 +364,28 @@ func TestSaver2_1PackageSavesFilesIfPresent(t *testing.T) { f1 := &spdx.File2_1{ FileName: "/tmp/whatever1.txt", FileSPDXIdentifier: spdx.ElementID("File1231"), - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983c", + Checksums: []spdx.Checksum{ + { + Algorithm: spdx.SHA1, + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + }, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, + LicenseInfoInFiles: []string{"Apache-2.0"}, FileCopyrightText: "Copyright (c) Jane Doe", } f2 := &spdx.File2_1{ FileName: "/tmp/whatever2.txt", FileSPDXIdentifier: spdx.ElementID("File1232"), - FileChecksumSHA1: "85ed0817af83a24ad8da68c2b5094de69833983d", + Checksums: []spdx.Checksum{ + { + Algorithm: spdx.SHA1, + Value: "85ed0817af83a24ad8da68c2b5094de69833983d", + }, + }, LicenseConcluded: "MIT", - LicenseInfoInFile: []string{"MIT"}, + LicenseInfoInFiles: []string{"MIT"}, FileCopyrightText: "Copyright (c) John Doe", } @@ -362,9 +407,9 @@ func TestSaver2_1PackageSavesFilesIfPresent(t *testing.T) { }, PackageLicenseDeclared: "Apache-2.0 OR GPL-2.0-or-later", PackageCopyrightText: "Copyright (c) John Doe, Inc.", - Files: map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID("File1231"): f1, - spdx.ElementID("File1232"): f2, + Files: []*spdx.File2_1{ + f1, + f2, }, } diff --git a/tvsaver/saver2v1/save_snippet.go b/tvsaver/saver2v1/save_snippet.go index e9e1a077..13995489 100644 --- a/tvsaver/saver2v1/save_snippet.go +++ b/tvsaver/saver2v1/save_snippet.go @@ -13,15 +13,18 @@ func renderSnippet2_1(sn *spdx.Snippet2_1, w io.Writer) error { if sn.SnippetSPDXIdentifier != "" { fmt.Fprintf(w, "SnippetSPDXID: %s\n", spdx.RenderElementID(sn.SnippetSPDXIdentifier)) } - snFromFileIDStr := spdx.RenderDocElementID(sn.SnippetFromFileSPDXIdentifier) + snFromFileIDStr := spdx.RenderElementID(sn.SnippetFromFileSPDXIdentifier) if snFromFileIDStr != "" { fmt.Fprintf(w, "SnippetFromFileSPDXID: %s\n", snFromFileIDStr) } - if sn.SnippetByteRangeStart != 0 && sn.SnippetByteRangeEnd != 0 { - fmt.Fprintf(w, "SnippetByteRange: %d:%d\n", sn.SnippetByteRangeStart, sn.SnippetByteRangeEnd) - } - if sn.SnippetLineRangeStart != 0 && sn.SnippetLineRangeEnd != 0 { - fmt.Fprintf(w, "SnippetLineRange: %d:%d\n", sn.SnippetLineRangeStart, sn.SnippetLineRangeEnd) + + for _, snippetRange := range sn.Ranges { + if snippetRange.StartPointer.Offset != 0 && snippetRange.EndPointer.Offset != 0 { + fmt.Fprintf(w, "SnippetByteRange: %d:%d\n", snippetRange.StartPointer.Offset, snippetRange.EndPointer.Offset) + } + if snippetRange.StartPointer.LineNumber != 0 && snippetRange.EndPointer.LineNumber != 0 { + fmt.Fprintf(w, "SnippetLineRange: %d:%d\n", snippetRange.StartPointer.LineNumber, snippetRange.EndPointer.LineNumber) + } } if sn.SnippetLicenseConcluded != "" { fmt.Fprintf(w, "SnippetLicenseConcluded: %s\n", sn.SnippetLicenseConcluded) diff --git a/tvsaver/saver2v1/save_snippet_test.go b/tvsaver/saver2v1/save_snippet_test.go index 07c9e2a3..fd6357ec 100644 --- a/tvsaver/saver2v1/save_snippet_test.go +++ b/tvsaver/saver2v1/save_snippet_test.go @@ -13,12 +13,18 @@ import ( func TestSaver2_1SnippetSavesText(t *testing.T) { sn := &spdx.Snippet2_1{ SnippetSPDXIdentifier: spdx.ElementID("Snippet17"), - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292"), - SnippetByteRangeStart: 17, - SnippetByteRangeEnd: 209, - SnippetLineRangeStart: 3, - SnippetLineRangeEnd: 8, - SnippetLicenseConcluded: "GPL-2.0-or-later", + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292").ElementRefID, + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{LineNumber: 3}, + EndPointer: spdx.SnippetRangePointer{LineNumber: 8}, + }, + { + StartPointer: spdx.SnippetRangePointer{Offset: 17}, + EndPointer: spdx.SnippetRangePointer{Offset: 209}, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-or-later", LicenseInfoInSnippet: []string{ "GPL-2.0-or-later", "MIT", @@ -32,8 +38,8 @@ func TestSaver2_1SnippetSavesText(t *testing.T) { // what we want to get, as a buffer of bytes want := bytes.NewBufferString(`SnippetSPDXID: SPDXRef-Snippet17 SnippetFromFileSPDXID: SPDXRef-File292 -SnippetByteRange: 17:209 SnippetLineRange: 3:8 +SnippetByteRange: 17:209 SnippetLicenseConcluded: GPL-2.0-or-later LicenseInfoInSnippet: GPL-2.0-or-later LicenseInfoInSnippet: MIT @@ -61,11 +67,15 @@ SnippetName: from John's program func TestSaver2_1SnippetOmitsOptionalFieldsIfEmpty(t *testing.T) { sn := &spdx.Snippet2_1{ SnippetSPDXIdentifier: spdx.ElementID("Snippet17"), - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292"), - SnippetByteRangeStart: 17, - SnippetByteRangeEnd: 209, - SnippetLicenseConcluded: "GPL-2.0-or-later", - SnippetCopyrightText: "Copyright (c) John Doe 20x6", + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292").ElementRefID, + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{Offset: 17}, + EndPointer: spdx.SnippetRangePointer{Offset: 209}, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-or-later", + SnippetCopyrightText: "Copyright (c) John Doe 20x6", } // what we want to get, as a buffer of bytes @@ -94,10 +104,14 @@ SnippetCopyrightText: Copyright (c) John Doe 20x6 func TestSaver2_1SnippetWrapsCopyrightMultiline(t *testing.T) { sn := &spdx.Snippet2_1{ SnippetSPDXIdentifier: spdx.ElementID("Snippet17"), - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292"), - SnippetByteRangeStart: 17, - SnippetByteRangeEnd: 209, - SnippetLicenseConcluded: "GPL-2.0-or-later", + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292").ElementRefID, + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{Offset: 17}, + EndPointer: spdx.SnippetRangePointer{Offset: 209}, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-or-later", SnippetCopyrightText: `Copyright (c) John Doe 20x6 Copyright (c) John Doe 20x6`, } diff --git a/tvsaver/saver2v2/save_annotation.go b/tvsaver/saver2v2/save_annotation.go index c0f1449b..ddfe483a 100644 --- a/tvsaver/saver2v2/save_annotation.go +++ b/tvsaver/saver2v2/save_annotation.go @@ -10,8 +10,8 @@ import ( ) func renderAnnotation2_2(ann *spdx.Annotation2_2, w io.Writer) error { - if ann.Annotator != "" && ann.AnnotatorType != "" { - fmt.Fprintf(w, "Annotator: %s: %s\n", ann.AnnotatorType, ann.Annotator) + if ann.Annotator.Annotator != "" && ann.Annotator.AnnotatorType != "" { + fmt.Fprintf(w, "Annotator: %s: %s\n", ann.Annotator.AnnotatorType, ann.Annotator.Annotator) } if ann.AnnotationDate != "" { fmt.Fprintf(w, "AnnotationDate: %s\n", ann.AnnotationDate) diff --git a/tvsaver/saver2v2/save_annotation_test.go b/tvsaver/saver2v2/save_annotation_test.go index d938646d..46d8546b 100644 --- a/tvsaver/saver2v2/save_annotation_test.go +++ b/tvsaver/saver2v2/save_annotation_test.go @@ -12,8 +12,7 @@ import ( // ===== Annotation section Saver tests ===== func TestSaver2_2AnnotationSavesTextForPerson(t *testing.T) { ann := &spdx.Annotation2_2{ - Annotator: "John Doe", - AnnotatorType: "Person", + Annotator: spdx.Annotator{AnnotatorType: "Person", Annotator: "John Doe"}, AnnotationDate: "2018-10-10T17:52:00Z", AnnotationType: "REVIEW", AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), @@ -45,8 +44,7 @@ AnnotationComment: This is an annotation about the SPDX document func TestSaver2_2AnnotationSavesTextForOrganization(t *testing.T) { ann := &spdx.Annotation2_2{ - Annotator: "John Doe, Inc.", - AnnotatorType: "Organization", + Annotator: spdx.Annotator{AnnotatorType: "Organization", Annotator: "John Doe, Inc."}, AnnotationDate: "2018-10-10T17:52:00Z", AnnotationType: "REVIEW", AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), @@ -78,8 +76,7 @@ AnnotationComment: This is an annotation about the SPDX document func TestSaver2_2AnnotationSavesTextForTool(t *testing.T) { ann := &spdx.Annotation2_2{ - Annotator: "magictool-1.1", - AnnotatorType: "Tool", + Annotator: spdx.Annotator{AnnotatorType: "Tool", Annotator: "magictool-1.1"}, AnnotationDate: "2018-10-10T17:52:00Z", AnnotationType: "REVIEW", AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), diff --git a/tvsaver/saver2v2/save_creation_info.go b/tvsaver/saver2v2/save_creation_info.go index 1492fb45..df2b0b0f 100644 --- a/tvsaver/saver2v2/save_creation_info.go +++ b/tvsaver/saver2v2/save_creation_info.go @@ -4,50 +4,16 @@ package saver2v2 import ( "fmt" - "io" - "sort" - "github.com/spdx/tools-golang/spdx" + "io" ) func renderCreationInfo2_2(ci *spdx.CreationInfo2_2, w io.Writer) error { - if ci.SPDXVersion != "" { - fmt.Fprintf(w, "SPDXVersion: %s\n", ci.SPDXVersion) - } - if ci.DataLicense != "" { - fmt.Fprintf(w, "DataLicense: %s\n", ci.DataLicense) - } - if ci.SPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(ci.SPDXIdentifier)) - } - if ci.DocumentName != "" { - fmt.Fprintf(w, "DocumentName: %s\n", ci.DocumentName) - } - if ci.DocumentNamespace != "" { - fmt.Fprintf(w, "DocumentNamespace: %s\n", ci.DocumentNamespace) - } - // print EDRs in order sorted by identifier - edrIDs := []string{} - for docRefID := range ci.ExternalDocumentReferences { - edrIDs = append(edrIDs, docRefID) - } - sort.Strings(edrIDs) - for _, edrID := range edrIDs { - edr := ci.ExternalDocumentReferences[edrID] - fmt.Fprintf(w, "ExternalDocumentRef: DocumentRef-%s %s %s:%s\n", - edr.DocumentRefID, edr.URI, edr.Alg, edr.Checksum) - } if ci.LicenseListVersion != "" { fmt.Fprintf(w, "LicenseListVersion: %s\n", ci.LicenseListVersion) } - for _, s := range ci.CreatorPersons { - fmt.Fprintf(w, "Creator: Person: %s\n", s) - } - for _, s := range ci.CreatorOrganizations { - fmt.Fprintf(w, "Creator: Organization: %s\n", s) - } - for _, s := range ci.CreatorTools { - fmt.Fprintf(w, "Creator: Tool: %s\n", s) + for _, creator := range ci.Creators { + fmt.Fprintf(w, "Creator: %s: %s\n", creator.CreatorType, creator.Creator) } if ci.Created != "" { fmt.Fprintf(w, "Created: %s\n", ci.Created) @@ -55,9 +21,6 @@ func renderCreationInfo2_2(ci *spdx.CreationInfo2_2, w io.Writer) error { if ci.CreatorComment != "" { fmt.Fprintf(w, "CreatorComment: %s\n", textify(ci.CreatorComment)) } - if ci.DocumentComment != "" { - fmt.Fprintf(w, "DocumentComment: %s\n", textify(ci.DocumentComment)) - } // add blank newline b/c end of a main section fmt.Fprintf(w, "\n") diff --git a/tvsaver/saver2v2/save_creation_info_test.go b/tvsaver/saver2v2/save_creation_info_test.go index 404abfe0..ba3c18d8 100644 --- a/tvsaver/saver2v2/save_creation_info_test.go +++ b/tvsaver/saver2v2/save_creation_info_test.go @@ -12,53 +12,22 @@ import ( // ===== Creation Info section Saver tests ===== func TestSaver2_2CISavesText(t *testing.T) { ci := &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - DocumentName: "spdx-go-0.0.1.abcdef", - DocumentNamespace: "https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever", - ExternalDocumentReferences: map[string]spdx.ExternalDocumentRef2_2{ - "spdx-go-0.0.1a": spdx.ExternalDocumentRef2_2{ - DocumentRefID: "spdx-go-0.0.1a", - URI: "https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1a.cdefab.whatever", - Alg: "SHA1", - Checksum: "0123456701234567012345670123456701234567", - }, - "time-1.2.3": spdx.ExternalDocumentRef2_2{ - DocumentRefID: "time-1.2.3", - URI: "https://github.com/swinslow/spdx-docs/time/time-1.2.3.cdefab.whatever", - Alg: "SHA1", - Checksum: "0123456701234567012345670123456701234568", - }, - }, LicenseListVersion: "3.9", - CreatorPersons: []string{ - "John Doe", - "Jane Doe (janedoe@example.com)", - }, - CreatorOrganizations: []string{ - "John Doe, Inc.", - "Jane Doe LLC", - }, - CreatorTools: []string{ - "magictool1-1.0", - "magictool2-1.0", - "magictool3-1.0", + Creators: []spdx.Creator{ + {Creator: "John Doe", CreatorType: "Person"}, + {Creator: "Jane Doe (janedoe@example.com)", CreatorType: "Person"}, + {Creator: "John Doe, Inc.", CreatorType: "Organization"}, + {Creator: "Jane Doe LLC", CreatorType: "Organization"}, + {Creator: "magictool1-1.0", CreatorType: "Tool"}, + {Creator: "magictool2-1.0", CreatorType: "Tool"}, + {Creator: "magictool3-1.0", CreatorType: "Tool"}, }, - Created: "2018-10-10T06:20:00Z", - CreatorComment: "this is a creator comment", - DocumentComment: "this is a document comment", + Created: "2018-10-10T06:20:00Z", + CreatorComment: "this is a creator comment", } // what we want to get, as a buffer of bytes - want := bytes.NewBufferString(`SPDXVersion: SPDX-2.2 -DataLicense: CC0-1.0 -SPDXID: SPDXRef-DOCUMENT -DocumentName: spdx-go-0.0.1.abcdef -DocumentNamespace: https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever -ExternalDocumentRef: DocumentRef-spdx-go-0.0.1a https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1a.cdefab.whatever SHA1:0123456701234567012345670123456701234567 -ExternalDocumentRef: DocumentRef-time-1.2.3 https://github.com/swinslow/spdx-docs/time/time-1.2.3.cdefab.whatever SHA1:0123456701234567012345670123456701234568 -LicenseListVersion: 3.9 + want := bytes.NewBufferString(`LicenseListVersion: 3.9 Creator: Person: John Doe Creator: Person: Jane Doe (janedoe@example.com) Creator: Organization: John Doe, Inc. @@ -68,7 +37,6 @@ Creator: Tool: magictool2-1.0 Creator: Tool: magictool3-1.0 Created: 2018-10-10T06:20:00Z CreatorComment: this is a creator comment -DocumentComment: this is a document comment `) @@ -89,24 +57,14 @@ DocumentComment: this is a document comment func TestSaver2_2CIOmitsOptionalFieldsIfEmpty(t *testing.T) { // --- need at least one creator; do first for Persons --- ci1 := &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - DocumentName: "spdx-go-0.0.1.abcdef", - DocumentNamespace: "https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever", - CreatorPersons: []string{ - "John Doe", + Creators: []spdx.Creator{ + {Creator: "John Doe", CreatorType: "Person"}, }, Created: "2018-10-10T06:20:00Z", } // what we want to get, as a buffer of bytes - want1 := bytes.NewBufferString(`SPDXVersion: SPDX-2.2 -DataLicense: CC0-1.0 -SPDXID: SPDXRef-DOCUMENT -DocumentName: spdx-go-0.0.1.abcdef -DocumentNamespace: https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever -Creator: Person: John Doe + want1 := bytes.NewBufferString(`Creator: Person: John Doe Created: 2018-10-10T06:20:00Z `) @@ -126,24 +84,14 @@ Created: 2018-10-10T06:20:00Z // --- need at least one creator; now switch to organization --- ci2 := &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - DocumentName: "spdx-go-0.0.1.abcdef", - DocumentNamespace: "https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever", - CreatorOrganizations: []string{ - "John Doe, Inc.", + Creators: []spdx.Creator{ + {Creator: "John Doe, Inc.", CreatorType: "Organization"}, }, Created: "2018-10-10T06:20:00Z", } // what we want to get, as a buffer of bytes - want2 := bytes.NewBufferString(`SPDXVersion: SPDX-2.2 -DataLicense: CC0-1.0 -SPDXID: SPDXRef-DOCUMENT -DocumentName: spdx-go-0.0.1.abcdef -DocumentNamespace: https://github.com/swinslow/spdx-docs/spdx-go/spdx-go-0.0.1.abcdef.whatever -Creator: Organization: John Doe, Inc. + want2 := bytes.NewBufferString(`Creator: Organization: John Doe, Inc. Created: 2018-10-10T06:20:00Z `) diff --git a/tvsaver/saver2v2/save_document.go b/tvsaver/saver2v2/save_document.go index 8db23630..04b482da 100644 --- a/tvsaver/saver2v2/save_document.go +++ b/tvsaver/saver2v2/save_document.go @@ -21,30 +21,50 @@ func RenderDocument2_2(doc *spdx.Document2_2, w io.Writer) error { return fmt.Errorf("Document had nil CreationInfo section") } + if doc.SPDXVersion != "" { + fmt.Fprintf(w, "SPDXVersion: %s\n", doc.SPDXVersion) + } + if doc.DataLicense != "" { + fmt.Fprintf(w, "DataLicense: %s\n", doc.DataLicense) + } + if doc.SPDXIdentifier != "" { + fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(doc.SPDXIdentifier)) + } + if doc.DocumentName != "" { + fmt.Fprintf(w, "DocumentName: %s\n", doc.DocumentName) + } + if doc.DocumentNamespace != "" { + fmt.Fprintf(w, "DocumentNamespace: %s\n", doc.DocumentNamespace) + } + // print EDRs in order sorted by identifier + sort.Slice(doc.ExternalDocumentReferences, func(i, j int) bool { + return doc.ExternalDocumentReferences[i].DocumentRefID < doc.ExternalDocumentReferences[j].DocumentRefID + }) + for _, edr := range doc.ExternalDocumentReferences { + fmt.Fprintf(w, "ExternalDocumentRef: DocumentRef-%s %s %s:%s\n", + edr.DocumentRefID, edr.URI, edr.Checksum.Algorithm, edr.Checksum.Value) + } + if doc.DocumentComment != "" { + fmt.Fprintf(w, "DocumentComment: %s\n", textify(doc.DocumentComment)) + } + renderCreationInfo2_2(doc.CreationInfo, w) - if len(doc.UnpackagedFiles) > 0 { + if len(doc.Files) > 0 { fmt.Fprintf(w, "##### Unpackaged files\n\n") - // get slice of identifiers so we can sort them - unpackagedFileKeys := []string{} - for k := range doc.UnpackagedFiles { - unpackagedFileKeys = append(unpackagedFileKeys, string(k)) - } - sort.Strings(unpackagedFileKeys) - for _, fiID := range unpackagedFileKeys { - fi := doc.UnpackagedFiles[spdx.ElementID(fiID)] + sort.Slice(doc.Files, func(i, j int) bool { + return doc.Files[i].FileSPDXIdentifier < doc.Files[j].FileSPDXIdentifier + }) + for _, fi := range doc.Files { renderFile2_2(fi, w) } } - // get slice of Package identifiers so we can sort them - packageKeys := []string{} - for k := range doc.Packages { - packageKeys = append(packageKeys, string(k)) - } - sort.Strings(packageKeys) - for _, pkgID := range packageKeys { - pkg := doc.Packages[spdx.ElementID(pkgID)] + // sort Packages by identifier + sort.Slice(doc.Packages, func(i, j int) bool { + return doc.Packages[i].PackageSPDXIdentifier < doc.Packages[j].PackageSPDXIdentifier + }) + for _, pkg := range doc.Packages { fmt.Fprintf(w, "##### Package: %s\n\n", pkg.PackageName) renderPackage2_2(pkg, w) } diff --git a/tvsaver/saver2v2/save_document_test.go b/tvsaver/saver2v2/save_document_test.go index 19705805..552cdab5 100644 --- a/tvsaver/saver2v2/save_document_test.go +++ b/tvsaver/saver2v2/save_document_test.go @@ -14,13 +14,8 @@ func TestSaver2_2DocumentSavesText(t *testing.T) { // Creation Info section ci := &spdx.CreationInfo2_2{ - SPDXVersion: "SPDX-2.2", - DataLicense: "CC0-1.0", - SPDXIdentifier: spdx.ElementID("DOCUMENT"), - DocumentName: "tools-golang-0.0.1.abcdef", - DocumentNamespace: "https://github.com/spdx/spdx-docs/tools-golang/tools-golang-0.0.1.abcdef.whatever", - CreatorPersons: []string{ - "John Doe", + Creators: []spdx.Creator{ + {Creator: "John Doe", CreatorType: "Person"}, }, Created: "2018-10-10T06:20:00Z", } @@ -29,51 +24,39 @@ func TestSaver2_2DocumentSavesText(t *testing.T) { f1 := &spdx.File2_2{ FileName: "/tmp/whatever1.txt", FileSPDXIdentifier: spdx.ElementID("File1231"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "85ed0817af83a24ad8da68c2b5094de69833983c", - }, - }, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, - FileCopyrightText: "Copyright (c) Jane Doe", + Checksums: []spdx.Checksum{{Value: "85ed0817af83a24ad8da68c2b5094de69833983c", Algorithm: spdx.SHA1}}, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{"Apache-2.0"}, + FileCopyrightText: "Copyright (c) Jane Doe", } f2 := &spdx.File2_2{ FileName: "/tmp/whatever2.txt", FileSPDXIdentifier: spdx.ElementID("File1232"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "85ed0817af83a24ad8da68c2b5094de69833983d", - }, - }, - LicenseConcluded: "MIT", - LicenseInfoInFile: []string{"MIT"}, - FileCopyrightText: "Copyright (c) John Doe", + Checksums: []spdx.Checksum{{Value: "85ed0817af83a24ad8da68c2b5094de69833983d", Algorithm: spdx.SHA1}}, + LicenseConcluded: "MIT", + LicenseInfoInFiles: []string{"MIT"}, + FileCopyrightText: "Copyright (c) John Doe", } - unFiles := map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID("File1231"): f1, - spdx.ElementID("File1232"): f2, + unFiles := []*spdx.File2_2{ + f1, + f2, } // Package 1: packaged files with snippets sn1 := &spdx.Snippet2_2{ SnippetSPDXIdentifier: "Snippet19", - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "FileHasSnippets"), - SnippetByteRangeStart: 17, - SnippetByteRangeEnd: 209, + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "FileHasSnippets").ElementRefID, + Ranges: []spdx.SnippetRange{{StartPointer: spdx.SnippetRangePointer{Offset: 17}, EndPointer: spdx.SnippetRangePointer{Offset: 209}}}, SnippetLicenseConcluded: "GPL-2.0-or-later", SnippetCopyrightText: "Copyright (c) John Doe 20x6", } sn2 := &spdx.Snippet2_2{ SnippetSPDXIdentifier: "Snippet20", - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "FileHasSnippets"), - SnippetByteRangeStart: 268, - SnippetByteRangeEnd: 309, + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "FileHasSnippets").ElementRefID, + Ranges: []spdx.SnippetRange{{StartPointer: spdx.SnippetRangePointer{Offset: 268}, EndPointer: spdx.SnippetRangePointer{Offset: 309}}}, SnippetLicenseConcluded: "WTFPL", SnippetCopyrightText: "NOASSERTION", } @@ -81,14 +64,9 @@ func TestSaver2_2DocumentSavesText(t *testing.T) { f3 := &spdx.File2_2{ FileName: "/tmp/file-with-snippets.txt", FileSPDXIdentifier: spdx.ElementID("FileHasSnippets"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "85ed0817af83a24ad8da68c2b5094de69833983e", - }, - }, - LicenseConcluded: "GPL-2.0-or-later AND WTFPL", - LicenseInfoInFile: []string{ + Checksums: []spdx.Checksum{{Value: "85ed0817af83a24ad8da68c2b5094de69833983e", Algorithm: spdx.SHA1}}, + LicenseConcluded: "GPL-2.0-or-later AND WTFPL", + LicenseInfoInFiles: []string{ "Apache-2.0", "GPL-2.0-or-later", "WTFPL", @@ -103,15 +81,10 @@ func TestSaver2_2DocumentSavesText(t *testing.T) { f4 := &spdx.File2_2{ FileName: "/tmp/another-file.txt", FileSPDXIdentifier: spdx.ElementID("FileAnother"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "85ed0817af83a24ad8da68c2b5094de69833983f", - }, - }, - LicenseConcluded: "BSD-3-Clause", - LicenseInfoInFile: []string{"BSD-3-Clause"}, - FileCopyrightText: "Copyright (c) Jane Doe LLC", + Checksums: []spdx.Checksum{{Value: "85ed0817af83a24ad8da68c2b5094de69833983f", Algorithm: spdx.SHA1}}, + LicenseConcluded: "BSD-3-Clause", + LicenseInfoInFiles: []string{"BSD-3-Clause"}, + FileCopyrightText: "Copyright (c) Jane Doe LLC", } pkgWith := &spdx.Package2_2{ @@ -120,7 +93,7 @@ func TestSaver2_2DocumentSavesText(t *testing.T) { PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", FilesAnalyzed: true, IsFilesAnalyzedTagPresent: true, - PackageVerificationCode: "0123456789abcdef0123456789abcdef01234567", + PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, PackageLicenseConcluded: "GPL-2.0-or-later AND BSD-3-Clause AND WTFPL", PackageLicenseInfoFromFiles: []string{ "Apache-2.0", @@ -130,9 +103,9 @@ func TestSaver2_2DocumentSavesText(t *testing.T) { }, PackageLicenseDeclared: "Apache-2.0 OR GPL-2.0-or-later", PackageCopyrightText: "Copyright (c) John Doe, Inc.", - Files: map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID("FileHasSnippets"): f3, - spdx.ElementID("FileAnother"): f4, + Files: []*spdx.File2_2{ + f3, + f4, }, } @@ -172,8 +145,8 @@ blah blah blah blah`, // Annotations ann1 := &spdx.Annotation2_2{ - Annotator: "John Doe", - AnnotatorType: "Person", + Annotator: spdx.Annotator{Annotator: "John Doe", + AnnotatorType: "Person"}, AnnotationDate: "2018-10-10T17:52:00Z", AnnotationType: "REVIEW", AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), @@ -181,8 +154,8 @@ blah blah blah blah`, } ann2 := &spdx.Annotation2_2{ - Annotator: "John Doe, Inc.", - AnnotatorType: "Organization", + Annotator: spdx.Annotator{Annotator: "John Doe, Inc.", + AnnotatorType: "Organization"}, AnnotationDate: "2018-10-10T17:52:00Z", AnnotationType: "REVIEW", AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "p1"), @@ -204,11 +177,16 @@ blah blah blah blah`, // now, build the document doc := &spdx.Document2_2{ - CreationInfo: ci, - Packages: map[spdx.ElementID]*spdx.Package2_2{ - spdx.ElementID("p1"): pkgWith, + SPDXVersion: "SPDX-2.2", + DataLicense: "CC0-1.0", + SPDXIdentifier: spdx.ElementID("DOCUMENT"), + DocumentName: "tools-golang-0.0.1.abcdef", + DocumentNamespace: "https://github.com/spdx/spdx-docs/tools-golang/tools-golang-0.0.1.abcdef.whatever", + CreationInfo: ci, + Packages: []*spdx.Package2_2{ + pkgWith, }, - UnpackagedFiles: unFiles, + Files: unFiles, OtherLicenses: []*spdx.OtherLicense2_2{ ol1, ol2, diff --git a/tvsaver/saver2v2/save_file.go b/tvsaver/saver2v2/save_file.go index ce75f313..f1684efb 100644 --- a/tvsaver/saver2v2/save_file.go +++ b/tvsaver/saver2v2/save_file.go @@ -17,22 +17,18 @@ func renderFile2_2(f *spdx.File2_2, w io.Writer) error { if f.FileSPDXIdentifier != "" { fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(f.FileSPDXIdentifier)) } - for _, s := range f.FileType { + for _, s := range f.FileTypes { fmt.Fprintf(w, "FileType: %s\n", s) } - if f.FileChecksums[spdx.SHA1].Value != "" { - fmt.Fprintf(w, "FileChecksum: SHA1: %s\n", f.FileChecksums[spdx.SHA1].Value) - } - if f.FileChecksums[spdx.SHA256].Value != "" { - fmt.Fprintf(w, "FileChecksum: SHA256: %s\n", f.FileChecksums[spdx.SHA256].Value) - } - if f.FileChecksums[spdx.MD5].Value != "" { - fmt.Fprintf(w, "FileChecksum: MD5: %s\n", f.FileChecksums[spdx.MD5].Value) + + for _, checksum := range f.Checksums { + fmt.Fprintf(w, "FileChecksum: %s: %s\n", checksum.Algorithm, checksum.Value) } + if f.LicenseConcluded != "" { fmt.Fprintf(w, "LicenseConcluded: %s\n", f.LicenseConcluded) } - for _, s := range f.LicenseInfoInFile { + for _, s := range f.LicenseInfoInFiles { fmt.Fprintf(w, "LicenseInfoInFile: %s\n", s) } if f.LicenseComments != "" { @@ -56,7 +52,7 @@ func renderFile2_2(f *spdx.File2_2, w io.Writer) error { if f.FileNotice != "" { fmt.Fprintf(w, "FileNotice: %s\n", textify(f.FileNotice)) } - for _, s := range f.FileContributor { + for _, s := range f.FileContributors { fmt.Fprintf(w, "FileContributor: %s\n", s) } for _, s := range f.FileAttributionTexts { diff --git a/tvsaver/saver2v2/save_file_test.go b/tvsaver/saver2v2/save_file_test.go index f90a1bb9..c49c978b 100644 --- a/tvsaver/saver2v2/save_file_test.go +++ b/tvsaver/saver2v2/save_file_test.go @@ -14,26 +14,17 @@ func TestSaver2_2FileSavesText(t *testing.T) { f := &spdx.File2_2{ FileName: "/tmp/whatever.txt", FileSPDXIdentifier: spdx.ElementID("File123"), - FileType: []string{ + FileTypes: []string{ "TEXT", "DOCUMENTATION", }, - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "85ed0817af83a24ad8da68c2b5094de69833983c", - }, - spdx.SHA256: spdx.Checksum{ - Algorithm: spdx.SHA256, - Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", - }, - spdx.MD5: spdx.Checksum{ - Algorithm: spdx.MD5, - Value: "624c1abb3664f4b35547e7c73864ad24", - }, + Checksums: []spdx.Checksum{ + {Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c"}, + {Algorithm: spdx.SHA256, Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd"}, + {Algorithm: spdx.MD5, Value: "624c1abb3664f4b35547e7c73864ad24"}, }, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "Apache-2.0", "Apache-1.1", }, @@ -59,7 +50,7 @@ func TestSaver2_2FileSavesText(t *testing.T) { }, FileComment: "this is a file comment", FileNotice: "This file may be used under either Apache-2.0 or Apache-1.1.", - FileContributor: []string{ + FileContributors: []string{ "John Doe jdoe@example.com", "EvilCorp", }, @@ -124,18 +115,16 @@ FileDependency: g.txt func TestSaver2_2FileSavesSnippetsAlso(t *testing.T) { sn1 := &spdx.Snippet2_2{ SnippetSPDXIdentifier: spdx.ElementID("Snippet19"), - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File123"), - SnippetByteRangeStart: 17, - SnippetByteRangeEnd: 209, + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File123").ElementRefID, + Ranges: []spdx.SnippetRange{{StartPointer: spdx.SnippetRangePointer{Offset: 17}, EndPointer: spdx.SnippetRangePointer{Offset: 209}}}, SnippetLicenseConcluded: "GPL-2.0-or-later", SnippetCopyrightText: "Copyright (c) John Doe 20x6", } sn2 := &spdx.Snippet2_2{ SnippetSPDXIdentifier: spdx.ElementID("Snippet20"), - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File123"), - SnippetByteRangeStart: 268, - SnippetByteRangeEnd: 309, + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File123").ElementRefID, + Ranges: []spdx.SnippetRange{{StartPointer: spdx.SnippetRangePointer{Offset: 268}, EndPointer: spdx.SnippetRangePointer{Offset: 309}}}, SnippetLicenseConcluded: "WTFPL", SnippetCopyrightText: "NOASSERTION", } @@ -148,14 +137,11 @@ func TestSaver2_2FileSavesSnippetsAlso(t *testing.T) { f := &spdx.File2_2{ FileName: "/tmp/whatever.txt", FileSPDXIdentifier: spdx.ElementID("File123"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "85ed0817af83a24ad8da68c2b5094de69833983c", - }, + Checksums: []spdx.Checksum{ + {Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c"}, }, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "Apache-2.0", }, FileCopyrightText: "Copyright (c) Jane Doe", @@ -202,14 +188,11 @@ func TestSaver2_2FileOmitsOptionalFieldsIfEmpty(t *testing.T) { f := &spdx.File2_2{ FileName: "/tmp/whatever.txt", FileSPDXIdentifier: spdx.ElementID("File123"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "85ed0817af83a24ad8da68c2b5094de69833983c", - }, + Checksums: []spdx.Checksum{ + {Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c"}, }, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "Apache-2.0", }, FileCopyrightText: "Copyright (c) Jane Doe", @@ -243,14 +226,11 @@ func TestSaver2_2FileWrapsCopyrightMultiLine(t *testing.T) { f := &spdx.File2_2{ FileName: "/tmp/whatever.txt", FileSPDXIdentifier: spdx.ElementID("File123"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "85ed0817af83a24ad8da68c2b5094de69833983c", - }, + Checksums: []spdx.Checksum{ + {Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c"}, }, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "Apache-2.0", }, FileCopyrightText: `Copyright (c) Jane Doe @@ -286,16 +266,13 @@ func TestSaver2_2FileWrapsCommentsAndNoticesMultiLine(t *testing.T) { f := &spdx.File2_2{ FileName: "/tmp/whatever.txt", FileSPDXIdentifier: spdx.ElementID("File123"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ - Algorithm: spdx.SHA1, - Value: "85ed0817af83a24ad8da68c2b5094de69833983c", - }, + Checksums: []spdx.Checksum{ + {Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c"}, }, LicenseComments: `this is a multi-line license comment`, LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{ + LicenseInfoInFiles: []string{ "Apache-2.0", }, FileCopyrightText: "Copyright (c) Jane Doe", diff --git a/tvsaver/saver2v2/save_package.go b/tvsaver/saver2v2/save_package.go index 4929775e..6d21a6d2 100644 --- a/tvsaver/saver2v2/save_package.go +++ b/tvsaver/saver2v2/save_package.go @@ -6,6 +6,7 @@ import ( "fmt" "io" "sort" + "strings" "github.com/spdx/tools-golang/spdx" ) @@ -23,23 +24,19 @@ func renderPackage2_2(pkg *spdx.Package2_2, w io.Writer) error { if pkg.PackageFileName != "" { fmt.Fprintf(w, "PackageFileName: %s\n", pkg.PackageFileName) } - if pkg.PackageSupplierPerson != "" { - fmt.Fprintf(w, "PackageSupplier: Person: %s\n", pkg.PackageSupplierPerson) - } - if pkg.PackageSupplierOrganization != "" { - fmt.Fprintf(w, "PackageSupplier: Organization: %s\n", pkg.PackageSupplierOrganization) - } - if pkg.PackageSupplierNOASSERTION == true { - fmt.Fprintf(w, "PackageSupplier: NOASSERTION\n") - } - if pkg.PackageOriginatorPerson != "" { - fmt.Fprintf(w, "PackageOriginator: Person: %s\n", pkg.PackageOriginatorPerson) - } - if pkg.PackageOriginatorOrganization != "" { - fmt.Fprintf(w, "PackageOriginator: Organization: %s\n", pkg.PackageOriginatorOrganization) + if pkg.PackageSupplier != nil && pkg.PackageSupplier.Supplier != "" { + if pkg.PackageSupplier.SupplierType == "" { + fmt.Fprintf(w, "PackageSupplier: %s\n", pkg.PackageSupplier.Supplier) + } else { + fmt.Fprintf(w, "PackageSupplier: %s: %s\n", pkg.PackageSupplier.SupplierType, pkg.PackageSupplier.Supplier) + } } - if pkg.PackageOriginatorNOASSERTION == true { - fmt.Fprintf(w, "PackageOriginator: NOASSERTION\n") + if pkg.PackageOriginator != nil && pkg.PackageOriginator.Originator != "" { + if pkg.PackageOriginator.OriginatorType == "" { + fmt.Fprintf(w, "PackageOriginator: %s\n", pkg.PackageOriginator.Originator) + } else { + fmt.Fprintf(w, "PackageOriginator: %s: %s\n", pkg.PackageOriginator.OriginatorType, pkg.PackageOriginator.Originator) + } } if pkg.PackageDownloadLocation != "" { fmt.Fprintf(w, "PackageDownloadLocation: %s\n", pkg.PackageDownloadLocation) @@ -51,22 +48,18 @@ func renderPackage2_2(pkg *spdx.Package2_2, w io.Writer) error { } else { fmt.Fprintf(w, "FilesAnalyzed: false\n") } - if pkg.PackageVerificationCode != "" && pkg.FilesAnalyzed == true { - if pkg.PackageVerificationCodeExcludedFile == "" { - fmt.Fprintf(w, "PackageVerificationCode: %s\n", pkg.PackageVerificationCode) + if pkg.PackageVerificationCode.Value != "" && pkg.FilesAnalyzed == true { + if len(pkg.PackageVerificationCode.ExcludedFiles) == 0 { + fmt.Fprintf(w, "PackageVerificationCode: %s\n", pkg.PackageVerificationCode.Value) } else { - fmt.Fprintf(w, "PackageVerificationCode: %s (excludes: %s)\n", pkg.PackageVerificationCode, pkg.PackageVerificationCodeExcludedFile) + fmt.Fprintf(w, "PackageVerificationCode: %s (excludes: %s)\n", pkg.PackageVerificationCode.Value, strings.Join(pkg.PackageVerificationCode.ExcludedFiles, ", ")) } } - if pkg.PackageChecksums[spdx.SHA1].Value != "" { - fmt.Fprintf(w, "PackageChecksum: SHA1: %s\n", pkg.PackageChecksums[spdx.SHA1].Value) - } - if pkg.PackageChecksums[spdx.SHA256].Value != "" { - fmt.Fprintf(w, "PackageChecksum: SHA256: %s\n", pkg.PackageChecksums[spdx.SHA256].Value) - } - if pkg.PackageChecksums[spdx.MD5].Value != "" { - fmt.Fprintf(w, "PackageChecksum: MD5: %s\n", pkg.PackageChecksums[spdx.MD5].Value) + + for _, checksum := range pkg.PackageChecksums { + fmt.Fprintf(w, "PackageChecksum: %s: %s\n", checksum.Algorithm, checksum.Value) } + if pkg.PackageHomePage != "" { fmt.Fprintf(w, "PackageHomePage: %s\n", pkg.PackageHomePage) } @@ -112,14 +105,10 @@ func renderPackage2_2(pkg *spdx.Package2_2, w io.Writer) error { fmt.Fprintf(w, "\n") // also render any files for this package - // get slice of File identifiers so we can sort them - fileKeys := []string{} - for k := range pkg.Files { - fileKeys = append(fileKeys, string(k)) - } - sort.Strings(fileKeys) - for _, fiID := range fileKeys { - fi := pkg.Files[spdx.ElementID(fiID)] + sort.Slice(pkg.Files, func(i, j int) bool { + return pkg.Files[i].FileSPDXIdentifier < pkg.Files[j].FileSPDXIdentifier + }) + for _, fi := range pkg.Files { renderFile2_2(fi, w) } diff --git a/tvsaver/saver2v2/save_package_test.go b/tvsaver/saver2v2/save_package_test.go index 72d1de2e..f9960f0a 100644 --- a/tvsaver/saver2v2/save_package_test.go +++ b/tvsaver/saver2v2/save_package_test.go @@ -49,27 +49,29 @@ multi-line external ref comment`, } pkg := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplierOrganization: "John Doe, Inc.", - PackageOriginatorPerson: "John Doe", - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: true, - PackageVerificationCode: "0123456789abcdef0123456789abcdef01234567", - PackageVerificationCodeExcludedFile: "p1-0.1.0.spdx", - PackageChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{SupplierType: "Organization", Supplier: "John Doe, Inc."}, + PackageOriginator: &spdx.Originator{Originator: "John Doe", OriginatorType: "Person"}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: true, + IsFilesAnalyzedTagPresent: true, + PackageVerificationCode: spdx.PackageVerificationCode{ + Value: "0123456789abcdef0123456789abcdef01234567", + ExcludedFiles: []string{"p1-0.1.0.spdx"}, + }, + PackageChecksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c", }, - spdx.SHA256: spdx.Checksum{ + { Algorithm: spdx.SHA256, Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", }, - spdx.MD5: spdx.Checksum{ + { Algorithm: spdx.MD5, Value: "624c1abb3664f4b35547e7c73864ad24", }, @@ -154,26 +156,26 @@ func TestSaver2_2PackageSavesTextCombo2(t *testing.T) { // PackageVerificationCodeExcludedFile is empty pkg := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplierNOASSERTION: true, - PackageOriginatorOrganization: "John Doe, Inc.", - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: true, - IsFilesAnalyzedTagPresent: false, - PackageVerificationCode: "0123456789abcdef0123456789abcdef01234567", - PackageChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{Supplier: "NOASSERTION"}, + PackageOriginator: &spdx.Originator{OriginatorType: "Organization", Originator: "John Doe, Inc."}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: true, + IsFilesAnalyzedTagPresent: false, + PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, + PackageChecksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c", }, - spdx.SHA256: spdx.Checksum{ + { Algorithm: spdx.SHA256, Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", }, - spdx.MD5: spdx.Checksum{ + { Algorithm: spdx.MD5, Value: "624c1abb3664f4b35547e7c73864ad24", }, @@ -245,28 +247,28 @@ func TestSaver2_2PackageSavesTextCombo3(t *testing.T) { // three PackageAttributionTexts, one with multi-line text pkg := &spdx.Package2_2{ - PackageName: "p1", - PackageSPDXIdentifier: spdx.ElementID("p1"), - PackageVersion: "0.1.0", - PackageFileName: "p1-0.1.0-master.tar.gz", - PackageSupplierPerson: "John Doe", - PackageOriginatorNOASSERTION: true, - PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", - FilesAnalyzed: false, - IsFilesAnalyzedTagPresent: true, + PackageName: "p1", + PackageSPDXIdentifier: spdx.ElementID("p1"), + PackageVersion: "0.1.0", + PackageFileName: "p1-0.1.0-master.tar.gz", + PackageSupplier: &spdx.Supplier{Supplier: "John Doe", SupplierType: "Person"}, + PackageOriginator: &spdx.Originator{Originator: "NOASSERTION"}, + PackageDownloadLocation: "http://example.com/p1/p1-0.1.0-master.tar.gz", + FilesAnalyzed: false, + IsFilesAnalyzedTagPresent: true, // NOTE that verification code MUST be omitted from output // since FilesAnalyzed is false - PackageVerificationCode: "0123456789abcdef0123456789abcdef01234567", - PackageChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + PackageVerificationCode: spdx.PackageVerificationCode{Value: "0123456789abcdef0123456789abcdef01234567"}, + PackageChecksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c", }, - spdx.SHA256: spdx.Checksum{ + { Algorithm: spdx.SHA256, Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", }, - spdx.MD5: spdx.Checksum{ + { Algorithm: spdx.MD5, Value: "624c1abb3664f4b35547e7c73864ad24", }, @@ -387,29 +389,29 @@ func TestSaver2_2PackageSavesFilesIfPresent(t *testing.T) { f1 := &spdx.File2_2{ FileName: "/tmp/whatever1.txt", FileSPDXIdentifier: spdx.ElementID("File1231"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983c", }, }, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFile: []string{"Apache-2.0"}, - FileCopyrightText: "Copyright (c) Jane Doe", + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{"Apache-2.0"}, + FileCopyrightText: "Copyright (c) Jane Doe", } f2 := &spdx.File2_2{ FileName: "/tmp/whatever2.txt", FileSPDXIdentifier: spdx.ElementID("File1232"), - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "85ed0817af83a24ad8da68c2b5094de69833983d", }, }, - LicenseConcluded: "MIT", - LicenseInfoInFile: []string{"MIT"}, - FileCopyrightText: "Copyright (c) John Doe", + LicenseConcluded: "MIT", + LicenseInfoInFiles: []string{"MIT"}, + FileCopyrightText: "Copyright (c) John Doe", } pkg := &spdx.Package2_2{ @@ -430,9 +432,9 @@ func TestSaver2_2PackageSavesFilesIfPresent(t *testing.T) { }, PackageLicenseDeclared: "Apache-2.0 OR GPL-2.0-or-later", PackageCopyrightText: "Copyright (c) John Doe, Inc.", - Files: map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID("File1231"): f1, - spdx.ElementID("File1232"): f2, + Files: []*spdx.File2_2{ + f1, + f2, }, } diff --git a/tvsaver/saver2v2/save_snippet.go b/tvsaver/saver2v2/save_snippet.go index 8423e531..4f740982 100644 --- a/tvsaver/saver2v2/save_snippet.go +++ b/tvsaver/saver2v2/save_snippet.go @@ -13,15 +13,18 @@ func renderSnippet2_2(sn *spdx.Snippet2_2, w io.Writer) error { if sn.SnippetSPDXIdentifier != "" { fmt.Fprintf(w, "SnippetSPDXID: %s\n", spdx.RenderElementID(sn.SnippetSPDXIdentifier)) } - snFromFileIDStr := spdx.RenderDocElementID(sn.SnippetFromFileSPDXIdentifier) + snFromFileIDStr := spdx.RenderElementID(sn.SnippetFromFileSPDXIdentifier) if snFromFileIDStr != "" { fmt.Fprintf(w, "SnippetFromFileSPDXID: %s\n", snFromFileIDStr) } - if sn.SnippetByteRangeStart != 0 && sn.SnippetByteRangeEnd != 0 { - fmt.Fprintf(w, "SnippetByteRange: %d:%d\n", sn.SnippetByteRangeStart, sn.SnippetByteRangeEnd) - } - if sn.SnippetLineRangeStart != 0 && sn.SnippetLineRangeEnd != 0 { - fmt.Fprintf(w, "SnippetLineRange: %d:%d\n", sn.SnippetLineRangeStart, sn.SnippetLineRangeEnd) + + for _, snippetRange := range sn.Ranges { + if snippetRange.StartPointer.Offset != 0 && snippetRange.EndPointer.Offset != 0 { + fmt.Fprintf(w, "SnippetByteRange: %d:%d\n", snippetRange.StartPointer.Offset, snippetRange.EndPointer.Offset) + } + if snippetRange.StartPointer.LineNumber != 0 && snippetRange.EndPointer.LineNumber != 0 { + fmt.Fprintf(w, "SnippetLineRange: %d:%d\n", snippetRange.StartPointer.LineNumber, snippetRange.EndPointer.LineNumber) + } } if sn.SnippetLicenseConcluded != "" { fmt.Fprintf(w, "SnippetLicenseConcluded: %s\n", sn.SnippetLicenseConcluded) diff --git a/tvsaver/saver2v2/save_snippet_test.go b/tvsaver/saver2v2/save_snippet_test.go index b749c7d3..da91b991 100644 --- a/tvsaver/saver2v2/save_snippet_test.go +++ b/tvsaver/saver2v2/save_snippet_test.go @@ -13,12 +13,18 @@ import ( func TestSaver2_2SnippetSavesText(t *testing.T) { sn := &spdx.Snippet2_2{ SnippetSPDXIdentifier: spdx.ElementID("Snippet17"), - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292"), - SnippetByteRangeStart: 17, - SnippetByteRangeEnd: 209, - SnippetLineRangeStart: 3, - SnippetLineRangeEnd: 8, - SnippetLicenseConcluded: "GPL-2.0-or-later", + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292").ElementRefID, + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{LineNumber: 3}, + EndPointer: spdx.SnippetRangePointer{LineNumber: 8}, + }, + { + StartPointer: spdx.SnippetRangePointer{Offset: 17}, + EndPointer: spdx.SnippetRangePointer{Offset: 209}, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-or-later", LicenseInfoInSnippet: []string{ "GPL-2.0-or-later", "MIT", @@ -33,8 +39,8 @@ func TestSaver2_2SnippetSavesText(t *testing.T) { // what we want to get, as a buffer of bytes want := bytes.NewBufferString(`SnippetSPDXID: SPDXRef-Snippet17 SnippetFromFileSPDXID: SPDXRef-File292 -SnippetByteRange: 17:209 SnippetLineRange: 3:8 +SnippetByteRange: 17:209 SnippetLicenseConcluded: GPL-2.0-or-later LicenseInfoInSnippet: GPL-2.0-or-later LicenseInfoInSnippet: MIT @@ -63,11 +69,15 @@ SnippetAttributionText: some attributions func TestSaver2_2SnippetOmitsOptionalFieldsIfEmpty(t *testing.T) { sn := &spdx.Snippet2_2{ SnippetSPDXIdentifier: spdx.ElementID("Snippet17"), - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292"), - SnippetByteRangeStart: 17, - SnippetByteRangeEnd: 209, - SnippetLicenseConcluded: "GPL-2.0-or-later", - SnippetCopyrightText: "Copyright (c) John Doe 20x6", + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292").ElementRefID, + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{Offset: 17}, + EndPointer: spdx.SnippetRangePointer{Offset: 209}, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-or-later", + SnippetCopyrightText: "Copyright (c) John Doe 20x6", } // what we want to get, as a buffer of bytes @@ -96,10 +106,14 @@ SnippetCopyrightText: Copyright (c) John Doe 20x6 func TestSaver2_2SnippetWrapsCopyrightMultiline(t *testing.T) { sn := &spdx.Snippet2_2{ SnippetSPDXIdentifier: spdx.ElementID("Snippet17"), - SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292"), - SnippetByteRangeStart: 17, - SnippetByteRangeEnd: 209, - SnippetLicenseConcluded: "GPL-2.0-or-later", + SnippetFromFileSPDXIdentifier: spdx.MakeDocElementID("", "File292").ElementRefID, + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{Offset: 17}, + EndPointer: spdx.SnippetRangePointer{Offset: 209}, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-or-later", SnippetCopyrightText: `Copyright (c) John Doe 20x6 Copyright (c) John Doe 20x6`, } diff --git a/utils/verification.go b/utils/verification.go index 7c538417..94e8b7ef 100644 --- a/utils/verification.go +++ b/utils/verification.go @@ -15,15 +15,20 @@ import ( // GetVerificationCode2_1 takes a slice of files and an optional filename // for an "excludes" file, and returns a Package Verification Code calculated // according to SPDX spec version 2.1, section 3.9.4. -func GetVerificationCode2_1(files map[spdx.ElementID]*spdx.File2_1, excludeFile string) (string, error) { +func GetVerificationCode2_1(files []*spdx.File2_1, excludeFile string) (spdx.PackageVerificationCode, error) { // create slice of strings - unsorted SHA1s for all files shas := []string{} for i, f := range files { if f == nil { - return "", fmt.Errorf("got nil file for identifier %v", i) + return spdx.PackageVerificationCode{}, fmt.Errorf("got nil file for identifier %v", i) } if f.FileName != excludeFile { - shas = append(shas, f.FileChecksumSHA1) + // find the SHA1 hash, if present + for _, checksum := range f.Checksums { + if checksum.Algorithm == spdx.SHA1 { + shas = append(shas, checksum.Value) + } + } } } @@ -37,7 +42,11 @@ func GetVerificationCode2_1(files map[spdx.ElementID]*spdx.File2_1, excludeFile hsha1 := sha1.New() hsha1.Write([]byte(shasConcat)) bs := hsha1.Sum(nil) - code := fmt.Sprintf("%x", bs) + + code := spdx.PackageVerificationCode{ + Value: fmt.Sprintf("%x", bs), + ExcludedFiles: []string{excludeFile}, + } return code, nil } @@ -45,15 +54,20 @@ func GetVerificationCode2_1(files map[spdx.ElementID]*spdx.File2_1, excludeFile // GetVerificationCode2_2 takes a slice of files and an optional filename // for an "excludes" file, and returns a Package Verification Code calculated // according to SPDX spec version 2.2, section 3.9.4. -func GetVerificationCode2_2(files map[spdx.ElementID]*spdx.File2_2, excludeFile string) (string, error) { +func GetVerificationCode2_2(files []*spdx.File2_2, excludeFile string) (spdx.PackageVerificationCode, error) { // create slice of strings - unsorted SHA1s for all files shas := []string{} for i, f := range files { if f == nil { - return "", fmt.Errorf("got nil file for identifier %v", i) + return spdx.PackageVerificationCode{}, fmt.Errorf("got nil file for identifier %v", i) } if f.FileName != excludeFile { - shas = append(shas, f.FileChecksums[spdx.SHA1].Value) + // find the SHA1 hash, if present + for _, checksum := range f.Checksums { + if checksum.Algorithm == spdx.SHA1 { + shas = append(shas, checksum.Value) + } + } } } @@ -67,7 +81,11 @@ func GetVerificationCode2_2(files map[spdx.ElementID]*spdx.File2_2, excludeFile hsha1 := sha1.New() hsha1.Write([]byte(shasConcat)) bs := hsha1.Sum(nil) - code := fmt.Sprintf("%x", bs) + + code := spdx.PackageVerificationCode{ + Value: fmt.Sprintf("%x", bs), + ExcludedFiles: []string{excludeFile}, + } return code, nil } diff --git a/utils/verification_test.go b/utils/verification_test.go index c6fa3f9f..d31614af 100644 --- a/utils/verification_test.go +++ b/utils/verification_test.go @@ -11,99 +11,98 @@ import ( // ===== 2.1 Verification code functionality tests ===== func TestPackage2_1CanGetVerificationCode(t *testing.T) { - files := map[spdx.ElementID]*spdx.File2_1{ - "File0": &spdx.File2_1{ + files := []*spdx.File2_1{ + { FileName: "file2.txt", FileSPDXIdentifier: "File0", - FileChecksumSHA1: "aaaaaaaaaabbbbbbbbbbccccccccccdddddddddd", + Checksums: []spdx.Checksum{{Value: "aaaaaaaaaabbbbbbbbbbccccccccccdddddddddd", Algorithm: spdx.SHA1}}, }, - "File1": &spdx.File2_1{ + { FileName: "file1.txt", FileSPDXIdentifier: "File1", - FileChecksumSHA1: "3333333333bbbbbbbbbbccccccccccdddddddddd", + Checksums: []spdx.Checksum{{Value: "3333333333bbbbbbbbbbccccccccccdddddddddd", Algorithm: spdx.SHA1}}, }, - "File2": &spdx.File2_1{ + { FileName: "file3.txt", FileSPDXIdentifier: "File2", - FileChecksumSHA1: "8888888888bbbbbbbbbbccccccccccdddddddddd", + Checksums: []spdx.Checksum{{Value: "8888888888bbbbbbbbbbccccccccccdddddddddd", Algorithm: spdx.SHA1}}, }, - "File3": &spdx.File2_1{ + { FileName: "file5.txt", FileSPDXIdentifier: "File3", - FileChecksumSHA1: "2222222222bbbbbbbbbbccccccccccdddddddddd", + Checksums: []spdx.Checksum{{Value: "2222222222bbbbbbbbbbccccccccccdddddddddd", Algorithm: spdx.SHA1}}, }, - "File4": &spdx.File2_1{ + { FileName: "file4.txt", FileSPDXIdentifier: "File4", - FileChecksumSHA1: "bbbbbbbbbbccccccccccddddddddddaaaaaaaaaa", + Checksums: []spdx.Checksum{{Value: "bbbbbbbbbbccccccccccddddddddddaaaaaaaaaa", Algorithm: spdx.SHA1}}, }, } - wantCode := "ac924b375119c81c1f08c3e2722044bfbbdcd3dc" + wantCode := spdx.PackageVerificationCode{Value: "ac924b375119c81c1f08c3e2722044bfbbdcd3dc"} gotCode, err := GetVerificationCode2_1(files, "") if err != nil { t.Fatalf("expected nil error, got %v", err) } - if wantCode != gotCode { + if wantCode.Value != gotCode.Value { t.Errorf("expected %v, got %v", wantCode, gotCode) } } func TestPackage2_1CanGetVerificationCodeIgnoringExcludesFile(t *testing.T) { - files := map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID("File0"): &spdx.File2_1{ + files := []*spdx.File2_1{ + { FileName: "file1.txt", FileSPDXIdentifier: "File0", - FileChecksumSHA1: "aaaaaaaaaabbbbbbbbbbccccccccccdddddddddd", + Checksums: []spdx.Checksum{{Value: "aaaaaaaaaabbbbbbbbbbccccccccccdddddddddd", Algorithm: spdx.SHA1}}, }, - spdx.ElementID("File1"): &spdx.File2_1{ + { FileName: "file2.txt", FileSPDXIdentifier: "File1", - FileChecksumSHA1: "3333333333bbbbbbbbbbccccccccccdddddddddd", + Checksums: []spdx.Checksum{{Value: "3333333333bbbbbbbbbbccccccccccdddddddddd", Algorithm: spdx.SHA1}}, }, - spdx.ElementID("File2"): &spdx.File2_1{ + { FileName: "thisfile.spdx", FileSPDXIdentifier: "File2", - FileChecksumSHA1: "bbbbbbbbbbccccccccccddddddddddaaaaaaaaaa", + Checksums: []spdx.Checksum{{Value: "bbbbbbbbbbccccccccccddddddddddaaaaaaaaaa", Algorithm: spdx.SHA1}}, }, - spdx.ElementID("File3"): &spdx.File2_1{ + { FileName: "file3.txt", FileSPDXIdentifier: "File3", - FileChecksumSHA1: "8888888888bbbbbbbbbbccccccccccdddddddddd", + Checksums: []spdx.Checksum{{Value: "8888888888bbbbbbbbbbccccccccccdddddddddd", Algorithm: spdx.SHA1}}, }, - spdx.ElementID("File4"): &spdx.File2_1{ + { FileName: "file4.txt", FileSPDXIdentifier: "File4", - FileChecksumSHA1: "2222222222bbbbbbbbbbccccccccccdddddddddd", + Checksums: []spdx.Checksum{{Value: "2222222222bbbbbbbbbbccccccccccdddddddddd", Algorithm: spdx.SHA1}}, }, } - wantCode := "17fab1bd18fe5c13b5d3983f1c17e5f88b8ff266" + wantCode := spdx.PackageVerificationCode{Value: "17fab1bd18fe5c13b5d3983f1c17e5f88b8ff266"} gotCode, err := GetVerificationCode2_1(files, "thisfile.spdx") if err != nil { t.Fatalf("expected nil error, got %v", err) } - if wantCode != gotCode { + if wantCode.Value != gotCode.Value { t.Errorf("expected %v, got %v", wantCode, gotCode) } - } func TestPackage2_1GetVerificationCodeFailsIfNilFileInSlice(t *testing.T) { - files := map[spdx.ElementID]*spdx.File2_1{ - spdx.ElementID("File0"): &spdx.File2_1{ + files := []*spdx.File2_1{ + { FileName: "file2.txt", FileSPDXIdentifier: "File0", - FileChecksumSHA1: "aaaaaaaaaabbbbbbbbbbccccccccccdddddddddd", + Checksums: []spdx.Checksum{{Value: "aaaaaaaaaabbbbbbbbbbccccccccccdddddddddd", Algorithm: spdx.SHA1}}, }, - spdx.ElementID("File1"): nil, - spdx.ElementID("File2"): &spdx.File2_1{ + nil, + { FileName: "file3.txt", FileSPDXIdentifier: "File2", - FileChecksumSHA1: "8888888888bbbbbbbbbbccccccccccdddddddddd", + Checksums: []spdx.Checksum{{Value: "8888888888bbbbbbbbbbccccccccccdddddddddd", Algorithm: spdx.SHA1}}, }, } @@ -116,52 +115,52 @@ func TestPackage2_1GetVerificationCodeFailsIfNilFileInSlice(t *testing.T) { // ===== 2.2 Verification code functionality tests ===== func TestPackage2_2CanGetVerificationCode(t *testing.T) { - files := map[spdx.ElementID]*spdx.File2_2{ - "File0": &spdx.File2_2{ + files := []*spdx.File2_2{ + { FileName: "file2.txt", FileSPDXIdentifier: "File0", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "aaaaaaaaaabbbbbbbbbbccccccccccdddddddddd", }, }, }, - "File1": &spdx.File2_2{ + { FileName: "file1.txt", FileSPDXIdentifier: "File1", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "3333333333bbbbbbbbbbccccccccccdddddddddd", }, }, }, - "File2": &spdx.File2_2{ + { FileName: "file3.txt", FileSPDXIdentifier: "File2", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "8888888888bbbbbbbbbbccccccccccdddddddddd", }, }, }, - "File3": &spdx.File2_2{ + { FileName: "file5.txt", FileSPDXIdentifier: "File3", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "2222222222bbbbbbbbbbccccccccccdddddddddd", }, }, }, - "File4": &spdx.File2_2{ + { FileName: "file4.txt", FileSPDXIdentifier: "File4", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "bbbbbbbbbbccccccccccddddddddddaaaaaaaaaa", }, @@ -169,65 +168,65 @@ func TestPackage2_2CanGetVerificationCode(t *testing.T) { }, } - wantCode := "ac924b375119c81c1f08c3e2722044bfbbdcd3dc" + wantCode := spdx.PackageVerificationCode{Value: "ac924b375119c81c1f08c3e2722044bfbbdcd3dc"} gotCode, err := GetVerificationCode2_2(files, "") if err != nil { t.Fatalf("expected nil error, got %v", err) } - if wantCode != gotCode { + if wantCode.Value != gotCode.Value { t.Errorf("expected %v, got %v", wantCode, gotCode) } } func TestPackage2_2CanGetVerificationCodeIgnoringExcludesFile(t *testing.T) { - files := map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID("File0"): &spdx.File2_2{ + files := []*spdx.File2_2{ + { FileName: "file1.txt", FileSPDXIdentifier: "File0", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "aaaaaaaaaabbbbbbbbbbccccccccccdddddddddd", }, }, }, - spdx.ElementID("File1"): &spdx.File2_2{ + { FileName: "file2.txt", FileSPDXIdentifier: "File1", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "3333333333bbbbbbbbbbccccccccccdddddddddd", }, }, }, - spdx.ElementID("File2"): &spdx.File2_2{ + { FileName: "thisfile.spdx", FileSPDXIdentifier: "File2", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "bbbbbbbbbbccccccccccddddddddddaaaaaaaaaa", }, }, }, - spdx.ElementID("File3"): &spdx.File2_2{ + { FileName: "file3.txt", FileSPDXIdentifier: "File3", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "8888888888bbbbbbbbbbccccccccccdddddddddd", }, }, }, - spdx.ElementID("File4"): &spdx.File2_2{ + { FileName: "file4.txt", FileSPDXIdentifier: "File4", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "2222222222bbbbbbbbbbccccccccccdddddddddd", }, @@ -235,36 +234,35 @@ func TestPackage2_2CanGetVerificationCodeIgnoringExcludesFile(t *testing.T) { }, } - wantCode := "17fab1bd18fe5c13b5d3983f1c17e5f88b8ff266" + wantCode := spdx.PackageVerificationCode{Value: "17fab1bd18fe5c13b5d3983f1c17e5f88b8ff266"} gotCode, err := GetVerificationCode2_2(files, "thisfile.spdx") if err != nil { t.Fatalf("expected nil error, got %v", err) } - if wantCode != gotCode { + if wantCode.Value != gotCode.Value { t.Errorf("expected %v, got %v", wantCode, gotCode) } - } func TestPackage2_2GetVerificationCodeFailsIfNilFileInSlice(t *testing.T) { - files := map[spdx.ElementID]*spdx.File2_2{ - spdx.ElementID("File0"): &spdx.File2_2{ + files := []*spdx.File2_2{ + { FileName: "file2.txt", FileSPDXIdentifier: "File0", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "aaaaaaaaaabbbbbbbbbbccccccccccdddddddddd", }, }, }, - spdx.ElementID("File1"): nil, - spdx.ElementID("File2"): &spdx.File2_2{ + nil, + { FileName: "file3.txt", FileSPDXIdentifier: "File2", - FileChecksums: map[spdx.ChecksumAlgorithm]spdx.Checksum{ - spdx.SHA1: spdx.Checksum{ + Checksums: []spdx.Checksum{ + { Algorithm: spdx.SHA1, Value: "8888888888bbbbbbbbbbccccccccccdddddddddd", }, From 04feebfab5e1abcd073a9240221f99a687807d7f Mon Sep 17 00:00:00 2001 From: Ian Ling Date: Mon, 25 Apr 2022 14:58:39 -0700 Subject: [PATCH 2/6] Add YAML support Signed-off-by: Ian Ling --- README.md | 1 + go.mod | 1 + go.sum | 8 + yaml/parser.go | 29 +++ yaml/writer.go | 25 +++ yaml/yaml_test.go | 449 ++++++++++++++++++++++++++++++++++++++++++++++ 6 files changed, 513 insertions(+) create mode 100644 yaml/parser.go create mode 100644 yaml/writer.go create mode 100644 yaml/yaml_test.go diff --git a/README.md b/README.md index 3dd0e732..89a47296 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,7 @@ tools-golang provides the following packages: * *tvsaver* - tag-value document saver * *rdfloader* - RDF document loader * *json* - JSON document parser and writer +* *yaml* - YAML document parser and writer * *builder* - builds "empty" SPDX document (with hashes) for directory contents * *idsearcher* - searches for [SPDX short-form IDs](https://spdx.org/ids/) and builds SPDX document * *licensediff* - compares concluded licenses between files in two packages diff --git a/go.mod b/go.mod index 213c1297..1017b95c 100644 --- a/go.mod +++ b/go.mod @@ -5,4 +5,5 @@ go 1.13 require ( github.com/google/go-cmp v0.5.7 github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb + sigs.k8s.io/yaml v1.3.0 ) diff --git a/go.sum b/go.sum index 4355c263..85ffe1cd 100644 --- a/go.sum +++ b/go.sum @@ -1,6 +1,14 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb h1:bLo8hvc8XFm9J47r690TUKBzcjSWdJDxmjXJZ+/f92U= github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb/go.mod h1:uKWaldnbMnjsSAXRurWqqrdyZen1R7kxl8TkmWk2OyM= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/yaml/parser.go b/yaml/parser.go new file mode 100644 index 00000000..ca852ddf --- /dev/null +++ b/yaml/parser.go @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_yaml + +import ( + "bytes" + "io" + "sigs.k8s.io/yaml" + + "github.com/spdx/tools-golang/spdx" +) + +// Load2_2 takes in an io.Reader and returns an SPDX document. +func Load2_2(content io.Reader) (*spdx.Document2_2, error) { + // convert io.Reader to a slice of bytes and call the parser + buf := new(bytes.Buffer) + _, err := buf.ReadFrom(content) + if err != nil { + return nil, err + } + + var doc spdx.Document2_2 + err = yaml.Unmarshal(buf.Bytes(), &doc) + if err != nil { + return nil, err + } + + return &doc, nil +} diff --git a/yaml/writer.go b/yaml/writer.go new file mode 100644 index 00000000..edd47936 --- /dev/null +++ b/yaml/writer.go @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_yaml + +import ( + "io" + "sigs.k8s.io/yaml" + + "github.com/spdx/tools-golang/spdx" +) + +// Save2_2 takes an SPDX Document (version 2.2) and an io.Writer, and writes the document to the writer in YAML format. +func Save2_2(doc *spdx.Document2_2, w io.Writer) error { + buf, err := yaml.Marshal(doc) + if err != nil { + return err + } + + _, err = w.Write(buf) + if err != nil { + return err + } + + return nil +} diff --git a/yaml/yaml_test.go b/yaml/yaml_test.go new file mode 100644 index 00000000..49f8ebfe --- /dev/null +++ b/yaml/yaml_test.go @@ -0,0 +1,449 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_yaml + +import ( + "bytes" + "fmt" + "github.com/google/go-cmp/cmp" + "os" + "testing" + + "github.com/spdx/tools-golang/spdx" +) + +func TestLoad2_2(t *testing.T) { + file, err := os.Open("../examples/sample-docs/yaml/SPDXYAMLExample-2.2.spdx.yaml") + if err != nil { + panic(fmt.Errorf("error opening File: %s", err)) + } + + got, err := Load2_2(file) + if err != nil { + t.Errorf("yaml.parser.Load2_2() error = %v", err) + return + } + + // get a copy of the handwritten struct so we don't mutate it on accident + handwrittenExample := want + + if cmp.Equal(handwrittenExample, got) { + t.Errorf("got incorrect struct after parsing YAML example") + return + } +} + +func TestWrite2_2(t *testing.T) { + w := &bytes.Buffer{} + // get a copy of the handwritten struct so we don't mutate it on accident + handwrittenExample := want + if err := Save2_2(&handwrittenExample, w); err != nil { + t.Errorf("Save2_2() error = %v", err.Error()) + return + } + + // we should be able to parse what the writer wrote, and it should be identical to the original handwritten struct + parsedDoc, err := Load2_2(bytes.NewReader(w.Bytes())) + if err != nil { + t.Errorf("failed to parse written document: %v", err.Error()) + return + } + + if cmp.Equal(handwrittenExample, parsedDoc) { + t.Errorf("got incorrect struct after writing and re-parsing YAML example") + return + } +} + +// want is handwritten translation of the official example YAML SPDX v2.2 document into a Go struct. +// We expect that the result of parsing the official document should be this value. +// We expect that the result of writing this struct should match the official example document. +var want = spdx.Document2_2{ + DataLicense: "CC0-1.0", + SPDXVersion: "SPDX-2.2", + SPDXIdentifier: "SPDXRef-DOCUMENT", + DocumentName: "SPDX-Tools-v2.0", + DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", + CreationInfo: &spdx.CreationInfo2_2{ + LicenseListVersion: "3.9", + Creators: []spdx.Creator{ + {CreatorType: "Tool", Creator: "LicenseFind-1.0"}, + {CreatorType: "Organization", Creator: "ExampleCodeInspect ()"}, + {CreatorType: "Person", Creator: "Jane Doe ()"}, + }, + Created: "2010-01-29T18:30:22Z", + CreatorComment: "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", + }, + DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", + ExternalDocumentReferences: []spdx.ExternalDocumentRef2_2{ + { + DocumentRefID: "DocumentRef-spdx-tool-1.2", + URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", + Checksum: spdx.Checksum{ + Algorithm: spdx.SHA1, + Value: "d6a770ba38583ed4bb4525bd96e50461655d2759", + }, + }, + }, + OtherLicenses: []*spdx.OtherLicense2_2{ + { + LicenseIdentifier: "LicenseRef-1", + ExtractedText: "/*\n * (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/", + }, + { + LicenseIdentifier: "LicenseRef-2", + ExtractedText: "This package includes the GRDDL parser developed by Hewlett Packard under the following license:\n� Copyright 2007 Hewlett-Packard Development Company, LP\n\nRedistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: \n\nRedistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. \nRedistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. \nThe name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. \nTHIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + }, + { + LicenseIdentifier: "LicenseRef-4", + ExtractedText: "/*\n * (c) Copyright 2009 University of Bristol\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/", + }, + { + LicenseIdentifier: "LicenseRef-Beerware-4.2", + ExtractedText: "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp", + LicenseComment: "The beerware license has a couple of other standard variants.", + LicenseName: "Beer-Ware License (Version 42)", + LicenseCrossReferences: []string{"http://people.freebsd.org/~phk/"}, + }, + { + LicenseIdentifier: "LicenseRef-3", + ExtractedText: "The CyberNeko Software License, Version 1.0\n\n \n(C) Copyright 2002-2005, Andy Clark. All rights reserved.\n \nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions\nare met:\n\n1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer. \n\n2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in\n the documentation and/or other materials provided with the\n distribution.\n\n3. The end-user documentation included with the redistribution,\n if any, must include the following acknowledgment: \n \"This product includes software developed by Andy Clark.\"\n Alternately, this acknowledgment may appear in the software itself,\n if and wherever such third-party acknowledgments normally appear.\n\n4. The names \"CyberNeko\" and \"NekoHTML\" must not be used to endorse\n or promote products derived from this software without prior \n written permission. For written permission, please contact \n andyc@cyberneko.net.\n\n5. Products derived from this software may not be called \"CyberNeko\",\n nor may \"CyberNeko\" appear in their name, without prior written\n permission of the author.\n\nTHIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED\nWARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\nOF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS\nBE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, \nOR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT \nOF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR \nBUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, \nWHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE \nOR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, \nEVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + LicenseName: "CyberNeko License", + LicenseCrossReferences: []string{ + "http://people.apache.org/~andyc/neko/LICENSE", + "http://justasample.url.com", + }, + LicenseComment: "This is tye CyperNeko License", + }, + }, + Annotations: []*spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "Jane Doe ()", + AnnotatorType: "Person", + }, + AnnotationDate: "2010-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "Document level annotation", + }, + { + Annotator: spdx.Annotator{ + Annotator: "Joe Reviewer", + AnnotatorType: "Person", + }, + AnnotationDate: "2010-02-10T00:00:00Z", + AnnotationType: "REVIEW", + AnnotationComment: "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses", + }, + { + Annotator: spdx.Annotator{ + Annotator: "Suzanne Reviewer", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-03-13T00:00:00Z", + AnnotationType: "REVIEW", + AnnotationComment: "Another example reviewer.", + }, + }, + Packages: []*spdx.Package2_2{ + { + PackageName: "glibc", + PackageSPDXIdentifier: "SPDXRef-Package", + PackageVersion: "2.11.1", + PackageFileName: "glibc-2.11.1.tar.gz", + PackageSupplier: &spdx.Supplier{ + Supplier: "Jane Doe (jane.doe@example.com)", + SupplierType: "Person", + }, + PackageOriginator: &spdx.Originator{ + Originator: "ExampleCodeInspect (contact@example.com)", + OriginatorType: "Organization", + }, + PackageDownloadLocation: "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", + FilesAnalyzed: true, + PackageVerificationCode: spdx.PackageVerificationCode{ + Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", + ExcludedFiles: []string{"./package.spdx"}, + }, + PackageChecksums: []spdx.Checksum{ + { + Algorithm: "MD5", + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + { + Algorithm: "SHA1", + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + { + Algorithm: "SHA256", + Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", + }, + }, + PackageHomePage: "http://ftp.gnu.org/gnu/glibc", + PackageSourceInfo: "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git.", + PackageLicenseConcluded: "(LGPL-2.0-only OR LicenseRef-3)", + PackageLicenseInfoFromFiles: []string{ + "GPL-2.0-only", + "LicenseRef-2", + "LicenseRef-1", + }, + PackageLicenseDeclared: "(LGPL-2.0-only AND LicenseRef-3)", + PackageLicenseComments: "The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.", + PackageCopyrightText: "Copyright 2008-2010 John Smith", + PackageSummary: "GNU C library.", + PackageDescription: "The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.", + PackageComment: "", + PackageExternalReferences: []*spdx.PackageExternalReference2_2{ + { + Category: "SECURITY", + RefType: "cpe23Type", + Locator: "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*", + }, + { + Category: "OTHER", + RefType: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge", + Locator: "acmecorp/acmenator/4.1.3-alpha", + ExternalRefComment: "This is the external ref for Acme", + }, + }, + PackageAttributionTexts: []string{ + "The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually.", + }, + Files: nil, + Annotations: []spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "Package Commenter", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "Package level annotation", + }, + }, + }, + { + PackageSPDXIdentifier: "SPDXRef-fromDoap-1", + PackageCopyrightText: "NOASSERTION", + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: false, + PackageHomePage: "http://commons.apache.org/proper/commons-lang/", + PackageLicenseConcluded: "NOASSERTION", + PackageLicenseDeclared: "NOASSERTION", + PackageName: "Apache Commons Lang", + }, + { + PackageName: "Jena", + PackageSPDXIdentifier: "SPDXRef-fromDoap-0", + PackageCopyrightText: "NOASSERTION", + PackageDownloadLocation: "https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz", + PackageExternalReferences: []*spdx.PackageExternalReference2_2{ + { + Category: "PACKAGE_MANAGER", + RefType: "purl", + Locator: "pkg:maven/org.apache.jena/apache-jena@3.12.0", + }, + }, + FilesAnalyzed: false, + PackageHomePage: "http://www.openjena.org/", + PackageLicenseConcluded: "NOASSERTION", + PackageLicenseDeclared: "NOASSERTION", + PackageVersion: "3.12.0", + }, + { + PackageSPDXIdentifier: "SPDXRef-Saxon", + PackageChecksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + }, + PackageCopyrightText: "Copyright Saxonica Ltd", + PackageDescription: "The Saxon package is a collection of tools for processing XML documents.", + PackageDownloadLocation: "https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download", + FilesAnalyzed: false, + PackageHomePage: "http://saxon.sourceforge.net/", + PackageLicenseComments: "Other versions available for a commercial license", + PackageLicenseConcluded: "MPL-1.0", + PackageLicenseDeclared: "MPL-1.0", + PackageName: "Saxon", + PackageFileName: "saxonB-8.8.zip", + PackageVersion: "8.8", + }, + }, + Files: []*spdx.File2_2{ + { + FileName: "./src/org/spdx/parser/DOAPProject.java", + FileSPDXIdentifier: "SPDXRef-DoapSource", + FileTypes: []string{ + "SOURCE", + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", + }, + }, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{ + "Apache-2.0", + }, + FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", + FileContributors: []string{ + "Protecode Inc.", + "SPDX Technical Team Members", + "Open Logic Inc.", + "Source Auditor Inc.", + "Black Duck Software In.c", + }, + }, + { + FileSPDXIdentifier: "SPDXRef-CommonsLangSrc", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file is used by Jena", + FileCopyrightText: "Copyright 2001-2011 The Apache Software Foundation", + FileContributors: []string{"Apache Software Foundation"}, + FileName: "./lib-source/commons-lang3-3.1-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{"Apache-2.0"}, + FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", + }, + { + FileSPDXIdentifier: "SPDXRef-JenaLib", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file belongs to Jena", + FileCopyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP", + FileContributors: []string{"Apache Software Foundation", "Hewlett Packard Inc."}, + FileName: "./lib-source/jena-2.6.3-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseComments: "This license is used by Jena", + LicenseConcluded: "LicenseRef-1", + LicenseInfoInFiles: []string{"LicenseRef-1"}, + }, + { + FileSPDXIdentifier: "SPDXRef-File", + Annotations: []spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "File Commenter", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "File level annotation", + }, + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", + }, + { + Algorithm: "MD5", + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + }, + FileComment: "The concluded license was taken from the package level that the file was included in.\nThis information was found in the COPYING.txt file in the xyz directory.", + FileCopyrightText: "Copyright 2008-2010 John Smith", + FileContributors: []string{"The Regents of the University of California", "Modified by Paul Mundt lethal@linux-sh.org", "IBM Corporation"}, + FileName: "./package/foo.c", + FileTypes: []string{"SOURCE"}, + LicenseComments: "The concluded license was taken from the package level that the file was included in.", + LicenseConcluded: "(LGPL-2.0-only OR LicenseRef-2)", + LicenseInfoInFiles: []string{"GPL-2.0-only", "LicenseRef-2"}, + FileNotice: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: \nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", + }, + }, + Snippets: []spdx.Snippet2_2{ + { + SnippetSPDXIdentifier: "SPDXRef-Snippet", + SnippetFromFileSPDXIdentifier: "SPDXRef-DoapSource", + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{ + Offset: 310, + FileSPDXIdentifier: "SPDXRef-DoapSource", + }, + EndPointer: spdx.SnippetRangePointer{ + Offset: 420, + FileSPDXIdentifier: "SPDXRef-DoapSource", + }, + }, + { + StartPointer: spdx.SnippetRangePointer{ + LineNumber: 5, + FileSPDXIdentifier: "SPDXRef-DoapSource", + }, + EndPointer: spdx.SnippetRangePointer{ + LineNumber: 23, + FileSPDXIdentifier: "SPDXRef-DoapSource", + }, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-only", + LicenseInfoInSnippet: []string{"GPL-2.0-only"}, + SnippetLicenseComments: "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", + SnippetCopyrightText: "Copyright 2008-2010 John Smith", + SnippetComment: "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", + SnippetName: "from linux kernel", + }, + }, + Relationships: []*spdx.Relationship2_2{ + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("spdx-tool-1.2", "ToolsElement"), + Relationship: "COPY_OF", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "File"), + Relationship: "DESCRIBES", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "DESCRIBES", + }, + { + RefA: spdx.MakeDocElementID("", "Package"), + RefB: spdx.MakeDocElementID("", "JenaLib"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "Package"), + RefB: spdx.MakeDocElementID("", "Saxon"), + Relationship: "DYNAMIC_LINK", + }, + { + RefA: spdx.MakeDocElementID("", "CommonsLangSrc"), + RefB: spdx.MakeDocElementSpecial("NOASSERTION"), + Relationship: "GENERATED_FROM", + }, + { + RefA: spdx.MakeDocElementID("", "JenaLib"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "File"), + RefB: spdx.MakeDocElementID("", "fromDoap-0"), + Relationship: "GENERATED_FROM", + }, + }, +} From 325205e23e686792b7128abb8c7bd117b2ad5ac4 Mon Sep 17 00:00:00 2001 From: Ian Ling Date: Mon, 25 Apr 2022 15:09:35 -0700 Subject: [PATCH 3/6] Add String and FromString methods to structs Signed-off-by: Ian Ling --- json/json_test.go | 32 ++-- rdfloader/parser2v2/parse_spdx_document.go | 2 +- spdx/annotation.go | 42 +++-- spdx/checksum.go | 36 +++++ spdx/creation_info.go | 40 +++-- spdx/document.go | 95 ++++++++++- spdx/identifier.go | 148 ++++++++++++------ spdx/identifier_test.go | 38 +++++ spdx/package.go | 114 ++++++++++---- spdx/snippet.go | 51 ++++++ tvloader/parser2v1/parse_creation_info.go | 25 +-- .../parser2v1/parse_creation_info_test.go | 9 +- tvloader/parser2v2/parse_creation_info.go | 25 +-- .../parser2v2/parse_creation_info_test.go | 9 +- tvsaver/saver2v1/save_annotation.go | 2 +- tvsaver/saver2v1/save_document.go | 6 +- tvsaver/saver2v1/save_file.go | 2 +- tvsaver/saver2v1/save_package.go | 2 +- tvsaver/saver2v1/save_relationship.go | 4 +- tvsaver/saver2v1/save_snippet.go | 4 +- tvsaver/saver2v2/save_annotation.go | 2 +- tvsaver/saver2v2/save_document.go | 6 +- tvsaver/saver2v2/save_file.go | 2 +- tvsaver/saver2v2/save_package.go | 2 +- tvsaver/saver2v2/save_relationship.go | 4 +- tvsaver/saver2v2/save_snippet.go | 4 +- yaml/yaml_test.go | 32 ++-- 27 files changed, 540 insertions(+), 198 deletions(-) create mode 100644 spdx/identifier_test.go diff --git a/json/json_test.go b/json/json_test.go index c78013cb..845986c6 100644 --- a/json/json_test.go +++ b/json/json_test.go @@ -61,7 +61,7 @@ func TestWrite2_2(t *testing.T) { var want = spdx.Document2_2{ DataLicense: "CC0-1.0", SPDXVersion: "SPDX-2.2", - SPDXIdentifier: "SPDXRef-DOCUMENT", + SPDXIdentifier: "DOCUMENT", DocumentName: "SPDX-Tools-v2.0", DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", CreationInfo: &spdx.CreationInfo2_2{ @@ -77,7 +77,7 @@ var want = spdx.Document2_2{ DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", ExternalDocumentReferences: []spdx.ExternalDocumentRef2_2{ { - DocumentRefID: "DocumentRef-spdx-tool-1.2", + DocumentRefID: spdx.MakeDocElementID("spdx-tool-1.2", ""), URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", Checksum: spdx.Checksum{ Algorithm: spdx.SHA1, @@ -148,7 +148,7 @@ var want = spdx.Document2_2{ Packages: []*spdx.Package2_2{ { PackageName: "glibc", - PackageSPDXIdentifier: "SPDXRef-Package", + PackageSPDXIdentifier: "Package", PackageVersion: "2.11.1", PackageFileName: "glibc-2.11.1.tar.gz", PackageSupplier: &spdx.Supplier{ @@ -223,7 +223,7 @@ var want = spdx.Document2_2{ }, }, { - PackageSPDXIdentifier: "SPDXRef-fromDoap-1", + PackageSPDXIdentifier: "fromDoap-1", PackageCopyrightText: "NOASSERTION", PackageDownloadLocation: "NOASSERTION", FilesAnalyzed: false, @@ -234,7 +234,7 @@ var want = spdx.Document2_2{ }, { PackageName: "Jena", - PackageSPDXIdentifier: "SPDXRef-fromDoap-0", + PackageSPDXIdentifier: "fromDoap-0", PackageCopyrightText: "NOASSERTION", PackageDownloadLocation: "https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz", PackageExternalReferences: []*spdx.PackageExternalReference2_2{ @@ -251,7 +251,7 @@ var want = spdx.Document2_2{ PackageVersion: "3.12.0", }, { - PackageSPDXIdentifier: "SPDXRef-Saxon", + PackageSPDXIdentifier: "Saxon", PackageChecksums: []spdx.Checksum{ { Algorithm: "SHA1", @@ -274,7 +274,7 @@ var want = spdx.Document2_2{ Files: []*spdx.File2_2{ { FileName: "./src/org/spdx/parser/DOAPProject.java", - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", FileTypes: []string{ "SOURCE", }, @@ -298,7 +298,7 @@ var want = spdx.Document2_2{ }, }, { - FileSPDXIdentifier: "SPDXRef-CommonsLangSrc", + FileSPDXIdentifier: "CommonsLangSrc", Checksums: []spdx.Checksum{ { Algorithm: "SHA1", @@ -315,7 +315,7 @@ var want = spdx.Document2_2{ FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", }, { - FileSPDXIdentifier: "SPDXRef-JenaLib", + FileSPDXIdentifier: "JenaLib", Checksums: []spdx.Checksum{ { Algorithm: "SHA1", @@ -332,7 +332,7 @@ var want = spdx.Document2_2{ LicenseInfoInFiles: []string{"LicenseRef-1"}, }, { - FileSPDXIdentifier: "SPDXRef-File", + FileSPDXIdentifier: "File", Annotations: []spdx.Annotation2_2{ { Annotator: spdx.Annotator{ @@ -367,27 +367,27 @@ var want = spdx.Document2_2{ }, Snippets: []spdx.Snippet2_2{ { - SnippetSPDXIdentifier: "SPDXRef-Snippet", - SnippetFromFileSPDXIdentifier: "SPDXRef-DoapSource", + SnippetSPDXIdentifier: "Snippet", + SnippetFromFileSPDXIdentifier: "DoapSource", Ranges: []spdx.SnippetRange{ { StartPointer: spdx.SnippetRangePointer{ Offset: 310, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, EndPointer: spdx.SnippetRangePointer{ Offset: 420, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, }, { StartPointer: spdx.SnippetRangePointer{ LineNumber: 5, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, EndPointer: spdx.SnippetRangePointer{ LineNumber: 23, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, }, }, diff --git a/rdfloader/parser2v2/parse_spdx_document.go b/rdfloader/parser2v2/parse_spdx_document.go index 61593172..17f16696 100644 --- a/rdfloader/parser2v2/parse_spdx_document.go +++ b/rdfloader/parser2v2/parse_spdx_document.go @@ -94,7 +94,7 @@ func (parser *rdfParser2_2) getExternalDocumentRefFromNode(node *gordfParser.Nod switch triple.Predicate.ID { case SPDX_EXTERNAL_DOCUMENT_ID: // cardinality: exactly 1 - edr.DocumentRefID = triple.Object.ID + edr.DocumentRefID = spdx.MakeDocElementID(triple.Object.ID, "") case SPDX_SPDX_DOCUMENT: // cardinality: exactly 1 // assumption: "spdxDocument" property of an external document diff --git a/spdx/annotation.go b/spdx/annotation.go index 560b6f00..37958efd 100644 --- a/spdx/annotation.go +++ b/spdx/annotation.go @@ -14,17 +14,22 @@ type Annotator struct { AnnotatorType string } -// UnmarshalJSON takes an annotator in the typical one-line format and parses it into an Annotator struct. -// This function is also used when unmarshalling YAML -func (a *Annotator) UnmarshalJSON(data []byte) error { - // annotator will simply be a string - annotatorStr := string(data) - annotatorStr = strings.Trim(annotatorStr, "\"") +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (a Annotator) Validate() error { + if a.Annotator == "" || a.AnnotatorType == "" { + return fmt.Errorf("invalid Annotator, missing fields. %+v", a) + } - annotatorFields := strings.SplitN(annotatorStr, ": ", 2) + return nil +} + +// FromString parses an Annotator string into an Annotator struct. +func (a *Annotator) FromString(value string) error { + annotatorFields := strings.SplitN(value, ": ", 2) if len(annotatorFields) != 2 { - return fmt.Errorf("failed to parse Annotator '%s'", annotatorStr) + return fmt.Errorf("failed to parse Annotator '%s'", value) } a.AnnotatorType = annotatorFields[0] @@ -33,14 +38,29 @@ func (a *Annotator) UnmarshalJSON(data []byte) error { return nil } +// String converts the receiver into a string. +func (a Annotator) String() string { + return fmt.Sprintf("%s: %s", a.AnnotatorType, a.Annotator) +} + +// UnmarshalJSON takes an annotator in the typical one-line format and parses it into an Annotator struct. +// This function is also used when unmarshalling YAML +func (a *Annotator) UnmarshalJSON(data []byte) error { + // annotator will simply be a string + annotatorStr := string(data) + annotatorStr = strings.Trim(annotatorStr, "\"") + + return a.FromString(annotatorStr) +} + // MarshalJSON converts the receiver into a slice of bytes representing an Annotator in string form. // This function is also used when marshalling to YAML func (a Annotator) MarshalJSON() ([]byte, error) { - if a.Annotator != "" { - return json.Marshal(fmt.Sprintf("%s: %s", a.AnnotatorType, a.Annotator)) + if err := a.Validate(); err != nil { + return nil, err } - return []byte{}, nil + return json.Marshal(a.String()) } // Annotation2_1 is an Annotation section of an SPDX Document for version 2.1 of the spec. diff --git a/spdx/checksum.go b/spdx/checksum.go index 3295969a..c8aca876 100644 --- a/spdx/checksum.go +++ b/spdx/checksum.go @@ -2,6 +2,11 @@ package spdx +import ( + "fmt" + "strings" +) + // ChecksumAlgorithm represents the algorithm used to generate the file checksum in the Checksum struct. type ChecksumAlgorithm string @@ -24,3 +29,34 @@ type Checksum struct { Algorithm ChecksumAlgorithm `json:"algorithm"` Value string `json:"checksumValue"` } + +// FromString parses a Checksum string into a spdx.Checksum. +// These strings take the following form: +// SHA1: d6a770ba38583ed4bb4525bd96e50461655d2759 +func (c *Checksum) FromString(value string) error { + fields := strings.Split(value, ": ") + if len(fields) != 2 { + return fmt.Errorf("invalid checksum: %s", value) + } + + c.Algorithm = ChecksumAlgorithm(fields[0]) + c.Value = fields[1] + + return nil +} + +// String converts the Checksum to its string form. +// e.g. "SHA1: d6a770ba38583ed4bb4525bd96e50461655d2759" +func (c Checksum) String() string { + return fmt.Sprintf("%s: %s", c.Algorithm, c.Value) +} + +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (c Checksum) Validate() error { + if c.Algorithm == "" || c.Value == "" { + return fmt.Errorf("invalid checksum, missing field(s). %+v", c) + } + + return nil +} diff --git a/spdx/creation_info.go b/spdx/creation_info.go index c0b6f636..3f121173 100644 --- a/spdx/creation_info.go +++ b/spdx/creation_info.go @@ -16,11 +16,18 @@ type Creator struct { CreatorType string } -// UnmarshalJSON takes an annotator in the typical one-line format and parses it into a Creator struct. -// This function is also used when unmarshalling YAML -func (c *Creator) UnmarshalJSON(data []byte) error { - str := string(data) - str = strings.Trim(str, "\"") +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (c Creator) Validate() error { + if c.CreatorType == "" || c.Creator == "" { + return fmt.Errorf("invalid Creator, missing fields. %+v", c) + } + + return nil +} + +// FromString takes a Creator in the typical one-line format and parses it into a Creator struct. +func (c *Creator) FromString(str string) error { fields := strings.SplitN(str, ": ", 2) if len(fields) != 2 { @@ -33,14 +40,27 @@ func (c *Creator) UnmarshalJSON(data []byte) error { return nil } +// String converts the Creator into a string. +func (c Creator) String() string { + return fmt.Sprintf("%s: %s", c.CreatorType, c.Creator) +} + +// UnmarshalJSON takes a Creator in the typical one-line string format and parses it into a Creator struct. +func (c *Creator) UnmarshalJSON(data []byte) error { + str := string(data) + str = strings.Trim(str, "\"") + + return c.FromString(str) +} + // MarshalJSON converts the receiver into a slice of bytes representing a Creator in string form. // This function is also used with marshalling to YAML func (c Creator) MarshalJSON() ([]byte, error) { - if c.Creator != "" { - return json.Marshal(fmt.Sprintf("%s: %s", c.CreatorType, c.Creator)) + if err := c.Validate(); err != nil { + return nil, err } - return []byte{}, nil + return json.Marshal(c.String()) } // CreationInfo2_1 is a Document Creation Information section of an @@ -48,7 +68,7 @@ func (c Creator) MarshalJSON() ([]byte, error) { type CreationInfo2_1 struct { // 2.7: License List Version // Cardinality: optional, one - LicenseListVersion string `json:"licenseListVersion"` + LicenseListVersion string `json:"licenseListVersion,omitempty"` // 2.8: Creators: may have multiple keys for Person, Organization // and/or Tool @@ -61,7 +81,7 @@ type CreationInfo2_1 struct { // 2.10: Creator Comment // Cardinality: optional, one - CreatorComment string `json:"comment"` + CreatorComment string `json:"comment,omitempty"` } // CreationInfo2_2 is a Document Creation Information section of an diff --git a/spdx/document.go b/spdx/document.go index a3117cb7..69d6c315 100644 --- a/spdx/document.go +++ b/spdx/document.go @@ -3,13 +3,18 @@ // SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later package spdx +import ( + "fmt" + "strings" +) + // ExternalDocumentRef2_1 is a reference to an external SPDX document // as defined in section 2.6 for version 2.1 of the spec. type ExternalDocumentRef2_1 struct { // DocumentRefID is the ID string defined in the start of the // reference. It should _not_ contain the "DocumentRef-" part // of the mandatory ID string. - DocumentRefID string `json:"externalDocumentId"` + DocumentRefID DocElementID `json:"externalDocumentId"` // URI is the URI defined for the external document URI string `json:"spdxDocument"` @@ -24,7 +29,7 @@ type ExternalDocumentRef2_2 struct { // DocumentRefID is the ID string defined in the start of the // reference. It should _not_ contain the "DocumentRef-" part // of the mandatory ID string. - DocumentRefID string `json:"externalDocumentId"` + DocumentRefID DocElementID `json:"externalDocumentId"` // URI is the URI defined for the external document URI string `json:"spdxDocument"` @@ -33,6 +38,92 @@ type ExternalDocumentRef2_2 struct { Checksum Checksum `json:"checksum"` } +// Validate verifies that all the required fields are present. +// Returns true if the object is valid, returns false and an error if it is invalid. +func (e ExternalDocumentRef2_1) Validate() error { + if err := e.Checksum.Validate(); err != nil { + return fmt.Errorf("invalid Checksum in DocElementID: %w", err) + } + + if e.DocumentRefID.Validate() != nil || e.URI == "" { + return fmt.Errorf("invalid DocElementID, missing fields. %+v", e) + } + + return nil +} + +// Validate verifies that all the required fields are present. +// Returns true if the object is valid, returns false and an error if it is invalid. +func (e ExternalDocumentRef2_2) Validate() error { + if err := e.Checksum.Validate(); err != nil { + return fmt.Errorf("invalid Checksum in DocElementID: %w", err) + } + + if e.DocumentRefID.Validate() != nil || e.URI == "" { + return fmt.Errorf("invalid DocElementID, missing fields. %+v", e) + } + + return nil +} + +// String converts a ExternalDocumentRef2_1 object to a string. +// These strings take the form: " " +func (e ExternalDocumentRef2_1) String() string { + return fmt.Sprintf("%s %s %s", e.DocumentRefID, e.URI, e.Checksum) +} + +// String converts a ExternalDocumentRef2_2 object to a string. +// These strings take the form: " " +func (e ExternalDocumentRef2_2) String() string { + return fmt.Sprintf("%s %s %s", e.DocumentRefID, e.URI, e.Checksum) +} + +// FromString parses a string into a spdx.ExternalDocumentRef2_1. +// These strings take the following form: " " +func (e *ExternalDocumentRef2_1) FromString(value string) error { + fields := strings.SplitN(value, " ", 3) + if len(fields) != 3 { + return fmt.Errorf("invalid external document reference: %s", value) + } + + e.DocumentRefID = MakeDocElementID(fields[0], "") + e.URI = fields[1] + + // the checksum is special and needs further processing + var checksum Checksum + err := checksum.FromString(fields[2]) + if err != nil { + return err + } + + e.Checksum = checksum + + return nil +} + +// FromString parses a string into a spdx.ExternalDocumentRef2_2. +// These strings take the following form: " " +func (e *ExternalDocumentRef2_2) FromString(value string) error { + fields := strings.SplitN(value, " ", 3) + if len(fields) != 3 { + return fmt.Errorf("invalid external document reference: %s", value) + } + + e.DocumentRefID = MakeDocElementID(fields[0], "") + e.URI = fields[1] + + // the checksum is special and needs further processing + var checksum Checksum + err := checksum.FromString(fields[2]) + if err != nil { + return err + } + + e.Checksum = checksum + + return nil +} + // Document2_1 is an SPDX Document for version 2.1 of the spec. // See https://spdx.org/sites/cpstandard/files/pages/files/spdxversion2.1.pdf type Document2_1 struct { diff --git a/spdx/identifier.go b/spdx/identifier.go index 56f8ffc8..64cbf339 100644 --- a/spdx/identifier.go +++ b/spdx/identifier.go @@ -14,13 +14,63 @@ import ( // ElementIDs should NOT contain the mandatory 'SPDXRef-' portion. type ElementID string +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (e ElementID) Validate() error { + if e == "" { + return fmt.Errorf("invalid ElementID, must not be blank") + } + + return nil +} + +func (e ElementID) String() string { + return fmt.Sprintf("SPDXRef-%s", string(e)) +} + +// FromString parses an SPDX Identifier string into an ElementID. +// These strings take the form: "SPDXRef-some-identifier" +func (e *ElementID) FromString(idStr string) error { + idFields := strings.SplitN(idStr, "SPDXRef-", 2) + switch len(idFields) { + case 2: + // "SPDXRef-" prefix was present + *e = ElementID(idFields[1]) + case 1: + // prefix was not present + *e = ElementID(idFields[0]) + } + + return nil +} + +// UnmarshalJSON takes a SPDX Identifier string parses it into an ElementID. +// This function is also used when unmarshalling YAML +func (e *ElementID) UnmarshalJSON(data []byte) error { + // SPDX identifier will simply be a string + idStr := string(data) + idStr = strings.Trim(idStr, "\"") + + return e.FromString(idStr) +} + +// MarshalJSON converts the receiver into a slice of bytes representing an ElementID in string form. +// This function is also used when marshalling to YAML +func (e ElementID) MarshalJSON() ([]byte, error) { + if err := e.Validate(); err != nil { + return nil, err + } + + return json.Marshal(e.String()) +} + // DocElementID represents an SPDX element identifier that could be defined // in a different SPDX document, and therefore could have a "DocumentRef-" // portion, such as Relationships and Annotations. // ElementID is used for attributes in which a "DocumentRef-" portion cannot // appear, such as a Package or File definition (since it is necessarily // being defined in the present document). -// DocumentRefID will be the empty string for elements defined in the +// DocumentRefID will be an empty string for elements defined in the // present document. // DocElementIDs should NOT contain the mandatory 'DocumentRef-' or // 'SPDXRef-' portions. @@ -34,13 +84,22 @@ type DocElementID struct { SpecialID string } -// UnmarshalJSON takes a SPDX Identifier string parses it into a DocElementID struct. -// This function is also used when unmarshalling YAML -func (d *DocElementID) UnmarshalJSON(data []byte) error { - // SPDX identifier will simply be a string - idStr := string(data) - idStr = strings.Trim(idStr, "\"") +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (d DocElementID) Validate() error { + if d.DocumentRefID == "" && d.ElementRefID.Validate() != nil && d.SpecialID == "" { + return fmt.Errorf("invalid DocElementID, missing fields. %+v", d) + } + return nil +} + +// FromString parses an SPDX Identifier string into a DocElementID struct. +// These strings take one of the following forms: +// - "DocumentRef-other-document:SPDXRef-some-identifier" +// - "SPDXRef-some-identifier" +// - "NOASSERTION" or "NONE" +func (d *DocElementID) FromString(idStr string) error { // handle special cases if idStr == "NONE" || idStr == "NOASSERTION" { d.SpecialID = idStr @@ -66,37 +125,57 @@ func (d *DocElementID) UnmarshalJSON(data []byte) error { } // handle SPDXRef- - idFields = strings.SplitN(idStr, "SPDXRef-", 2) - if len(idFields) != 2 { - return fmt.Errorf("failed to parse SPDX Identifier '%s'", idStr) + err := d.ElementRefID.FromString(idStr) + if err != nil { + return err } - d.ElementRefID = ElementID(idFields[1]) - return nil } -// MarshalJSON converts the receiver into a slice of bytes representing a DocElementID in string form. -// This function is also used when marshalling to YAML -func (d DocElementID) MarshalJSON() ([]byte, error) { +// MarshalString converts the receiver into a string representing a DocElementID. +// This is used when writing a spreadsheet SPDX file, for example. +func (d DocElementID) String() string { if d.DocumentRefID != "" && d.ElementRefID != "" { - return json.Marshal(fmt.Sprintf("DocumentRef-%s:SPDXRef-%s", d.DocumentRefID, d.ElementRefID)) + return fmt.Sprintf("DocumentRef-%s:%s", d.DocumentRefID, d.ElementRefID) + } else if d.DocumentRefID != "" { + return fmt.Sprintf("DocumentRef-%s", d.DocumentRefID) } else if d.ElementRefID != "" { - return json.Marshal(fmt.Sprintf("SPDXRef-%s", d.ElementRefID)) + return d.ElementRefID.String() } else if d.SpecialID != "" { - return json.Marshal(d.SpecialID) + return d.SpecialID } - return []byte{}, fmt.Errorf("failed to marshal empty DocElementID") + return "" } -// TODO: add equivalents for LicenseRef- identifiers +// UnmarshalJSON takes a SPDX Identifier string parses it into a DocElementID struct. +// This function is also used when unmarshalling YAML +func (d *DocElementID) UnmarshalJSON(data []byte) error { + // SPDX identifier will simply be a string + idStr := string(data) + idStr = strings.Trim(idStr, "\"") -// MakeDocElementID takes strings (without prefixes) for the DocumentRef- -// and SPDXRef- identifiers, and returns a DocElementID. An empty string -// should be used for the DocumentRef- portion if it is referring to the -// present document. + return d.FromString(idStr) +} + +// MarshalJSON converts the receiver into a slice of bytes representing a DocElementID in string form. +// This function is also used when marshalling to YAML +func (d DocElementID) MarshalJSON() ([]byte, error) { + if err := d.Validate(); err != nil { + return nil, err + } + + return json.Marshal(d.String()) +} + +// MakeDocElementID takes strings for the DocumentRef- and SPDXRef- identifiers (these prefixes will be stripped if present), +// and returns a DocElementID. +// An empty string should be used for the DocumentRef- portion if it is referring to the present document. func MakeDocElementID(docRef string, eltRef string) DocElementID { + docRef = strings.Replace(docRef, "DocumentRef-", "", 1) + eltRef = strings.Replace(eltRef, "SPDXRef-", "", 1) + return DocElementID{ DocumentRefID: docRef, ElementRefID: ElementID(eltRef), @@ -110,24 +189,3 @@ func MakeDocElementID(docRef string, eltRef string) DocElementID { func MakeDocElementSpecial(specialID string) DocElementID { return DocElementID{SpecialID: specialID} } - -// RenderElementID takes an ElementID and returns the string equivalent, -// with the SPDXRef- prefix reinserted. -func RenderElementID(eID ElementID) string { - return "SPDXRef-" + string(eID) -} - -// RenderDocElementID takes a DocElementID and returns the string equivalent, -// with the SPDXRef- prefix (and, if applicable, the DocumentRef- prefix) -// reinserted. If a SpecialID is present, it will be rendered verbatim and -// DocumentRefID and ElementRefID will be ignored. -func RenderDocElementID(deID DocElementID) string { - if deID.SpecialID != "" { - return deID.SpecialID - } - prefix := "" - if deID.DocumentRefID != "" { - prefix = "DocumentRef-" + deID.DocumentRefID + ":" - } - return prefix + "SPDXRef-" + string(deID.ElementRefID) -} diff --git a/spdx/identifier_test.go b/spdx/identifier_test.go new file mode 100644 index 00000000..1c9d0059 --- /dev/null +++ b/spdx/identifier_test.go @@ -0,0 +1,38 @@ +package spdx + +import ( + "encoding/json" + "testing" +) + +func TestMakeDocElementID(t *testing.T) { + // without DocRef + docElementID := MakeDocElementID("", "Package") + if docElementID.String() != "SPDXRef-Package" { + t.Errorf("expected 'SPDXRef-Package', got %s", docElementID) + return + } + + // with DocRef + docElementID = MakeDocElementID("OtherDoc", "Package") + if docElementID.String() != "DocumentRef-OtherDoc:SPDXRef-Package" { + t.Errorf("expected 'DocumentRef-OtherDoc:SPDXRef-Package', got %s", docElementID) + return + } +} + +func TestDocElementID_UnmarshalJSON(t *testing.T) { + rawJSON := json.RawMessage("\"DocumentRef-some-doc\"") + docElementID := DocElementID{} + + err := json.Unmarshal(rawJSON, &docElementID) + if err != nil { + t.Errorf(err.Error()) + return + } + + if docElementID.DocumentRefID != "some-doc" { + t.Errorf("Bad!") + return + } +} \ No newline at end of file diff --git a/spdx/package.go b/spdx/package.go index e6c45223..20ba57e1 100644 --- a/spdx/package.go +++ b/spdx/package.go @@ -15,22 +15,29 @@ type Supplier struct { SupplierType string } -// UnmarshalJSON takes a supplier in the typical one-line format and parses it into a Supplier struct. -// This function is also used when unmarshalling YAML -func (s *Supplier) UnmarshalJSON(data []byte) error { - // the value is just a string presented as a slice of bytes - supplierStr := string(data) - supplierStr = strings.Trim(supplierStr, "\"") +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (s Supplier) Validate() error { + // SupplierType is allowed to be empty if Supplier is "NOASSERTION" + if s.Supplier == "" || (s.SupplierType == "" && s.Supplier != "NOASSERTION") { + return fmt.Errorf("invalid Supplier, missing fields. %+v", s) + } - if supplierStr == "NOASSERTION" { - s.Supplier = supplierStr + return nil +} + +// FromString parses a string into a Supplier. +// These stings take the form: ": " +func (s *Supplier) FromString(value string) error { + if value == "NOASSERTION" { + s.Supplier = value return nil } - supplierFields := strings.SplitN(supplierStr, ": ", 2) + supplierFields := strings.SplitN(value, ": ", 2) if len(supplierFields) != 2 { - return fmt.Errorf("failed to parse Supplier '%s'", supplierStr) + return fmt.Errorf("failed to parse Supplier '%s'", value) } s.SupplierType = supplierFields[0] @@ -39,16 +46,33 @@ func (s *Supplier) UnmarshalJSON(data []byte) error { return nil } +// String converts the Supplier to a string in the form ": " +func (s Supplier) String() string { + if s.Supplier == "NOASSERTION" { + return s.Supplier + } + + return fmt.Sprintf("%s: %s", s.SupplierType, s.Supplier) +} + +// UnmarshalJSON takes a supplier in the typical one-line format and parses it into a Supplier struct. +// This function is also used when unmarshalling YAML +func (s *Supplier) UnmarshalJSON(data []byte) error { + // the value is just a string presented as a slice of bytes + supplierStr := string(data) + supplierStr = strings.Trim(supplierStr, "\"") + + return s.FromString(supplierStr) +} + // MarshalJSON converts the receiver into a slice of bytes representing a Supplier in string form. // This function is also used when marshalling to YAML func (s Supplier) MarshalJSON() ([]byte, error) { - if s.Supplier == "NOASSERTION" { - return json.Marshal(s.Supplier) - } else if s.SupplierType != "" && s.Supplier != "" { - return json.Marshal(fmt.Sprintf("%s: %s", s.SupplierType, s.Supplier)) + if err := s.Validate(); err != nil { + return nil, err } - return []byte{}, fmt.Errorf("failed to marshal invalid Supplier: %+v", s) + return json.Marshal(s.String()) } type Originator struct { @@ -58,40 +82,64 @@ type Originator struct { OriginatorType string } -// UnmarshalJSON takes an originator in the typical one-line format and parses it into an Originator struct. -// This function is also used when unmarshalling YAML -func (o *Originator) UnmarshalJSON(data []byte) error { - // the value is just a string presented as a slice of bytes - originatorStr := string(data) - originatorStr = strings.Trim(originatorStr, "\"") +// Validate verifies that all the required fields are present. +// Returns an error if the object is invalid. +func (o Originator) Validate() error { + // Originator is allowed to be empty if Originator is "NOASSERTION" + if o.Originator == "" || (o.OriginatorType == "" && o.Originator != "NOASSERTION") { + return fmt.Errorf("invalid Originator, missing fields. %+v", o) + } - if originatorStr == "NOASSERTION" { - o.Originator = originatorStr + return nil +} + +// FromString parses a string into a Originator. +// These stings take the form: ": " +func (o *Originator) FromString(value string) error { + if value == "NOASSERTION" { + o.Originator = value return nil } - originatorFields := strings.SplitN(originatorStr, ": ", 2) + fields := strings.SplitN(value, ": ", 2) - if len(originatorFields) != 2 { - return fmt.Errorf("failed to parse Originator '%s'", originatorStr) + if len(fields) != 2 { + return fmt.Errorf("failed to parse Originator '%s'", value) } - o.OriginatorType = originatorFields[0] - o.Originator = originatorFields[1] + o.OriginatorType = fields[0] + o.Originator = fields[1] return nil } +// String converts the Originator to a string in the form ": " +func (o Originator) String() string { + if o.Originator == "NOASSERTION" { + return o.Originator + } + + return fmt.Sprintf("%s: %s", o.OriginatorType, o.Originator) +} + +// UnmarshalJSON takes an originator in the typical one-line format and parses it into an Originator struct. +// This function is also used when unmarshalling YAML +func (o *Originator) UnmarshalJSON(data []byte) error { + // the value is just a string presented as a slice of bytes + originatorStr := string(data) + originatorStr = strings.Trim(originatorStr, "\"") + + return o.FromString(originatorStr) +} + // MarshalJSON converts the receiver into a slice of bytes representing an Originator in string form. // This function is also used when marshalling to YAML func (o Originator) MarshalJSON() ([]byte, error) { - if o.Originator == "NOASSERTION" { - return json.Marshal(o.Originator) - } else if o.Originator != "" { - return json.Marshal(fmt.Sprintf("%s: %s", o.OriginatorType, o.Originator)) + if err := o.Validate(); err != nil { + return nil, err } - return []byte{}, nil + return json.Marshal(o.String()) } type PackageVerificationCode struct { diff --git a/spdx/snippet.go b/spdx/snippet.go index 6bffb8c8..8bd2a4d9 100644 --- a/spdx/snippet.go +++ b/spdx/snippet.go @@ -2,6 +2,13 @@ package spdx +import ( + "errors" + "fmt" + "strconv" + "strings" +) + type SnippetRangePointer struct { // 5.3: Snippet Byte Range: [start byte]:[end byte] // Cardinality: mandatory, one @@ -19,6 +26,50 @@ type SnippetRange struct { EndPointer SnippetRangePointer `json:"endPointer"` } +func (s SnippetRange) Validate() error { + if s.StartPointer.Offset == 0 && s.StartPointer.LineNumber == 0 && + s.EndPointer.Offset == 0 && s.EndPointer.LineNumber == 0 { + return errors.New("no range info present in SnippetRange") + } + + return nil +} + +func (s SnippetRange) String() string { + if s.EndPointer.Offset != 0 { + return fmt.Sprintf("%d:%d", s.StartPointer.Offset, s.EndPointer.Offset) + } + + return fmt.Sprintf("%d:%d", s.StartPointer.LineNumber, s.EndPointer.LineNumber) +} + +func (s *SnippetRange) FromString(value string, isByteRange bool) error { + strValues := strings.Split(value, ":") + if len(strValues) != 2 { + return fmt.Errorf("invalid SnippetRange: %s", value) + } + + values := make([]int, 2) + for ii, value := range strValues { + valueInt, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return fmt.Errorf("couldn't parse integer from SnippetRange value '%s': %v", value, err.Error()) + } + + values[ii] = int(valueInt) + } + + if isByteRange { + s.StartPointer.Offset = values[0] + s.EndPointer.Offset = values[1] + } else { + s.StartPointer.LineNumber = values[0] + s.EndPointer.LineNumber = values[1] + } + + return nil +} + // Snippet2_1 is a Snippet section of an SPDX Document for version 2.1 of the spec. type Snippet2_1 struct { diff --git a/tvloader/parser2v1/parse_creation_info.go b/tvloader/parser2v1/parse_creation_info.go index df16008b..1378aafb 100644 --- a/tvloader/parser2v1/parse_creation_info.go +++ b/tvloader/parser2v1/parse_creation_info.go @@ -102,7 +102,7 @@ func (parser *tvParser2_1) parsePairFromCreationInfo2_1(tag string, value string // ===== Helper functions ===== -func extractExternalDocumentReference(value string) (string, string, string, string, error) { +func extractExternalDocumentReference(value string) (spdx.DocElementID, string, string, string, error) { sp := strings.Split(value, " ") // remove any that are just whitespace keepSp := []string{} @@ -113,42 +113,33 @@ func extractExternalDocumentReference(value string) (string, string, string, str } } - var documentRefID, uri, alg, checksum string + var documentRefID spdx.DocElementID + var uri, alg, checksum string // now, should have 4 items (or 3, if Alg and Checksum were joined) // and should be able to map them if len(keepSp) == 4 { - documentRefID = keepSp[0] + documentRefID = spdx.MakeDocElementID(keepSp[0], "") uri = keepSp[1] alg = keepSp[2] // check that colon is present for alg, and remove it if !strings.HasSuffix(alg, ":") { - return "", "", "", "", fmt.Errorf("algorithm does not end with colon") + return documentRefID, "", "", "", fmt.Errorf("algorithm does not end with colon") } alg = strings.TrimSuffix(alg, ":") checksum = keepSp[3] } else if len(keepSp) == 3 { - documentRefID = keepSp[0] + documentRefID = spdx.MakeDocElementID(keepSp[0], "") uri = keepSp[1] // split on colon into alg and checksum parts := strings.SplitN(keepSp[2], ":", 2) if len(parts) != 2 { - return "", "", "", "", fmt.Errorf("missing colon separator between algorithm and checksum") + return documentRefID, "", "", "", fmt.Errorf("missing colon separator between algorithm and checksum") } alg = parts[0] checksum = parts[1] } else { - return "", "", "", "", fmt.Errorf("expected 4 elements, got %d", len(keepSp)) - } - - // additionally, we should be able to parse the first element as a - // DocumentRef- ID string, and we should remove that prefix - if !strings.HasPrefix(documentRefID, "DocumentRef-") { - return "", "", "", "", fmt.Errorf("expected first element to have DocumentRef- prefix") - } - documentRefID = strings.TrimPrefix(documentRefID, "DocumentRef-") - if documentRefID == "" { - return "", "", "", "", fmt.Errorf("document identifier has nothing after prefix") + return documentRefID, "", "", "", fmt.Errorf("expected 4 elements, got %d", len(keepSp)) } return documentRefID, uri, alg, checksum, nil diff --git a/tvloader/parser2v1/parse_creation_info_test.go b/tvloader/parser2v1/parse_creation_info_test.go index 83058dd8..92c08cc4 100644 --- a/tvloader/parser2v1/parse_creation_info_test.go +++ b/tvloader/parser2v1/parse_creation_info_test.go @@ -360,7 +360,7 @@ func TestParser2_1CICreatesAnnotation(t *testing.T) { func TestCanExtractExternalDocumentReference(t *testing.T) { refstring := "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1:d6a770ba38583ed4bb4525bd96e50461655d2759" - wantDocumentRefID := "spdx-tool-1.2" + wantDocumentRefID := "DocumentRef-spdx-tool-1.2" wantURI := "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301" wantAlg := "SHA1" wantChecksum := "d6a770ba38583ed4bb4525bd96e50461655d2759" @@ -369,7 +369,7 @@ func TestCanExtractExternalDocumentReference(t *testing.T) { if err != nil { t.Errorf("got non-nil error: %v", err) } - if wantDocumentRefID != gotDocumentRefID { + if wantDocumentRefID != gotDocumentRefID.String() { t.Errorf("wanted document ref ID %s, got %s", wantDocumentRefID, gotDocumentRefID) } if wantURI != gotURI { @@ -385,7 +385,7 @@ func TestCanExtractExternalDocumentReference(t *testing.T) { func TestCanExtractExternalDocumentReferenceWithExtraWhitespace(t *testing.T) { refstring := " DocumentRef-spdx-tool-1.2 \t http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 \t SHA1: \t d6a770ba38583ed4bb4525bd96e50461655d2759" - wantDocumentRefID := "spdx-tool-1.2" + wantDocumentRefID := "DocumentRef-spdx-tool-1.2" wantURI := "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301" wantAlg := "SHA1" wantChecksum := "d6a770ba38583ed4bb4525bd96e50461655d2759" @@ -394,7 +394,7 @@ func TestCanExtractExternalDocumentReferenceWithExtraWhitespace(t *testing.T) { if err != nil { t.Errorf("got non-nil error: %v", err) } - if wantDocumentRefID != gotDocumentRefID { + if wantDocumentRefID != gotDocumentRefID.String() { t.Errorf("wanted document ref ID %s, got %s", wantDocumentRefID, gotDocumentRefID) } if wantURI != gotURI { @@ -416,7 +416,6 @@ func TestFailsExternalDocumentReferenceWithInvalidFormats(t *testing.T) { "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 d6a770ba38583ed4bb4525bd96e50461655d2759", "DocumentRef-spdx-tool-1.2", - "spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1:d6a770ba38583ed4bb4525bd96e50461655d2759", } for _, refstring := range invalidRefs { _, _, _, _, err := extractExternalDocumentReference(refstring) diff --git a/tvloader/parser2v2/parse_creation_info.go b/tvloader/parser2v2/parse_creation_info.go index f8406fc5..bf037d25 100644 --- a/tvloader/parser2v2/parse_creation_info.go +++ b/tvloader/parser2v2/parse_creation_info.go @@ -102,7 +102,7 @@ func (parser *tvParser2_2) parsePairFromCreationInfo2_2(tag string, value string // ===== Helper functions ===== -func extractExternalDocumentReference(value string) (string, string, string, string, error) { +func extractExternalDocumentReference(value string) (spdx.DocElementID, string, string, string, error) { sp := strings.Split(value, " ") // remove any that are just whitespace keepSp := []string{} @@ -113,42 +113,33 @@ func extractExternalDocumentReference(value string) (string, string, string, str } } - var documentRefID, uri, alg, checksum string + var documentRefID spdx.DocElementID + var uri, alg, checksum string // now, should have 4 items (or 3, if Alg and Checksum were joined) // and should be able to map them if len(keepSp) == 4 { - documentRefID = keepSp[0] + documentRefID = spdx.MakeDocElementID(keepSp[0], "") uri = keepSp[1] alg = keepSp[2] // check that colon is present for alg, and remove it if !strings.HasSuffix(alg, ":") { - return "", "", "", "", fmt.Errorf("algorithm does not end with colon") + return documentRefID, "", "", "", fmt.Errorf("algorithm does not end with colon") } alg = strings.TrimSuffix(alg, ":") checksum = keepSp[3] } else if len(keepSp) == 3 { - documentRefID = keepSp[0] + documentRefID = spdx.MakeDocElementID(keepSp[0], "") uri = keepSp[1] // split on colon into alg and checksum parts := strings.SplitN(keepSp[2], ":", 2) if len(parts) != 2 { - return "", "", "", "", fmt.Errorf("missing colon separator between algorithm and checksum") + return documentRefID, "", "", "", fmt.Errorf("missing colon separator between algorithm and checksum") } alg = parts[0] checksum = parts[1] } else { - return "", "", "", "", fmt.Errorf("expected 4 elements, got %d", len(keepSp)) - } - - // additionally, we should be able to parse the first element as a - // DocumentRef- ID string, and we should remove that prefix - if !strings.HasPrefix(documentRefID, "DocumentRef-") { - return "", "", "", "", fmt.Errorf("expected first element to have DocumentRef- prefix") - } - documentRefID = strings.TrimPrefix(documentRefID, "DocumentRef-") - if documentRefID == "" { - return "", "", "", "", fmt.Errorf("document identifier has nothing after prefix") + return documentRefID, "", "", "", fmt.Errorf("expected 4 elements, got %d", len(keepSp)) } return documentRefID, uri, alg, checksum, nil diff --git a/tvloader/parser2v2/parse_creation_info_test.go b/tvloader/parser2v2/parse_creation_info_test.go index 71213460..23453911 100644 --- a/tvloader/parser2v2/parse_creation_info_test.go +++ b/tvloader/parser2v2/parse_creation_info_test.go @@ -360,7 +360,7 @@ func TestParser2_2CICreatesAnnotation(t *testing.T) { func TestCanExtractExternalDocumentReference(t *testing.T) { refstring := "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1:d6a770ba38583ed4bb4525bd96e50461655d2759" - wantDocumentRefID := "spdx-tool-1.2" + wantDocumentRefID := "DocumentRef-spdx-tool-1.2" wantURI := "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301" wantAlg := "SHA1" wantChecksum := "d6a770ba38583ed4bb4525bd96e50461655d2759" @@ -369,7 +369,7 @@ func TestCanExtractExternalDocumentReference(t *testing.T) { if err != nil { t.Errorf("got non-nil error: %v", err) } - if wantDocumentRefID != gotDocumentRefID { + if wantDocumentRefID != gotDocumentRefID.String() { t.Errorf("wanted document ref ID %s, got %s", wantDocumentRefID, gotDocumentRefID) } if wantURI != gotURI { @@ -385,7 +385,7 @@ func TestCanExtractExternalDocumentReference(t *testing.T) { func TestCanExtractExternalDocumentReferenceWithExtraWhitespace(t *testing.T) { refstring := " DocumentRef-spdx-tool-1.2 \t http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 \t SHA1: \t d6a770ba38583ed4bb4525bd96e50461655d2759" - wantDocumentRefID := "spdx-tool-1.2" + wantDocumentRefID := "DocumentRef-spdx-tool-1.2" wantURI := "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301" wantAlg := "SHA1" wantChecksum := "d6a770ba38583ed4bb4525bd96e50461655d2759" @@ -394,7 +394,7 @@ func TestCanExtractExternalDocumentReferenceWithExtraWhitespace(t *testing.T) { if err != nil { t.Errorf("got non-nil error: %v", err) } - if wantDocumentRefID != gotDocumentRefID { + if wantDocumentRefID != gotDocumentRefID.String() { t.Errorf("wanted document ref ID %s, got %s", wantDocumentRefID, gotDocumentRefID) } if wantURI != gotURI { @@ -416,7 +416,6 @@ func TestFailsExternalDocumentReferenceWithInvalidFormats(t *testing.T) { "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", "DocumentRef-spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 d6a770ba38583ed4bb4525bd96e50461655d2759", "DocumentRef-spdx-tool-1.2", - "spdx-tool-1.2 http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301 SHA1:d6a770ba38583ed4bb4525bd96e50461655d2759", } for _, refstring := range invalidRefs { _, _, _, _, err := extractExternalDocumentReference(refstring) diff --git a/tvsaver/saver2v1/save_annotation.go b/tvsaver/saver2v1/save_annotation.go index f7d79538..36ab47f6 100644 --- a/tvsaver/saver2v1/save_annotation.go +++ b/tvsaver/saver2v1/save_annotation.go @@ -19,7 +19,7 @@ func renderAnnotation2_1(ann *spdx.Annotation2_1, w io.Writer) error { if ann.AnnotationType != "" { fmt.Fprintf(w, "AnnotationType: %s\n", ann.AnnotationType) } - annIDStr := spdx.RenderDocElementID(ann.AnnotationSPDXIdentifier) + annIDStr := ann.AnnotationSPDXIdentifier.String() if annIDStr != "SPDXRef-" { fmt.Fprintf(w, "SPDXREF: %s\n", annIDStr) } diff --git a/tvsaver/saver2v1/save_document.go b/tvsaver/saver2v1/save_document.go index ea17db25..d23aeea2 100644 --- a/tvsaver/saver2v1/save_document.go +++ b/tvsaver/saver2v1/save_document.go @@ -28,7 +28,7 @@ func RenderDocument2_1(doc *spdx.Document2_1, w io.Writer) error { fmt.Fprintf(w, "DataLicense: %s\n", doc.DataLicense) } if doc.SPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(doc.SPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", doc.SPDXIdentifier) } if doc.DocumentName != "" { fmt.Fprintf(w, "DocumentName: %s\n", doc.DocumentName) @@ -38,10 +38,10 @@ func RenderDocument2_1(doc *spdx.Document2_1, w io.Writer) error { } // print EDRs in order sorted by identifier sort.Slice(doc.ExternalDocumentReferences, func(i, j int) bool { - return doc.ExternalDocumentReferences[i].DocumentRefID < doc.ExternalDocumentReferences[j].DocumentRefID + return doc.ExternalDocumentReferences[i].DocumentRefID.DocumentRefID < doc.ExternalDocumentReferences[j].DocumentRefID.DocumentRefID }) for _, edr := range doc.ExternalDocumentReferences { - fmt.Fprintf(w, "ExternalDocumentRef: DocumentRef-%s %s %s:%s\n", + fmt.Fprintf(w, "ExternalDocumentRef: %s %s %s:%s\n", edr.DocumentRefID, edr.URI, edr.Checksum.Algorithm, edr.Checksum.Value) } if doc.DocumentComment != "" { diff --git a/tvsaver/saver2v1/save_file.go b/tvsaver/saver2v1/save_file.go index c1311220..7ed9fa44 100644 --- a/tvsaver/saver2v1/save_file.go +++ b/tvsaver/saver2v1/save_file.go @@ -15,7 +15,7 @@ func renderFile2_1(f *spdx.File2_1, w io.Writer) error { fmt.Fprintf(w, "FileName: %s\n", f.FileName) } if f.FileSPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(f.FileSPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", f.FileSPDXIdentifier) } for _, s := range f.FileTypes { fmt.Fprintf(w, "FileType: %s\n", s) diff --git a/tvsaver/saver2v1/save_package.go b/tvsaver/saver2v1/save_package.go index 24a468c0..139fd522 100644 --- a/tvsaver/saver2v1/save_package.go +++ b/tvsaver/saver2v1/save_package.go @@ -16,7 +16,7 @@ func renderPackage2_1(pkg *spdx.Package2_1, w io.Writer) error { fmt.Fprintf(w, "PackageName: %s\n", pkg.PackageName) } if pkg.PackageSPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(pkg.PackageSPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", pkg.PackageSPDXIdentifier) } if pkg.PackageVersion != "" { fmt.Fprintf(w, "PackageVersion: %s\n", pkg.PackageVersion) diff --git a/tvsaver/saver2v1/save_relationship.go b/tvsaver/saver2v1/save_relationship.go index aea48bc3..356820a0 100644 --- a/tvsaver/saver2v1/save_relationship.go +++ b/tvsaver/saver2v1/save_relationship.go @@ -10,8 +10,8 @@ import ( ) func renderRelationship2_1(rln *spdx.Relationship2_1, w io.Writer) error { - rlnAStr := spdx.RenderDocElementID(rln.RefA) - rlnBStr := spdx.RenderDocElementID(rln.RefB) + rlnAStr := rln.RefA.String() + rlnBStr := rln.RefB.String() if rlnAStr != "SPDXRef-" && rlnBStr != "SPDXRef-" && rln.Relationship != "" { fmt.Fprintf(w, "Relationship: %s %s %s\n", rlnAStr, rln.Relationship, rlnBStr) } diff --git a/tvsaver/saver2v1/save_snippet.go b/tvsaver/saver2v1/save_snippet.go index 13995489..60684797 100644 --- a/tvsaver/saver2v1/save_snippet.go +++ b/tvsaver/saver2v1/save_snippet.go @@ -11,9 +11,9 @@ import ( func renderSnippet2_1(sn *spdx.Snippet2_1, w io.Writer) error { if sn.SnippetSPDXIdentifier != "" { - fmt.Fprintf(w, "SnippetSPDXID: %s\n", spdx.RenderElementID(sn.SnippetSPDXIdentifier)) + fmt.Fprintf(w, "SnippetSPDXID: %s\n", sn.SnippetSPDXIdentifier) } - snFromFileIDStr := spdx.RenderElementID(sn.SnippetFromFileSPDXIdentifier) + snFromFileIDStr := sn.SnippetFromFileSPDXIdentifier.String() if snFromFileIDStr != "" { fmt.Fprintf(w, "SnippetFromFileSPDXID: %s\n", snFromFileIDStr) } diff --git a/tvsaver/saver2v2/save_annotation.go b/tvsaver/saver2v2/save_annotation.go index ddfe483a..281d77f1 100644 --- a/tvsaver/saver2v2/save_annotation.go +++ b/tvsaver/saver2v2/save_annotation.go @@ -19,7 +19,7 @@ func renderAnnotation2_2(ann *spdx.Annotation2_2, w io.Writer) error { if ann.AnnotationType != "" { fmt.Fprintf(w, "AnnotationType: %s\n", ann.AnnotationType) } - annIDStr := spdx.RenderDocElementID(ann.AnnotationSPDXIdentifier) + annIDStr := ann.AnnotationSPDXIdentifier.String() if annIDStr != "SPDXRef-" { fmt.Fprintf(w, "SPDXREF: %s\n", annIDStr) } diff --git a/tvsaver/saver2v2/save_document.go b/tvsaver/saver2v2/save_document.go index 04b482da..2dd55166 100644 --- a/tvsaver/saver2v2/save_document.go +++ b/tvsaver/saver2v2/save_document.go @@ -28,7 +28,7 @@ func RenderDocument2_2(doc *spdx.Document2_2, w io.Writer) error { fmt.Fprintf(w, "DataLicense: %s\n", doc.DataLicense) } if doc.SPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(doc.SPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", doc.SPDXIdentifier) } if doc.DocumentName != "" { fmt.Fprintf(w, "DocumentName: %s\n", doc.DocumentName) @@ -38,10 +38,10 @@ func RenderDocument2_2(doc *spdx.Document2_2, w io.Writer) error { } // print EDRs in order sorted by identifier sort.Slice(doc.ExternalDocumentReferences, func(i, j int) bool { - return doc.ExternalDocumentReferences[i].DocumentRefID < doc.ExternalDocumentReferences[j].DocumentRefID + return doc.ExternalDocumentReferences[i].DocumentRefID.DocumentRefID < doc.ExternalDocumentReferences[j].DocumentRefID.DocumentRefID }) for _, edr := range doc.ExternalDocumentReferences { - fmt.Fprintf(w, "ExternalDocumentRef: DocumentRef-%s %s %s:%s\n", + fmt.Fprintf(w, "ExternalDocumentRef: %s %s %s:%s\n", edr.DocumentRefID, edr.URI, edr.Checksum.Algorithm, edr.Checksum.Value) } if doc.DocumentComment != "" { diff --git a/tvsaver/saver2v2/save_file.go b/tvsaver/saver2v2/save_file.go index f1684efb..b3bd8e1f 100644 --- a/tvsaver/saver2v2/save_file.go +++ b/tvsaver/saver2v2/save_file.go @@ -15,7 +15,7 @@ func renderFile2_2(f *spdx.File2_2, w io.Writer) error { fmt.Fprintf(w, "FileName: %s\n", f.FileName) } if f.FileSPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(f.FileSPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", f.FileSPDXIdentifier) } for _, s := range f.FileTypes { fmt.Fprintf(w, "FileType: %s\n", s) diff --git a/tvsaver/saver2v2/save_package.go b/tvsaver/saver2v2/save_package.go index 6d21a6d2..23e2bc57 100644 --- a/tvsaver/saver2v2/save_package.go +++ b/tvsaver/saver2v2/save_package.go @@ -16,7 +16,7 @@ func renderPackage2_2(pkg *spdx.Package2_2, w io.Writer) error { fmt.Fprintf(w, "PackageName: %s\n", pkg.PackageName) } if pkg.PackageSPDXIdentifier != "" { - fmt.Fprintf(w, "SPDXID: %s\n", spdx.RenderElementID(pkg.PackageSPDXIdentifier)) + fmt.Fprintf(w, "SPDXID: %s\n", pkg.PackageSPDXIdentifier) } if pkg.PackageVersion != "" { fmt.Fprintf(w, "PackageVersion: %s\n", pkg.PackageVersion) diff --git a/tvsaver/saver2v2/save_relationship.go b/tvsaver/saver2v2/save_relationship.go index 4bd12ddb..26a11ba1 100644 --- a/tvsaver/saver2v2/save_relationship.go +++ b/tvsaver/saver2v2/save_relationship.go @@ -10,8 +10,8 @@ import ( ) func renderRelationship2_2(rln *spdx.Relationship2_2, w io.Writer) error { - rlnAStr := spdx.RenderDocElementID(rln.RefA) - rlnBStr := spdx.RenderDocElementID(rln.RefB) + rlnAStr := rln.RefA.String() + rlnBStr := rln.RefB.String() if rlnAStr != "SPDXRef-" && rlnBStr != "SPDXRef-" && rln.Relationship != "" { fmt.Fprintf(w, "Relationship: %s %s %s\n", rlnAStr, rln.Relationship, rlnBStr) } diff --git a/tvsaver/saver2v2/save_snippet.go b/tvsaver/saver2v2/save_snippet.go index 4f740982..ae26c576 100644 --- a/tvsaver/saver2v2/save_snippet.go +++ b/tvsaver/saver2v2/save_snippet.go @@ -11,9 +11,9 @@ import ( func renderSnippet2_2(sn *spdx.Snippet2_2, w io.Writer) error { if sn.SnippetSPDXIdentifier != "" { - fmt.Fprintf(w, "SnippetSPDXID: %s\n", spdx.RenderElementID(sn.SnippetSPDXIdentifier)) + fmt.Fprintf(w, "SnippetSPDXID: %s\n", sn.SnippetSPDXIdentifier) } - snFromFileIDStr := spdx.RenderElementID(sn.SnippetFromFileSPDXIdentifier) + snFromFileIDStr := sn.SnippetFromFileSPDXIdentifier.String() if snFromFileIDStr != "" { fmt.Fprintf(w, "SnippetFromFileSPDXID: %s\n", snFromFileIDStr) } diff --git a/yaml/yaml_test.go b/yaml/yaml_test.go index 49f8ebfe..c93ce815 100644 --- a/yaml/yaml_test.go +++ b/yaml/yaml_test.go @@ -61,7 +61,7 @@ func TestWrite2_2(t *testing.T) { var want = spdx.Document2_2{ DataLicense: "CC0-1.0", SPDXVersion: "SPDX-2.2", - SPDXIdentifier: "SPDXRef-DOCUMENT", + SPDXIdentifier: "DOCUMENT", DocumentName: "SPDX-Tools-v2.0", DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", CreationInfo: &spdx.CreationInfo2_2{ @@ -77,7 +77,7 @@ var want = spdx.Document2_2{ DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", ExternalDocumentReferences: []spdx.ExternalDocumentRef2_2{ { - DocumentRefID: "DocumentRef-spdx-tool-1.2", + DocumentRefID: spdx.MakeDocElementID("spdx-tool-1.2", ""), URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", Checksum: spdx.Checksum{ Algorithm: spdx.SHA1, @@ -148,7 +148,7 @@ var want = spdx.Document2_2{ Packages: []*spdx.Package2_2{ { PackageName: "glibc", - PackageSPDXIdentifier: "SPDXRef-Package", + PackageSPDXIdentifier: "Package", PackageVersion: "2.11.1", PackageFileName: "glibc-2.11.1.tar.gz", PackageSupplier: &spdx.Supplier{ @@ -223,7 +223,7 @@ var want = spdx.Document2_2{ }, }, { - PackageSPDXIdentifier: "SPDXRef-fromDoap-1", + PackageSPDXIdentifier: "fromDoap-1", PackageCopyrightText: "NOASSERTION", PackageDownloadLocation: "NOASSERTION", FilesAnalyzed: false, @@ -234,7 +234,7 @@ var want = spdx.Document2_2{ }, { PackageName: "Jena", - PackageSPDXIdentifier: "SPDXRef-fromDoap-0", + PackageSPDXIdentifier: "fromDoap-0", PackageCopyrightText: "NOASSERTION", PackageDownloadLocation: "https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz", PackageExternalReferences: []*spdx.PackageExternalReference2_2{ @@ -251,7 +251,7 @@ var want = spdx.Document2_2{ PackageVersion: "3.12.0", }, { - PackageSPDXIdentifier: "SPDXRef-Saxon", + PackageSPDXIdentifier: "Saxon", PackageChecksums: []spdx.Checksum{ { Algorithm: "SHA1", @@ -274,7 +274,7 @@ var want = spdx.Document2_2{ Files: []*spdx.File2_2{ { FileName: "./src/org/spdx/parser/DOAPProject.java", - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", FileTypes: []string{ "SOURCE", }, @@ -298,7 +298,7 @@ var want = spdx.Document2_2{ }, }, { - FileSPDXIdentifier: "SPDXRef-CommonsLangSrc", + FileSPDXIdentifier: "CommonsLangSrc", Checksums: []spdx.Checksum{ { Algorithm: "SHA1", @@ -315,7 +315,7 @@ var want = spdx.Document2_2{ FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", }, { - FileSPDXIdentifier: "SPDXRef-JenaLib", + FileSPDXIdentifier: "JenaLib", Checksums: []spdx.Checksum{ { Algorithm: "SHA1", @@ -332,7 +332,7 @@ var want = spdx.Document2_2{ LicenseInfoInFiles: []string{"LicenseRef-1"}, }, { - FileSPDXIdentifier: "SPDXRef-File", + FileSPDXIdentifier: "File", Annotations: []spdx.Annotation2_2{ { Annotator: spdx.Annotator{ @@ -367,27 +367,27 @@ var want = spdx.Document2_2{ }, Snippets: []spdx.Snippet2_2{ { - SnippetSPDXIdentifier: "SPDXRef-Snippet", - SnippetFromFileSPDXIdentifier: "SPDXRef-DoapSource", + SnippetSPDXIdentifier: "Snippet", + SnippetFromFileSPDXIdentifier: "DoapSource", Ranges: []spdx.SnippetRange{ { StartPointer: spdx.SnippetRangePointer{ Offset: 310, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, EndPointer: spdx.SnippetRangePointer{ Offset: 420, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, }, { StartPointer: spdx.SnippetRangePointer{ LineNumber: 5, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, EndPointer: spdx.SnippetRangePointer{ LineNumber: 23, - FileSPDXIdentifier: "SPDXRef-DoapSource", + FileSPDXIdentifier: "DoapSource", }, }, }, From 5ebe4615aa23c562ee7fea7c5a1be9c44534bc57 Mon Sep 17 00:00:00 2001 From: Ian Ling Date: Mon, 25 Apr 2022 15:18:05 -0700 Subject: [PATCH 4/6] Add spreadsheet parser Signed-off-by: Ian Ling --- README.md | 1 + go.mod | 1 + go.sum | 37 ++ spreadsheet/common/annotations_columns.go | 9 + spreadsheet/common/document_info_columns.go | 15 + spreadsheet/common/external_refs_columns.go | 9 + .../common/extracted_license_info_columns.go | 9 + spreadsheet/common/package_info_columns.go | 26 + spreadsheet/common/per_file_info_columns.go | 21 + spreadsheet/common/relationships_columns.go | 8 + spreadsheet/common/sheet_names.go | 13 + spreadsheet/common/snippets_columns.go | 14 + spreadsheet/parse/annotations.go | 88 +++ spreadsheet/parse/document_info.go | 70 +++ spreadsheet/parse/extracted_license_info.go | 43 ++ spreadsheet/parse/package_external_refs.go | 68 +++ spreadsheet/parse/package_info.go | 119 ++++ spreadsheet/parse/per_file_info.go | 109 ++++ spreadsheet/parse/relationships.go | 55 ++ spreadsheet/parse/snippets.go | 80 +++ spreadsheet/parser.go | 130 +++++ spreadsheet/spreadsheet_test.go | 518 ++++++++++++++++++ spreadsheet/writer.go | 25 + 23 files changed, 1468 insertions(+) create mode 100644 spreadsheet/common/annotations_columns.go create mode 100644 spreadsheet/common/document_info_columns.go create mode 100644 spreadsheet/common/external_refs_columns.go create mode 100644 spreadsheet/common/extracted_license_info_columns.go create mode 100644 spreadsheet/common/package_info_columns.go create mode 100644 spreadsheet/common/per_file_info_columns.go create mode 100644 spreadsheet/common/relationships_columns.go create mode 100644 spreadsheet/common/sheet_names.go create mode 100644 spreadsheet/common/snippets_columns.go create mode 100644 spreadsheet/parse/annotations.go create mode 100644 spreadsheet/parse/document_info.go create mode 100644 spreadsheet/parse/extracted_license_info.go create mode 100644 spreadsheet/parse/package_external_refs.go create mode 100644 spreadsheet/parse/package_info.go create mode 100644 spreadsheet/parse/per_file_info.go create mode 100644 spreadsheet/parse/relationships.go create mode 100644 spreadsheet/parse/snippets.go create mode 100644 spreadsheet/parser.go create mode 100644 spreadsheet/spreadsheet_test.go create mode 100644 spreadsheet/writer.go diff --git a/README.md b/README.md index 89a47296..f4a2200d 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,7 @@ tools-golang provides the following packages: * *tvsaver* - tag-value document saver * *rdfloader* - RDF document loader * *json* - JSON document parser and writer +* *spreadsheet* - Spreadsheet (XLS/XLSX) parser and writer * *yaml* - YAML document parser and writer * *builder* - builds "empty" SPDX document (with hashes) for directory contents * *idsearcher* - searches for [SPDX short-form IDs](https://spdx.org/ids/) and builds SPDX document diff --git a/go.mod b/go.mod index 1017b95c..1a546af4 100644 --- a/go.mod +++ b/go.mod @@ -5,5 +5,6 @@ go 1.13 require ( github.com/google/go-cmp v0.5.7 github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb + github.com/xuri/excelize/v2 v2.6.0 sigs.k8s.io/yaml v1.3.0 ) diff --git a/go.sum b/go.sum index 85ffe1cd..83c095a6 100644 --- a/go.sum +++ b/go.sum @@ -1,14 +1,51 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/richardlehane/mscfb v1.0.4 h1:WULscsljNPConisD5hR0+OyZjwK46Pfyr6mPu5ZawpM= +github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7gK3DypaEsUk= +github.com/richardlehane/msoleps v1.0.1 h1:RfrALnSNXzmXLbGct/P2b4xkFz4e8Gmj/0Vj9M9xC1o= +github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb h1:bLo8hvc8XFm9J47r690TUKBzcjSWdJDxmjXJZ+/f92U= github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb/go.mod h1:uKWaldnbMnjsSAXRurWqqrdyZen1R7kxl8TkmWk2OyM= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/xuri/efp v0.0.0-20220407160117-ad0f7a785be8 h1:3X7aE0iLKJ5j+tz58BpvIZkXNV7Yq4jC93Z/rbN2Fxk= +github.com/xuri/efp v0.0.0-20220407160117-ad0f7a785be8/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= +github.com/xuri/excelize/v2 v2.6.0 h1:m/aXAzSAqxgt74Nfd+sNzpzVKhTGl7+S9nbG4A57mF4= +github.com/xuri/excelize/v2 v2.6.0/go.mod h1:Q1YetlHesXEKwGFfeJn7PfEZz2IvHb6wdOeYjBxVcVs= +github.com/xuri/nfp v0.0.0-20220409054826-5e722a1d9e22 h1:OAmKAfT06//esDdpi/DZ8Qsdt4+M5+ltca05dA5bG2M= +github.com/xuri/nfp v0.0.0-20220409054826-5e722a1d9e22/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= +golang.org/x/crypto v0.0.0-20220408190544-5352b0902921 h1:iU7T1X1J6yxDr0rda54sWGkHgOp5XJrqm79gcNlC2VM= +golang.org/x/crypto v0.0.0-20220408190544-5352b0902921/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/image v0.0.0-20211028202545-6944b10bf410 h1:hTftEOvwiOq2+O8k2D5/Q7COC7k5Qcrgc2TFURJYnvQ= +golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3 h1:EN5+DfgmRMvRUrMGERW2gQl3Vc+Z7ZMnI/xdEpPSf0c= +golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/spreadsheet/common/annotations_columns.go b/spreadsheet/common/annotations_columns.go new file mode 100644 index 00000000..eed2b880 --- /dev/null +++ b/spreadsheet/common/annotations_columns.go @@ -0,0 +1,9 @@ +package common + +const ( + AnnotationsSPDXIdentifier = "SPDX Identifier being Annotated" + AnnotationsComment = "Annotation Comment" + AnnotationsDate = "Annotation Date" + AnnotationsAnnotator = "Annotator" + AnnotationsType = "Annotation Type" +) diff --git a/spreadsheet/common/document_info_columns.go b/spreadsheet/common/document_info_columns.go new file mode 100644 index 00000000..813b3a9e --- /dev/null +++ b/spreadsheet/common/document_info_columns.go @@ -0,0 +1,15 @@ +package common + +const ( + SPDXVersion = "SPDX Version" + DataLicense = "Data License" + SPDXIdentifier = "SPDX Identifier" + LicenseListVersion = "License List Version" + DocumentName = "Document Name" + DocumentNamespace = "Document Namespace" + ExternalDocumentReferences = "External Document References" + DocumentComment = "Document Comment" + Creator = "Creator" + Created = "Created" + CreatorComment = "Creator Comment" +) diff --git a/spreadsheet/common/external_refs_columns.go b/spreadsheet/common/external_refs_columns.go new file mode 100644 index 00000000..4759faa7 --- /dev/null +++ b/spreadsheet/common/external_refs_columns.go @@ -0,0 +1,9 @@ +package common + +const ( + ExternalRefPackageID = "Package ID" + ExternalRefCategory = "Category" + ExternalRefType = "Type" + ExternalRefLocator = "Locator" + ExternalRefComment = "Comment" +) diff --git a/spreadsheet/common/extracted_license_info_columns.go b/spreadsheet/common/extracted_license_info_columns.go new file mode 100644 index 00000000..52b1b5f8 --- /dev/null +++ b/spreadsheet/common/extracted_license_info_columns.go @@ -0,0 +1,9 @@ +package common + +const ( + LicenseInfoIdentifier = "Identifier" + LicenseInfoExtractedText = "Extracted Text" + LicenseInfoLicenseName = "License Name" + LicenseInfoCrossReferenceURLs = "Cross Reference URLs" + LicenseInfoComment = "Comment" +) diff --git a/spreadsheet/common/package_info_columns.go b/spreadsheet/common/package_info_columns.go new file mode 100644 index 00000000..bce1b444 --- /dev/null +++ b/spreadsheet/common/package_info_columns.go @@ -0,0 +1,26 @@ +package common + +const ( + PackageName = "Package Name" + PackageSPDXIdentifier = "SPDX Identifier" + PackageVersion = "Package Version" + PackageFileName = "Package FileName" + PackageSupplier = "Package Supplier" + PackageOriginator = "Package Originator" + PackageHomePage = "Home Page" + PackageDownloadLocation = "Package Download Location" + PackageChecksum = "Package Checksum" + PackageVerificationCode = "Package Verification Code" + PackageVerificationCodeExcludedFiles = "Verification Code Excluded Files" + PackageSourceInfo = "Source Info" + PackageLicenseDeclared = "License Declared" + PackageLicenseConcluded = "License Concluded" + PackageLicenseInfoFromFiles = "License Info From Files" + PackageLicenseComments = "License Comments" + PackageCopyrightText = "Package Copyright Text" + PackageSummary = "Summary" + PackageDescription = "Description" + PackageAttributionText = "Attribution Text" + PackageFilesAnalyzed = "Files Analyzed" + PackageComments = "Comments" +) diff --git a/spreadsheet/common/per_file_info_columns.go b/spreadsheet/common/per_file_info_columns.go new file mode 100644 index 00000000..d624cebe --- /dev/null +++ b/spreadsheet/common/per_file_info_columns.go @@ -0,0 +1,21 @@ +package common + +const ( + FileInfoFileName = "File Name" + FileInfoSPDXIdentifier = "SPDX Identifier" + FileInfoPackageIdentifier = "Package Identifier" + FileInfoFileTypes = "File Type(s)" + FileInfoFileChecksums = "File Checksum(s)" + FileInfoLicenseConcluded = "License Concluded" + FileInfoLicenseInfoInFile = "License Info in File" + FileInfoLicenseComments = "License Comments" + FileInfoFileCopyrightText = "File Copyright Text" + FileInfoNoticeText = "Notice Text" + FileInfoArtifactOfProject = "Artifact of Project" + FileInfoArtifactOfHomepage = "Artifact of Homepage" + FileInfoArtifactOfURL = "Artifact of URL" + FileInfoContributors = "Contributors" + FileInfoFileComment = "File Comment" + FileInfoFileDependencies = "File Dependencies" + FileInfoAttributionText = "Attribution Text" +) diff --git a/spreadsheet/common/relationships_columns.go b/spreadsheet/common/relationships_columns.go new file mode 100644 index 00000000..f98db9dd --- /dev/null +++ b/spreadsheet/common/relationships_columns.go @@ -0,0 +1,8 @@ +package common + +const ( + RelationshipsRefA = "SPDX Identifier A" + RelationshipsRelationship = "Relationship" + RelationshipsRefB = "SPDX Identifier B" + RelationshipsComment = "Relationship Comment" +) diff --git a/spreadsheet/common/sheet_names.go b/spreadsheet/common/sheet_names.go new file mode 100644 index 00000000..6bb29118 --- /dev/null +++ b/spreadsheet/common/sheet_names.go @@ -0,0 +1,13 @@ +package common + +const ( + SheetNameDocumentInfo = "Document Info" + SheetNamePackageInfo = "Package Info" + SheetNameExtractedLicenseInfo = "Extracted License Info" + SheetNameFileInfo = "Per File Info" + SheetNameRelationships = "Relationships" + SheetNameAnnotations = "Annotations" + SheetNameReviewers = "Reviewers" + SheetNameSnippets = "Snippets" + SheetNameExternalRefs = "External Refs" +) diff --git a/spreadsheet/common/snippets_columns.go b/spreadsheet/common/snippets_columns.go new file mode 100644 index 00000000..df77a8fb --- /dev/null +++ b/spreadsheet/common/snippets_columns.go @@ -0,0 +1,14 @@ +package common + +const ( + SnippetsID = "ID" + SnippetsName = "Name" + SnippetsFromFileID = "From File ID" + SnippetsByteRange = "Byte Range" + SnippetsLineRange = "Line Range" + SnippetsLicenseConcluded = "License Concluded" + SnippetsLicenseInfoInSnippet = "License Info in Snippet" + SnippetsLicenseComments = "License Comments" + SnippetsCopyrightText = "Snippet Copyright Text" + SnippetsComment = "Comment" +) diff --git a/spreadsheet/parse/annotations.go b/spreadsheet/parse/annotations.go new file mode 100644 index 00000000..c81c240e --- /dev/null +++ b/spreadsheet/parse/annotations.go @@ -0,0 +1,88 @@ +package parse + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" +) + +func ProcessAnnotationsRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + + newAnnotation := spdx.Annotation2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.AnnotationsSPDXIdentifier: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.AnnotationsSPDXIdentifier, rowNum, err.Error()) + } + + newAnnotation.AnnotationSPDXIdentifier = id + case common.AnnotationsComment: + newAnnotation.AnnotationComment = value + case common.AnnotationsDate: + newAnnotation.AnnotationDate = value + case common.AnnotationsAnnotator: + annotator := spdx.Annotator{} + err := annotator.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.AnnotationsAnnotator, rowNum, err.Error()) + } + + newAnnotation.Annotator = annotator + case common.AnnotationsType: + newAnnotation.AnnotationType = value + } + } + + // TODO: validate? + + // an annotation can be at the Document level, File level, or Package level + if newAnnotation.AnnotationSPDXIdentifier.DocumentRefID == "" && newAnnotation.AnnotationSPDXIdentifier.ElementRefID != doc.SPDXIdentifier { + var found bool + for ii, pkg := range doc.Packages { + if newAnnotation.AnnotationSPDXIdentifier.ElementRefID == pkg.PackageSPDXIdentifier { + // package level + found = true + doc.Packages[ii].Annotations = append(doc.Packages[ii].Annotations, newAnnotation) + break + } + } + + if !found { + for ii, file := range doc.Files { + if newAnnotation.AnnotationSPDXIdentifier.ElementRefID == file.FileSPDXIdentifier { + // file level + found = true + doc.Files[ii].Annotations = append(doc.Files[ii].Annotations, newAnnotation) + break + } + } + } + + if !found { + return fmt.Errorf("annotation SPDX Identifier from row %d not found in document: %s", rowNum, newAnnotation.AnnotationSPDXIdentifier) + } + } else { + // document level + doc.Annotations = append(doc.Annotations, &newAnnotation) + } + } + + return nil +} diff --git a/spreadsheet/parse/document_info.go b/spreadsheet/parse/document_info.go new file mode 100644 index 00000000..c6b44593 --- /dev/null +++ b/spreadsheet/parse/document_info.go @@ -0,0 +1,70 @@ +package parse + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" +) + +func ProcessDocumentInfoRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.SPDXVersion: + doc.SPDXVersion = value + case common.DataLicense: + doc.DataLicense = value + case common.SPDXIdentifier: + var id spdx.DocElementID + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid SPDX Identifier in row %d: %v", rowNum, err.Error()) + } + + doc.SPDXIdentifier = id.ElementRefID + case common.LicenseListVersion: + doc.CreationInfo.LicenseListVersion = value + case common.DocumentName: + doc.DocumentName = value + case common.DocumentNamespace: + doc.DocumentNamespace = value + case common.DocumentComment: + doc.DocumentComment = value + case common.ExternalDocumentReferences: + externalDocRef := spdx.ExternalDocumentRef2_2{} + err := externalDocRef.FromString(value) + if err != nil { + return fmt.Errorf("invalid External Document Ref in row %d: %v", rowNum, err.Error()) + } + + doc.ExternalDocumentReferences = append(doc.ExternalDocumentReferences, externalDocRef) + case common.Created: + doc.CreationInfo.Created = value + case common.CreatorComment: + doc.CreationInfo.CreatorComment = value + case common.Creator: + creator := spdx.Creator{} + err := creator.FromString(value) + if err != nil { + return fmt.Errorf("invalid Creator in row %d: %v", rowNum, err.Error()) + } + + doc.CreationInfo.Creators = append(doc.CreationInfo.Creators, creator) + } + } + } + + return nil +} diff --git a/spreadsheet/parse/extracted_license_info.go b/spreadsheet/parse/extracted_license_info.go new file mode 100644 index 00000000..18be90e4 --- /dev/null +++ b/spreadsheet/parse/extracted_license_info.go @@ -0,0 +1,43 @@ +package parse + +import ( + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "strings" +) + +func ProcessExtractedLicenseInfoRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for _, row := range rows[1:] { + newLicense := spdx.OtherLicense2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.LicenseInfoIdentifier: + newLicense.LicenseIdentifier = value + case common.LicenseInfoExtractedText: + newLicense.ExtractedText = value + case common.LicenseInfoLicenseName: + newLicense.LicenseName = value + case common.LicenseInfoCrossReferenceURLs: + newLicense.LicenseCrossReferences = strings.Split(value, ", ") + case common.LicenseInfoComment: + newLicense.LicenseComment = value + } + } + + // TODO: validate? + doc.OtherLicenses = append(doc.OtherLicenses, &newLicense) + } + + return nil +} diff --git a/spreadsheet/parse/package_external_refs.go b/spreadsheet/parse/package_external_refs.go new file mode 100644 index 00000000..1cdb2385 --- /dev/null +++ b/spreadsheet/parse/package_external_refs.go @@ -0,0 +1,68 @@ +package parse + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" +) + +func ProcessPackageExternalRefsRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + // each external ref is related to a package, make sure we figure out which package + var packageSPDXID spdx.ElementID + newExternalRef := spdx.PackageExternalReference2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.ExternalRefPackageID: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid Package SPDX Identifier for External Ref in row %d: %v", rowNum, err.Error()) + } + + packageSPDXID = id.ElementRefID + case common.ExternalRefCategory: + newExternalRef.Category = value + case common.ExternalRefType: + newExternalRef.RefType = value + case common.ExternalRefLocator: + newExternalRef.Locator = value + case common.ExternalRefComment: + newExternalRef.ExternalRefComment = value + } + } + + if packageSPDXID == "" { + return fmt.Errorf("no SPDX ID given for package external ref in row %d", rowNum) + } + + // find the package this external ref is related to + var packageFound bool + for ii, pkg := range doc.Packages { + if pkg.PackageSPDXIdentifier == packageSPDXID { + packageFound = true + doc.Packages[ii].PackageExternalReferences = append(doc.Packages[ii].PackageExternalReferences, &newExternalRef) + break + } + } + + if !packageFound { + return fmt.Errorf("package external ref assigned to non-existent package %s in row %d", packageSPDXID, rowNum) + } + } + + return nil +} diff --git a/spreadsheet/parse/package_info.go b/spreadsheet/parse/package_info.go new file mode 100644 index 00000000..99146f28 --- /dev/null +++ b/spreadsheet/parse/package_info.go @@ -0,0 +1,119 @@ +package parse + +import ( + "encoding/csv" + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "strconv" + "strings" +) + +func ProcessPackageInfoRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + newPackage := spdx.Package2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.PackageName: + newPackage.PackageName = value + case common.PackageSPDXIdentifier: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid Package SPDX Identifier in row %d: %v", rowNum, err.Error()) + } + + newPackage.PackageSPDXIdentifier = id.ElementRefID + case common.PackageVersion: + newPackage.PackageVersion = value + case common.PackageFileName: + newPackage.PackageFileName = value + case common.PackageSupplier: + supplier := spdx.Supplier{} + err := supplier.FromString(value) + if err != nil { + return fmt.Errorf("invalid Package Supplier in row %d: %v", rowNum, err.Error()) + } + + newPackage.PackageSupplier = &supplier + case common.PackageOriginator: + originator := spdx.Originator{} + err := originator.FromString(value) + if err != nil { + return fmt.Errorf("invalid Package Originator in row %d: %v", rowNum, err.Error()) + } + + newPackage.PackageOriginator = &originator + case common.PackageHomePage: + newPackage.PackageHomePage = value + case common.PackageDownloadLocation: + newPackage.PackageDownloadLocation = value + case common.PackageChecksum: + checksums := strings.Split(value, "\n") + for _, checksumStr := range checksums { + checksum := spdx.Checksum{} + err := checksum.FromString(checksumStr) + if err != nil { + return fmt.Errorf("invalid Package Checksum in row %d: %v", rowNum, err.Error()) + } + + newPackage.PackageChecksums = append(newPackage.PackageChecksums, checksum) + } + case common.PackageVerificationCode: + newPackage.PackageVerificationCode.Value = value + case common.PackageVerificationCodeExcludedFiles: + newPackage.PackageVerificationCode.ExcludedFiles = append(newPackage.PackageVerificationCode.ExcludedFiles, value) + case common.PackageSourceInfo: + newPackage.PackageSourceInfo = value + case common.PackageLicenseDeclared: + newPackage.PackageLicenseDeclared = value + case common.PackageLicenseConcluded: + newPackage.PackageLicenseConcluded = value + case common.PackageLicenseInfoFromFiles: + files := strings.Split(value, ",") + newPackage.PackageLicenseInfoFromFiles = append(newPackage.PackageLicenseInfoFromFiles, files...) + case common.PackageLicenseComments: + newPackage.PackageLicenseComments = value + case common.PackageCopyrightText: + newPackage.PackageCopyrightText = value + case common.PackageSummary: + newPackage.PackageSummary = value + case common.PackageDescription: + newPackage.PackageDescription = value + case common.PackageAttributionText: + attributionTexts, err := csv.NewReader(strings.NewReader(value)).Read() + if err != nil { + return fmt.Errorf("invalid Package Attribution Text in row %d: %s", rowNum, err.Error()) + } + newPackage.PackageAttributionTexts = attributionTexts + case common.PackageFilesAnalyzed: + filesAnalyzed, err := strconv.ParseBool(value) + if err != nil { + return fmt.Errorf("invalid boolean for Files Analyzed in row %d (should be 'true' or 'false')", rowNum) + } + + newPackage.FilesAnalyzed = filesAnalyzed + case common.PackageComments: + newPackage.PackageComment = value + } + } + + // TODO: validate? + doc.Packages = append(doc.Packages, &newPackage) + } + + return nil +} diff --git a/spreadsheet/parse/per_file_info.go b/spreadsheet/parse/per_file_info.go new file mode 100644 index 00000000..fcd56819 --- /dev/null +++ b/spreadsheet/parse/per_file_info.go @@ -0,0 +1,109 @@ +package parse + +import ( + "encoding/csv" + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "strings" +) + +func ProcessPerFileInfoRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + newFile := spdx.File2_2{} + var associatedPackageSPDXID spdx.ElementID + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.FileInfoFileName: + newFile.FileName = value + case common.FileInfoSPDXIdentifier: + var id spdx.DocElementID + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid SPDX Identifier in row %d: %v", rowNum, err.Error()) + } + + newFile.FileSPDXIdentifier = id.ElementRefID + case common.FileInfoPackageIdentifier: + // in spreadsheet formats, file<->package relationships are dictated by this column. + // if there is no value in this column, the file is not associated with a particular package + var id spdx.DocElementID + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid Package SPDX Identifier in row %d: %v", rowNum, err.Error()) + } + + associatedPackageSPDXID = id.ElementRefID + case common.FileInfoFileTypes: + newFile.FileTypes = strings.Split(value, ", ") + case common.FileInfoFileChecksums: + checksums := strings.Split(value, "\n") + for _, checksumStr := range checksums { + checksum := spdx.Checksum{} + err := checksum.FromString(checksumStr) + if err != nil { + return fmt.Errorf("invalid File Checksum in row %d: %v", rowNum, err.Error()) + } + + newFile.Checksums = append(newFile.Checksums, checksum) + } + case common.FileInfoLicenseConcluded: + newFile.LicenseConcluded = value + case common.FileInfoLicenseInfoInFile: + newFile.LicenseInfoInFiles = strings.Split(value, ", ") + case common.FileInfoLicenseComments: + newFile.LicenseComments = value + case common.FileInfoFileCopyrightText: + newFile.FileCopyrightText = value + case common.FileInfoNoticeText: + newFile.FileNotice = value + case common.FileInfoArtifactOfProject: + // ignored + case common.FileInfoArtifactOfHomepage: + // ignored + case common.FileInfoArtifactOfURL: + // ignored + case common.FileInfoContributors: + contributors, err := csv.NewReader(strings.NewReader(value)).Read() + if err != nil { + return fmt.Errorf("invalid File Contributors in row %d: %s", rowNum, err.Error()) + } + newFile.FileContributors = contributors + case common.FileInfoFileComment: + newFile.FileComment = value + case common.FileInfoFileDependencies: + newFile.FileDependencies = strings.Split(value, ", ") + case common.FileInfoAttributionText: + newFile.FileAttributionTexts = strings.Split(value, ", ") + } + } + + // TODO: validate? + doc.Files = append(doc.Files, &newFile) + + // add this file to the associated package, if it is associated with a package + if associatedPackageSPDXID != "" { + for ii, pkg := range doc.Packages { + if pkg.PackageSPDXIdentifier == associatedPackageSPDXID { + doc.Packages[ii].Files = append(doc.Packages[ii].Files, &newFile) + break + } + } + } + } + + return nil +} diff --git a/spreadsheet/parse/relationships.go b/spreadsheet/parse/relationships.go new file mode 100644 index 00000000..7a2d3c02 --- /dev/null +++ b/spreadsheet/parse/relationships.go @@ -0,0 +1,55 @@ +package parse + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" +) + +func ProcessRelationshipsRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + newRelationship := spdx.Relationship2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.RelationshipsRefA: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.RelationshipsRefA, rowNum, err.Error()) + } + + newRelationship.RefA = id + case common.RelationshipsRelationship: + newRelationship.Relationship = value + case common.RelationshipsRefB: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.RelationshipsRefB, rowNum, err.Error()) + } + + newRelationship.RefB = id + case common.RelationshipsComment: + newRelationship.RelationshipComment = value + } + } + + // TODO: validate? + doc.Relationships = append(doc.Relationships, &newRelationship) + } + + return nil +} diff --git a/spreadsheet/parse/snippets.go b/spreadsheet/parse/snippets.go new file mode 100644 index 00000000..5129a887 --- /dev/null +++ b/spreadsheet/parse/snippets.go @@ -0,0 +1,80 @@ +package parse + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "strings" +) + +func ProcessSnippetsRows(rows [][]string, doc *spdx.Document2_2) error { + // the first row is column headers, keep track of which order they appear in + columnsByIndex := make(map[int]string) + for index, header := range rows[0] { + columnsByIndex[index] = header + } + + for rowNum, row := range rows[1:] { + // set rowNum to the correct value, Go slices are zero-indexed (+1), and we started iterating on the second element (+1) + rowNum = rowNum + 2 + newSnippet := spdx.Snippet2_2{} + + for columnIndex, value := range row { + if value == "" { + continue + } + + switch columnsByIndex[columnIndex] { + case common.SnippetsID: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.SnippetsID, rowNum, err.Error()) + } + + newSnippet.SnippetSPDXIdentifier = id.ElementRefID + case common.SnippetsName: + newSnippet.SnippetName = value + case common.SnippetsFromFileID: + id := spdx.DocElementID{} + err := id.FromString(value) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.SnippetsFromFileID, rowNum, err.Error()) + } + + newSnippet.SnippetFromFileSPDXIdentifier = id.ElementRefID + case common.SnippetsByteRange: + snippetRange := spdx.SnippetRange{} + err := snippetRange.FromString(value, true) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.SnippetsByteRange, rowNum, err.Error()) + } + + newSnippet.Ranges = append(newSnippet.Ranges, snippetRange) + case common.SnippetsLineRange: + snippetRange := spdx.SnippetRange{} + err := snippetRange.FromString(value, false) + if err != nil { + return fmt.Errorf("invalid %s in row %d: %v", common.SnippetsLineRange, rowNum, err.Error()) + } + + newSnippet.Ranges = append(newSnippet.Ranges, snippetRange) + case common.SnippetsLicenseConcluded: + newSnippet.SnippetLicenseConcluded = value + case common.SnippetsLicenseInfoInSnippet: + newSnippet.LicenseInfoInSnippet = strings.Split(value, ", ") + case common.SnippetsLicenseComments: + newSnippet.SnippetLicenseComments = value + case common.SnippetsCopyrightText: + newSnippet.SnippetCopyrightText = value + case common.SnippetsComment: + newSnippet.SnippetComment = value + } + } + + // TODO: validate? + doc.Snippets = append(doc.Snippets, newSnippet) + } + + return nil +} diff --git a/spreadsheet/parser.go b/spreadsheet/parser.go new file mode 100644 index 00000000..ecc9d64d --- /dev/null +++ b/spreadsheet/parser.go @@ -0,0 +1,130 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_xls + +import ( + "errors" + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/spdx/tools-golang/spreadsheet/parse" + "github.com/xuri/excelize/v2" + "io" +) + +// sheetHandlerFunc is a func that takes in the data from a sheet as a slice of rows and iterates through them to +// fill in information in the given spdx.Document2_2. +// Returns an error if any occurred. +type sheetHandlerFunc func(rows [][]string, doc *spdx.Document2_2) error + +// sheetHandlingInformation defines info that is needed for parsing individual sheets in a workbook. +type sheetHandlingInformation struct { + // SheetName is the name of the sheet + SheetName string + // HandlerFunc is the function that should be used to parse a particular sheet + HandlerFunc sheetHandlerFunc + // SheetIsRequired denotes whether the sheet is required to be present in the workbook, or if it is optional (false) + SheetIsRequired bool +} + +// sheetHandlers contains handling information for each sheet in the workbook. +// The order of this slice determines the order in which the sheets are processed. +var sheetHandlers = []sheetHandlingInformation{ + { + SheetName: common.SheetNameDocumentInfo, + HandlerFunc: parse.ProcessDocumentInfoRows, + SheetIsRequired: true, + }, + { + SheetName: common.SheetNamePackageInfo, + HandlerFunc: parse.ProcessPackageInfoRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameExternalRefs, + HandlerFunc: parse.ProcessPackageExternalRefsRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameExtractedLicenseInfo, + HandlerFunc: parse.ProcessExtractedLicenseInfoRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameFileInfo, + HandlerFunc: parse.ProcessPerFileInfoRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameRelationships, + HandlerFunc: parse.ProcessRelationshipsRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameAnnotations, + HandlerFunc: parse.ProcessAnnotationsRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameSnippets, + HandlerFunc: parse.ProcessSnippetsRows, + SheetIsRequired: false, + }, +} + +// Load2_2 takes in an io.Reader and returns an SPDX document. +func Load2_2(content io.Reader) (*spdx.Document2_2, error) { + workbook, err := excelize.OpenReader(content) + if err != nil { + return nil, err + } + + doc, err := parseWorkbook(workbook) + if err != nil { + return nil, err + } + + return doc, nil +} + +func parseWorkbook(workbook *excelize.File) (*spdx.Document2_2, error) { + doc := spdx.Document2_2{ + // ensure this pointer is not nil + CreationInfo: &spdx.CreationInfo2_2{}, + } + + for _, sheetHandlingInfo := range sheetHandlers { + rows, err := workbook.GetRows(sheetHandlingInfo.SheetName) + if err != nil { + // if the sheet doesn't exist and is required, that's a problem + if errors.As(err, &excelize.ErrSheetNotExist{}) { + if sheetHandlingInfo.SheetIsRequired { + return nil, fmt.Errorf("sheet '%s' is required but is not present", sheetHandlingInfo.SheetName) + } else { + // if it is not required, skip it + continue + } + } else { + // some other error happened + return nil, err + } + } + + // the first row is column headers, and the next row would contain actual data. + // if there are less than 2 rows present, there is no actual data in the sheet. + if len(rows) < 2 { + if sheetHandlingInfo.SheetIsRequired { + return nil, fmt.Errorf("sheet '%s' is required but contains no data", sheetHandlingInfo.SheetName) + } + + continue + } + + err = sheetHandlingInfo.HandlerFunc(rows, &doc) + if err != nil { + return nil, fmt.Errorf("failed to parse sheet '%s': %w", sheetHandlingInfo.SheetName, err) + } + } + + return &doc, nil +} diff --git a/spreadsheet/spreadsheet_test.go b/spreadsheet/spreadsheet_test.go new file mode 100644 index 00000000..8931d662 --- /dev/null +++ b/spreadsheet/spreadsheet_test.go @@ -0,0 +1,518 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_xls + +import ( + "fmt" + "github.com/google/go-cmp/cmp" + "os" + "testing" + + "github.com/spdx/tools-golang/spdx" +) + +func TestLoad2_2(t *testing.T) { + file, err := os.Open("../examples/sample-docs/xls/SPDXSpreadsheetExample-v2.2.xlsx") + if err != nil { + panic(fmt.Errorf("error opening File: %s", err)) + } + + got, err := Load2_2(file) + if err != nil { + t.Errorf("xls.parser.Load2_2() error = %v", err) + return + } + + // get a copy of the handwritten struct so we don't mutate it on accident + handwrittenExample := want + + if cmp.Equal(handwrittenExample, got) { + t.Errorf("Got incorrect struct after parsing XLSX example") + return + } +} + +// func TestWrite2_2(t *testing.T) { +// w := &bytes.Buffer{} +// // get a copy of the handwritten struct so we don't mutate it on accident +// handwrittenExample := want +// if err := Save2_2(&handwrittenExample, w); err != nil { +// t.Errorf("Save2_2() error = %v", err.Error()) +// return +// } +// +// // we should be able to parse what the writer wrote, and it should be identical to the original handwritten struct +// parsedDoc, err := Load2_2(bytes.NewReader(w.Bytes())) +// if err != nil { +// t.Errorf("failed to parse written document: %v", err.Error()) +// return +// } +// +// changes, err := diff.Diff(parsedDoc, &handwrittenExample) +// if err != nil { +// t.Errorf("failed to diff written doc and original doc: %v", err.Error()) +// return +// } +// +// if len(changes) != 0 { +// for _, change := range changes { +// t.Errorf("(%+v) %+v should be %+v\n", change.Path, change.From, change.To) +// } +// +// return +// } +// } + +// want is handwritten translation of the official example xls SPDX v2.2 document into a Go struct. +// We expect that the result of parsing the official document should be this value. +// We expect that the result of writing this struct should match the official example document. +var want = spdx.Document2_2{ + DataLicense: "CC0-1.0", + SPDXVersion: "SPDX-2.2", + SPDXIdentifier: "DOCUMENT", + DocumentName: "SPDX-Tools-v2.0", + DocumentNamespace: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301", + CreationInfo: &spdx.CreationInfo2_2{ + LicenseListVersion: "3.9", + Creators: []spdx.Creator{ + {CreatorType: "Tool", Creator: "LicenseFind-1.0"}, + {CreatorType: "Organization", Creator: "ExampleCodeInspect ()"}, + {CreatorType: "Person", Creator: "Jane Doe ()"}, + }, + Created: "1/29/10 18:30", + CreatorComment: "This package has been shipped in source and binary form.\nThe binaries were created with gcc 4.5.1 and expect to link to\ncompatible system run time libraries.", + }, + DocumentComment: "This document was created using SPDX 2.0 using licenses from the web site.", + ExternalDocumentReferences: []spdx.ExternalDocumentRef2_2{ + { + DocumentRefID: spdx.MakeDocElementID("spdx-tool-1.2", ""), + URI: "http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301", + Checksum: spdx.Checksum{ + Algorithm: spdx.SHA1, + Value: "d6a770ba38583ed4bb4525bd96e50461655d2759", + }, + }, + }, + OtherLicenses: []*spdx.OtherLicense2_2{ + { + LicenseIdentifier: "LicenseRef-1", + ExtractedText: "/*\n * (c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/", + }, + { + LicenseIdentifier: "LicenseRef-2", + ExtractedText: "This package includes the GRDDL parser developed by Hewlett Packard under the following license:\n� Copyright 2007 Hewlett-Packard Development Company, LP\n\nRedistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: \n\nRedistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. \nRedistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. \nThe name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. \nTHIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + }, + { + LicenseIdentifier: "LicenseRef-4", + ExtractedText: "/*\n * (c) Copyright 2009 University of Bristol\n * All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n * 3. The name of the author may not be used to endorse or promote products\n * derived from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n*/", + }, + { + LicenseIdentifier: "LicenseRef-Beerware-4.2", + ExtractedText: "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp", + LicenseComment: "The beerware license has a couple of other standard variants.", + LicenseName: "Beer-Ware License (Version 42)", + LicenseCrossReferences: []string{"http://people.freebsd.org/~phk/"}, + }, + { + LicenseIdentifier: "LicenseRef-3", + ExtractedText: "The CyberNeko Software License, Version 1.0\n\n \n(C) Copyright 2002-2005, Andy Clark. All rights reserved.\n \nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions\nare met:\n\n1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer. \n\n2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in\n the documentation and/or other materials provided with the\n distribution.\n\n3. The end-user documentation included with the redistribution,\n if any, must include the following acknowledgment: \n \"This product includes software developed by Andy Clark.\"\n Alternately, this acknowledgment may appear in the software itself,\n if and wherever such third-party acknowledgments normally appear.\n\n4. The names \"CyberNeko\" and \"NekoHTML\" must not be used to endorse\n or promote products derived from this software without prior \n written permission. For written permission, please contact \n andyc@cyberneko.net.\n\n5. Products derived from this software may not be called \"CyberNeko\",\n nor may \"CyberNeko\" appear in their name, without prior written\n permission of the author.\n\nTHIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED\nWARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\nOF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS\nBE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, \nOR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT \nOF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR \nBUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, \nWHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE \nOR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, \nEVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + LicenseName: "CyberNeko License", + LicenseCrossReferences: []string{ + "http://people.apache.org/~andyc/neko/LICENSE", + "http://justasample.url.com", + }, + LicenseComment: "This is tye CyperNeko License", + }, + }, + Annotations: []*spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "Jane Doe ()", + AnnotatorType: "Person", + }, + AnnotationDate: "2010-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "Document level annotation", + AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), + }, + { + Annotator: spdx.Annotator{ + Annotator: "Joe Reviewer", + AnnotatorType: "Person", + }, + AnnotationDate: "2010-02-10T00:00:00Z", + AnnotationType: "REVIEW", + AnnotationComment: "This is just an example. Some of the non-standard licenses look like they are actually BSD 3 clause licenses", + AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), + }, + { + Annotator: spdx.Annotator{ + Annotator: "Suzanne Reviewer", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-03-13T00:00:00Z", + AnnotationType: "REVIEW", + AnnotationComment: "Another example reviewer.", + AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "DOCUMENT"), + }, + }, + Packages: []*spdx.Package2_2{ + { + PackageSPDXIdentifier: "fromDoap-1", + PackageCopyrightText: "NOASSERTION", + PackageDownloadLocation: "NOASSERTION", + FilesAnalyzed: false, + PackageHomePage: "http://commons.apache.org/proper/commons-lang/", + PackageLicenseConcluded: "NOASSERTION", + PackageLicenseDeclared: "NOASSERTION", + PackageName: "Apache Commons Lang", + }, + { + PackageName: "glibc", + PackageSPDXIdentifier: "Package", + PackageVersion: "2.11.1", + PackageFileName: "glibc-2.11.1.tar.gz", + PackageSupplier: &spdx.Supplier{ + Supplier: "Jane Doe (jane.doe@example.com)", + SupplierType: "Person", + }, + PackageOriginator: &spdx.Originator{ + Originator: "ExampleCodeInspect (contact@example.com)", + OriginatorType: "Organization", + }, + PackageDownloadLocation: "http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz", + FilesAnalyzed: true, + PackageVerificationCode: spdx.PackageVerificationCode{ + Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", + ExcludedFiles: []string{"./package.spdx"}, + }, + PackageChecksums: []spdx.Checksum{ + { + Algorithm: "MD5", + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + { + Algorithm: "SHA1", + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + { + Algorithm: "SHA256", + Value: "11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd", + }, + }, + PackageHomePage: "http://ftp.gnu.org/gnu/glibc", + PackageSourceInfo: "uses glibc-2_11-branch from git://sourceware.org/git/glibc.git.", + PackageLicenseConcluded: "(LGPL-2.0-only OR LicenseRef-3)", + PackageLicenseInfoFromFiles: []string{ + "GPL-2.0-only", + "LicenseRef-2", + "LicenseRef-1", + }, + PackageLicenseDeclared: "(LGPL-2.0-only AND LicenseRef-3)", + PackageLicenseComments: "The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.", + PackageCopyrightText: "Copyright 2008-2010 John Smith", + PackageSummary: "GNU C library.", + PackageDescription: "The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.", + PackageComment: "", + PackageExternalReferences: []*spdx.PackageExternalReference2_2{ + { + Category: "SECURITY", + RefType: "cpe23Type", + Locator: "cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*", + }, + { + Category: "OTHER", + RefType: "LocationRef-acmeforge", + Locator: "acmecorp/acmenator/4.1.3-alpha", + ExternalRefComment: "This is the external ref for Acme", + }, + }, + PackageAttributionTexts: []string{ + "The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually.", + }, + Files: []*spdx.File2_2{ + { + FileSPDXIdentifier: "CommonsLangSrc", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file is used by Jena", + FileCopyrightText: "Copyright 2001-2011 The Apache Software Foundation", + FileContributors: []string{"Apache Software Foundation"}, + FileName: "./lib-source/commons-lang3-3.1-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{"Apache-2.0"}, + FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", + }, + { + FileSPDXIdentifier: "JenaLib", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file belongs to Jena", + FileCopyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP", + FileContributors: []string{"Apache Software Foundation", "Hewlett Packard Inc."}, + FileName: "./lib-source/jena-2.6.3-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseComments: "This license is used by Jena", + LicenseConcluded: "LicenseRef-1", + LicenseInfoInFiles: []string{"LicenseRef-1"}, + }, + { + FileName: "./src/org/spdx/parser/DOAPProject.java", + FileSPDXIdentifier: "DoapSource", + FileTypes: []string{ + "SOURCE", + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", + }, + }, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{ + "Apache-2.0", + }, + FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", + FileContributors: []string{ + "Protecode Inc.", + "SPDX Technical Team Members", + "Open Logic Inc.", + "Source Auditor Inc.", + "Black Duck Software In.c", + }, + }, + }, + Annotations: []spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "Package Commenter", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "Package level annotation", + AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "Package"), + }, + }, + }, + { + PackageName: "Jena", + PackageSPDXIdentifier: "fromDoap-0", + PackageCopyrightText: "NOASSERTION", + PackageDownloadLocation: "https://search.maven.org/remotecontent?filepath=org/apache/jena/apache-jena/3.12.0/apache-jena-3.12.0.tar.gz", + PackageExternalReferences: []*spdx.PackageExternalReference2_2{ + { + Category: "PACKAGE_MANAGER", + RefType: "purl", + Locator: "pkg:maven/org.apache.jena/apache-jena@3.12.0", + }, + }, + FilesAnalyzed: false, + PackageHomePage: "http://www.openjena.org/", + PackageLicenseConcluded: "NOASSERTION", + PackageLicenseDeclared: "NOASSERTION", + PackageVersion: "3.12.0", + }, + { + PackageSPDXIdentifier: "Saxon", + PackageChecksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "85ed0817af83a24ad8da68c2b5094de69833983c", + }, + }, + PackageCopyrightText: "Copyright Saxonica Ltd", + PackageDescription: "The Saxon package is a collection of tools for processing XML documents.", + PackageDownloadLocation: "https://sourceforge.net/projects/saxon/files/Saxon-B/8.8.0.7/saxonb8-8-0-7j.zip/download", + FilesAnalyzed: false, + PackageHomePage: "http://saxon.sourceforge.net/", + PackageLicenseComments: "Other versions available for a commercial license", + PackageLicenseConcluded: "MPL-1.0", + PackageLicenseDeclared: "MPL-1.0", + PackageName: "Saxon", + PackageFileName: "saxonB-8.8.zip", + PackageVersion: "8.8", + }, + }, + Files: []*spdx.File2_2{ + { + FileSPDXIdentifier: "CommonsLangSrc", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file is used by Jena", + FileCopyrightText: "Copyright 2001-2011 The Apache Software Foundation", + FileContributors: []string{"Apache Software Foundation"}, + FileName: "./lib-source/commons-lang3-3.1-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{"Apache-2.0"}, + FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", + }, + { + FileSPDXIdentifier: "JenaLib", + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125", + }, + }, + FileComment: "This file belongs to Jena", + FileCopyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP", + FileContributors: []string{"Apache Software Foundation", "Hewlett Packard Inc."}, + FileName: "./lib-source/jena-2.6.3-sources.jar", + FileTypes: []string{"ARCHIVE"}, + LicenseComments: "This license is used by Jena", + LicenseConcluded: "LicenseRef-1", + LicenseInfoInFiles: []string{"LicenseRef-1"}, + }, + { + FileSPDXIdentifier: "File", + Annotations: []spdx.Annotation2_2{ + { + Annotator: spdx.Annotator{ + Annotator: "File Commenter", + AnnotatorType: "Person", + }, + AnnotationDate: "2011-01-29T18:30:22Z", + AnnotationType: "OTHER", + AnnotationComment: "File level annotation", + AnnotationSPDXIdentifier: spdx.MakeDocElementID("", "File"), + }, + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "d6a770ba38583ed4bb4525bd96e50461655d2758", + }, + { + Algorithm: "MD5", + Value: "624c1abb3664f4b35547e7c73864ad24", + }, + }, + FileComment: "The concluded license was taken from the package level that the file was included in.\nThis information was found in the COPYING.txt file in the xyz directory.", + FileCopyrightText: "Copyright 2008-2010 John Smith", + FileContributors: []string{"The Regents of the University of California", "Modified by Paul Mundt lethal@linux-sh.org", "IBM Corporation"}, + FileName: "./package/foo.c", + FileTypes: []string{"SOURCE"}, + LicenseComments: "The concluded license was taken from the package level that the file was included in.", + LicenseConcluded: "(LGPL-2.0-only OR LicenseRef-2)", + LicenseInfoInFiles: []string{"GPL-2.0-only", "LicenseRef-2"}, + FileNotice: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: \nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", + }, + { + FileName: "./src/org/spdx/parser/DOAPProject.java", + FileSPDXIdentifier: "DoapSource", + FileTypes: []string{ + "SOURCE", + }, + Checksums: []spdx.Checksum{ + { + Algorithm: "SHA1", + Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", + }, + }, + LicenseConcluded: "Apache-2.0", + LicenseInfoInFiles: []string{ + "Apache-2.0", + }, + FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", + FileContributors: []string{ + "Protecode Inc.", + "SPDX Technical Team Members", + "Open Logic Inc.", + "Source Auditor Inc.", + "Black Duck Software In.c", + }, + }, + }, + Snippets: []spdx.Snippet2_2{ + { + SnippetSPDXIdentifier: "Snippet", + SnippetFromFileSPDXIdentifier: "DoapSource", + Ranges: []spdx.SnippetRange{ + { + StartPointer: spdx.SnippetRangePointer{ + Offset: 310, + }, + EndPointer: spdx.SnippetRangePointer{ + Offset: 420, + }, + }, + { + StartPointer: spdx.SnippetRangePointer{ + LineNumber: 5, + }, + EndPointer: spdx.SnippetRangePointer{ + LineNumber: 23, + }, + }, + }, + SnippetLicenseConcluded: "GPL-2.0-only", + LicenseInfoInSnippet: []string{"GPL-2.0-only"}, + SnippetLicenseComments: "The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.", + SnippetCopyrightText: "Copyright 2008-2010 John Smith", + SnippetComment: "This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.", + SnippetName: "from linux kernel", + }, + }, + Relationships: []*spdx.Relationship2_2{ + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("spdx-tool-1.2", "ToolsElement"), + Relationship: "COPY_OF", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "File"), + Relationship: "DESCRIBES", + }, + { + RefA: spdx.MakeDocElementID("", "DOCUMENT"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "DESCRIBES", + }, + { + RefA: spdx.MakeDocElementID("", "Package"), + RefB: spdx.MakeDocElementID("", "JenaLib"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "Package"), + RefB: spdx.MakeDocElementID("", "Saxon"), + Relationship: "DYNAMIC_LINK", + }, + { + RefA: spdx.MakeDocElementID("", "CommonsLangSrc"), + RefB: spdx.MakeDocElementSpecial("NOASSERTION"), + Relationship: "GENERATED_FROM", + }, + { + RefA: spdx.MakeDocElementID("", "JenaLib"), + RefB: spdx.MakeDocElementID("", "Package"), + Relationship: "CONTAINS", + }, + { + RefA: spdx.MakeDocElementID("", "File"), + RefB: spdx.MakeDocElementID("", "fromDoap-0"), + Relationship: "GENERATED_FROM", + }, + }, +} diff --git a/spreadsheet/writer.go b/spreadsheet/writer.go new file mode 100644 index 00000000..07d8872d --- /dev/null +++ b/spreadsheet/writer.go @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later + +package spdx_xls + +import ( + "io" + "sigs.k8s.io/yaml" + + "github.com/spdx/tools-golang/spdx" +) + +// Save2_2 takes an SPDX Document (version 2.2) and an io.Writer, and writes the document to the writer in YAML format. +func Save2_2(doc *spdx.Document2_2, w io.Writer) error { + buf, err := yaml.Marshal(doc) + if err != nil { + return err + } + + _, err = w.Write(buf) + if err != nil { + return err + } + + return nil +} From f149ab8aaf9af8a2547ca5406437987a5f5b7791 Mon Sep 17 00:00:00 2001 From: Ian Ling Date: Mon, 25 Apr 2022 15:21:35 -0700 Subject: [PATCH 5/6] Add spreadsheet writer Signed-off-by: Ian Ling --- spdx/document.go | 4 +- spreadsheet/common/document_info_columns.go | 22 +-- spreadsheet/common/position_to_axis.go | 11 ++ spreadsheet/common/sheet_names.go | 1 - spreadsheet/parse/document_info.go | 22 +-- spreadsheet/parse/package_info.go | 3 +- spreadsheet/parse/per_file_info.go | 5 +- spreadsheet/parser.go | 64 +-------- spreadsheet/sheet_handling.go | 100 ++++++++++++++ spreadsheet/spreadsheet_test.go | 111 +++------------ spreadsheet/write/annotations.go | 94 +++++++++++++ spreadsheet/write/document_info.go | 110 +++++++++++++++ spreadsheet/write/extracted_license_info.go | 57 ++++++++ spreadsheet/write/package_external_refs.go | 59 ++++++++ spreadsheet/write/package_info.go | 132 ++++++++++++++++++ spreadsheet/write/per_file_info.go | 141 ++++++++++++++++++++ spreadsheet/write/relationships.go | 53 ++++++++ spreadsheet/write/snippets.go | 86 ++++++++++++ spreadsheet/writer.go | 36 +++-- 19 files changed, 923 insertions(+), 188 deletions(-) create mode 100644 spreadsheet/common/position_to_axis.go create mode 100644 spreadsheet/sheet_handling.go create mode 100644 spreadsheet/write/annotations.go create mode 100644 spreadsheet/write/document_info.go create mode 100644 spreadsheet/write/extracted_license_info.go create mode 100644 spreadsheet/write/package_external_refs.go create mode 100644 spreadsheet/write/package_info.go create mode 100644 spreadsheet/write/per_file_info.go create mode 100644 spreadsheet/write/relationships.go create mode 100644 spreadsheet/write/snippets.go diff --git a/spdx/document.go b/spdx/document.go index 69d6c315..a14104f0 100644 --- a/spdx/document.go +++ b/spdx/document.go @@ -42,7 +42,7 @@ type ExternalDocumentRef2_2 struct { // Returns true if the object is valid, returns false and an error if it is invalid. func (e ExternalDocumentRef2_1) Validate() error { if err := e.Checksum.Validate(); err != nil { - return fmt.Errorf("invalid Checksum in DocElementID: %w", err) + return fmt.Errorf("invalid Checksum in External Document Reference: %w", err) } if e.DocumentRefID.Validate() != nil || e.URI == "" { @@ -56,7 +56,7 @@ func (e ExternalDocumentRef2_1) Validate() error { // Returns true if the object is valid, returns false and an error if it is invalid. func (e ExternalDocumentRef2_2) Validate() error { if err := e.Checksum.Validate(); err != nil { - return fmt.Errorf("invalid Checksum in DocElementID: %w", err) + return fmt.Errorf("invalid Checksum in External Document Reference: %w", err) } if e.DocumentRefID.Validate() != nil || e.URI == "" { diff --git a/spreadsheet/common/document_info_columns.go b/spreadsheet/common/document_info_columns.go index 813b3a9e..dcae061d 100644 --- a/spreadsheet/common/document_info_columns.go +++ b/spreadsheet/common/document_info_columns.go @@ -1,15 +1,15 @@ package common const ( - SPDXVersion = "SPDX Version" - DataLicense = "Data License" - SPDXIdentifier = "SPDX Identifier" - LicenseListVersion = "License List Version" - DocumentName = "Document Name" - DocumentNamespace = "Document Namespace" - ExternalDocumentReferences = "External Document References" - DocumentComment = "Document Comment" - Creator = "Creator" - Created = "Created" - CreatorComment = "Creator Comment" + DocumentInfoSPDXVersion = "SPDX Version" + DocumentInfoDataLicense = "Data License" + DocumentInfoSPDXIdentifier = "SPDX Identifier" + DocumentInfoLicenseListVersion = "License List Version" + DocumentInfoDocumentName = "Document Name" + DocumentInfoDocumentNamespace = "Document Namespace" + DocumentInfoExternalDocumentReferences = "External Document References" + DocumentInfoDocumentComment = "Document Comment" + DocumentInfoCreator = "Creator" + DocumentInfoCreated = "Created" + DocumentInfoCreatorComment = "Creator Comment" ) diff --git a/spreadsheet/common/position_to_axis.go b/spreadsheet/common/position_to_axis.go new file mode 100644 index 00000000..55bd8e42 --- /dev/null +++ b/spreadsheet/common/position_to_axis.go @@ -0,0 +1,11 @@ +package common + +import "fmt" + +// PositionToAxis takes a column string and a row integer to combines them into an "axis" +// to be used with the Excelize module. +// An "axis" is the word Excelize uses to describe a coordinate position within a spreadsheet, +// e.g. "A1", "B14", etc. +func PositionToAxis(column string, row int) string { + return fmt.Sprintf("%s%d", column, row) +} diff --git a/spreadsheet/common/sheet_names.go b/spreadsheet/common/sheet_names.go index 6bb29118..1f5c98cf 100644 --- a/spreadsheet/common/sheet_names.go +++ b/spreadsheet/common/sheet_names.go @@ -7,7 +7,6 @@ const ( SheetNameFileInfo = "Per File Info" SheetNameRelationships = "Relationships" SheetNameAnnotations = "Annotations" - SheetNameReviewers = "Reviewers" SheetNameSnippets = "Snippets" SheetNameExternalRefs = "External Refs" ) diff --git a/spreadsheet/parse/document_info.go b/spreadsheet/parse/document_info.go index c6b44593..646607a3 100644 --- a/spreadsheet/parse/document_info.go +++ b/spreadsheet/parse/document_info.go @@ -22,11 +22,11 @@ func ProcessDocumentInfoRows(rows [][]string, doc *spdx.Document2_2) error { } switch columnsByIndex[columnIndex] { - case common.SPDXVersion: + case common.DocumentInfoSPDXVersion: doc.SPDXVersion = value - case common.DataLicense: + case common.DocumentInfoDataLicense: doc.DataLicense = value - case common.SPDXIdentifier: + case common.DocumentInfoSPDXIdentifier: var id spdx.DocElementID err := id.FromString(value) if err != nil { @@ -34,15 +34,15 @@ func ProcessDocumentInfoRows(rows [][]string, doc *spdx.Document2_2) error { } doc.SPDXIdentifier = id.ElementRefID - case common.LicenseListVersion: + case common.DocumentInfoLicenseListVersion: doc.CreationInfo.LicenseListVersion = value - case common.DocumentName: + case common.DocumentInfoDocumentName: doc.DocumentName = value - case common.DocumentNamespace: + case common.DocumentInfoDocumentNamespace: doc.DocumentNamespace = value - case common.DocumentComment: + case common.DocumentInfoDocumentComment: doc.DocumentComment = value - case common.ExternalDocumentReferences: + case common.DocumentInfoExternalDocumentReferences: externalDocRef := spdx.ExternalDocumentRef2_2{} err := externalDocRef.FromString(value) if err != nil { @@ -50,11 +50,11 @@ func ProcessDocumentInfoRows(rows [][]string, doc *spdx.Document2_2) error { } doc.ExternalDocumentReferences = append(doc.ExternalDocumentReferences, externalDocRef) - case common.Created: + case common.DocumentInfoCreated: doc.CreationInfo.Created = value - case common.CreatorComment: + case common.DocumentInfoCreatorComment: doc.CreationInfo.CreatorComment = value - case common.Creator: + case common.DocumentInfoCreator: creator := spdx.Creator{} err := creator.FromString(value) if err != nil { diff --git a/spreadsheet/parse/package_info.go b/spreadsheet/parse/package_info.go index 99146f28..495864f4 100644 --- a/spreadsheet/parse/package_info.go +++ b/spreadsheet/parse/package_info.go @@ -75,7 +75,8 @@ func ProcessPackageInfoRows(rows [][]string, doc *spdx.Document2_2) error { case common.PackageVerificationCode: newPackage.PackageVerificationCode.Value = value case common.PackageVerificationCodeExcludedFiles: - newPackage.PackageVerificationCode.ExcludedFiles = append(newPackage.PackageVerificationCode.ExcludedFiles, value) + excludedFiles := strings.Split(value, "\n") + newPackage.PackageVerificationCode.ExcludedFiles = append(newPackage.PackageVerificationCode.ExcludedFiles, excludedFiles...) case common.PackageSourceInfo: newPackage.PackageSourceInfo = value case common.PackageLicenseDeclared: diff --git a/spreadsheet/parse/per_file_info.go b/spreadsheet/parse/per_file_info.go index fcd56819..a8b9161c 100644 --- a/spreadsheet/parse/per_file_info.go +++ b/spreadsheet/parse/per_file_info.go @@ -85,14 +85,13 @@ func ProcessPerFileInfoRows(rows [][]string, doc *spdx.Document2_2) error { case common.FileInfoFileComment: newFile.FileComment = value case common.FileInfoFileDependencies: - newFile.FileDependencies = strings.Split(value, ", ") + // ignored case common.FileInfoAttributionText: newFile.FileAttributionTexts = strings.Split(value, ", ") } } // TODO: validate? - doc.Files = append(doc.Files, &newFile) // add this file to the associated package, if it is associated with a package if associatedPackageSPDXID != "" { @@ -102,6 +101,8 @@ func ProcessPerFileInfoRows(rows [][]string, doc *spdx.Document2_2) error { break } } + } else { + doc.Files = append(doc.Files, &newFile) } } diff --git a/spreadsheet/parser.go b/spreadsheet/parser.go index ecc9d64d..55fd4c56 100644 --- a/spreadsheet/parser.go +++ b/spreadsheet/parser.go @@ -6,72 +6,10 @@ import ( "errors" "fmt" "github.com/spdx/tools-golang/spdx" - "github.com/spdx/tools-golang/spreadsheet/common" - "github.com/spdx/tools-golang/spreadsheet/parse" "github.com/xuri/excelize/v2" "io" ) -// sheetHandlerFunc is a func that takes in the data from a sheet as a slice of rows and iterates through them to -// fill in information in the given spdx.Document2_2. -// Returns an error if any occurred. -type sheetHandlerFunc func(rows [][]string, doc *spdx.Document2_2) error - -// sheetHandlingInformation defines info that is needed for parsing individual sheets in a workbook. -type sheetHandlingInformation struct { - // SheetName is the name of the sheet - SheetName string - // HandlerFunc is the function that should be used to parse a particular sheet - HandlerFunc sheetHandlerFunc - // SheetIsRequired denotes whether the sheet is required to be present in the workbook, or if it is optional (false) - SheetIsRequired bool -} - -// sheetHandlers contains handling information for each sheet in the workbook. -// The order of this slice determines the order in which the sheets are processed. -var sheetHandlers = []sheetHandlingInformation{ - { - SheetName: common.SheetNameDocumentInfo, - HandlerFunc: parse.ProcessDocumentInfoRows, - SheetIsRequired: true, - }, - { - SheetName: common.SheetNamePackageInfo, - HandlerFunc: parse.ProcessPackageInfoRows, - SheetIsRequired: false, - }, - { - SheetName: common.SheetNameExternalRefs, - HandlerFunc: parse.ProcessPackageExternalRefsRows, - SheetIsRequired: false, - }, - { - SheetName: common.SheetNameExtractedLicenseInfo, - HandlerFunc: parse.ProcessExtractedLicenseInfoRows, - SheetIsRequired: false, - }, - { - SheetName: common.SheetNameFileInfo, - HandlerFunc: parse.ProcessPerFileInfoRows, - SheetIsRequired: false, - }, - { - SheetName: common.SheetNameRelationships, - HandlerFunc: parse.ProcessRelationshipsRows, - SheetIsRequired: false, - }, - { - SheetName: common.SheetNameAnnotations, - HandlerFunc: parse.ProcessAnnotationsRows, - SheetIsRequired: false, - }, - { - SheetName: common.SheetNameSnippets, - HandlerFunc: parse.ProcessSnippetsRows, - SheetIsRequired: false, - }, -} - // Load2_2 takes in an io.Reader and returns an SPDX document. func Load2_2(content io.Reader) (*spdx.Document2_2, error) { workbook, err := excelize.OpenReader(content) @@ -120,7 +58,7 @@ func parseWorkbook(workbook *excelize.File) (*spdx.Document2_2, error) { continue } - err = sheetHandlingInfo.HandlerFunc(rows, &doc) + err = sheetHandlingInfo.ParserFunc(rows, &doc) if err != nil { return nil, fmt.Errorf("failed to parse sheet '%s': %w", sheetHandlingInfo.SheetName, err) } diff --git a/spreadsheet/sheet_handling.go b/spreadsheet/sheet_handling.go new file mode 100644 index 00000000..dbff80d3 --- /dev/null +++ b/spreadsheet/sheet_handling.go @@ -0,0 +1,100 @@ +package spdx_xls + +import ( + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/spdx/tools-golang/spreadsheet/parse" + "github.com/spdx/tools-golang/spreadsheet/write" + "github.com/xuri/excelize/v2" +) + +// sheetParserFunc is a function that takes in the data from a sheet as a slice of rows and iterates through them to +// fill in information in the given spdx.Document2_2. +// Returns an error if any occurred. +type sheetParserFunc func(rows [][]string, doc *spdx.Document2_2) error + +// sheetWriterFunc is a function that takes in a spdx.Document2_2 and a spreadsheet as a *excelize.File and iterates +// through particular section of the Document spdx.Document2_2 in order to write out data to the spreadsheet. +// Returns an error if any occurred. +type sheetWriterFunc func(doc *spdx.Document2_2, spreadsheet *excelize.File) error + +// sheetHandlingInformation defines info that is needed for parsing individual sheets in a workbook. +type sheetHandlingInformation struct { + // SheetName is the name of the sheet + SheetName string + + // HeadersByColumn is a map of header names to which column the header should go in. + // This is used only when writing/exporting a spreadsheet. + // During parsing/imports, the header positions are parsed dynamically. + HeadersByColumn map[string]string + + // ParserFunc is the function that should be used to parse a particular sheet + ParserFunc sheetParserFunc + + // WriterFunc is the function that should be used to write a particular sheet + WriterFunc sheetWriterFunc + + // SheetIsRequired denotes whether the sheet is required to be present in the workbook, or if it is optional (false) + SheetIsRequired bool +} + +// sheetHandlers contains handling information for each sheet in the workbook. +// The order of this slice determines the order in which the sheets are processed. +var sheetHandlers = []sheetHandlingInformation{ + { + SheetName: common.SheetNameDocumentInfo, + HeadersByColumn: write.DocumentInfoHeadersByColumn, + ParserFunc: parse.ProcessDocumentInfoRows, + WriterFunc: write.WriteDocumentInfoRows, + SheetIsRequired: true, + }, + { + SheetName: common.SheetNamePackageInfo, + HeadersByColumn: write.PackageInfoHeadersByColumn, + ParserFunc: parse.ProcessPackageInfoRows, + WriterFunc: write.WritePackageInfoRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameExternalRefs, + HeadersByColumn: write.ExternalRefsHeadersByColumn, + ParserFunc: parse.ProcessPackageExternalRefsRows, + WriterFunc: write.WriteExternalRefsRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameExtractedLicenseInfo, + HeadersByColumn: write.ExtractedLicenseInfoHeadersByColumn, + ParserFunc: parse.ProcessExtractedLicenseInfoRows, + WriterFunc: write.WriteExtractedLicenseInfoRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameFileInfo, + HeadersByColumn: write.FileInfoHeadersByColumn, + ParserFunc: parse.ProcessPerFileInfoRows, + WriterFunc: write.WriteFileInfoRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameRelationships, + HeadersByColumn: write.RelationshipsHeadersByColumn, + ParserFunc: parse.ProcessRelationshipsRows, + WriterFunc: write.WriteRelationshipsRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameAnnotations, + HeadersByColumn: write.AnnotationsHeadersByColumn, + ParserFunc: parse.ProcessAnnotationsRows, + WriterFunc: write.WriteAnnotationsRows, + SheetIsRequired: false, + }, + { + SheetName: common.SheetNameSnippets, + HeadersByColumn: write.SnippetsHeadersByColumn, + ParserFunc: parse.ProcessSnippetsRows, + WriterFunc: write.WriteSnippetsRows, + SheetIsRequired: false, + }, +} diff --git a/spreadsheet/spreadsheet_test.go b/spreadsheet/spreadsheet_test.go index 8931d662..82f94b84 100644 --- a/spreadsheet/spreadsheet_test.go +++ b/spreadsheet/spreadsheet_test.go @@ -3,6 +3,7 @@ package spdx_xls import ( + "bytes" "fmt" "github.com/google/go-cmp/cmp" "os" @@ -32,36 +33,27 @@ func TestLoad2_2(t *testing.T) { } } -// func TestWrite2_2(t *testing.T) { -// w := &bytes.Buffer{} -// // get a copy of the handwritten struct so we don't mutate it on accident -// handwrittenExample := want -// if err := Save2_2(&handwrittenExample, w); err != nil { -// t.Errorf("Save2_2() error = %v", err.Error()) -// return -// } -// -// // we should be able to parse what the writer wrote, and it should be identical to the original handwritten struct -// parsedDoc, err := Load2_2(bytes.NewReader(w.Bytes())) -// if err != nil { -// t.Errorf("failed to parse written document: %v", err.Error()) -// return -// } -// -// changes, err := diff.Diff(parsedDoc, &handwrittenExample) -// if err != nil { -// t.Errorf("failed to diff written doc and original doc: %v", err.Error()) -// return -// } -// -// if len(changes) != 0 { -// for _, change := range changes { -// t.Errorf("(%+v) %+v should be %+v\n", change.Path, change.From, change.To) -// } -// -// return -// } -// } +func TestWrite2_2(t *testing.T) { + w := &bytes.Buffer{} + // get a copy of the handwritten struct so we don't mutate it on accident + handwrittenExample := want + if err := Save2_2(&handwrittenExample, w); err != nil { + t.Errorf("Save2_2() error = %v", err.Error()) + return + } + + // we should be able to parse what the writer wrote, and it should be identical to the original handwritten struct + parsedDoc, err := Load2_2(bytes.NewReader(w.Bytes())) + if err != nil { + t.Errorf("failed to parse written document: %v", err.Error()) + return + } + + if cmp.Equal(handwrittenExample, parsedDoc) { + t.Errorf("Got incorrect struct after writing and re-parsing XLSX example") + return + } +} // want is handwritten translation of the official example xls SPDX v2.2 document into a Go struct. // We expect that the result of parsing the official document should be this value. @@ -344,40 +336,6 @@ var want = spdx.Document2_2{ }, }, Files: []*spdx.File2_2{ - { - FileSPDXIdentifier: "CommonsLangSrc", - Checksums: []spdx.Checksum{ - { - Algorithm: "SHA1", - Value: "c2b4e1c67a2d28fced849ee1bb76e7391b93f125", - }, - }, - FileComment: "This file is used by Jena", - FileCopyrightText: "Copyright 2001-2011 The Apache Software Foundation", - FileContributors: []string{"Apache Software Foundation"}, - FileName: "./lib-source/commons-lang3-3.1-sources.jar", - FileTypes: []string{"ARCHIVE"}, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFiles: []string{"Apache-2.0"}, - FileNotice: "Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())", - }, - { - FileSPDXIdentifier: "JenaLib", - Checksums: []spdx.Checksum{ - { - Algorithm: "SHA1", - Value: "3ab4e1c67a2d28fced849ee1bb76e7391b93f125", - }, - }, - FileComment: "This file belongs to Jena", - FileCopyrightText: "(c) Copyright 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP", - FileContributors: []string{"Apache Software Foundation", "Hewlett Packard Inc."}, - FileName: "./lib-source/jena-2.6.3-sources.jar", - FileTypes: []string{"ARCHIVE"}, - LicenseComments: "This license is used by Jena", - LicenseConcluded: "LicenseRef-1", - LicenseInfoInFiles: []string{"LicenseRef-1"}, - }, { FileSPDXIdentifier: "File", Annotations: []spdx.Annotation2_2{ @@ -412,31 +370,6 @@ var want = spdx.Document2_2{ LicenseInfoInFiles: []string{"GPL-2.0-only", "LicenseRef-2"}, FileNotice: "Copyright (c) 2001 Aaron Lehmann aaroni@vitelus.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the �Software�), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: \nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED �AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", }, - { - FileName: "./src/org/spdx/parser/DOAPProject.java", - FileSPDXIdentifier: "DoapSource", - FileTypes: []string{ - "SOURCE", - }, - Checksums: []spdx.Checksum{ - { - Algorithm: "SHA1", - Value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", - }, - }, - LicenseConcluded: "Apache-2.0", - LicenseInfoInFiles: []string{ - "Apache-2.0", - }, - FileCopyrightText: "Copyright 2010, 2011 Source Auditor Inc.", - FileContributors: []string{ - "Protecode Inc.", - "SPDX Technical Team Members", - "Open Logic Inc.", - "Source Auditor Inc.", - "Black Duck Software In.c", - }, - }, }, Snippets: []spdx.Snippet2_2{ { diff --git a/spreadsheet/write/annotations.go b/spreadsheet/write/annotations.go new file mode 100644 index 00000000..bb466baa --- /dev/null +++ b/spreadsheet/write/annotations.go @@ -0,0 +1,94 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" +) + +var AnnotationsHeadersByColumn = map[string]string{ + "A": common.AnnotationsSPDXIdentifier, + "B": common.AnnotationsComment, + "C": common.AnnotationsDate, + "D": common.AnnotationsAnnotator, + "E": common.AnnotationsType, +} + +func WriteAnnotationsRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + rowNum := 2 + + // annotations can be document-level, package-level, or file-level + + // document-level + for _, annotation := range doc.Annotations { + err := processAnnotation(annotation, spdx.MakeDocElementID("", string(doc.SPDXIdentifier)), spreadsheet, rowNum) + if err != nil { + return fmt.Errorf("failed to process document-level annotation: %s", err.Error()) + } + + rowNum += 1 + } + + // package-level + for _, pkg := range doc.Packages { + for _, annotation := range pkg.Annotations { + err := processAnnotation(&annotation, spdx.MakeDocElementID("", string(pkg.PackageSPDXIdentifier)), spreadsheet, rowNum) + if err != nil { + return fmt.Errorf("failed to process package-level annotation: %s", err.Error()) + } + + rowNum += 1 + } + } + + // file-level + for _, file := range doc.Files { + for _, annotation := range file.Annotations { + err := processAnnotation(&annotation, spdx.MakeDocElementID("", string(file.FileSPDXIdentifier)), spreadsheet, rowNum) + if err != nil { + return fmt.Errorf("failed to process file-level annotation: %s", err.Error()) + } + + rowNum += 1 + } + } + + return nil +} + +func processAnnotation(annotation *spdx.Annotation2_2, spdxID spdx.DocElementID, spreadsheet *excelize.File, rowNum int) error { + for column, valueType := range AnnotationsHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.AnnotationsSPDXIdentifier: + value = spdxID + case common.AnnotationsComment: + value = annotation.AnnotationComment + case common.AnnotationsDate: + value = annotation.AnnotationDate + case common.AnnotationsAnnotator: + err = annotation.Annotator.Validate() + value = annotation.Annotator.String() + case common.AnnotationsType: + value = annotation.AnnotationType + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameAnnotations, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + + return nil +} diff --git a/spreadsheet/write/document_info.go b/spreadsheet/write/document_info.go new file mode 100644 index 00000000..c807ed0f --- /dev/null +++ b/spreadsheet/write/document_info.go @@ -0,0 +1,110 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" +) + +var DocumentInfoHeadersByColumn = map[string]string{ + "A": common.DocumentInfoSPDXVersion, + "B": common.DocumentInfoDataLicense, + "C": common.DocumentInfoSPDXIdentifier, + "D": common.DocumentInfoLicenseListVersion, + "E": common.DocumentInfoDocumentName, + "F": common.DocumentInfoDocumentNamespace, + "G": common.DocumentInfoExternalDocumentReferences, + "H": common.DocumentInfoDocumentComment, + "I": common.DocumentInfoCreator, + "J": common.DocumentInfoCreated, + "K": common.DocumentInfoCreatorComment, +} + +func WriteDocumentInfoRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + if doc.CreationInfo == nil { + return fmt.Errorf("document is missing CreationInfo") + } + + // some data in this sheet gets split across rows, instead of being split up by newlines or commas. + // the two columns where this happens are Creators and External Document Refs. + // figure out how many rows we're going to need + numCreators := len(doc.CreationInfo.Creators) + numExternalDocRefs := len(doc.ExternalDocumentReferences) + rowsNeeded := 1 + if numCreators > numExternalDocRefs { + rowsNeeded = numCreators + } else if numExternalDocRefs > 1 { + rowsNeeded = numExternalDocRefs + } + + for rowNum := 2; rowNum-2 < rowsNeeded; rowNum++ { + for column, valueType := range DocumentInfoHeadersByColumn { + // only certain columns are used past the first data row + if rowNum > 2 && valueType != common.DocumentInfoCreator && valueType != common.DocumentInfoExternalDocumentReferences { + continue + } + + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.DocumentInfoSPDXVersion: + value = doc.SPDXVersion + case common.DocumentInfoDataLicense: + value = doc.DataLicense + case common.DocumentInfoSPDXIdentifier: + value = doc.SPDXIdentifier + case common.DocumentInfoLicenseListVersion: + value = doc.CreationInfo.LicenseListVersion + case common.DocumentInfoDocumentName: + value = doc.DocumentName + case common.DocumentInfoDocumentNamespace: + value = doc.DocumentNamespace + case common.DocumentInfoExternalDocumentReferences: + if rowNum-2 > numExternalDocRefs-1 { + continue + } + + ref := doc.ExternalDocumentReferences[rowNum-2] + if err = ref.Validate(); err != nil { + break + } + + value = ref.String() + case common.DocumentInfoDocumentComment: + value = doc.DocumentComment + case common.DocumentInfoCreator: + if rowNum-2 > numCreators-1 { + continue + } + + creator := doc.CreationInfo.Creators[rowNum-2] + if err = creator.Validate(); err != nil { + break + } + + value = creator.String() + case common.DocumentInfoCreated: + value = doc.CreationInfo.Created + case common.DocumentInfoCreatorComment: + value = doc.CreationInfo.CreatorComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameDocumentInfo, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + } + + return nil +} diff --git a/spreadsheet/write/extracted_license_info.go b/spreadsheet/write/extracted_license_info.go new file mode 100644 index 00000000..c0202e45 --- /dev/null +++ b/spreadsheet/write/extracted_license_info.go @@ -0,0 +1,57 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" + "strings" +) + +var ExtractedLicenseInfoHeadersByColumn = map[string]string{ + "A": common.LicenseInfoIdentifier, + "B": common.LicenseInfoExtractedText, + "C": common.LicenseInfoLicenseName, + "D": common.LicenseInfoCrossReferenceURLs, + "E": common.LicenseInfoComment, +} + +func WriteExtractedLicenseInfoRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + for ii, license := range doc.OtherLicenses { + // get correct row number. first row is headers (+1) and Go slices are zero-indexed (+1) + rowNum := ii + 2 + + for column, valueType := range ExtractedLicenseInfoHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.LicenseInfoIdentifier: + value = license.LicenseIdentifier + case common.LicenseInfoExtractedText: + value = license.ExtractedText + case common.LicenseInfoLicenseName: + value = license.LicenseName + case common.LicenseInfoCrossReferenceURLs: + value = strings.Join(license.LicenseCrossReferences, ", ") + case common.LicenseInfoComment: + value = license.LicenseComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameExtractedLicenseInfo, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + } + + return nil +} diff --git a/spreadsheet/write/package_external_refs.go b/spreadsheet/write/package_external_refs.go new file mode 100644 index 00000000..0fe94835 --- /dev/null +++ b/spreadsheet/write/package_external_refs.go @@ -0,0 +1,59 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" +) + +var ExternalRefsHeadersByColumn = map[string]string{ + "A": common.ExternalRefPackageID, + "B": common.ExternalRefCategory, + "C": common.ExternalRefType, + "D": common.ExternalRefLocator, + "E": common.ExternalRefComment, +} + +func WriteExternalRefsRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + rowNum := 2 + + for _, pkg := range doc.Packages { + for _, externalRef := range pkg.PackageExternalReferences { + for column, valueType := range ExternalRefsHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.ExternalRefPackageID: + value = pkg.PackageSPDXIdentifier + case common.ExternalRefCategory: + value = externalRef.Category + case common.ExternalRefType: + value = externalRef.RefType + case common.ExternalRefLocator: + value = externalRef.Locator + case common.ExternalRefComment: + value = externalRef.ExternalRefComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameExternalRefs, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + + rowNum += 1 + } + } + + return nil +} diff --git a/spreadsheet/write/package_info.go b/spreadsheet/write/package_info.go new file mode 100644 index 00000000..a541ea50 --- /dev/null +++ b/spreadsheet/write/package_info.go @@ -0,0 +1,132 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" + "strings" +) + +var PackageInfoHeadersByColumn = map[string]string{ + "A": common.PackageName, + "B": common.PackageSPDXIdentifier, + "C": common.PackageVersion, + "D": common.PackageFileName, + "E": common.PackageSupplier, + "F": common.PackageOriginator, + "G": common.PackageHomePage, + "H": common.PackageDownloadLocation, + "I": common.PackageChecksum, + "J": common.PackageVerificationCode, + "K": common.PackageVerificationCodeExcludedFiles, + "L": common.PackageSourceInfo, + "M": common.PackageLicenseDeclared, + "N": common.PackageLicenseConcluded, + "O": common.PackageLicenseInfoFromFiles, + "P": common.PackageLicenseComments, + "Q": common.PackageCopyrightText, + "R": common.PackageSummary, + "S": common.PackageDescription, + "T": common.PackageAttributionText, + "U": common.PackageFilesAnalyzed, + "V": common.PackageComments, +} + +func WritePackageInfoRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + for ii, pkg := range doc.Packages { + // get correct row number. first row is headers (+1) and Go slices are zero-indexed (+1) + rowNum := ii + 2 + + for column, valueType := range PackageInfoHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.PackageName: + value = pkg.PackageName + case common.PackageSPDXIdentifier: + value = pkg.PackageSPDXIdentifier + case common.PackageVersion: + value = pkg.PackageVersion + case common.PackageFileName: + value = pkg.PackageFileName + case common.PackageSupplier: + if pkg.PackageSupplier == nil { + continue + } + + err = pkg.PackageSupplier.Validate() + value = pkg.PackageSupplier.String() + case common.PackageOriginator: + if pkg.PackageOriginator == nil { + continue + } + + err = pkg.PackageOriginator.Validate() + value = pkg.PackageOriginator.String() + case common.PackageHomePage: + value = pkg.PackageHomePage + case common.PackageDownloadLocation: + value = pkg.PackageDownloadLocation + case common.PackageChecksum: + checksums := make([]string, 0, len(pkg.PackageChecksums)) + for _, checksum := range pkg.PackageChecksums { + if err = checksum.Validate(); err != nil { + break + } + + checksums = append(checksums, checksum.String()) + } + + value = strings.Join(checksums, "\n") + case common.PackageVerificationCode: + value = pkg.PackageVerificationCode.Value + case common.PackageVerificationCodeExcludedFiles: + value = strings.Join(pkg.PackageVerificationCode.ExcludedFiles, "\n") + case common.PackageSourceInfo: + value = pkg.PackageSourceInfo + case common.PackageLicenseDeclared: + value = pkg.PackageLicenseDeclared + case common.PackageLicenseConcluded: + value = pkg.PackageLicenseConcluded + case common.PackageLicenseInfoFromFiles: + value = strings.Join(pkg.PackageLicenseInfoFromFiles, ",") + case common.PackageLicenseComments: + value = pkg.PackageLicenseComments + case common.PackageCopyrightText: + value = pkg.PackageCopyrightText + case common.PackageSummary: + value = pkg.PackageSummary + case common.PackageDescription: + value = pkg.PackageDescription + case common.PackageAttributionText: + texts := make([]string, 0, len(pkg.PackageAttributionTexts)) + for _, text := range pkg.PackageAttributionTexts { + // these get wrapped in quotes + texts = append(texts, fmt.Sprintf("\"%s\"", text)) + } + value = strings.Join(texts, "\n") + case common.PackageFilesAnalyzed: + value = pkg.FilesAnalyzed + case common.PackageComments: + value = pkg.PackageComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNamePackageInfo, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + } + + return nil +} diff --git a/spreadsheet/write/per_file_info.go b/spreadsheet/write/per_file_info.go new file mode 100644 index 00000000..3c5f2829 --- /dev/null +++ b/spreadsheet/write/per_file_info.go @@ -0,0 +1,141 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" + "strings" +) + +var FileInfoHeadersByColumn = map[string]string{ + "A": common.FileInfoFileName, + "B": common.FileInfoSPDXIdentifier, + "C": common.FileInfoPackageIdentifier, + "D": common.FileInfoFileTypes, + "E": common.FileInfoFileChecksums, + "F": common.FileInfoLicenseConcluded, + "G": common.FileInfoLicenseInfoInFile, + "H": common.FileInfoLicenseComments, + "I": common.FileInfoFileCopyrightText, + "J": common.FileInfoNoticeText, + "K": common.FileInfoArtifactOfProject, + "L": common.FileInfoArtifactOfHomepage, + "M": common.FileInfoArtifactOfURL, + "N": common.FileInfoContributors, + "O": common.FileInfoFileComment, + "P": common.FileInfoFileDependencies, + "Q": common.FileInfoAttributionText, +} + +func WriteFileInfoRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + rowNum := 2 + + // files can appear at the document level, or the package level + + // document-level + for _, file := range doc.Files { + err := processFileInfo(file, "", spreadsheet, rowNum) + if err != nil { + return fmt.Errorf("failed to process document-level file info: %s", err.Error()) + } + + rowNum += 1 + } + + // package-level + for _, pkg := range doc.Packages { + for _, file := range pkg.Files { + err := processFileInfo(file, pkg.PackageSPDXIdentifier, spreadsheet, rowNum) + if err != nil { + return fmt.Errorf("failed to process package-level file info: %s", err.Error()) + } + + rowNum += 1 + } + } + + return nil +} + +func processFileInfo(file *spdx.File2_2, packageIdentifier spdx.ElementID, spreadsheet *excelize.File, rowNum int) error { + for column, valueType := range FileInfoHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.FileInfoFileName: + value = file.FileName + case common.FileInfoSPDXIdentifier: + value = file.FileSPDXIdentifier + case common.FileInfoPackageIdentifier: + // a file can optionally be associated with a package + value = "" + if packageIdentifier != "" { + value = packageIdentifier + } + case common.FileInfoFileTypes: + value = strings.Join(file.FileTypes, "\n") + case common.FileInfoFileChecksums: + checksums := make([]string, 0, len(file.Checksums)) + for _, checksum := range file.Checksums { + if err = checksum.Validate(); err != nil { + break + } + + checksums = append(checksums, checksum.String()) + } + + value = strings.Join(checksums, "\n") + case common.FileInfoLicenseConcluded: + value = file.LicenseConcluded + case common.FileInfoLicenseInfoInFile: + value = strings.Join(file.LicenseInfoInFiles, ", ") + case common.FileInfoLicenseComments: + value = file.LicenseComments + case common.FileInfoFileCopyrightText: + value = file.FileCopyrightText + case common.FileInfoNoticeText: + value = file.FileNotice + case common.FileInfoArtifactOfProject: + // ignored + case common.FileInfoArtifactOfHomepage: + // ignored + case common.FileInfoArtifactOfURL: + // ignored + case common.FileInfoContributors: + contributors := make([]string, 0, len(file.FileContributors)) + for _, contributor := range file.FileContributors { + // these get wrapped in quotes + contributors = append(contributors, fmt.Sprintf("\"%s\"", contributor)) + } + value = strings.Join(contributors, ",") + case common.FileInfoFileComment: + value = file.FileComment + case common.FileInfoFileDependencies: + // ignored + case common.FileInfoAttributionText: + texts := make([]string, 0, len(file.FileAttributionTexts)) + for _, text := range file.FileAttributionTexts { + // these get wrapped in quotes + texts = append(texts, fmt.Sprintf("\"%s\"", text)) + } + value = strings.Join(texts, "\n") + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameFileInfo, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + + return nil +} diff --git a/spreadsheet/write/relationships.go b/spreadsheet/write/relationships.go new file mode 100644 index 00000000..66792465 --- /dev/null +++ b/spreadsheet/write/relationships.go @@ -0,0 +1,53 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" +) + +var RelationshipsHeadersByColumn = map[string]string{ + "A": common.RelationshipsRefA, + "B": common.RelationshipsRelationship, + "C": common.RelationshipsRefB, + "D": common.RelationshipsComment, +} + +func WriteRelationshipsRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + for ii, relationship := range doc.Relationships { + // get correct row number. first row is headers (+1) and Go slices are zero-indexed (+1) + rowNum := ii + 2 + + for column, valueType := range RelationshipsHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.RelationshipsRefA: + value = relationship.RefA + case common.RelationshipsRelationship: + value = relationship.Relationship + case common.RelationshipsRefB: + value = relationship.RefB + case common.RelationshipsComment: + value = relationship.RelationshipComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameRelationships, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + } + + return nil +} diff --git a/spreadsheet/write/snippets.go b/spreadsheet/write/snippets.go new file mode 100644 index 00000000..f280a89c --- /dev/null +++ b/spreadsheet/write/snippets.go @@ -0,0 +1,86 @@ +package write + +import ( + "fmt" + "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" + "strings" +) + +var SnippetsHeadersByColumn = map[string]string{ + "A": common.SnippetsID, + "B": common.SnippetsName, + "C": common.SnippetsFromFileID, + "D": common.SnippetsByteRange, + "E": common.SnippetsLineRange, + "F": common.SnippetsLicenseConcluded, + "G": common.SnippetsLicenseInfoInSnippet, + "H": common.SnippetsLicenseComments, + "I": common.SnippetsCopyrightText, + "J": common.SnippetsComment, +} + +func WriteSnippetsRows(doc *spdx.Document2_2, spreadsheet *excelize.File) error { + for ii, snippet := range doc.Snippets { + // get correct row number. first row is headers (+1) and Go slices are zero-indexed (+1) + rowNum := ii + 2 + + for column, valueType := range SnippetsHeadersByColumn { + axis := common.PositionToAxis(column, rowNum) + + // set `value` to the value to be written to the spreadsheet cell + var value interface{} + // if there was a problem determining `value`, set err to something non-nil and processing will be aborted + var err error + + switch valueType { + case common.SnippetsID: + value = snippet.SnippetSPDXIdentifier + case common.SnippetsName: + value = snippet.SnippetName + case common.SnippetsFromFileID: + value = snippet.SnippetFromFileSPDXIdentifier + case common.SnippetsByteRange: + // find a byte range, if there is one + value = "" + for _, snippetRange := range snippet.Ranges { + if snippetRange.EndPointer.Offset != 0 { + value = snippetRange.String() + break + } + } + case common.SnippetsLineRange: + // find a line range, if there is one + value = "" + for _, snippetRange := range snippet.Ranges { + if snippetRange.EndPointer.LineNumber != 0 { + value = snippetRange.String() + break + } + } + case common.SnippetsLicenseConcluded: + value = snippet.SnippetLicenseConcluded + case common.SnippetsLicenseInfoInSnippet: + value = strings.Join(snippet.LicenseInfoInSnippet, ", ") + case common.SnippetsLicenseComments: + value = snippet.SnippetLicenseComments + case common.SnippetsCopyrightText: + value = snippet.SnippetCopyrightText + case common.SnippetsComment: + value = snippet.SnippetComment + } + + if err != nil { + return fmt.Errorf("failed to translate %s for row %d: %s", valueType, rowNum, err.Error()) + } + + err = spreadsheet.SetCellValue(common.SheetNameSnippets, axis, value) + if err != nil { + return fmt.Errorf("failed to set cell %s to %+v: %s", axis, value, err.Error()) + } + } + } + + return nil +} diff --git a/spreadsheet/writer.go b/spreadsheet/writer.go index 07d8872d..91263d58 100644 --- a/spreadsheet/writer.go +++ b/spreadsheet/writer.go @@ -3,23 +3,43 @@ package spdx_xls import ( - "io" - "sigs.k8s.io/yaml" - + "fmt" "github.com/spdx/tools-golang/spdx" + "github.com/spdx/tools-golang/spreadsheet/common" + "github.com/xuri/excelize/v2" + "io" ) -// Save2_2 takes an SPDX Document (version 2.2) and an io.Writer, and writes the document to the writer in YAML format. +// Save2_2 takes an SPDX Document (version 2.2) and an io.Writer, and writes the document to the writer as an XLSX file. func Save2_2(doc *spdx.Document2_2, w io.Writer) error { - buf, err := yaml.Marshal(doc) - if err != nil { - return err + spreadsheet := excelize.NewFile() + + for _, sheetHandlingInfo := range sheetHandlers { + spreadsheet.NewSheet(sheetHandlingInfo.SheetName) + + err := writeHeaders(spreadsheet, sheetHandlingInfo.SheetName, sheetHandlingInfo.HeadersByColumn) + + err = sheetHandlingInfo.WriterFunc(doc, spreadsheet) + if err != nil { + return fmt.Errorf("failed to write data for sheet %s: %s", sheetHandlingInfo.SheetName, err.Error()) + } } - _, err = w.Write(buf) + err := spreadsheet.Write(w) if err != nil { return err } return nil } + +func writeHeaders(spreadsheet *excelize.File, sheetName string, headersByColumn map[string]string) error { + for column, header := range headersByColumn { + err := spreadsheet.SetCellValue(sheetName, common.PositionToAxis(column, 1), header) + if err != nil { + return err + } + } + + return nil +} From 1f383332136e8b559f3661cf3540b0ceb43ebcbf Mon Sep 17 00:00:00 2001 From: Ian Ling Date: Mon, 25 Apr 2022 15:22:12 -0700 Subject: [PATCH 6/6] Use go 1.15 Signed-off-by: Ian Ling --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 82d1f53e..d0720a3f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -9,7 +9,7 @@ jobs: - uses: actions/checkout@main - uses: actions/setup-go@v2 with: - go-version: '1.14' + go-version: '1.15' - name: Run tests run: go test -v -covermode=count -coverprofile=profile.cov ./... - name: Send coverage report to coveralls