From 43ec2b62fa03f06398249542549794ddc693870b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=F0=9F=8C=B2=20Harry=20=F0=9F=8C=8A=20John=20=F0=9F=8F=94?= Date: Tue, 29 Jul 2025 20:40:14 -0700 Subject: [PATCH] *: Update prometheus/thanos/promql-engine MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 🌲 Harry 🌊 John 🏔 --- .github/workflows/test-build-deploy.yml | 2 +- .golangci.yml | 1 + Makefile | 20 +- go.mod | 71 +- go.sum | 214 +- integration/e2e/images/images.go | 2 +- integration/parquet_querier_test.go | 15 +- integration/query_fuzz_test.go | 61 +- integration/ruler_test.go | 10 +- pkg/api/handlers.go | 3 + pkg/api/handlers_test.go | 5 +- pkg/chunk/fixtures.go | 38 +- pkg/chunk/json_helpers.go | 32 +- pkg/compactor/compactor_metrics_test.go | 1 + pkg/compactor/compactor_paritioning_test.go | 5 +- pkg/compactor/compactor_test.go | 2 +- ...rded_compaction_lifecycle_callback_test.go | 3 +- pkg/compactor/sharded_posting.go | 8 +- pkg/compactor/sharded_posting_test.go | 16 +- pkg/configs/userconfig/config.go | 2 +- pkg/configs/userconfig/config_test.go | 10 +- pkg/cortex/modules.go | 1 + pkg/cortexpb/compat.go | 16 +- pkg/cortexpb/compat_test.go | 16 +- pkg/cortexpb/signature.go | 2 +- pkg/distributor/distributor.go | 2 +- pkg/distributor/distributor_test.go | 361 ++- pkg/ingester/active_series_test.go | 16 +- pkg/ingester/errors.go | 2 +- pkg/ingester/ingester.go | 34 +- pkg/ingester/ingester_test.go | 152 +- pkg/ingester/user_state.go | 6 +- pkg/ingester/user_state_test.go | 4 +- pkg/parquetconverter/converter_test.go | 19 +- pkg/querier/blocks_store_queryable_test.go | 170 +- pkg/querier/codec/protobuf_codec.go | 41 +- pkg/querier/codec/protobuf_codec_test.go | 7 +- pkg/querier/distributor_queryable_test.go | 4 +- pkg/querier/error_translate_queryable_test.go | 2 + pkg/querier/parquet_queryable_test.go | 10 +- pkg/querier/querier_test.go | 16 +- pkg/querier/series/series_set.go | 13 +- pkg/querier/series/series_set_test.go | 8 +- pkg/querier/stats_renderer_test.go | 2 + .../exemplar_merge_queryable.go | 5 +- .../tenantfederation/merge_queryable.go | 29 +- .../tenantfederation/merge_queryable_test.go | 168 +- pkg/querier/testutils.go | 2 +- pkg/querier/tripperware/distributed_query.go | 5 +- .../tripperware/queryrange/results_cache.go | 7 +- .../tripperware/queryrange/test_utils.go | 7 +- .../tripperware/queryrange/test_utils_test.go | 49 +- pkg/querier/tripperware/queryrange/value.go | 6 +- .../tripperware/queryrange/value_test.go | 28 +- pkg/ruler/external_labels.go | 6 +- pkg/ruler/external_labels_test.go | 2 +- pkg/ruler/frontend_decoder.go | 13 +- pkg/ruler/notifier_test.go | 8 +- pkg/ruler/ruler_test.go | 28 +- pkg/storage/bucket/client_mock.go | 55 +- pkg/storage/bucket/prefixed_bucket_client.go | 17 +- pkg/storage/bucket/s3/bucket_client.go | 10 +- pkg/storage/bucket/s3/bucket_client_test.go | 6 +- pkg/storage/bucket/sse_bucket_client.go | 8 +- .../tsdb/bucketindex/block_ids_fetcher.go | 8 +- .../bucketindex/block_ids_fetcher_test.go | 9 +- .../tsdb/bucketindex/markers_bucket_client.go | 12 +- pkg/storage/tsdb/cached_chunks_querier.go | 2 +- pkg/storage/tsdb/testutil/objstore.go | 4 +- .../bucket_index_metadata_fetcher_test.go | 8 + pkg/storegateway/bucket_stores_test.go | 3 +- pkg/storegateway/gateway_test.go | 2 +- pkg/util/labels.go | 6 +- pkg/util/metrics_helper.go | 6 +- pkg/util/push/otlp.go | 11 +- pkg/util/validation/limits.go | 11 +- pkg/util/validation/limits_test.go | 4 +- vendor/cloud.google.com/go/auth/CHANGES.md | 29 + .../externalaccount/externalaccount.go | 5 +- .../internal/externalaccount/x509_provider.go | 173 +- .../go/auth/grpctransport/directpath.go | 50 +- .../go/auth/grpctransport/grpctransport.go | 12 +- .../go/auth/internal/credsfile/filetype.go | 1 + .../go/auth/internal/transport/cba.go | 24 - .../internal/transport/cert/workload_cert.go | 38 +- vendor/cloud.google.com/go/iam/CHANGES.md | 44 + .../go/iam/apiv1/iampb/iam_policy.pb.go | 2 +- .../go/iam/apiv1/iampb/options.pb.go | 2 +- .../go/iam/apiv1/iampb/policy.pb.go | 2 +- .../apiv1/iampb/resource_policy_member.pb.go | 2 +- .../go/internal/.repo-metadata-full.json | 40 +- .../apiv3/v2/monitoringpb/alert.pb.go | 2 +- .../apiv3/v2/monitoringpb/alert_service.pb.go | 2 +- .../apiv3/v2/monitoringpb/common.pb.go | 2 +- .../v2/monitoringpb/dropped_labels.pb.go | 2 +- .../apiv3/v2/monitoringpb/group.pb.go | 2 +- .../apiv3/v2/monitoringpb/group_service.pb.go | 2 +- .../apiv3/v2/monitoringpb/metric.pb.go | 2 +- .../v2/monitoringpb/metric_service.pb.go | 2 +- .../v2/monitoringpb/mutation_record.pb.go | 2 +- .../apiv3/v2/monitoringpb/notification.pb.go | 2 +- .../monitoringpb/notification_service.pb.go | 2 +- .../apiv3/v2/monitoringpb/query_service.pb.go | 2 +- .../apiv3/v2/monitoringpb/service.pb.go | 2 +- .../v2/monitoringpb/service_service.pb.go | 2 +- .../apiv3/v2/monitoringpb/snooze.pb.go | 2 +- .../v2/monitoringpb/snooze_service.pb.go | 2 +- .../apiv3/v2/monitoringpb/span_context.pb.go | 2 +- .../apiv3/v2/monitoringpb/uptime.pb.go | 2 +- .../v2/monitoringpb/uptime_service.pb.go | 2 +- .../go/monitoring/internal/version.go | 2 +- .../azure-sdk-for-go/sdk/azcore/CHANGELOG.md | 8 +- .../internal/resource/resource_identifier.go | 34 +- .../Azure/azure-sdk-for-go/sdk/azcore/ci.yml | 2 + .../sdk/azcore/internal/exported/request.go | 6 +- .../sdk/azcore/internal/shared/constants.go | 2 +- .../sdk/azcore/policy/policy.go | 2 +- .../sdk/azidentity/CHANGELOG.md | 5 + .../sdk/azidentity/TOKEN_CACHING.MD | 1 + .../sdk/azidentity/TROUBLESHOOTING.md | 2 +- .../sdk/azidentity/azure_cli_credential.go | 10 +- .../azure_developer_cli_credential.go | 11 +- .../sdk/azidentity/version.go | 2 +- .../gax-go/v2/.release-please-manifest.json | 2 +- .../googleapis/gax-go/v2/CHANGES.md | 7 + .../googleapis/gax-go/v2/call_option.go | 11 +- .../googleapis/gax-go/v2/internal/version.go | 2 +- .../consul/api/config_entry_jwt_provider.go | 6 + .../github.com/hashicorp/consul/api/health.go | 2 + vendor/github.com/minio/crc64nvme/LICENSE | 202 ++ vendor/github.com/minio/crc64nvme/README.md | 20 + vendor/github.com/minio/crc64nvme/crc64.go | 180 ++ .../github.com/minio/crc64nvme/crc64_amd64.go | 15 + .../github.com/minio/crc64nvme/crc64_amd64.s | 157 ++ .../github.com/minio/crc64nvme/crc64_arm64.go | 15 + .../github.com/minio/crc64nvme/crc64_arm64.s | 157 ++ .../github.com/minio/crc64nvme/crc64_other.go | 11 + .../minio/minio-go/v7/.golangci.yml | 85 +- .../minio/minio-go/v7/api-append-object.go | 226 ++ .../minio/minio-go/v7/api-bucket-cors.go | 2 +- .../minio-go/v7/api-bucket-notification.go | 12 +- .../minio/minio-go/v7/api-bucket-policy.go | 2 +- .../minio-go/v7/api-bucket-replication.go | 38 +- .../minio-go/v7/api-bucket-versioning.go | 1 + .../minio/minio-go/v7/api-compose-object.go | 37 +- .../minio/minio-go/v7/api-copy-object.go | 2 +- .../minio/minio-go/v7/api-datatypes.go | 24 +- .../minio/minio-go/v7/api-error-response.go | 37 +- .../minio/minio-go/v7/api-get-object-acl.go | 12 +- .../minio/minio-go/v7/api-get-object.go | 12 +- .../github.com/minio/minio-go/v7/api-list.go | 418 +-- .../minio/minio-go/v7/api-presigned.go | 2 +- .../minio/minio-go/v7/api-prompt-object.go | 78 + .../minio/minio-go/v7/api-prompt-options.go | 84 + .../minio/minio-go/v7/api-put-bucket.go | 35 +- .../minio-go/v7/api-put-object-fan-out.go | 7 +- .../minio-go/v7/api-put-object-multipart.go | 66 +- .../minio-go/v7/api-put-object-streaming.go | 103 +- .../minio/minio-go/v7/api-put-object.go | 35 +- .../minio-go/v7/api-putobject-snowball.go | 4 +- .../minio/minio-go/v7/api-remove.go | 200 +- .../minio/minio-go/v7/api-s3-datatypes.go | 88 +- .../minio/minio-go/v7/api-select.go | 2 - .../github.com/minio/minio-go/v7/api-stat.go | 12 +- vendor/github.com/minio/minio-go/v7/api.go | 210 +- .../minio/minio-go/v7/bucket-cache.go | 52 +- .../github.com/minio/minio-go/v7/checksum.go | 249 +- .../minio/minio-go/v7/create-session.go | 182 ++ .../v7/{s3-endpoints.go => endpoints.go} | 97 + .../minio/minio-go/v7/functional_tests.go | 2444 ++++++----------- .../minio/minio-go/v7/hook-reader.go | 10 +- .../minio-go/v7/internal/json/json_goccy.go | 49 + .../minio-go/v7/internal/json/json_stdlib.go | 49 + .../v7/pkg/credentials/assume_role.go | 48 +- .../minio-go/v7/pkg/credentials/chain.go | 18 + .../v7/pkg/credentials/credentials.go | 48 +- .../minio-go/v7/pkg/credentials/env_aws.go | 13 +- .../minio-go/v7/pkg/credentials/env_minio.go | 13 +- .../pkg/credentials/file_aws_credentials.go | 17 +- .../v7/pkg/credentials/file_minio_client.go | 17 +- .../minio-go/v7/pkg/credentials/iam_aws.go | 46 +- .../minio-go/v7/pkg/credentials/static.go | 5 + .../v7/pkg/credentials/sts_client_grants.go | 42 +- .../v7/pkg/credentials/sts_custom_identity.go | 42 +- .../v7/pkg/credentials/sts_ldap_identity.go | 46 +- .../v7/pkg/credentials/sts_tls_identity.go | 106 +- .../v7/pkg/credentials/sts_web_identity.go | 61 +- .../minio-go/v7/pkg/encrypt/server-side.go | 2 +- .../minio/minio-go/v7/pkg/kvcache/cache.go | 54 + .../minio-go/v7/pkg/lifecycle/lifecycle.go | 9 +- .../v7/pkg/notification/notification.go | 9 +- .../v7/pkg/replication/replication.go | 83 +- .../minio/minio-go/v7/pkg/s3utils/utils.go | 158 +- .../minio/minio-go/v7/pkg/set/msgp.go | 149 + .../minio/minio-go/v7/pkg/set/stringset.go | 30 +- ...st-signature-streaming-unsigned-trailer.go | 1 - .../pkg/signer/request-signature-streaming.go | 55 +- .../v7/pkg/signer/request-signature-v2.go | 2 +- .../v7/pkg/signer/request-signature-v4.go | 58 +- .../v7/pkg/singleflight/singleflight.go | 217 ++ .../v7/pkg/utils/peek-reader-closer.go | 73 + .../minio/minio-go/v7/post-policy.go | 73 +- .../minio/minio-go/v7/retry-continous.go | 34 +- vendor/github.com/minio/minio-go/v7/retry.go | 38 +- .../github.com/minio/minio-go/v7/s3-error.go | 130 +- vendor/github.com/minio/minio-go/v7/utils.go | 184 +- vendor/github.com/oklog/run/LICENSE | 2 +- vendor/github.com/oklog/run/README.md | 32 +- vendor/github.com/oklog/run/actors.go | 74 +- vendor/github.com/philhofer/fwd/LICENSE.md | 7 + vendor/github.com/philhofer/fwd/README.md | 368 +++ vendor/github.com/philhofer/fwd/reader.go | 445 +++ vendor/github.com/philhofer/fwd/writer.go | 236 ++ .../philhofer/fwd/writer_appengine.go | 6 + .../github.com/philhofer/fwd/writer_tinygo.go | 13 + .../github.com/philhofer/fwd/writer_unsafe.go | 20 + .../prometheus/client_golang/api/client.go | 27 +- .../prometheus/internal/difflib.go | 4 +- .../client_golang/prometheus/metric.go | 25 +- .../prometheus/process_collector_darwin.go | 6 +- .../process_collector_mem_nocgo_darwin.go | 2 +- .../process_collector_procfsenabled.go | 8 +- .../prometheus/promhttp/instrument_server.go | 2 +- .../client_golang/prometheus/vec.go | 10 +- .../client_golang/prometheus/wrap.go | 36 +- .../prometheus/common/config/http_config.go | 16 +- .../prometheus/common/expfmt/text_parse.go | 4 +- .../prometheus/common/model/labels.go | 9 +- .../prometheus/common/model/metric.go | 59 +- .../prometheus/common/model/time.go | 25 +- .../prometheus/common/promslog/slog.go | 12 +- .../prometheus/otlptranslator/.gitignore | 25 + .../prometheus/otlptranslator/.golangci.yml | 106 + .../otlptranslator/CODE_OF_CONDUCT.md | 3 + .../prometheus/otlptranslator/LICENSE | 201 ++ .../prometheus/otlptranslator/MAINTAINERS.md | 4 + .../prometheus/otlptranslator/README.md | 2 + .../prometheus/otlptranslator/SECURITY.md | 6 + .../prometheus/otlptranslator/constants.go | 38 + .../metric_namer.go} | 180 +- .../prometheus/otlptranslator/metric_type.go | 36 + .../normalize_label.go | 25 +- .../prometheus/otlptranslator/strconv.go | 42 + .../prometheus/otlptranslator/unit_namer.go | 110 + .../prometheus/prometheus/config/config.go | 291 +- .../prometheus/prometheus/config/reload.go | 5 +- .../prometheus/discovery/manager.go | 51 +- .../prometheus/discovery/registry.go | 7 +- .../model/histogram/float_histogram.go | 16 +- .../prometheus/model/histogram/histogram.go | 23 +- .../prometheus/model/labels/labels_common.go | 24 +- .../model/labels/labels_dedupelabels.go | 28 +- .../{labels.go => labels_slicelabels.go} | 54 +- .../model/labels/labels_stringlabels.go | 116 +- .../prometheus/model/labels/regexp.go | 22 +- .../prometheus/model/labels/sharding.go | 2 +- .../model/labels/sharding_stringlabels.go | 2 +- .../prometheus/model/relabel/relabel.go | 6 - .../prometheus/model/textparse/interface.go | 11 +- .../prometheus/model/textparse/nhcbparse.go | 47 +- .../model/textparse/openmetricsparse.go | 79 +- .../prometheus/model/textparse/promparse.go | 36 +- .../model/textparse/protobufparse.go | 37 +- .../prometheus/prometheus/notifier/alert.go | 91 + .../prometheus/notifier/alertmanager.go | 90 + .../prometheus/notifier/alertmanagerset.go | 128 + .../notifier/{notifier.go => manager.go} | 363 +-- .../prometheus/prometheus/notifier/metric.go | 94 + .../prometheus/prometheus/notifier/util.go | 49 + .../prometheus/prometheus/prompb/buf.gen.yaml | 5 + .../prometheus/prometheus/prompb/buf.lock | 6 +- .../prometheus/prometheus/prompb/codec.go | 2 + .../prompb/io/prometheus/client/decoder.go | 72 +- .../prompb/io/prometheus/write/v2/codec.go | 3 + .../prompb/io/prometheus/write/v2/types.pb.go | 5 +- .../prometheus/prometheus/prompb/types.pb.go | 284 +- .../prometheus/prometheus/prompb/types.proto | 4 + .../prometheus/prometheus/promql/durations.go | 160 ++ .../prometheus/prometheus/promql/engine.go | 361 ++- .../prometheus/prometheus/promql/functions.go | 543 ++-- .../prometheus/prometheus/promql/fuzz.go | 2 +- .../promql/histogram_stats_iterator.go | 68 +- .../prometheus/promql/parser/ast.go | 58 +- .../prometheus/promql/parser/functions.go | 18 + .../promql/parser/generated_parser.y | 342 ++- .../promql/parser/generated_parser.y.go | 1301 +++++---- .../prometheus/promql/parser/lex.go | 126 +- .../prometheus/promql/parser/parse.go | 83 +- .../prometheus/promql/parser/prettier.go | 16 + .../prometheus/promql/parser/printer.go | 70 +- .../prometheus/promql/promqltest/README.md | 103 +- .../prometheus/promql/promqltest/test.go | 230 +- .../promql/promqltest/test_migrate.go | 200 ++ .../promqltest/testdata/aggregators.test | 240 +- .../promqltest/testdata/at_modifier.test | 3 +- .../promql/promqltest/testdata/collision.test | 3 +- .../testdata/duration_expression.test | 228 ++ .../promql/promqltest/testdata/functions.test | 444 ++- .../promqltest/testdata/histograms.test | 182 +- .../promql/promqltest/testdata/limit.test | 30 +- .../testdata/name_label_dropping.test | 3 +- .../testdata/native_histograms.test | 310 ++- .../promql/promqltest/testdata/operators.test | 231 +- .../promql/promqltest/testdata/subquery.test | 9 + .../promqltest/testdata/type_and_unit.test | 280 ++ .../prometheus/prometheus/promql/quantile.go | 182 +- .../prometheus/prometheus/promql/value.go | 74 +- .../prometheus/prometheus/rules/group.go | 23 +- .../prometheus/prometheus/rules/manager.go | 32 +- .../prometheus/prometheus/schema/labels.go | 157 ++ .../prometheus/scrape/clientprotobuf.go | 1 - .../prometheus/prometheus/scrape/manager.go | 78 +- .../prometheus/prometheus/scrape/scrape.go | 90 +- .../prometheus/prometheus/scrape/target.go | 2 +- .../prometheus/storage/interface.go | 14 +- .../prometheus/prometheus/storage/merge.go | 12 +- .../storage/remote/azuread/azuread.go | 24 +- .../prometheus/storage/remote/client.go | 29 +- .../prometheus/storage/remote/codec.go | 12 +- .../prometheus/storage/remote/intern.go | 6 +- .../otlptranslator/prometheus/unit_to_ucum.go | 102 - .../prometheusremotewrite/helper.go | 160 +- .../prometheusremotewrite/histograms.go | 199 +- .../prometheusremotewrite/metrics_to_prw.go | 173 +- .../number_data_points.go | 8 +- .../otlp_to_openmetrics_metadata.go | 15 + .../storage/remote/queue_manager.go | 107 +- .../prometheus/storage/remote/read.go | 2 +- .../prometheus/storage/remote/write.go | 8 + .../storage/remote/write_handler.go | 82 +- .../prometheus/template/template.go | 16 +- .../prometheus/prometheus/tsdb/block.go | 27 +- .../tsdb/chunkenc/float_histogram.go | 4 +- .../prometheus/tsdb/chunkenc/histogram.go | 64 +- .../tsdb/chunks/chunk_write_queue.go | 5 +- .../prometheus/tsdb/chunks/chunks.go | 89 +- .../prometheus/prometheus/tsdb/compact.go | 36 +- .../prometheus/prometheus/tsdb/db.go | 40 +- .../prometheus/tsdb/errors/errors.go | 5 + .../prometheus/prometheus/tsdb/exemplar.go | 5 +- .../prometheus/tsdb/fileutil/direct_io.go | 39 + .../tsdb/fileutil/direct_io_force.go | 28 + .../tsdb/fileutil/direct_io_linux.go | 29 + .../tsdb/fileutil/direct_io_unsupported.go | 29 + .../tsdb/fileutil/direct_io_writer.go | 409 +++ .../prometheus/prometheus/tsdb/head.go | 67 +- .../prometheus/prometheus/tsdb/head_append.go | 63 +- .../prometheus/prometheus/tsdb/head_read.go | 16 +- .../prometheus/prometheus/tsdb/head_wal.go | 178 +- .../prometheus/prometheus/tsdb/index/index.go | 18 +- .../prometheus/tsdb/index/postings.go | 12 +- .../prometheus/prometheus/tsdb/ooo_head.go | 2 +- .../prometheus/tsdb/ooo_head_read.go | 10 +- .../prometheus/prometheus/tsdb/querier.go | 17 +- .../prometheus/prometheus/tsdb/testutil.go | 6 +- .../prometheus/tsdb/tombstones/tombstones.go | 5 +- .../prometheus/tsdb/wlog/live_reader.go | 75 +- .../prometheus/prometheus/tsdb/wlog/reader.go | 57 +- .../prometheus/tsdb/wlog/watcher.go | 15 + .../prometheus/prometheus/tsdb/wlog/wlog.go | 251 +- .../util/annotations/annotations.go | 37 +- .../prometheus/util/compression/buffers.go | 142 + .../util/compression/compression.go | 122 + .../prometheus/util/httputil/cors.go | 4 +- .../prometheus/util/stats/query_stats.go | 16 +- .../prometheus/util/strutil/strconv.go | 6 +- .../prometheus/prometheus/web/api/v1/api.go | 9 +- .../prometheus/web/api/v1/json_codec.go | 4 +- .../github.com/prometheus/sigv4/.golangci.yml | 85 +- .../prometheus/sigv4/Makefile.common | 13 +- vendor/github.com/prometheus/sigv4/sigv4.go | 142 +- .../prometheus/sigv4/sigv4_config.go | 3 +- .../github.com/thanos-io/objstore/.go-version | 2 +- .../thanos-io/objstore/.golangci.yml | 23 +- .../thanos-io/objstore/CHANGELOG.md | 12 +- .../github.com/thanos-io/objstore/README.md | 19 +- vendor/github.com/thanos-io/objstore/inmem.go | 41 +- .../github.com/thanos-io/objstore/objstore.go | 48 +- .../thanos-io/objstore/prefixed_bucket.go | 8 +- .../objstore/providers/azure/azure.go | 20 +- .../objstore/providers/azure/helpers.go | 26 +- .../providers/filesystem/filesystem.go | 4 +- .../thanos-io/objstore/providers/gcs/gcs.go | 6 +- .../thanos-io/objstore/providers/s3/s3.go | 29 +- .../objstore/providers/s3/s3_aws_sdk_auth.go | 8 +- .../objstore/providers/swift/swift.go | 10 +- .../github.com/thanos-io/objstore/testing.go | 6 +- .../tracing/opentracing/opentracing.go | 6 +- .../thanos-io/promql-engine/api/remote.go | 9 + .../promql-engine/engine/distributed.go | 4 + .../thanos-io/promql-engine/engine/engine.go | 36 +- .../execution/aggregate/accumulator.go | 8 +- .../execution/aggregate/khashaggregate.go | 10 +- .../promql-engine/execution/binary/utils.go | 6 +- .../promql-engine/execution/binary/vector.go | 8 +- .../promql-engine/execution/execution.go | 2 +- .../execution/function/functions.go | 92 +- .../execution/function/histogram.go | 182 +- .../execution/function/operator.go | 4 +- .../execution/function/quantile.go | 253 -- .../execution/telemetry/telemetry.go | 37 +- .../promql-engine/logicalplan/distribute.go | 4 +- .../promql-engine/logicalplan/plan.go | 25 +- .../promql-engine/ringbuffer/functions.go | 167 +- .../storage/prometheus/vector_selector.go | 2 + .../thanos-io/thanos/pkg/block/block.go | 2 +- .../thanos-io/thanos/pkg/block/fetcher.go | 174 +- .../thanos-io/thanos/pkg/block/index.go | 2 +- .../pkg/block/indexheader/reader_pool.go | 22 +- .../thanos/pkg/block/metadata/meta.go | 5 + .../thanos-io/thanos/pkg/extpromql/parser.go | 6 +- .../thanos/pkg/query/remote_engine.go | 7 + .../thanos/pkg/store/labelpb/label.go | 22 +- .../thanos-io/thanos/pkg/store/proxy.go | 61 +- vendor/github.com/tinylib/msgp/LICENSE | 8 + .../tinylib/msgp/msgp/advise_linux.go | 25 + .../tinylib/msgp/msgp/advise_other.go | 18 + .../github.com/tinylib/msgp/msgp/circular.go | 45 + vendor/github.com/tinylib/msgp/msgp/defs.go | 151 + vendor/github.com/tinylib/msgp/msgp/edit.go | 242 ++ vendor/github.com/tinylib/msgp/msgp/elsize.go | 128 + .../tinylib/msgp/msgp/elsize_default.go | 21 + .../tinylib/msgp/msgp/elsize_tinygo.go | 13 + vendor/github.com/tinylib/msgp/msgp/errors.go | 393 +++ .../tinylib/msgp/msgp/errors_default.go | 25 + .../tinylib/msgp/msgp/errors_tinygo.go | 42 + .../github.com/tinylib/msgp/msgp/extension.go | 561 ++++ vendor/github.com/tinylib/msgp/msgp/file.go | 93 + .../github.com/tinylib/msgp/msgp/file_port.go | 48 + .../github.com/tinylib/msgp/msgp/integers.go | 199 ++ vendor/github.com/tinylib/msgp/msgp/json.go | 580 ++++ .../tinylib/msgp/msgp/json_bytes.go | 347 +++ vendor/github.com/tinylib/msgp/msgp/number.go | 266 ++ vendor/github.com/tinylib/msgp/msgp/purego.go | 16 + vendor/github.com/tinylib/msgp/msgp/read.go | 1494 ++++++++++ .../tinylib/msgp/msgp/read_bytes.go | 1393 ++++++++++ vendor/github.com/tinylib/msgp/msgp/size.go | 40 + vendor/github.com/tinylib/msgp/msgp/unsafe.go | 37 + vendor/github.com/tinylib/msgp/msgp/write.go | 886 ++++++ .../tinylib/msgp/msgp/write_bytes.go | 520 ++++ .../collector/confmap/confmap.go | 15 +- .../confmap/internal/mapstructure/encoder.go | 7 +- .../componentattribute/logger_zap.go | 65 +- .../internal/generated_wrapper_byteslice.go | 5 + .../generated_wrapper_float64slice.go | 5 + .../generated_wrapper_instrumentationscope.go | 7 + .../internal/generated_wrapper_int32slice.go | 5 + .../internal/generated_wrapper_int64slice.go | 5 + .../internal/generated_wrapper_resource.go | 5 + .../internal/generated_wrapper_stringslice.go | 5 + .../internal/generated_wrapper_uint64slice.go | 5 + .../collector/pdata/internal/wrapper_map.go | 12 + .../collector/pdata/internal/wrapper_slice.go | 11 + .../pdata/internal/wrapper_tracestate.go | 4 + .../collector/pdata/internal/wrapper_value.go | 38 + .../pdata/pcommon/generated_byteslice.go | 11 +- .../pdata/pcommon/generated_float64slice.go | 11 +- .../pcommon/generated_instrumentationscope.go | 5 +- .../pdata/pcommon/generated_int32slice.go | 11 +- .../pdata/pcommon/generated_int64slice.go | 11 +- .../pdata/pcommon/generated_resource.go | 3 +- .../pdata/pcommon/generated_stringslice.go | 11 +- .../pdata/pcommon/generated_uint64slice.go | 11 +- .../collector/pdata/pcommon/map.go | 23 +- .../collector/pdata/pcommon/slice.go | 12 +- .../collector/pdata/pcommon/value.go | 39 +- .../pdata/plog/generated_logrecord.go | 26 +- .../pdata/plog/generated_logrecordslice.go | 32 +- .../pdata/plog/generated_resourcelogs.go | 10 +- .../pdata/plog/generated_resourcelogsslice.go | 32 +- .../pdata/plog/generated_scopelogs.go | 10 +- .../pdata/plog/generated_scopelogsslice.go | 32 +- .../pdata/pmetric/generated_exemplar.go | 23 +- .../pdata/pmetric/generated_exemplarslice.go | 18 +- .../pmetric/generated_exponentialhistogram.go | 8 +- ...generated_exponentialhistogramdatapoint.go | 47 +- ...ed_exponentialhistogramdatapointbuckets.go | 8 +- ...ated_exponentialhistogramdatapointslice.go | 32 +- .../pdata/pmetric/generated_gauge.go | 6 +- .../pdata/pmetric/generated_histogram.go | 8 +- .../pmetric/generated_histogramdatapoint.go | 41 +- .../generated_histogramdatapointslice.go | 32 +- .../pdata/pmetric/generated_metric.go | 55 +- .../pdata/pmetric/generated_metricslice.go | 32 +- .../pmetric/generated_numberdatapoint.go | 25 +- .../pmetric/generated_numberdatapointslice.go | 32 +- .../pmetric/generated_resourcemetrics.go | 10 +- .../pmetric/generated_resourcemetricsslice.go | 32 +- .../pdata/pmetric/generated_scopemetrics.go | 10 +- .../pmetric/generated_scopemetricsslice.go | 32 +- .../collector/pdata/pmetric/generated_sum.go | 10 +- .../pdata/pmetric/generated_summary.go | 6 +- .../pmetric/generated_summarydatapoint.go | 18 +- .../generated_summarydatapointslice.go | 32 +- ...nerated_summarydatapointvalueatquantile.go | 8 +- ...ed_summarydatapointvalueatquantileslice.go | 32 +- .../generated_exportpartialsuccess.go | 8 +- .../pdata/ptrace/generated_resourcespans.go | 10 +- .../ptrace/generated_resourcespansslice.go | 32 +- .../pdata/ptrace/generated_scopespans.go | 10 +- .../pdata/ptrace/generated_scopespansslice.go | 32 +- .../collector/pdata/ptrace/generated_span.go | 36 +- .../pdata/ptrace/generated_spanevent.go | 12 +- .../pdata/ptrace/generated_spaneventslice.go | 32 +- .../pdata/ptrace/generated_spanlink.go | 16 +- .../pdata/ptrace/generated_spanlinkslice.go | 32 +- .../pdata/ptrace/generated_spanslice.go | 32 +- .../pdata/ptrace/generated_status.go | 8 +- .../iamcredentials/v1/iamcredentials-api.json | 115 +- .../iamcredentials/v1/iamcredentials-gen.go | 348 ++- vendor/google.golang.org/api/internal/cba.go | 28 +- .../google.golang.org/api/internal/creds.go | 6 + .../api/internal/settings.go | 3 + .../google.golang.org/api/internal/version.go | 2 +- .../option/internaloption/internaloption.go | 21 +- .../api/storage/v1/storage-api.json | 76 +- .../api/storage/v1/storage-gen.go | 33 +- .../type/calendarperiod/calendar_period.pb.go | 2 +- .../genproto/googleapis/type/date/date.pb.go | 2 +- .../genproto/googleapis/type/expr/expr.pb.go | 2 +- .../googleapis/type/timeofday/timeofday.pb.go | 2 +- vendor/modules.txt | 105 +- 522 files changed, 26678 insertions(+), 8245 deletions(-) create mode 100644 vendor/github.com/minio/crc64nvme/LICENSE create mode 100644 vendor/github.com/minio/crc64nvme/README.md create mode 100644 vendor/github.com/minio/crc64nvme/crc64.go create mode 100644 vendor/github.com/minio/crc64nvme/crc64_amd64.go create mode 100644 vendor/github.com/minio/crc64nvme/crc64_amd64.s create mode 100644 vendor/github.com/minio/crc64nvme/crc64_arm64.go create mode 100644 vendor/github.com/minio/crc64nvme/crc64_arm64.s create mode 100644 vendor/github.com/minio/crc64nvme/crc64_other.go create mode 100644 vendor/github.com/minio/minio-go/v7/api-append-object.go create mode 100644 vendor/github.com/minio/minio-go/v7/api-prompt-object.go create mode 100644 vendor/github.com/minio/minio-go/v7/api-prompt-options.go create mode 100644 vendor/github.com/minio/minio-go/v7/create-session.go rename vendor/github.com/minio/minio-go/v7/{s3-endpoints.go => endpoints.go} (63%) create mode 100644 vendor/github.com/minio/minio-go/v7/internal/json/json_goccy.go create mode 100644 vendor/github.com/minio/minio-go/v7/internal/json/json_stdlib.go create mode 100644 vendor/github.com/minio/minio-go/v7/pkg/kvcache/cache.go create mode 100644 vendor/github.com/minio/minio-go/v7/pkg/set/msgp.go create mode 100644 vendor/github.com/minio/minio-go/v7/pkg/singleflight/singleflight.go create mode 100644 vendor/github.com/minio/minio-go/v7/pkg/utils/peek-reader-closer.go create mode 100644 vendor/github.com/philhofer/fwd/LICENSE.md create mode 100644 vendor/github.com/philhofer/fwd/README.md create mode 100644 vendor/github.com/philhofer/fwd/reader.go create mode 100644 vendor/github.com/philhofer/fwd/writer.go create mode 100644 vendor/github.com/philhofer/fwd/writer_appengine.go create mode 100644 vendor/github.com/philhofer/fwd/writer_tinygo.go create mode 100644 vendor/github.com/philhofer/fwd/writer_unsafe.go create mode 100644 vendor/github.com/prometheus/otlptranslator/.gitignore create mode 100644 vendor/github.com/prometheus/otlptranslator/.golangci.yml create mode 100644 vendor/github.com/prometheus/otlptranslator/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/prometheus/otlptranslator/LICENSE create mode 100644 vendor/github.com/prometheus/otlptranslator/MAINTAINERS.md create mode 100644 vendor/github.com/prometheus/otlptranslator/README.md create mode 100644 vendor/github.com/prometheus/otlptranslator/SECURITY.md create mode 100644 vendor/github.com/prometheus/otlptranslator/constants.go rename vendor/github.com/prometheus/{prometheus/storage/remote/otlptranslator/prometheus/metric_name_builder.go => otlptranslator/metric_namer.go} (56%) create mode 100644 vendor/github.com/prometheus/otlptranslator/metric_type.go rename vendor/github.com/prometheus/{prometheus/storage/remote/otlptranslator/prometheus => otlptranslator}/normalize_label.go (63%) create mode 100644 vendor/github.com/prometheus/otlptranslator/strconv.go create mode 100644 vendor/github.com/prometheus/otlptranslator/unit_namer.go rename vendor/github.com/prometheus/prometheus/model/labels/{labels.go => labels_slicelabels.go} (89%) create mode 100644 vendor/github.com/prometheus/prometheus/notifier/alert.go create mode 100644 vendor/github.com/prometheus/prometheus/notifier/alertmanager.go create mode 100644 vendor/github.com/prometheus/prometheus/notifier/alertmanagerset.go rename vendor/github.com/prometheus/prometheus/notifier/{notifier.go => manager.go} (57%) create mode 100644 vendor/github.com/prometheus/prometheus/notifier/metric.go create mode 100644 vendor/github.com/prometheus/prometheus/notifier/util.go create mode 100644 vendor/github.com/prometheus/prometheus/prompb/buf.gen.yaml create mode 100644 vendor/github.com/prometheus/prometheus/promql/durations.go create mode 100644 vendor/github.com/prometheus/prometheus/promql/promqltest/test_migrate.go create mode 100644 vendor/github.com/prometheus/prometheus/promql/promqltest/testdata/duration_expression.test create mode 100644 vendor/github.com/prometheus/prometheus/promql/promqltest/testdata/type_and_unit.test create mode 100644 vendor/github.com/prometheus/prometheus/schema/labels.go delete mode 100644 vendor/github.com/prometheus/prometheus/storage/remote/otlptranslator/prometheus/unit_to_ucum.go create mode 100644 vendor/github.com/prometheus/prometheus/tsdb/fileutil/direct_io.go create mode 100644 vendor/github.com/prometheus/prometheus/tsdb/fileutil/direct_io_force.go create mode 100644 vendor/github.com/prometheus/prometheus/tsdb/fileutil/direct_io_linux.go create mode 100644 vendor/github.com/prometheus/prometheus/tsdb/fileutil/direct_io_unsupported.go create mode 100644 vendor/github.com/prometheus/prometheus/tsdb/fileutil/direct_io_writer.go create mode 100644 vendor/github.com/prometheus/prometheus/util/compression/buffers.go create mode 100644 vendor/github.com/prometheus/prometheus/util/compression/compression.go delete mode 100644 vendor/github.com/thanos-io/promql-engine/execution/function/quantile.go create mode 100644 vendor/github.com/tinylib/msgp/LICENSE create mode 100644 vendor/github.com/tinylib/msgp/msgp/advise_linux.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/advise_other.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/circular.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/defs.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/edit.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/elsize.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/elsize_default.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/elsize_tinygo.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/errors.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/errors_default.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/errors_tinygo.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/extension.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/file.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/file_port.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/integers.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/json.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/json_bytes.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/number.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/purego.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/read.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/read_bytes.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/size.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/unsafe.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/write.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/write_bytes.go diff --git a/.github/workflows/test-build-deploy.yml b/.github/workflows/test-build-deploy.yml index 082ca9620ed..a15cd009776 100644 --- a/.github/workflows/test-build-deploy.yml +++ b/.github/workflows/test-build-deploy.yml @@ -224,7 +224,7 @@ jobs: export CORTEX_IMAGE="${CORTEX_IMAGE_PREFIX}cortex:$IMAGE_TAG-amd64" export CORTEX_CHECKOUT_DIR="/go/src/github.com/cortexproject/cortex" echo "Running integration tests with image: $CORTEX_IMAGE" - go test -tags=integration,${{ matrix.tags }} -timeout 2400s -v -count=1 ./integration/... + go test -tags=slicelabels,integration,${{ matrix.tags }} -timeout 2400s -v -count=1 ./integration/... env: IMAGE_PREFIX: ${{ secrets.IMAGE_PREFIX }} diff --git a/.golangci.yml b/.golangci.yml index 2812394d35b..a5621806d68 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -12,6 +12,7 @@ run: - integration_querier - integration_ruler - integration_query_fuzz + - slicelabels output: formats: text: diff --git a/Makefile b/Makefile index 14d9b7b4deb..c47bc2b2ce0 100644 --- a/Makefile +++ b/Makefile @@ -118,7 +118,7 @@ LATEST_BUILD_IMAGE_TAG ?= master-7ce1d1b12 # as it currently disallows TTY devices. This value needs to be overridden # in any custom cloudbuild.yaml files TTY := --tty -GO_FLAGS := -ldflags "-X main.Branch=$(GIT_BRANCH) -X main.Revision=$(GIT_REVISION) -X main.Version=$(VERSION) -extldflags \"-static\" -s -w" -tags netgo +GO_FLAGS := -ldflags "-X main.Branch=$(GIT_BRANCH) -X main.Revision=$(GIT_REVISION) -X main.Version=$(VERSION) -extldflags \"-static\" -s -w" -tags "netgo slicelabels" ifeq ($(BUILD_IN_CONTAINER),true) @@ -213,15 +213,15 @@ lint: ./pkg/ruler/... test: - go test -tags netgo -timeout 30m -race -count 1 ./... + go test -tags "netgo slicelabels" -timeout 30m -race -count 1 ./... test-no-race: - go test -tags netgo -timeout 30m -count 1 ./... + go test -tags "netgo slicelabels" -timeout 30m -count 1 ./... cover: $(eval COVERDIR := $(shell mktemp -d coverage.XXXXXXXXXX)) $(eval COVERFILE := $(shell mktemp $(COVERDIR)/unit.XXXXXXXXXX)) - go test -tags netgo -timeout 30m -race -count 1 -coverprofile=$(COVERFILE) ./... + go test -tags netgo,slicelabels -timeout 30m -race -count 1 -coverprofile=$(COVERFILE) ./... go tool cover -html=$(COVERFILE) -o cover.html go tool cover -func=cover.html | tail -n1 @@ -229,7 +229,7 @@ shell: bash configs-integration-test: - /bin/bash -c "go test -v -tags 'netgo integration' -timeout 10m ./pkg/configs/... ./pkg/ruler/..." + /bin/bash -c "go test -v -tags 'netgo integration slicelabels' -timeout 10m ./pkg/configs/... ./pkg/ruler/..." mod-check: GO111MODULE=on go mod download @@ -253,11 +253,11 @@ web-deploy: # Generates the config file documentation. doc: clean-doc - go run ./tools/doc-generator ./docs/configuration/config-file-reference.template > ./docs/configuration/config-file-reference.md - go run ./tools/doc-generator ./docs/blocks-storage/compactor.template > ./docs/blocks-storage/compactor.md - go run ./tools/doc-generator ./docs/blocks-storage/store-gateway.template > ./docs/blocks-storage/store-gateway.md - go run ./tools/doc-generator ./docs/blocks-storage/querier.template > ./docs/blocks-storage/querier.md - go run ./tools/doc-generator ./docs/guides/encryption-at-rest.template > ./docs/guides/encryption-at-rest.md + go run -tags slicelabels ./tools/doc-generator ./docs/configuration/config-file-reference.template > ./docs/configuration/config-file-reference.md + go run -tags slicelabels ./tools/doc-generator ./docs/blocks-storage/compactor.template > ./docs/blocks-storage/compactor.md + go run -tags slicelabels ./tools/doc-generator ./docs/blocks-storage/store-gateway.template > ./docs/blocks-storage/store-gateway.md + go run -tags slicelabels ./tools/doc-generator ./docs/blocks-storage/querier.template > ./docs/blocks-storage/querier.md + go run -tags slicelabels ./tools/doc-generator ./docs/guides/encryption-at-rest.template > ./docs/guides/encryption-at-rest.md embedmd -w docs/operations/requests-mirroring-to-secondary-cluster.md embedmd -w docs/guides/overrides-exporter.md diff --git a/go.mod b/go.mod index ea2dbcc0670..acc4123dabf 100644 --- a/go.mod +++ b/go.mod @@ -26,14 +26,14 @@ require ( github.com/gorilla/mux v1.8.1 github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 - github.com/hashicorp/consul/api v1.31.2 + github.com/hashicorp/consul/api v1.32.0 github.com/hashicorp/go-cleanhttp v0.5.2 github.com/hashicorp/go-sockaddr v1.0.7 github.com/hashicorp/memberlist v0.5.1 github.com/json-iterator/go v1.1.12 github.com/klauspost/compress v1.18.0 github.com/lib/pq v1.10.9 - github.com/minio/minio-go/v7 v7.0.80 + github.com/minio/minio-go/v7 v7.0.93 github.com/mitchellh/go-wordwrap v1.0.1 github.com/oklog/ulid v1.3.1 // indirect github.com/opentracing-contrib/go-grpc v0.1.2 @@ -41,18 +41,18 @@ require ( github.com/opentracing/opentracing-go v1.2.0 github.com/pkg/errors v0.9.1 github.com/prometheus/alertmanager v0.28.1 - github.com/prometheus/client_golang v1.22.0 + github.com/prometheus/client_golang v1.23.0-rc.1 github.com/prometheus/client_model v0.6.2 - github.com/prometheus/common v0.63.0 + github.com/prometheus/common v0.65.1-0.20250703115700-7f8b2a0d32d3 // Prometheus maps version 2.x.y to tags v0.x.y. - github.com/prometheus/prometheus v0.303.1 + github.com/prometheus/prometheus v0.305.1-0.20250721065454-b09cf6be8d56 github.com/segmentio/fasthash v1.0.3 github.com/sony/gobreaker v1.0.0 github.com/spf13/afero v1.11.0 github.com/stretchr/testify v1.10.0 - github.com/thanos-io/objstore v0.0.0-20250317105316-a0136a6f898d - github.com/thanos-io/promql-engine v0.0.0-20250611170940-015ebeb7b5ff - github.com/thanos-io/thanos v0.39.2 + github.com/thanos-io/objstore v0.0.0-20250722142242-922b22272ee3 + github.com/thanos-io/promql-engine v0.0.0-20250726034445-91e6e32a36a7 + github.com/thanos-io/thanos v0.39.3-0.20250729120336-88d0ae8071cb github.com/uber/jaeger-client-go v2.30.0+incompatible github.com/weaveworks/common v0.0.0-20230728070032-dd9e68f319d5 go.etcd.io/etcd/api/v3 v3.5.17 @@ -87,22 +87,22 @@ require ( github.com/prometheus/procfs v0.16.1 github.com/sercand/kuberesolver/v5 v5.1.1 github.com/tjhop/slog-gokit v0.1.4 - go.opentelemetry.io/collector/pdata v1.34.0 + go.opentelemetry.io/collector/pdata v1.35.0 go.uber.org/automaxprocs v1.6.0 google.golang.org/protobuf v1.36.6 ) require ( cel.dev/expr v0.23.1 // indirect - cloud.google.com/go v0.118.1 // indirect - cloud.google.com/go/auth v0.15.1-0.20250317171031-671eed979bfd // indirect + cloud.google.com/go v0.120.0 // indirect + cloud.google.com/go/auth v0.16.2 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect cloud.google.com/go/compute/metadata v0.7.0 // indirect - cloud.google.com/go/iam v1.3.1 // indirect - cloud.google.com/go/monitoring v1.24.0 // indirect + cloud.google.com/go/iam v1.5.2 // indirect + cloud.google.com/go/monitoring v1.24.2 // indirect cloud.google.com/go/storage v1.50.0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.1 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 // indirect github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.1 // indirect github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect @@ -172,7 +172,7 @@ require ( github.com/google/s2a-go v0.1.9 // indirect github.com/google/uuid v1.6.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect - github.com/googleapis/gax-go/v2 v2.14.1 // indirect + github.com/googleapis/gax-go/v2 v2.14.2 // indirect github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect @@ -204,6 +204,7 @@ require ( github.com/mdlayher/vsock v1.2.1 // indirect github.com/metalmatze/signal v0.0.0-20210307161603-1c9aa721a97a // indirect github.com/miekg/dns v1.1.66 // indirect + github.com/minio/crc64nvme v1.0.1 // indirect github.com/minio/md5-simd v1.1.2 // indirect github.com/minio/sha256-simd v1.0.1 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect @@ -214,17 +215,19 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect github.com/ncw/swift v1.0.53 // indirect - github.com/oklog/run v1.1.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.128.0 // indirect + github.com/oklog/run v1.2.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.129.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.129.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.129.0 // indirect + github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c // indirect github.com/pierrec/lz4/v4 v4.1.22 // indirect github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus-community/prom-label-proxy v0.11.1 // indirect github.com/prometheus/exporter-toolkit v0.14.0 // indirect - github.com/prometheus/sigv4 v0.1.2 // indirect + github.com/prometheus/otlptranslator v0.0.0-20250620074007-94f535e0c588 // indirect + github.com/prometheus/sigv4 v0.2.0 // indirect github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect github.com/redis/rueidis v1.0.61 // indirect github.com/rs/cors v1.11.1 // indirect @@ -237,6 +240,7 @@ require ( github.com/soheilhy/cmux v0.1.5 // indirect github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect github.com/stretchr/objx v0.5.2 // indirect + github.com/tinylib/msgp v1.3.0 // indirect github.com/trivago/tgo v1.0.7 // indirect github.com/uber/jaeger-lib v2.4.1+incompatible // indirect github.com/vimeo/galaxycache v1.3.1 // indirect @@ -247,14 +251,14 @@ require ( go.mongodb.org/mongo-driver v1.17.4 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/collector/component v1.34.0 // indirect - go.opentelemetry.io/collector/confmap v1.34.0 // indirect - go.opentelemetry.io/collector/confmap/xconfmap v0.128.0 // indirect - go.opentelemetry.io/collector/consumer v1.34.0 // indirect - go.opentelemetry.io/collector/featuregate v1.34.0 // indirect - go.opentelemetry.io/collector/internal/telemetry v0.128.0 // indirect - go.opentelemetry.io/collector/pipeline v0.128.0 // indirect - go.opentelemetry.io/collector/processor v1.34.0 // indirect + go.opentelemetry.io/collector/component v1.35.0 // indirect + go.opentelemetry.io/collector/confmap v1.35.0 // indirect + go.opentelemetry.io/collector/confmap/xconfmap v0.129.0 // indirect + go.opentelemetry.io/collector/consumer v1.35.0 // indirect + go.opentelemetry.io/collector/featuregate v1.35.0 // indirect + go.opentelemetry.io/collector/internal/telemetry v0.129.0 // indirect + go.opentelemetry.io/collector/pipeline v0.129.0 // indirect + go.opentelemetry.io/collector/processor v1.35.0 // indirect go.opentelemetry.io/collector/semconv v0.128.0 // indirect go.opentelemetry.io/contrib/bridges/otelzap v0.11.0 // indirect go.opentelemetry.io/contrib/detectors/gcp v1.35.0 // indirect @@ -282,8 +286,8 @@ require ( golang.org/x/text v0.26.0 // indirect golang.org/x/tools v0.34.0 // indirect gonum.org/v1/gonum v0.16.0 // indirect - google.golang.org/api v0.228.0 // indirect - google.golang.org/genproto v0.0.0-20250204164813-702378808489 // indirect + google.golang.org/api v0.239.0 // indirect + google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect gopkg.in/telebot.v3 v3.3.8 // indirect @@ -320,8 +324,3 @@ replace github.com/google/gnostic => github.com/googleapis/gnostic v0.6.9 // Same replace used by thanos: (may be removed in the future) // https://github.com/thanos-io/thanos/blob/fdeea3917591fc363a329cbe23af37c6fff0b5f0/go.mod#L265 replace gopkg.in/alecthomas/kingpin.v2 => github.com/alecthomas/kingpin v1.3.8-0.20210301060133-17f40c25f497 - -replace github.com/thanos-io/objstore => github.com/thanos-io/objstore v0.0.0-20241111205755-d1dd89d41f97 - -// v3.3.1 with https://github.com/prometheus/prometheus/pull/16252. (same as thanos) -replace github.com/prometheus/prometheus => github.com/thanos-io/thanos-prometheus v0.0.0-20250610133519-082594458a88 diff --git a/go.sum b/go.sum index 6985e6f181f..461f204f903 100644 --- a/go.sum +++ b/go.sum @@ -31,10 +31,10 @@ cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= -cloud.google.com/go v0.118.1 h1:b8RATMcrK9A4BH0rj8yQupPXp+aP+cJ0l6H7V9osV1E= -cloud.google.com/go v0.118.1/go.mod h1:CFO4UPEPi8oV21xoezZCrd3d81K4fFkDTEJu4R8K+9M= -cloud.google.com/go/auth v0.15.1-0.20250317171031-671eed979bfd h1:0y6Ls7Yg2PYIjBiiY4COpxqhv+hRtoDQfY/u/eXNZuw= -cloud.google.com/go/auth v0.15.1-0.20250317171031-671eed979bfd/go.mod h1:uJW0Bahg/VuSfsCxYjfpcKMblBoti/JuY8OQfnmW4Vk= +cloud.google.com/go v0.120.0 h1:wc6bgG9DHyKqF5/vQvX1CiZrtHnxJjBlKUyF9nP6meA= +cloud.google.com/go v0.120.0/go.mod h1:/beW32s8/pGRuj4IILWQNd4uuebeT4dkOhKmkfit64Q= +cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4= +cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= @@ -53,14 +53,14 @@ cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQ cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= -cloud.google.com/go/iam v1.3.1 h1:KFf8SaT71yYq+sQtRISn90Gyhyf4X8RGgeAVC8XGf3E= -cloud.google.com/go/iam v1.3.1/go.mod h1:3wMtuyT4NcbnYNPLMBzYRFiEfjKfJlLVLrisE7bwm34= +cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= +cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc= cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA= -cloud.google.com/go/longrunning v0.6.4 h1:3tyw9rO3E2XVXzSApn1gyEEnH2K9SynNQjMlBi3uHLg= -cloud.google.com/go/longrunning v0.6.4/go.mod h1:ttZpLCe6e7EXvn9OxpBRx7kZEB0efv8yBO6YnVMfhJs= -cloud.google.com/go/monitoring v1.24.0 h1:csSKiCJ+WVRgNkRzzz3BPoGjFhjPY23ZTcaenToJxMM= -cloud.google.com/go/monitoring v1.24.0/go.mod h1:Bd1PRK5bmQBQNnuGwHBfUamAV1ys9049oEPHnn4pcsc= +cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE= +cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY= +cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM= +cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -73,13 +73,13 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= cloud.google.com/go/storage v1.50.0 h1:3TbVkzTooBvnZsk7WaAQfOsNrdoM8QHusXA1cpk6QJs= cloud.google.com/go/storage v1.50.0/go.mod h1:l7XeiD//vx5lfqE3RavfmU9yvk5Pp0Zhcv482poyafY= -cloud.google.com/go/trace v1.11.4 h1:LKlhVyX6I4+heP31sWvERSKZZ9cPPEZumt7b4SKVK18= -cloud.google.com/go/trace v1.11.4/go.mod h1:lCSHzSPZC1TPwto7zhaRt3KtGYsXFyaErPQ18AUUeUE= +cloud.google.com/go/trace v1.11.6 h1:2O2zjPzqPYAHrn3OKl029qlqG6W8ZdYaOWRyr8NgMT4= +cloud.google.com/go/trace v1.11.6/go.mod h1:GA855OeDEBiBMzcckLPE2kDunIpC72N+Pq8WFieFjnI= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.0 h1:j8BorDEigD8UFOSZQiSqAMOOleyQOOQPnUAwV+Ls1gA= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.0/go.mod h1:JdM5psgjfBf5fo2uWOZhflPWyDBZ/O/CNAH9CtsuZE4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.1 h1:Wc1ml6QlJs2BHQ/9Bqu1jiyggbsSjramq2oUmp5WeIo= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.1/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1/go.mod h1:JdM5psgjfBf5fo2uWOZhflPWyDBZ/O/CNAH9CtsuZE4= github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2 h1:yz1bePFlP5Vws5+8ez6T3HWXPmwOK7Yvq8QxDBD3SKY= github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2/go.mod h1:Pa9ZNPuoNu/GztvBSKk9J1cDJW6vk/n0zLtV4mgd8N8= github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4= @@ -232,6 +232,10 @@ github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/coder/quartz v0.1.2 h1:PVhc9sJimTdKd3VbygXtS4826EOCpB1fXoRlLnCrE+s= github.com/coder/quartz v0.1.2/go.mod h1:vsiCc+AHViMKH2CQpGIpFgdHIEQsxwm8yCscqKmzbRA= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= @@ -257,12 +261,12 @@ github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/r github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/dhui/dktest v0.4.3 h1:wquqUxAFdcUgabAVLvSCOKOlag5cIZuaOjYIBOWdsR0= github.com/dhui/dktest v0.4.3/go.mod h1:zNK8IwktWzQRm6I/l2Wjp7MakiyaFWv4G1hjmodmMTs= -github.com/digitalocean/godo v1.136.0 h1:DTxugljFJSMBPfEGq4KeXpnKeAHicggNqogcrw/YdZw= -github.com/digitalocean/godo v1.136.0/go.mod h1:PU8JB6I1XYkQIdHFop8lLAY9ojp6M0XcU0TWaQSxbrc= +github.com/digitalocean/godo v1.157.0 h1:ReELaS6FxXNf8gryUiVH0wmyUmZN8/NCmBX4gXd3F0o= +github.com/digitalocean/godo v1.157.0/go.mod h1:tYeiWY5ZXVpU48YaFv0M5irUFHXGorZpDNm7zzdWMzM= github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= -github.com/docker/docker v28.0.1+incompatible h1:FCHjSRdXhNRFjlHMTv4jUNlIBbTeRjrWfeFuJp7jpo0= -github.com/docker/docker v28.0.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v28.3.0+incompatible h1:ffS62aKWupCWdvcee7nBU9fhnmknOqDPaJAMtfK0ImQ= +github.com/docker/docker v28.3.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= @@ -376,8 +380,8 @@ github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+ github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= -github.com/go-resty/resty/v2 v2.16.3 h1:zacNT7lt4b8M/io2Ahj6yPypL7bqx9n1iprfQuodV+E= -github.com/go-resty/resty/v2 v2.16.3/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA= +github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM= +github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk= github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= @@ -513,11 +517,11 @@ github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0 github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= -github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= -github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= +github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3GqO0k0= +github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/gophercloud/gophercloud/v2 v2.6.0 h1:XJKQ0in3iHOZHVAFMXq/OhjCuvvG+BKR0unOqRfG1EI= -github.com/gophercloud/gophercloud/v2 v2.6.0/go.mod h1:Ki/ILhYZr/5EPebrPL9Ej+tUg4lqx71/YH2JWVeU+Qk= +github.com/gophercloud/gophercloud/v2 v2.7.0 h1:o0m4kgVcPgHlcXiWAjoVxGd8QCmvM5VU+YM71pFbn0E= +github.com/gophercloud/gophercloud/v2 v2.7.0/go.mod h1:Ki/ILhYZr/5EPebrPL9Ej+tUg4lqx71/YH2JWVeU+Qk= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo= @@ -535,8 +539,8 @@ github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFb github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 h1:5ZPtiqj0JL5oKWmcsq4VMaAW5ukBEgSGXEN89zeH1Jo= github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3/go.mod h1:ndYquD05frm2vACXE1nsccT4oJzjhw2arTS2cpUD1PI= github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= -github.com/hashicorp/consul/api v1.31.2 h1:NicObVJHcCmyOIl7Z9iHPvvFrocgTYo9cITSGg0/7pw= -github.com/hashicorp/consul/api v1.31.2/go.mod h1:Z8YgY0eVPukT/17ejW+l+C7zJmKwgPHtjU1q16v/Y40= +github.com/hashicorp/consul/api v1.32.0 h1:5wp5u780Gri7c4OedGEPzmlUEzi0g2KyiPphSr6zjVg= +github.com/hashicorp/consul/api v1.32.0/go.mod h1:Z8YgY0eVPukT/17ejW+l+C7zJmKwgPHtjU1q16v/Y40= github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= github.com/hashicorp/consul/sdk v0.16.1 h1:V8TxTnImoPD5cj0U9Spl0TUxcytjcbbJeADFF07KdHg= github.com/hashicorp/consul/sdk v0.16.1/go.mod h1:fSXvwxB2hmh1FMZCNl6PwX0Q/1wdWtHJcZ7Ea5tns0s= @@ -594,8 +598,8 @@ github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpT github.com/hashicorp/serf v0.9.7/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= github.com/hashicorp/serf v0.10.1 h1:Z1H2J60yRKvfDYAOZLd2MU0ND4AH/WDz7xYHDWQsIPY= github.com/hashicorp/serf v0.10.1/go.mod h1:yL2t6BqATOLGc5HF7qbFkTfXoPIY0WZdWHfEvMqbG+4= -github.com/hetznercloud/hcloud-go/v2 v2.19.1 h1:UU/7h3uc/rdgspM8xkQF7wokmwZXePWDXcLqrQRRzzY= -github.com/hetznercloud/hcloud-go/v2 v2.19.1/go.mod h1:r5RTzv+qi8IbLcDIskTzxkFIji7Ovc8yNgepQR9M+UA= +github.com/hetznercloud/hcloud-go/v2 v2.21.1 h1:IH3liW8/cCRjfJ4cyqYvw3s1ek+KWP8dl1roa0lD8JM= +github.com/hetznercloud/hcloud-go/v2 v2.21.1/go.mod h1:XOaYycZJ3XKMVWzmqQ24/+1V7ormJHmPdck/kxrNnQA= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/huaweicloud/huaweicloud-sdk-go-obs v3.25.4+incompatible h1:yNjwdvn9fwuN6Ouxr0xHM0cVu03YMUWUyFmu2van/Yc= @@ -670,8 +674,8 @@ github.com/leesper/go_rng v0.0.0-20190531154944-a612b043e353/go.mod h1:N0SVk0uhy github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/linode/linodego v1.47.0 h1:6MFNCyzWbr8Rhl4r7d5DwZLwxvFIsM4ARH6W0KS/R0U= -github.com/linode/linodego v1.47.0/go.mod h1:vyklQRzZUWhFVBZdYx4dcYJU/gG9yKB9VUcUs6ub0Lk= +github.com/linode/linodego v1.52.2 h1:N9ozU27To1LMSrDd8WvJZ5STSz1eGYdyLnxhAR/dIZg= +github.com/linode/linodego v1.52.2/go.mod h1:bI949fZaVchjWyKIA08hNyvAcV6BAS+PM2op3p7PAWA= github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= @@ -708,10 +712,12 @@ github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKju github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= github.com/miekg/dns v1.1.66 h1:FeZXOS3VCVsKnEAd+wBkjMC3D2K+ww66Cq3VnCINuJE= github.com/miekg/dns v1.1.66/go.mod h1:jGFzBsSNbJw6z1HYut1RKBKHA9PBdxeHrZG8J+gC2WE= +github.com/minio/crc64nvme v1.0.1 h1:DHQPrYPdqK7jQG/Ls5CTBZWeex/2FMS3G5XGkycuFrY= +github.com/minio/crc64nvme v1.0.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg= github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= -github.com/minio/minio-go/v7 v7.0.80 h1:2mdUHXEykRdY/BigLt3Iuu1otL0JTogT0Nmltg0wujk= -github.com/minio/minio-go/v7 v7.0.80/go.mod h1:84gmIilaX4zcvAWWzJ5Z1WI5axN+hAbM5w25xf8xvC0= +github.com/minio/minio-go/v7 v7.0.93 h1:lAB4QJp8Nq3vDMOU0eKgMuyBiEGMNlXQ5Glc8qAxqSU= +github.com/minio/minio-go/v7 v7.0.93/go.mod h1:71t2CqDt3ThzESgZUlU1rBN54mksGGlkLcFgguDnnAc= github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM= github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= @@ -754,8 +760,8 @@ github.com/ncw/swift v1.0.53/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= -github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= +github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E= +github.com/oklog/run v1.2.0/go.mod h1:mgDbKRSwPhJfesJ4PntqFUbKQRZ50NgmZTSPlFA0YFk= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/oklog/ulid/v2 v2.1.1 h1:suPZ4ARWLOJLegGFiZZ1dFAkqzhMjL3J1TzI+5wHz8s= @@ -764,14 +770,14 @@ github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8= github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0 h1:hZa4FkI2JhYC0tkiwOepnHyyfWzezz3FfCmt88nWJa0= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0/go.mod h1:sLbOuJEFckPdw4li0RtWpoSsMeppcck3s/cmzPyKAgc= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.128.0 h1:+rUULr4xqOJjZK3SokFmRYzsiPq5onoWoSv3He4aaus= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.128.0/go.mod h1:Fh2SXPeFkr4J97w9CV/apFAib8TC9Hi0P08xtiT7Lng= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0 h1:8OWwRSdIhm3DY3PEYJ0PtSEz1a1OjL0fghLXSr14JMk= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0/go.mod h1:32OeaysZe4vkSmD1LJ18Q1DfooryYqpSzFNmz+5A5RU= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.128.0 h1:9wVFaWEhgV8WQD+nP662nHNaQIkmyF57KRhtsqlaWEI= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.128.0/go.mod h1:Yak3vQIvwYQiAO83u+zD9ujdCmpcDL7JSfg2YK+Mwn4= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.129.0 h1:2pzb6bC/AAfciC9DN+8d7Y8Rsk8ZPCfp/ACTfZu87FQ= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.129.0/go.mod h1:tIE4dzdxuM7HnFeYA6sj5zfLuUA/JxzQ+UDl1YrHvQw= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.129.0 h1:ydkfqpZ5BWZfEJEs7OUhTHW59og5aZspbUYxoGcAEok= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.129.0/go.mod h1:oA+49dkzmhUx0YFC9JXGuPPSBL0TOTp6jkv7qSr2n0Q= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.129.0 h1:AOVxBvCZfTPj0GLGqBVHpAnlC9t9pl1JXUQXymHliiY= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.129.0/go.mod h1:0CAJ32V/bCUBhNTEvnN9wlOG5IsyZ+Bmhe9e3Eri7CU= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.129.0 h1:yDLSAoIi3jNt4R/5xN4IJ9YAg1rhOShgchlO/ESv8EY= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.129.0/go.mod h1:IXQHbTPxqNcuu44FvkyvpYJ6Qy4wh4YsCVkKsp0Flzo= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= @@ -786,8 +792,8 @@ github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYr github.com/oracle/oci-go-sdk/v65 v65.93.1 h1:lIvy/6aQOUenQI+cxXH1wDBJeXFPO9Du3CaomXeYFaY= github.com/oracle/oci-go-sdk/v65 v65.93.1/go.mod h1:u6XRPsw9tPziBh76K7GrrRXPa8P8W3BQeqJ6ZZt9VLA= github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0= -github.com/ovh/go-ovh v1.7.0 h1:V14nF7FwDjQrZt9g7jzcvAAQ3HN6DNShRFRMC3jLoPw= -github.com/ovh/go-ovh v1.7.0/go.mod h1:cTVDnl94z4tl8pP1uZ/8jlVxntjSIf09bNcQ5TJSC7c= +github.com/ovh/go-ovh v1.9.0 h1:6K8VoL3BYjVV3In9tPJUdT7qMx9h0GExN9EXx1r2kKE= +github.com/ovh/go-ovh v1.9.0/go.mod h1:cTVDnl94z4tl8pP1uZ/8jlVxntjSIf09bNcQ5TJSC7c= github.com/parquet-go/parquet-go v0.25.1 h1:l7jJwNM0xrk0cnIIptWMtnSnuxRkwq53S+Po3KG8Xgo= github.com/parquet-go/parquet-go v0.25.1/go.mod h1:AXBuotO1XiBtcqJb/FKFyjBG4aqa3aQAAWF3ZPzCanY= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= @@ -796,6 +802,8 @@ github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144T github.com/pborman/getopt v0.0.0-20170112200414-7148bc3a4c30/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas= +github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c h1:dAMKvw0MlJT1GshSTtih8C2gDs04w8dReiOGXrGLNoY= +github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM= github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU= github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= @@ -826,8 +834,8 @@ github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3O github.com/prometheus/client_golang v1.5.1/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q= -github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= +github.com/prometheus/client_golang v1.23.0-rc.1 h1:Is/nGODd8OsJlNQSybeYBwY/B6aHrN7+QwVUYutHSgw= +github.com/prometheus/client_golang v1.23.0-rc.1/go.mod h1:i/o0R9ByOnHX0McrTMTyhYvKE4haaf2mW08I+jGAjEE= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= @@ -838,10 +846,12 @@ github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8 github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= -github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k= -github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18= +github.com/prometheus/common v0.65.1-0.20250703115700-7f8b2a0d32d3 h1:R/zO7ombSHCI8bjQusgCMSL+cE669w5/R2upq5WlPD0= +github.com/prometheus/common v0.65.1-0.20250703115700-7f8b2a0d32d3/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= github.com/prometheus/exporter-toolkit v0.14.0 h1:NMlswfibpcZZ+H0sZBiTjrA3/aBFHkNZqE+iCj5EmRg= github.com/prometheus/exporter-toolkit v0.14.0/go.mod h1:Gu5LnVvt7Nr/oqTBUC23WILZepW0nffNo10XdhQcwWA= +github.com/prometheus/otlptranslator v0.0.0-20250620074007-94f535e0c588 h1:QlySqDdSESgWDePeAYskbbcKKdowI26m9aU9zloHyYE= +github.com/prometheus/otlptranslator v0.0.0-20250620074007-94f535e0c588/go.mod h1:P8AwMgdD7XEr6QRUJ2QWLpiAZTgTE2UYgjlu3svompI= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= @@ -849,8 +859,10 @@ github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= -github.com/prometheus/sigv4 v0.1.2 h1:R7570f8AoM5YnTUPFm3mjZH5q2k4D+I/phCWvZ4PXG8= -github.com/prometheus/sigv4 v0.1.2/go.mod h1:GF9fwrvLgkQwDdQ5BXeV9XUSCH/IPNqzvAoaohfjqMU= +github.com/prometheus/prometheus v0.305.1-0.20250721065454-b09cf6be8d56 h1:F7rkXwWiujBbpql4Syxr1bbbaQf/ePB24BInELXpAQc= +github.com/prometheus/prometheus v0.305.1-0.20250721065454-b09cf6be8d56/go.mod h1:7hMSGyZHt0dcmZ5r4kFPJ/vxPQU99N5/BGwSPDxeZrQ= +github.com/prometheus/sigv4 v0.2.0 h1:qDFKnHYFswJxdzGeRP63c4HlH3Vbn1Yf/Ao2zabtVXk= +github.com/prometheus/sigv4 v0.2.0/go.mod h1:D04rqmAaPPEUkjRQxGqjoxdyJuyCh6E0M18fZr0zBiE= github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg= github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA= github.com/redis/go-redis/v9 v9.8.0 h1:q3nRvjrlge/6UD7eTu/DSg2uYiU2mCL0G/uzBWqhicI= @@ -868,8 +880,8 @@ github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU= github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/sagikazarmark/crypt v0.6.0/go.mod h1:U8+INwJo3nBv1m6A/8OBXAq7Jnpspk5AxSgDyEQcea8= -github.com/scaleway/scaleway-sdk-go v1.0.0-beta.32 h1:4+LP7qmsLSGbmc66m1s5dKRMBwztRppfxFKlYqYte/c= -github.com/scaleway/scaleway-sdk-go v1.0.0-beta.32/go.mod h1:kzh+BSAvpoyHHdHBCDhmSWtBc1NbLMZ2lWHqnBoxFks= +github.com/scaleway/scaleway-sdk-go v1.0.0-beta.33 h1:KhF0WejiUTDbL5X55nXowP7zNopwpowa6qaMAWyIE+0= +github.com/scaleway/scaleway-sdk-go v1.0.0-beta.33/go.mod h1:792k1RTU+5JeMXm35/e2Wgp71qPH/DmDoZrRc+EFZDk= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/segmentio/fasthash v1.0.3 h1:EI9+KE1EwvMLBWwjpRDc+fEM+prwxDYbslddQGtrmhM= @@ -902,6 +914,8 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An github.com/spf13/viper v1.13.0/go.mod h1:Icm2xNL3/8uyh/wFuB1jI7TiTNKp8632Nwegu+zgdYw= github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= +github.com/stackitcloud/stackit-sdk-go/core v0.17.2 h1:jPyn+i8rkp2hM80+hOg0B/1EVRbMt778Tr5RWyK1m2E= +github.com/stackitcloud/stackit-sdk-go/core v0.17.2/go.mod h1:8KIw3czdNJ9sdil9QQimxjR6vHjeINFrRv0iZ67wfn0= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= @@ -928,14 +942,14 @@ github.com/tencentyun/cos-go-sdk-v5 v0.7.66 h1:O4O6EsozBoDjxWbltr3iULgkI7WPj/BFN github.com/tencentyun/cos-go-sdk-v5 v0.7.66/go.mod h1:8+hG+mQMuRP/OIS9d83syAvXvrMj9HhkND6Q1fLghw0= github.com/thanos-community/galaxycache v0.0.0-20211122094458-3a32041a1f1e h1:f1Zsv7OAU9iQhZwigp50Yl38W10g/vd5NC8Rdk1Jzng= github.com/thanos-community/galaxycache v0.0.0-20211122094458-3a32041a1f1e/go.mod h1:jXcofnrSln/cLI6/dhlBxPQZEEQHVPCcFaH75M+nSzM= -github.com/thanos-io/objstore v0.0.0-20241111205755-d1dd89d41f97 h1:VjG0mwhN1DkncwDHFvrpd12/2TLfgYNRmEQA48ikp+0= -github.com/thanos-io/objstore v0.0.0-20241111205755-d1dd89d41f97/go.mod h1:vyzFrBXgP+fGNG2FopEGWOO/zrIuoy7zt3LpLeezRsw= -github.com/thanos-io/promql-engine v0.0.0-20250611170940-015ebeb7b5ff h1:obQDLbgnae6rLPngWwQ6q/ifQZeDEmVvxHIJ6arJCDs= -github.com/thanos-io/promql-engine v0.0.0-20250611170940-015ebeb7b5ff/go.mod h1:IQjuIvDzOOVE2MGDs88Q65GYmmKrpmIsDkMVOqs5reo= -github.com/thanos-io/thanos v0.39.2 h1:edN03y7giEc6lD17HJhYcv8ELapXxElmhJnFIYJ2GqQ= -github.com/thanos-io/thanos v0.39.2/go.mod h1:bvUPJNIx2LBXme6yBinRiGqQinxlGikLlK7PGeFQPkQ= -github.com/thanos-io/thanos-prometheus v0.0.0-20250610133519-082594458a88 h1:5uf08MPb6xrVo4rxmBDh9/1SLthbZGY9zLeF3oMixh8= -github.com/thanos-io/thanos-prometheus v0.0.0-20250610133519-082594458a88/go.mod h1:WEq2ogBPZoLjj9x5K67VEk7ECR0nRD9XCjaOt1lsYck= +github.com/thanos-io/objstore v0.0.0-20250722142242-922b22272ee3 h1:P301Anc27aVL7Ls88el92j+qW3PJp8zmiDl+kOUZv3A= +github.com/thanos-io/objstore v0.0.0-20250722142242-922b22272ee3/go.mod h1:uDHLkMKOGDAnlN75EAz8VrRzob1+VbgYSuUleatWuF0= +github.com/thanos-io/promql-engine v0.0.0-20250726034445-91e6e32a36a7 h1:lFCGOWLDH50RB4ig/xRnUXX99ECD13xUHQdNOvcAYwc= +github.com/thanos-io/promql-engine v0.0.0-20250726034445-91e6e32a36a7/go.mod h1:MOFN0M1nDMcWZg1t4iF39sOard/K4SWgO/HHSODeDIc= +github.com/thanos-io/thanos v0.39.3-0.20250729120336-88d0ae8071cb h1:z/ePbn3lo/D4vdHGH8hpa2kgH9M6iLq0kOFtZwuelKM= +github.com/thanos-io/thanos v0.39.3-0.20250729120336-88d0ae8071cb/go.mod h1:gGUG3TDEoRSjTFVs/QO6QnQIILRgNF0P9l7BiiMfmHw= +github.com/tinylib/msgp v1.3.0 h1:ULuf7GPooDaIlbyvgAxBV/FI7ynli6LZ1/nVUNu+0ww= +github.com/tinylib/msgp v1.3.0/go.mod h1:ykjzy2wzgrlvpDCRc4LA8UXy6D8bzMSuAF3WD57Gok0= github.com/tjhop/slog-gokit v0.1.4 h1:uj/vbDt3HaF0Py8bHPV4ti/s0utnO0miRbO277FLBKM= github.com/tjhop/slog-gokit v0.1.4/go.mod h1:Bbu5v2748qpAWH7k6gse/kw3076IJf6owJmh7yArmJs= github.com/trivago/tgo v1.0.7 h1:uaWH/XIy9aWYWpjm2CU3RpcqZXmX2ysQ9/Go+d9gyrM= @@ -989,40 +1003,40 @@ go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/collector/component v1.34.0 h1:YONg7FaZ5zZbj5cLdARvwtMNuZHunuyxw2fWe5fcWqc= -go.opentelemetry.io/collector/component v1.34.0/go.mod h1:GvolsSVZskXuyfQdwYacqeBSZe/1tg4RJ0YK55KSvDA= -go.opentelemetry.io/collector/component/componentstatus v0.128.0 h1:0lEYHgUQEMMkl5FLtMgDH8lue4B3auElQINzGIWUya4= -go.opentelemetry.io/collector/component/componentstatus v0.128.0/go.mod h1:8vVO6JSV+edmiezJsQzW7aKQ7sFLIN6S3JawKBI646o= -go.opentelemetry.io/collector/component/componenttest v0.128.0 h1:MGNh5lQQ0Qmz2SmNwOqLJYaWMDkMLYj/51wjMzTBR34= -go.opentelemetry.io/collector/component/componenttest v0.128.0/go.mod h1:hALNxcacqOaX/Gm/dE7sNOxAEFj41SbRqtvF57Yd6gs= -go.opentelemetry.io/collector/confmap v1.34.0 h1:PG4sYlLxgCMnA5F7daKXZV+NKjU1IzXBzVQeyvcwyh0= -go.opentelemetry.io/collector/confmap v1.34.0/go.mod h1:BbAit8+hAJg5vyFBQoDh9vOXOH8UzCdNu91jCh+b72E= -go.opentelemetry.io/collector/confmap/xconfmap v0.128.0 h1:hcVKU45pjC+PLz7xUc8kwSlR5wsN2w8hs9midZ3ez10= -go.opentelemetry.io/collector/confmap/xconfmap v0.128.0/go.mod h1:2928x4NAAu1CysfzLbEJE6MSSDB/gOYVq6YRGWY9LmM= -go.opentelemetry.io/collector/consumer v1.34.0 h1:oBhHH6mgViOGhVDPozE+sUdt7jFBo2Hh32lsSr2L3Tc= -go.opentelemetry.io/collector/consumer v1.34.0/go.mod h1:DVMCb56ZBlPNcmo0lSJKn3rp18oyZQCedRE4GKIMI+Q= -go.opentelemetry.io/collector/consumer/consumertest v0.128.0 h1:x50GB0I/QvU3sQuNCap5z/P2cnq2yHoRJ/8awkiT87w= -go.opentelemetry.io/collector/consumer/consumertest v0.128.0/go.mod h1:Wb3IAbMY/DOIwJPy81PuBiW2GnKoNIz4THE7wfJwovE= -go.opentelemetry.io/collector/consumer/xconsumer v0.128.0 h1:4E+KTdCjkRS3SIw0bsv5kpv9XFXHf8x9YiPEuxBVEHY= -go.opentelemetry.io/collector/consumer/xconsumer v0.128.0/go.mod h1:OmzilL/qbjCzPMHay+WEA7/cPe5xuX7Jbj5WPIpqaMo= -go.opentelemetry.io/collector/featuregate v1.34.0 h1:zqDHpEYy1UeudrfUCvlcJL2t13dXywrC6lwpNZ5DrCU= -go.opentelemetry.io/collector/featuregate v1.34.0/go.mod h1:Y/KsHbvREENKvvN9RlpiWk/IGBK+CATBYzIIpU7nccc= -go.opentelemetry.io/collector/internal/telemetry v0.128.0 h1:ySEYWoY7J8DAYdlw2xlF0w+ODQi3AhYj7TRNflsCbx8= -go.opentelemetry.io/collector/internal/telemetry v0.128.0/go.mod h1:572B/iJqjauv3aT+zcwnlNWBPqM7+KqrYGSUuOAStrM= -go.opentelemetry.io/collector/pdata v1.34.0 h1:2vwYftckXe7pWxI9mfSo+tw3wqdGNrYpMbDx/5q6rw8= -go.opentelemetry.io/collector/pdata v1.34.0/go.mod h1:StPHMFkhLBellRWrULq0DNjv4znCDJZP6La4UuC+JHI= -go.opentelemetry.io/collector/pdata/pprofile v0.128.0 h1:6DEtzs/liqv/ukz2EHbC5OMaj2V6K2pzuj/LaRg2YmY= -go.opentelemetry.io/collector/pdata/pprofile v0.128.0/go.mod h1:bVVRpz+zKFf1UCCRUFqy8LvnO3tHlXKkdqW2d+Wi/iA= -go.opentelemetry.io/collector/pdata/testdata v0.128.0 h1:5xcsMtyzvb18AnS2skVtWreQP1nl6G3PiXaylKCZ6pA= -go.opentelemetry.io/collector/pdata/testdata v0.128.0/go.mod h1:9/VYVgzv3JMuIyo19KsT3FwkVyxbh3Eg5QlabQEUczA= -go.opentelemetry.io/collector/pipeline v0.128.0 h1:WgNXdFbyf/QRLy5XbO/jtPQosWrSWX/TEnSYpJq8bgI= -go.opentelemetry.io/collector/pipeline v0.128.0/go.mod h1:TO02zju/K6E+oFIOdi372Wk0MXd+Szy72zcTsFQwXl4= -go.opentelemetry.io/collector/processor v1.34.0 h1:5pwXIG12XXxdkJ8F68e2cBEjEnFlCIAZhqEYM7vjkqE= -go.opentelemetry.io/collector/processor v1.34.0/go.mod h1:VCl4vYj2tdO4APUcr0q6Eh796mqCCsH9Z/gqaPuzlUs= -go.opentelemetry.io/collector/processor/processortest v0.128.0 h1:xPhOSmGFDGqhC3/nu1BqPSE6EpDPAf1/F+BfaYjDn/8= -go.opentelemetry.io/collector/processor/processortest v0.128.0/go.mod h1:XXXom+mbAQtrkcvq4Ecd6n8RQoVgcfLe1vrUlr6U2gI= -go.opentelemetry.io/collector/processor/xprocessor v0.128.0 h1:ObbtdXab0is6bdt4XabsRJZ+SUTuwQjPVlHTbmScfNg= -go.opentelemetry.io/collector/processor/xprocessor v0.128.0/go.mod h1:/nHXW15nzwSRQ+25Cb+r17he/uMtCEvSOBGqpDbn3Uk= +go.opentelemetry.io/collector/component v1.35.0 h1:JpvBukEcEUvJ/TInF1KYpXtWEP+C7iYkxCHKjI0o7BQ= +go.opentelemetry.io/collector/component v1.35.0/go.mod h1:hU/ieWPxWbMAacODCSqem5ZaN6QH9W5GWiZ3MtXVuwc= +go.opentelemetry.io/collector/component/componentstatus v0.129.0 h1:ejpBAt7hXAAZiQKcSxLvcy8sj8SjY4HOLdoXIlW6ybw= +go.opentelemetry.io/collector/component/componentstatus v0.129.0/go.mod h1:/dLPIxn/tRMWmGi+DPtuFoBsffOLqPpSZ2IpEQzYtwI= +go.opentelemetry.io/collector/component/componenttest v0.129.0 h1:gpKkZGCRPu3Yn0U2co09bMvhs17yLFb59oV8Gl9mmRI= +go.opentelemetry.io/collector/component/componenttest v0.129.0/go.mod h1:JR9k34Qvd/pap6sYkPr5QqdHpTn66A5lYeYwhenKBAM= +go.opentelemetry.io/collector/confmap v1.35.0 h1:U4JDATAl4PrKWe9bGHbZkoQXmJXefWgR2DIkFvw8ULQ= +go.opentelemetry.io/collector/confmap v1.35.0/go.mod h1:qX37ExVBa+WU4jWWJCZc7IJ+uBjb58/9oL+/ctF1Bt0= +go.opentelemetry.io/collector/confmap/xconfmap v0.129.0 h1:Q/+pJKrkCaMPSoSAH2BpC3UZCh+5hTiFkh/bdy5yChk= +go.opentelemetry.io/collector/confmap/xconfmap v0.129.0/go.mod h1:RNMnlay2meJDXcKjxiLbST9/YAhKLJlj0kZCrJrLGgw= +go.opentelemetry.io/collector/consumer v1.35.0 h1:mgS42yh1maXBIE65IT4//iOA89BE+7xSUzV8czyevHg= +go.opentelemetry.io/collector/consumer v1.35.0/go.mod h1:9sSPX0hDHaHqzR2uSmfLOuFK9v3e9K3HRQ+fydAjOWs= +go.opentelemetry.io/collector/consumer/consumertest v0.129.0 h1:kRmrAgVvPxH5c/rTaOYAzyy0YrrYhQpBNkuqtDRrgeU= +go.opentelemetry.io/collector/consumer/consumertest v0.129.0/go.mod h1:JgJKms1+v/CuAjkPH+ceTnKeDgUUGTQV4snGu5wTEHY= +go.opentelemetry.io/collector/consumer/xconsumer v0.129.0 h1:bRyJ9TGWwnrUnB5oQGTjPhxpVRbkIVeugmvks22bJ4A= +go.opentelemetry.io/collector/consumer/xconsumer v0.129.0/go.mod h1:pbe5ZyPJrtzdt/RRI0LqfT1GVBiJLbtkDKx3SBRTiTY= +go.opentelemetry.io/collector/featuregate v1.35.0 h1:c/XRtA35odgxVc4VgOF/PTIk7ajw1wYdQ6QI562gzd4= +go.opentelemetry.io/collector/featuregate v1.35.0/go.mod h1:Y/KsHbvREENKvvN9RlpiWk/IGBK+CATBYzIIpU7nccc= +go.opentelemetry.io/collector/internal/telemetry v0.129.0 h1:jkzRpIyMxMGdAzVOcBe8aRNrbP7eUrMq6cxEHe0sbzA= +go.opentelemetry.io/collector/internal/telemetry v0.129.0/go.mod h1:riAPlR2LZBV7VEx4LicOKebg3N1Ja3izzkv5fl1Lhiw= +go.opentelemetry.io/collector/pdata v1.35.0 h1:ck6WO6hCNjepADY/p9sT9/rLECTLO5ukYTumKzsqB/E= +go.opentelemetry.io/collector/pdata v1.35.0/go.mod h1:pttpb089864qG1k0DMeXLgwwTFLk+o3fAW9I6MF9tzw= +go.opentelemetry.io/collector/pdata/pprofile v0.129.0 h1:DgZTvjOGmyZRx7Or80hz8XbEaGwHPkIh2SX1A5eXttQ= +go.opentelemetry.io/collector/pdata/pprofile v0.129.0/go.mod h1:uUBZxqJNOk6QIMvbx30qom//uD4hXJ1K/l3qysijMLE= +go.opentelemetry.io/collector/pdata/testdata v0.129.0 h1:n1QLnLOtrcAR57oMSVzmtPsQEpCc/nE5Avk1xfuAkjY= +go.opentelemetry.io/collector/pdata/testdata v0.129.0/go.mod h1:RfY5IKpmcvkS2IGVjl9jG9fcT7xpQEBWpg9sQOn/7mY= +go.opentelemetry.io/collector/pipeline v0.129.0 h1:Mp7RuKLizLQJ0381eJqKQ0zpgkFlhTE9cHidpJQIvMU= +go.opentelemetry.io/collector/pipeline v0.129.0/go.mod h1:TO02zju/K6E+oFIOdi372Wk0MXd+Szy72zcTsFQwXl4= +go.opentelemetry.io/collector/processor v1.35.0 h1:YOfHemhhodYn4BnPjN7kWYYDhzPVqRkyHCaQ8mAlavs= +go.opentelemetry.io/collector/processor v1.35.0/go.mod h1:cWHDOpmpAaVNCc9K9j2/okZoLIuP/EpGGRNhM4JGmFM= +go.opentelemetry.io/collector/processor/processortest v0.129.0 h1:r5iJHdS7Ffdb2zmMVYx4ahe92PLrce5cas/AJEXivkY= +go.opentelemetry.io/collector/processor/processortest v0.129.0/go.mod h1:gdf8GzyzjGoDTA11+CPwC4jfXphtC+B7MWbWn+LIWXc= +go.opentelemetry.io/collector/processor/xprocessor v0.129.0 h1:V3Zgd+YIeu3Ij3DPlGtzdcTwpqOQIqQVcL5jdHHS7sc= +go.opentelemetry.io/collector/processor/xprocessor v0.129.0/go.mod h1:78T+AP5NO137W/E+SibQhaqOyS67fR+IN697b4JFh00= go.opentelemetry.io/collector/semconv v0.128.0 h1:MzYOz7Vgb3Kf5D7b49pqqgeUhEmOCuT10bIXb/Cc+k4= go.opentelemetry.io/collector/semconv v0.128.0/go.mod h1:OPXer4l43X23cnjLXIZnRj/qQOjSuq4TgBLI76P9hns= go.opentelemetry.io/contrib/bridges/otelzap v0.11.0 h1:u2E32P7j1a/gRgZDWhIXC+Shd4rLg70mnE7QLI/Ssnw= @@ -1439,8 +1453,8 @@ google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRR google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= google.golang.org/api v0.81.0/go.mod h1:FA6Mb/bZxj706H2j+j2d6mHEEaHBmbbWnkfvmorOCko= -google.golang.org/api v0.228.0 h1:X2DJ/uoWGnY5obVjewbp8icSL5U4FzuCfy9OjbLSnLs= -google.golang.org/api v0.228.0/go.mod h1:wNvRS1Pbe8r4+IfBIniV8fwCpGwTrYa+kMUDiC5z5a4= +google.golang.org/api v0.239.0 h1:2hZKUnFZEy81eugPs4e2XzIJ5SOwQg0G82bpXD65Puo= +google.golang.org/api v0.239.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -1528,8 +1542,8 @@ google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= -google.golang.org/genproto v0.0.0-20250204164813-702378808489 h1:nQcbCCOg2h2CQ0yA8SY3AHqriNKDvsetuq9mE/HFjtc= -google.golang.org/genproto v0.0.0-20250204164813-702378808489/go.mod h1:wkQ2Aj/xvshAUDtO/JHvu9y+AaN9cqs28QuSVSHtZSY= +google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78= +google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc= google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 h1:fc6jSaCT0vBduLYZHYrBBNY4dsWuvgyff9noRNDdBeE= diff --git a/integration/e2e/images/images.go b/integration/e2e/images/images.go index 7b744526676..1ef0e8bbdec 100644 --- a/integration/e2e/images/images.go +++ b/integration/e2e/images/images.go @@ -11,5 +11,5 @@ var ( Minio = "minio/minio:RELEASE.2024-05-28T17-19-04Z" Consul = "consul:1.8.4" ETCD = "gcr.io/etcd-development/etcd:v3.4.7" - Prometheus = "quay.io/prometheus/prometheus:v3.3.1" + Prometheus = "quay.io/prometheus/prometheus:v3.5.0" ) diff --git a/integration/parquet_querier_test.go b/integration/parquet_querier_test.go index 570b4c0c45a..e085cef99d1 100644 --- a/integration/parquet_querier_test.go +++ b/integration/parquet_querier_test.go @@ -99,19 +99,8 @@ func TestParquetFuzz(t *testing.T) { end := now.Add(-time.Hour) for i := 0; i < numSeries; i++ { - lbls = append(lbls, labels.Labels{ - {Name: labels.MetricName, Value: "test_series_a"}, - {Name: "job", Value: "test"}, - {Name: "series", Value: strconv.Itoa(i % 3)}, - {Name: "status_code", Value: statusCodes[i%5]}, - }) - - lbls = append(lbls, labels.Labels{ - {Name: labels.MetricName, Value: "test_series_b"}, - {Name: "job", Value: "test"}, - {Name: "series", Value: strconv.Itoa((i + 1) % 3)}, - {Name: "status_code", Value: statusCodes[(i+1)%5]}, - }) + lbls = append(lbls, labels.FromStrings(labels.MetricName, "test_series_a", "job", "test", "series", strconv.Itoa(i%3), "status_code", statusCodes[i%5])) + lbls = append(lbls, labels.FromStrings(labels.MetricName, "test_series_b", "job", "test", "series", strconv.Itoa((i+1)%3), "status_code", statusCodes[(i+1)%5])) } id, err := e2e.CreateBlock(ctx, rnd, dir, lbls, numSamples, start.UnixMilli(), end.UnixMilli(), scrapeInterval.Milliseconds(), 10) require.NoError(t, err) diff --git a/integration/query_fuzz_test.go b/integration/query_fuzz_test.go index cc8d272fd2f..d39c1726a4d 100644 --- a/integration/query_fuzz_test.go +++ b/integration/query_fuzz_test.go @@ -108,19 +108,8 @@ func TestNativeHistogramFuzz(t *testing.T) { scrapeInterval := time.Minute statusCodes := []string{"200", "400", "404", "500", "502"} for i := 0; i < numSeries; i++ { - lbls = append(lbls, labels.Labels{ - {Name: labels.MetricName, Value: "test_series_a"}, - {Name: "job", Value: "test"}, - {Name: "series", Value: strconv.Itoa(i % 3)}, - {Name: "status_code", Value: statusCodes[i%5]}, - }) - - lbls = append(lbls, labels.Labels{ - {Name: labels.MetricName, Value: "test_series_b"}, - {Name: "job", Value: "test"}, - {Name: "series", Value: strconv.Itoa((i + 1) % 3)}, - {Name: "status_code", Value: statusCodes[(i+1)%5]}, - }) + lbls = append(lbls, labels.FromStrings(labels.MetricName, "test_series_a", "job", "test", "series", strconv.Itoa(i%3), "status_code", statusCodes[i%5])) + lbls = append(lbls, labels.FromStrings(labels.MetricName, "test_series_b", "job", "test", "series", strconv.Itoa((i+1)%3), "status_code", statusCodes[(i+1)%5])) } ctx := context.Background() @@ -221,19 +210,8 @@ func TestExperimentalPromQLFuncsWithPrometheus(t *testing.T) { scrapeInterval := time.Minute statusCodes := []string{"200", "400", "404", "500", "502"} for i := 0; i < numSeries; i++ { - lbls = append(lbls, labels.Labels{ - {Name: labels.MetricName, Value: "test_series_a"}, - {Name: "job", Value: "test"}, - {Name: "series", Value: strconv.Itoa(i % 3)}, - {Name: "status_code", Value: statusCodes[i%5]}, - }) - - lbls = append(lbls, labels.Labels{ - {Name: labels.MetricName, Value: "test_series_b"}, - {Name: "job", Value: "test"}, - {Name: "series", Value: strconv.Itoa((i + 1) % 3)}, - {Name: "status_code", Value: statusCodes[(i+1)%5]}, - }) + lbls = append(lbls, labels.FromStrings(labels.MetricName, "test_series_a", "job", "test", "series", strconv.Itoa(i%3), "status_code", statusCodes[i%5])) + lbls = append(lbls, labels.FromStrings(labels.MetricName, "test_series_b", "job", "test", "series", strconv.Itoa((i+1)%3), "status_code", statusCodes[(i+1)%5])) } ctx := context.Background() @@ -1209,13 +1187,7 @@ func TestStoreGatewayLazyExpandedPostingsSeriesFuzz(t *testing.T) { metricName := "http_requests_total" statusCodes := []string{"200", "400", "404", "500", "502"} for i := 0; i < numSeries; i++ { - lbl := labels.Labels{ - {Name: labels.MetricName, Value: metricName}, - {Name: "job", Value: "test"}, - {Name: "series", Value: strconv.Itoa(i % 200)}, - {Name: "status_code", Value: statusCodes[i%5]}, - } - lbls = append(lbls, lbl) + lbls = append(lbls, labels.FromStrings(labels.MetricName, metricName, "job", "test", "series", strconv.Itoa(i%200), "status_code", statusCodes[i%5])) } ctx := context.Background() rnd := rand.New(rand.NewSource(time.Now().Unix())) @@ -1367,13 +1339,7 @@ func TestStoreGatewayLazyExpandedPostingsSeriesFuzzWithPrometheus(t *testing.T) metricName := "http_requests_total" statusCodes := []string{"200", "400", "404", "500", "502"} for i := 0; i < numSeries; i++ { - lbl := labels.Labels{ - {Name: labels.MetricName, Value: metricName}, - {Name: "job", Value: "test"}, - {Name: "series", Value: strconv.Itoa(i % 200)}, - {Name: "status_code", Value: statusCodes[i%5]}, - } - lbls = append(lbls, lbl) + lbls = append(lbls, labels.FromStrings(labels.MetricName, metricName, "job", "test", "series", strconv.Itoa(i%200), "status_code", statusCodes[i%5])) } ctx := context.Background() rnd := rand.New(rand.NewSource(time.Now().Unix())) @@ -1673,19 +1639,8 @@ func TestPrometheusCompatibilityQueryFuzz(t *testing.T) { scrapeInterval := time.Minute statusCodes := []string{"200", "400", "404", "500", "502"} for i := 0; i < numSeries; i++ { - lbls = append(lbls, labels.Labels{ - {Name: labels.MetricName, Value: "test_series_a"}, - {Name: "job", Value: "test"}, - {Name: "series", Value: strconv.Itoa(i % 3)}, - {Name: "status_code", Value: statusCodes[i%5]}, - }) - - lbls = append(lbls, labels.Labels{ - {Name: labels.MetricName, Value: "test_series_b"}, - {Name: "job", Value: "test"}, - {Name: "series", Value: strconv.Itoa((i + 1) % 3)}, - {Name: "status_code", Value: statusCodes[(i+1)%5]}, - }) + lbls = append(lbls, labels.FromStrings(labels.MetricName, "test_series_a", "job", "test", "series", strconv.Itoa(i%3), "status_code", statusCodes[i%5])) + lbls = append(lbls, labels.FromStrings(labels.MetricName, "test_series_b", "job", "test", "series", strconv.Itoa((i+1)%3), "status_code", statusCodes[(i+1)%5])) } ctx := context.Background() diff --git a/integration/ruler_test.go b/integration/ruler_test.go index 5a9a2d4261a..48bdaff5514 100644 --- a/integration/ruler_test.go +++ b/integration/ruler_test.go @@ -504,14 +504,14 @@ func testRulerAPIWithSharding(t *testing.T, enableRulesBackup bool) { assert.NoError(t, json.Unmarshal(responseJson, ar)) if !ar.LastEvaluation.IsZero() { // Labels will be merged only if groups are loaded to Prometheus rule manager - assert.Equal(t, 5, len(ar.Labels)) + assert.Equal(t, 5, ar.Labels.Len()) } - for _, label := range ar.Labels { - if label.Name == "duplicate_label" { + ar.Labels.Range(func(l labels.Label) { + if l.Name == "duplicate_label" { // rule label should override group label - assert.Equal(t, ruleLabels["duplicate_label"], label.Value) + assert.Equal(t, ruleLabels["duplicate_label"], l.Value) } - } + }) } }, }, diff --git a/pkg/api/handlers.go b/pkg/api/handlers.go index 9bcc6a6906e..5eb6733532f 100644 --- a/pkg/api/handlers.go +++ b/pkg/api/handlers.go @@ -161,6 +161,7 @@ func DefaultConfigHandler(actualCfg interface{}, defaultCfg interface{}) http.Ha // server to fulfill the Prometheus query API. func NewQuerierHandler( cfg Config, + querierCfg querier.Config, queryable storage.SampleAndChunkQueryable, exemplarQueryable storage.ExemplarQueryable, engine promql.QueryEngine, @@ -239,6 +240,8 @@ func NewQuerierHandler( false, false, false, + false, + querierCfg.LookbackDelta, ) // Let's clear all codecs to create the instrumented ones api.ClearCodecs() diff --git a/pkg/api/handlers_test.go b/pkg/api/handlers_test.go index 32e84d70a97..9b8b7930683 100644 --- a/pkg/api/handlers_test.go +++ b/pkg/api/handlers_test.go @@ -14,6 +14,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/weaveworks/common/user" + + "github.com/cortexproject/cortex/pkg/querier" ) func TestIndexHandlerPrefix(t *testing.T) { @@ -229,10 +231,11 @@ func TestBuildInfoAPI(t *testing.T) { } { t.Run(tc.name, func(t *testing.T) { cfg := Config{buildInfoEnabled: true} + querierConfig := querier.Config{} version.Version = tc.version version.Branch = tc.branch version.Revision = tc.revision - handler := NewQuerierHandler(cfg, nil, nil, nil, nil, nil, &FakeLogger{}) + handler := NewQuerierHandler(cfg, querierConfig, nil, nil, nil, nil, nil, &FakeLogger{}) writer := httptest.NewRecorder() req := httptest.NewRequest("GET", "/api/v1/status/buildinfo", nil) req = req.WithContext(user.InjectOrgID(req.Context(), "test")) diff --git a/pkg/chunk/fixtures.go b/pkg/chunk/fixtures.go index 9227415db08..433cd8c277a 100644 --- a/pkg/chunk/fixtures.go +++ b/pkg/chunk/fixtures.go @@ -8,22 +8,22 @@ import ( ) // BenchmarkLabels is a real example from Kubernetes' embedded cAdvisor metrics, lightly obfuscated -var BenchmarkLabels = labels.Labels{ - {Name: model.MetricNameLabel, Value: "container_cpu_usage_seconds_total"}, - {Name: "beta_kubernetes_io_arch", Value: "amd64"}, - {Name: "beta_kubernetes_io_instance_type", Value: "c3.somesize"}, - {Name: "beta_kubernetes_io_os", Value: "linux"}, - {Name: "container_name", Value: "some-name"}, - {Name: "cpu", Value: "cpu01"}, - {Name: "failure_domain_beta_kubernetes_io_region", Value: "somewhere-1"}, - {Name: "failure_domain_beta_kubernetes_io_zone", Value: "somewhere-1b"}, - {Name: "id", Value: "/kubepods/burstable/pod6e91c467-e4c5-11e7-ace3-0a97ed59c75e/a3c8498918bd6866349fed5a6f8c643b77c91836427fb6327913276ebc6bde28"}, - {Name: "image", Value: "registry/organisation/name@sha256:dca3d877a80008b45d71d7edc4fd2e44c0c8c8e7102ba5cbabec63a374d1d506"}, - {Name: "instance", Value: "ip-111-11-1-11.ec2.internal"}, - {Name: "job", Value: "kubernetes-cadvisor"}, - {Name: "kubernetes_io_hostname", Value: "ip-111-11-1-11"}, - {Name: "monitor", Value: "prod"}, - {Name: "name", Value: "k8s_some-name_some-other-name-5j8s8_kube-system_6e91c467-e4c5-11e7-ace3-0a97ed59c75e_0"}, - {Name: "namespace", Value: "kube-system"}, - {Name: "pod_name", Value: "some-other-name-5j8s8"}, -} +var BenchmarkLabels = labels.FromStrings( + model.MetricNameLabel, "container_cpu_usage_seconds_total", + "beta_kubernetes_io_arch", "amd64", + "beta_kubernetes_io_instance_type", "c3.somesize", + "beta_kubernetes_io_os", "linux", + "container_name", "some-name", + "cpu", "cpu01", + "failure_domain_beta_kubernetes_io_region", "somewhere-1", + "failure_domain_beta_kubernetes_io_zone", "somewhere-1b", + "id", "/kubepods/burstable/pod6e91c467-e4c5-11e7-ace3-0a97ed59c75e/a3c8498918bd6866349fed5a6f8c643b77c91836427fb6327913276ebc6bde28", + "image", "registry/organisation/name@sha256:dca3d877a80008b45d71d7edc4fd2e44c0c8c8e7102ba5cbabec63a374d1d506", + "instance", "ip-111-11-1-11.ec2.internal", + "job", "kubernetes-cadvisor", + "kubernetes_io_hostname", "ip-111-11-1-11", + "monitor", "prod", + "name", "k8s_some-name_some-other-name-5j8s8_kube-system_6e91c467-e4c5-11e7-ace3-0a97ed59c75e_0", + "namespace", "kube-system", + "pod_name", "some-other-name-5j8s8", +) diff --git a/pkg/chunk/json_helpers.go b/pkg/chunk/json_helpers.go index 9107f7d8c25..21711149380 100644 --- a/pkg/chunk/json_helpers.go +++ b/pkg/chunk/json_helpers.go @@ -1,7 +1,6 @@ package chunk import ( - "sort" "unsafe" jsoniter "github.com/json-iterator/go" @@ -19,35 +18,40 @@ func init() { // Override Prometheus' labels.Labels decoder which goes via a map func DecodeLabels(ptr unsafe.Pointer, iter *jsoniter.Iterator) { labelsPtr := (*labels.Labels)(ptr) - *labelsPtr = make(labels.Labels, 0, 10) + b := labels.NewBuilder(labels.EmptyLabels()) + iter.ReadMapCB(func(iter *jsoniter.Iterator, key string) bool { value := iter.ReadString() - *labelsPtr = append(*labelsPtr, labels.Label{Name: key, Value: value}) + b.Set(key, value) return true }) - // Labels are always sorted, but earlier Cortex using a map would - // output in any order so we have to sort on read in - sort.Sort(*labelsPtr) + *labelsPtr = b.Labels() } // Override Prometheus' labels.Labels encoder which goes via a map func EncodeLabels(ptr unsafe.Pointer, stream *jsoniter.Stream) { - labelsPtr := (*labels.Labels)(ptr) + lbls := *(*labels.Labels)(ptr) + stream.WriteObjectStart() - for i, v := range *labelsPtr { - if i != 0 { + first := true + + lbls.Range(func(l labels.Label) { + if !first { stream.WriteMore() } - stream.WriteString(v.Name) + first = false + + stream.WriteString(l.Name) stream.WriteRaw(`:`) - stream.WriteString(v.Value) - } + stream.WriteString(l.Value) + }) + stream.WriteObjectEnd() } func labelsIsEmpty(ptr unsafe.Pointer) bool { - labelsPtr := (*labels.Labels)(ptr) - return len(*labelsPtr) == 0 + labelsPtr := *(*labels.Labels)(ptr) + return labelsPtr.Len() == 0 } // Decode via jsoniter's float64 routine is faster than getting the string data and decoding as two integers diff --git a/pkg/compactor/compactor_metrics_test.go b/pkg/compactor/compactor_metrics_test.go index 75879f2d96a..0288bbe909f 100644 --- a/pkg/compactor/compactor_metrics_test.go +++ b/pkg/compactor/compactor_metrics_test.go @@ -49,6 +49,7 @@ func TestCompactorMetrics(t *testing.T) { cortex_compactor_meta_synced{state="marked-for-deletion"} 0 cortex_compactor_meta_synced{state="marked-for-no-compact"} 0 cortex_compactor_meta_synced{state="no-meta-json"} 0 + cortex_compactor_meta_synced{state="parquet-migrated"} 0 cortex_compactor_meta_synced{state="time-excluded"} 0 cortex_compactor_meta_synced{state="too-fresh"} 0 # HELP cortex_compactor_meta_syncs_total Total blocks metadata synchronization attempts. diff --git a/pkg/compactor/compactor_paritioning_test.go b/pkg/compactor/compactor_paritioning_test.go index 1e5627590b6..bbb875dad37 100644 --- a/pkg/compactor/compactor_paritioning_test.go +++ b/pkg/compactor/compactor_paritioning_test.go @@ -18,6 +18,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" prom_testutil "github.com/prometheus/client_golang/prometheus/testutil" + "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/tsdb" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -1041,7 +1042,9 @@ func TestPartitionCompactor_ShouldCompactAllUsersOnShardingEnabledButOnlyOneInst bucketClient.MockExists(cortex_tsdb.GetGlobalDeletionMarkPath("user-2"), false, nil) bucketClient.MockExists(cortex_tsdb.GetLocalDeletionMarkPath("user-2"), false, nil) bucketClient.MockIter("user-1/", []string{"user-1/01DTVP434PA9VFXSW2JKB3392D", "user-1/01DTVP434PA9VFXSW2JKB3392D/meta.json", "user-1/01FN6CDF3PNEWWRY5MPGJPE3EX/meta.json"}, nil) + //bucketClient.MockIterWithAttributes("user-1/", []string{"user-1/01DTVP434PA9VFXSW2JKB3392D", "user-1/01DTVP434PA9VFXSW2JKB3392D/meta.json", "user-1/01FN6CDF3PNEWWRY5MPGJPE3EX/meta.json"}, nil) bucketClient.MockIter("user-2/", []string{"user-2/01DTW0ZCPDDNV4BV83Q2SV4QAZ", "user-2/01DTW0ZCPDDNV4BV83Q2SV4QAZ/meta.json", "user-2/01FN3V83ABR9992RF8WRJZ76ZQ/meta.json"}, nil) + //bucketClient.MockIterWithAttributes("user-2/", []string{"user-2/01DTW0ZCPDDNV4BV83Q2SV4QAZ", "user-2/01DTW0ZCPDDNV4BV83Q2SV4QAZ/meta.json", "user-2/01FN3V83ABR9992RF8WRJZ76ZQ/meta.json"}, nil) bucketClient.MockIter("user-1/markers/", nil, nil) bucketClient.MockGet("user-1/markers/cleaner-visit-marker.json", "", nil) bucketClient.MockUpload("user-1/markers/cleaner-visit-marker.json", nil) @@ -1507,7 +1510,7 @@ func mockBlockGroup(userID string, ids []string, bkt *bucket.ClientMock) *compac log.NewNopLogger(), bkt, getPartitionedGroupID(userID), - nil, + labels.EmptyLabels(), 0, true, true, diff --git a/pkg/compactor/compactor_test.go b/pkg/compactor/compactor_test.go index a76afa4a206..19bb759f009 100644 --- a/pkg/compactor/compactor_test.go +++ b/pkg/compactor/compactor_test.go @@ -1362,7 +1362,7 @@ func createTSDBBlock(t *testing.T, bkt objstore.Bucket, userID string, minT, max // Append a sample at the beginning and one at the end of the time range. for i, ts := range []int64{minT, maxT - 1} { - lbls := labels.Labels{labels.Label{Name: "series_id", Value: strconv.Itoa(i)}} + lbls := labels.FromStrings("series_id", strconv.Itoa(i)) app := db.Appender(context.Background()) _, err := app.Append(0, lbls, ts, float64(i)) diff --git a/pkg/compactor/sharded_compaction_lifecycle_callback_test.go b/pkg/compactor/sharded_compaction_lifecycle_callback_test.go index 0c0b8f0f340..9e598a2edc5 100644 --- a/pkg/compactor/sharded_compaction_lifecycle_callback_test.go +++ b/pkg/compactor/sharded_compaction_lifecycle_callback_test.go @@ -9,6 +9,7 @@ import ( "github.com/go-kit/log" "github.com/oklog/ulid/v2" + "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/tsdb" "github.com/stretchr/testify/require" "github.com/thanos-io/thanos/pkg/block/metadata" @@ -46,7 +47,7 @@ func TestPreCompactionCallback(t *testing.T) { log.NewNopLogger(), nil, testGroupKey, - nil, + labels.EmptyLabels(), 0, true, true, diff --git a/pkg/compactor/sharded_posting.go b/pkg/compactor/sharded_posting.go index b0c29ca1c98..09115de6841 100644 --- a/pkg/compactor/sharded_posting.go +++ b/pkg/compactor/sharded_posting.go @@ -28,10 +28,10 @@ func NewShardedPosting(ctx context.Context, postings index.Postings, partitionCo if builder.Labels().Hash()%partitionCount == partitionID { posting := postings.At() series = append(series, posting) - for _, label := range builder.Labels() { - symbols[label.Name] = struct{}{} - symbols[label.Value] = struct{}{} - } + builder.Labels().Range(func(l labels.Label) { + symbols[l.Name] = struct{}{} + symbols[l.Value] = struct{}{} + }) } } return index.NewListPostings(series), symbols, nil diff --git a/pkg/compactor/sharded_posting_test.go b/pkg/compactor/sharded_posting_test.go index e65b9b52919..c277922fe0a 100644 --- a/pkg/compactor/sharded_posting_test.go +++ b/pkg/compactor/sharded_posting_test.go @@ -46,15 +46,11 @@ func TestShardPostingAndSymbolBasedOnPartitionID(t *testing.T) { expectedSeriesCount := 10 for i := 0; i < expectedSeriesCount; i++ { labelValue := strconv.Itoa(r.Int()) - series = append(series, labels.Labels{ - metricName, - {Name: ConstLabelName, Value: ConstLabelValue}, - {Name: TestLabelName, Value: labelValue}, - }) + series = append(series, labels.FromStrings(metricName.Name, metricName.Value, ConstLabelName, ConstLabelValue, TestLabelName, labelValue)) expectedSymbols[TestLabelName] = false expectedSymbols[labelValue] = false } - blockID, err := e2eutil.CreateBlock(context.Background(), tmpdir, series, 10, time.Now().Add(-10*time.Minute).UnixMilli(), time.Now().UnixMilli(), nil, 0, metadata.NoneFunc, nil) + blockID, err := e2eutil.CreateBlock(context.Background(), tmpdir, series, 10, time.Now().Add(-10*time.Minute).UnixMilli(), time.Now().UnixMilli(), labels.EmptyLabels(), 0, metadata.NoneFunc, nil) require.NoError(t, err) var closers []io.Closer @@ -82,10 +78,10 @@ func TestShardPostingAndSymbolBasedOnPartitionID(t *testing.T) { require.NoError(t, err) require.Equal(t, uint64(partitionID), builder.Labels().Hash()%uint64(partitionCount)) seriesCount++ - for _, label := range builder.Labels() { - expectedShardedSymbols[label.Name] = struct{}{} - expectedShardedSymbols[label.Value] = struct{}{} - } + builder.Labels().Range(func(l labels.Label) { + expectedShardedSymbols[l.Name] = struct{}{} + expectedShardedSymbols[l.Value] = struct{}{} + }) } err = ir.Close() if err == nil { diff --git a/pkg/configs/userconfig/config.go b/pkg/configs/userconfig/config.go index 25e7d39b38b..70b6ed70187 100644 --- a/pkg/configs/userconfig/config.go +++ b/pkg/configs/userconfig/config.go @@ -308,7 +308,7 @@ func (c RulesConfig) parseV2() (map[string][]rules.Rule, error) { time.Duration(rl.KeepFiringFor), labels.FromMap(rl.Labels), labels.FromMap(rl.Annotations), - nil, + labels.EmptyLabels(), "", true, util_log.GoKitLogToSlog(log.With(util_log.Logger, "alert", rl.Alert)), diff --git a/pkg/configs/userconfig/config_test.go b/pkg/configs/userconfig/config_test.go index 392ca911ca9..d17dae574d0 100644 --- a/pkg/configs/userconfig/config_test.go +++ b/pkg/configs/userconfig/config_test.go @@ -86,13 +86,9 @@ func TestParseLegacyAlerts(t *testing.T) { parsed, 5*time.Minute, 0, - labels.Labels{ - labels.Label{Name: "severity", Value: "critical"}, - }, - labels.Labels{ - labels.Label{Name: "message", Value: "I am a message"}, - }, - nil, + labels.FromStrings("severity", "critical"), + labels.FromStrings("message", "I am a message"), + labels.EmptyLabels(), "", true, util_log.GoKitLogToSlog(log.With(util_log.Logger, "alert", "TestAlert")), diff --git a/pkg/cortex/modules.go b/pkg/cortex/modules.go index 967f7aba1e3..dca0dc0b266 100644 --- a/pkg/cortex/modules.go +++ b/pkg/cortex/modules.go @@ -365,6 +365,7 @@ func (t *Cortex) initQuerier() (serv services.Service, err error) { // to a Prometheus API struct instantiated with the Cortex Queryable. internalQuerierRouter := api.NewQuerierHandler( t.Cfg.API, + t.Cfg.Querier, t.QuerierQueryable, t.ExemplarQueryable, t.QuerierEngine, diff --git a/pkg/cortexpb/compat.go b/pkg/cortexpb/compat.go index 6de2423d562..83bdbff33d1 100644 --- a/pkg/cortexpb/compat.go +++ b/pkg/cortexpb/compat.go @@ -67,13 +67,13 @@ func FromLabelAdaptersToLabels(ls []LabelAdapter) labels.Labels { // Do NOT use unsafe to convert between data types because this function may // get in input labels whose data structure is reused. func FromLabelAdaptersToLabelsWithCopy(input []LabelAdapter) labels.Labels { - return CopyLabels(FromLabelAdaptersToLabels(input)) + return CopyLabels(input) } // Efficiently copies labels input slice. To be used in cases where input slice // can be reused, but long-term copy is needed. -func CopyLabels(input []labels.Label) labels.Labels { - result := make(labels.Labels, len(input)) +func CopyLabels(input []LabelAdapter) labels.Labels { + builder := labels.NewBuilder(labels.EmptyLabels()) size := 0 for _, l := range input { @@ -84,12 +84,14 @@ func CopyLabels(input []labels.Label) labels.Labels { // Copy all strings into the buffer, and use 'yoloString' to convert buffer // slices to strings. buf := make([]byte, size) + var name, value string - for i, l := range input { - result[i].Name, buf = copyStringToBuffer(l.Name, buf) - result[i].Value, buf = copyStringToBuffer(l.Value, buf) + for _, l := range input { + name, buf = copyStringToBuffer(l.Name, buf) + value, buf = copyStringToBuffer(l.Value, buf) + builder.Set(name, value) } - return result + return builder.Labels() } // Copies string to buffer (which must be big enough), and converts buffer slice containing diff --git a/pkg/cortexpb/compat_test.go b/pkg/cortexpb/compat_test.go index 6fda91a84ee..843aa290d07 100644 --- a/pkg/cortexpb/compat_test.go +++ b/pkg/cortexpb/compat_test.go @@ -104,26 +104,28 @@ func TestMetricMetadataToMetricTypeToMetricType(t *testing.T) { func TestFromLabelAdaptersToLabels(t *testing.T) { input := []LabelAdapter{{Name: "hello", Value: "world"}} - expected := labels.Labels{labels.Label{Name: "hello", Value: "world"}} + expected := labels.FromStrings("hello", "world") actual := FromLabelAdaptersToLabels(input) assert.Equal(t, expected, actual) - // All strings must NOT be copied. - assert.Equal(t, uintptr(unsafe.Pointer(&input[0].Name)), uintptr(unsafe.Pointer(&actual[0].Name))) - assert.Equal(t, uintptr(unsafe.Pointer(&input[0].Value)), uintptr(unsafe.Pointer(&actual[0].Value))) + final := FromLabelsToLabelAdapters(actual) + // All strings must not be copied. + assert.Equal(t, uintptr(unsafe.Pointer(&input[0].Name)), uintptr(unsafe.Pointer(&final[0].Name))) + assert.Equal(t, uintptr(unsafe.Pointer(&input[0].Value)), uintptr(unsafe.Pointer(&final[0].Value))) } func TestFromLabelAdaptersToLabelsWithCopy(t *testing.T) { input := []LabelAdapter{{Name: "hello", Value: "world"}} - expected := labels.Labels{labels.Label{Name: "hello", Value: "world"}} + expected := labels.FromStrings("hello", "world") actual := FromLabelAdaptersToLabelsWithCopy(input) assert.Equal(t, expected, actual) + final := FromLabelsToLabelAdapters(actual) // All strings must be copied. - assert.NotEqual(t, uintptr(unsafe.Pointer(&input[0].Name)), uintptr(unsafe.Pointer(&actual[0].Name))) - assert.NotEqual(t, uintptr(unsafe.Pointer(&input[0].Value)), uintptr(unsafe.Pointer(&actual[0].Value))) + assert.NotEqual(t, uintptr(unsafe.Pointer(&input[0].Name)), uintptr(unsafe.Pointer(&final[0].Name))) + assert.NotEqual(t, uintptr(unsafe.Pointer(&input[0].Value)), uintptr(unsafe.Pointer(&final[0].Value))) } func BenchmarkFromLabelAdaptersToLabelsWithCopy(b *testing.B) { diff --git a/pkg/cortexpb/signature.go b/pkg/cortexpb/signature.go index 42343e6f4c1..a11c5bcd025 100644 --- a/pkg/cortexpb/signature.go +++ b/pkg/cortexpb/signature.go @@ -9,7 +9,7 @@ import ( // Ref: https://github.com/prometheus/common/blob/main/model/fnv.go func LabelsToFingerprint(lset labels.Labels) model.Fingerprint { - if len(lset) == 0 { + if lset.Len() == 0 { return model.Fingerprint(hashNew()) } diff --git a/pkg/distributor/distributor.go b/pkg/distributor/distributor.go index 0fc11c19d19..f508852f1fd 100644 --- a/pkg/distributor/distributor.go +++ b/pkg/distributor/distributor.go @@ -1017,7 +1017,7 @@ func (d *Distributor) prepareSeriesKeys(ctx context.Context, req *cortexpb.Write if mrc := limits.MetricRelabelConfigs; len(mrc) > 0 { l, _ := relabel.Process(cortexpb.FromLabelAdaptersToLabels(ts.Labels), mrc...) - if len(l) == 0 { + if l.Len() == 0 { // all labels are gone, samples will be discarded d.validateMetrics.DiscardedSamples.WithLabelValues( validation.DroppedByRelabelConfiguration, diff --git a/pkg/distributor/distributor_test.go b/pkg/distributor/distributor_test.go index 5ad019c4bf9..c9f931199a2 100644 --- a/pkg/distributor/distributor_test.go +++ b/pkg/distributor/distributor_test.go @@ -1778,53 +1778,56 @@ func TestDistributor_Push_LabelRemoval(t *testing.T) { { removeReplica: true, removeLabels: []string{"cluster"}, - inputSeries: labels.Labels{ - {Name: "__name__", Value: "some_metric"}, - {Name: "cluster", Value: "one"}, - {Name: "__replica__", Value: "two"}, - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "some_metric"}, - }, + inputSeries: labels.FromStrings( + "__name__", "some_metric", + "cluster", "one", + "__replica__", "two", + ), + expectedSeries: labels.FromStrings( + "__name__", "some_metric", + ), }, + // Remove multiple labels and replica. { removeReplica: true, removeLabels: []string{"foo", "some"}, - inputSeries: labels.Labels{ - {Name: "__name__", Value: "some_metric"}, - {Name: "cluster", Value: "one"}, - {Name: "__replica__", Value: "two"}, - {Name: "foo", Value: "bar"}, - {Name: "some", Value: "thing"}, - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "some_metric"}, - {Name: "cluster", Value: "one"}, - }, + inputSeries: labels.FromStrings( + "__name__", "some_metric", + "cluster", "one", + "__replica__", "two", + "foo", "bar", + "some", "thing", + ), + expectedSeries: labels.FromStrings( + "__name__", "some_metric", + "cluster", "one", + ), }, + // Don't remove any labels. { removeReplica: false, - inputSeries: labels.Labels{ - {Name: "__name__", Value: "some_metric"}, - {Name: "__replica__", Value: "two"}, - {Name: "cluster", Value: "one"}, - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "some_metric"}, - {Name: "__replica__", Value: "two"}, - {Name: "cluster", Value: "one"}, - }, + inputSeries: labels.FromStrings( + "__name__", "some_metric", + "__replica__", "two", + "cluster", "one", + ), + expectedSeries: labels.FromStrings( + "__name__", "some_metric", + "__replica__", "two", + "cluster", "one", + ), }, + // No labels left. { removeReplica: true, removeLabels: []string{"cluster"}, - inputSeries: labels.Labels{ - {Name: "cluster", Value: "one"}, - {Name: "__replica__", Value: "two"}, - }, + inputSeries: labels.FromStrings( + "cluster", "one", + "__replica__", "two", + ), expectedSeries: labels.Labels{}, exemplars: []cortexpb.Exemplar{ {Labels: cortexpb.FromLabelsToLabelAdapters(labels.FromStrings("test", "a")), Value: 1, TimestampMs: 0}, @@ -1897,13 +1900,9 @@ func TestDistributor_Push_LabelRemoval_RemovingNameLabelWillError(t *testing.T) } tc := testcase{ - removeReplica: true, - removeLabels: []string{"__name__"}, - inputSeries: labels.Labels{ - {Name: "__name__", Value: "some_metric"}, - {Name: "cluster", Value: "one"}, - {Name: "__replica__", Value: "two"}, - }, + removeReplica: true, + removeLabels: []string{"__name__"}, + inputSeries: labels.FromStrings("__name__", "some_metric", "cluster", "one", "__replica__", "two"), expectedSeries: labels.Labels{}, } @@ -1937,66 +1936,70 @@ func TestDistributor_Push_ShouldGuaranteeShardingTokenConsistencyOverTheTime(t * expectedToken uint32 }{ "metric_1 with value_1": { - inputSeries: labels.Labels{ - {Name: "__name__", Value: "metric_1"}, - {Name: "cluster", Value: "cluster_1"}, - {Name: "key", Value: "value_1"}, - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "metric_1"}, - {Name: "cluster", Value: "cluster_1"}, - {Name: "key", Value: "value_1"}, - }, + inputSeries: labels.FromStrings( + "__name__", "metric_1", + "cluster", "cluster_1", + "key", "value_1", + ), + expectedSeries: labels.FromStrings( + "__name__", "metric_1", + "cluster", "cluster_1", + "key", "value_1", + ), expectedToken: 0xec0a2e9d, }, + "metric_1 with value_1 and dropped label due to config": { - inputSeries: labels.Labels{ - {Name: "__name__", Value: "metric_1"}, - {Name: "cluster", Value: "cluster_1"}, - {Name: "key", Value: "value_1"}, - {Name: "dropped", Value: "unused"}, // will be dropped, doesn't need to be in correct order - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "metric_1"}, - {Name: "cluster", Value: "cluster_1"}, - {Name: "key", Value: "value_1"}, - }, + inputSeries: labels.FromStrings( + "__name__", "metric_1", + "cluster", "cluster_1", + "key", "value_1", + "dropped", "unused", + ), + expectedSeries: labels.FromStrings( + "__name__", "metric_1", + "cluster", "cluster_1", + "key", "value_1", + ), expectedToken: 0xec0a2e9d, }, + "metric_1 with value_1 and dropped HA replica label": { - inputSeries: labels.Labels{ - {Name: "__name__", Value: "metric_1"}, - {Name: "cluster", Value: "cluster_1"}, - {Name: "key", Value: "value_1"}, - {Name: "__replica__", Value: "replica_1"}, - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "metric_1"}, - {Name: "cluster", Value: "cluster_1"}, - {Name: "key", Value: "value_1"}, - }, + inputSeries: labels.FromStrings( + "__name__", "metric_1", + "cluster", "cluster_1", + "key", "value_1", + "__replica__", "replica_1", + ), + expectedSeries: labels.FromStrings( + "__name__", "metric_1", + "cluster", "cluster_1", + "key", "value_1", + ), expectedToken: 0xec0a2e9d, }, + "metric_2 with value_1": { - inputSeries: labels.Labels{ - {Name: "__name__", Value: "metric_2"}, - {Name: "key", Value: "value_1"}, - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "metric_2"}, - {Name: "key", Value: "value_1"}, - }, + inputSeries: labels.FromStrings( + "__name__", "metric_2", + "key", "value_1", + ), + expectedSeries: labels.FromStrings( + "__name__", "metric_2", + "key", "value_1", + ), expectedToken: 0xa60906f2, }, + "metric_1 with value_2": { - inputSeries: labels.Labels{ - {Name: "__name__", Value: "metric_1"}, - {Name: "key", Value: "value_2"}, - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "metric_1"}, - {Name: "key", Value: "value_2"}, - }, + inputSeries: labels.FromStrings( + "__name__", "metric_1", + "key", "value_2", + ), + expectedSeries: labels.FromStrings( + "__name__", "metric_1", + "key", "value_2", + ), expectedToken: 0x18abc8a2, }, } @@ -2039,10 +2042,7 @@ func TestDistributor_Push_ShouldGuaranteeShardingTokenConsistencyOverTheTime(t * func TestDistributor_Push_LabelNameValidation(t *testing.T) { t.Parallel() - inputLabels := labels.Labels{ - {Name: model.MetricNameLabel, Value: "foo"}, - {Name: "999.illegal", Value: "baz"}, - } + inputLabels := labels.FromStrings(model.MetricNameLabel, "foo", "999.illegal", "baz") ctx := user.InjectOrgID(context.Background(), "user") tests := map[string]struct { @@ -2235,8 +2235,8 @@ func BenchmarkDistributor_Push(b *testing.B) { metrics := make([]labels.Labels, numSeriesPerRequest) samples := make([]cortexpb.Sample, numSeriesPerRequest) + lbls := labels.NewBuilder(labels.FromStrings(model.MetricNameLabel, "foo")) for i := 0; i < numSeriesPerRequest; i++ { - lbls := labels.NewBuilder(labels.Labels{{Name: model.MetricNameLabel, Value: "foo"}}) for i := 0; i < 10; i++ { lbls.Set(fmt.Sprintf("name_%d", i), fmt.Sprintf("value_%d", i)) } @@ -2262,7 +2262,7 @@ func BenchmarkDistributor_Push(b *testing.B) { samples := make([]cortexpb.Sample, numSeriesPerRequest) for i := 0; i < numSeriesPerRequest; i++ { - lbls := labels.NewBuilder(labels.Labels{{Name: model.MetricNameLabel, Value: "foo"}}) + lbls := labels.NewBuilder(labels.FromStrings(model.MetricNameLabel, "foo")) for i := 0; i < 10; i++ { lbls.Set(fmt.Sprintf("name_%d", i), fmt.Sprintf("value_%d", i)) } @@ -2287,7 +2287,7 @@ func BenchmarkDistributor_Push(b *testing.B) { samples := make([]cortexpb.Sample, numSeriesPerRequest) for i := 0; i < numSeriesPerRequest; i++ { - lbls := labels.NewBuilder(labels.Labels{{Name: model.MetricNameLabel, Value: "foo"}}) + lbls := labels.NewBuilder(labels.FromStrings(model.MetricNameLabel, "foo")) for i := 1; i < 31; i++ { lbls.Set(fmt.Sprintf("name_%d", i), fmt.Sprintf("value_%d", i)) } @@ -2312,7 +2312,7 @@ func BenchmarkDistributor_Push(b *testing.B) { samples := make([]cortexpb.Sample, numSeriesPerRequest) for i := 0; i < numSeriesPerRequest; i++ { - lbls := labels.NewBuilder(labels.Labels{{Name: model.MetricNameLabel, Value: "foo"}}) + lbls := labels.NewBuilder(labels.FromStrings(model.MetricNameLabel, "foo")) for i := 0; i < 10; i++ { lbls.Set(fmt.Sprintf("name_%d", i), fmt.Sprintf("value_%d", i)) } @@ -2340,7 +2340,7 @@ func BenchmarkDistributor_Push(b *testing.B) { samples := make([]cortexpb.Sample, numSeriesPerRequest) for i := 0; i < numSeriesPerRequest; i++ { - lbls := labels.NewBuilder(labels.Labels{{Name: model.MetricNameLabel, Value: "foo"}}) + lbls := labels.NewBuilder(labels.FromStrings(model.MetricNameLabel, "foo")) for i := 0; i < 10; i++ { lbls.Set(fmt.Sprintf("name_%d", i), fmt.Sprintf("value_%d", i)) } @@ -2368,7 +2368,7 @@ func BenchmarkDistributor_Push(b *testing.B) { samples := make([]cortexpb.Sample, numSeriesPerRequest) for i := 0; i < numSeriesPerRequest; i++ { - lbls := labels.NewBuilder(labels.Labels{{Name: model.MetricNameLabel, Value: "foo"}}) + lbls := labels.NewBuilder(labels.FromStrings(model.MetricNameLabel, "foo")) for i := 0; i < 10; i++ { lbls.Set(fmt.Sprintf("name_%d", i), fmt.Sprintf("value_%d", i)) } @@ -2397,7 +2397,7 @@ func BenchmarkDistributor_Push(b *testing.B) { samples := make([]cortexpb.Sample, numSeriesPerRequest) for i := 0; i < numSeriesPerRequest; i++ { - lbls := labels.NewBuilder(labels.Labels{{Name: model.MetricNameLabel, Value: "foo"}}) + lbls := labels.NewBuilder(labels.FromStrings(model.MetricNameLabel, "foo")) for i := 0; i < 10; i++ { lbls.Set(fmt.Sprintf("name_%d", i), fmt.Sprintf("value_%d", i)) } @@ -2422,7 +2422,7 @@ func BenchmarkDistributor_Push(b *testing.B) { samples := make([]cortexpb.Sample, numSeriesPerRequest) for i := 0; i < numSeriesPerRequest; i++ { - lbls := labels.NewBuilder(labels.Labels{{Name: model.MetricNameLabel, Value: "foo"}}) + lbls := labels.NewBuilder(labels.FromStrings(model.MetricNameLabel, "foo")) for i := 0; i < 10; i++ { lbls.Set(fmt.Sprintf("name_%d", i), fmt.Sprintf("value_%d", i)) } @@ -2571,7 +2571,8 @@ func TestDistributor_MetricsForLabelMatchers_SingleSlowIngester(t *testing.T) { now := model.Now() for i := 0; i < 100; i++ { - req := mockWriteRequest([]labels.Labels{{{Name: labels.MetricName, Value: "test"}, {Name: "app", Value: "m"}, {Name: "uniq8", Value: strconv.Itoa(i)}}}, 1, now.Unix(), histogram) + + req := mockWriteRequest([]labels.Labels{labels.FromStrings(labels.MetricName, "test", "app", "m", "uniq8", strconv.Itoa(i))}, 1, now.Unix(), histogram) _, err := ds[0].Push(ctx, req) require.NoError(t, err) } @@ -2592,12 +2593,32 @@ func TestDistributor_MetricsForLabelMatchers(t *testing.T) { value int64 timestamp int64 }{ - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "status", Value: "200"}}, 1, 100000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "status", Value: "500"}}, 1, 110000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_2"}}, 2, 200000}, + { + lbls: labels.FromStrings(labels.MetricName, "test_1", "status", "200"), + value: 1, + timestamp: 100000, + }, + { + lbls: labels.FromStrings(labels.MetricName, "test_1", "status", "500"), + value: 1, + timestamp: 110000, + }, + { + lbls: labels.FromStrings(labels.MetricName, "test_2"), + value: 2, + timestamp: 200000, + }, // The two following series have the same FastFingerprint=e002a3a451262627 - {labels.Labels{{Name: labels.MetricName, Value: "fast_fingerprint_collision"}, {Name: "app", Value: "l"}, {Name: "uniq0", Value: "0"}, {Name: "uniq1", Value: "1"}}, 1, 300000}, - {labels.Labels{{Name: labels.MetricName, Value: "fast_fingerprint_collision"}, {Name: "app", Value: "m"}, {Name: "uniq0", Value: "1"}, {Name: "uniq1", Value: "1"}}, 1, 300000}, + { + lbls: labels.FromStrings(labels.MetricName, "fast_fingerprint_collision", "app", "l", "uniq0", "0", "uniq1", "1"), + value: 1, + timestamp: 300000, + }, + { + lbls: labels.FromStrings(labels.MetricName, "fast_fingerprint_collision", "app", "m", "uniq0", "1", "uniq1", "1"), + value: 1, + timestamp: 300000, + }, } tests := map[string]struct { @@ -2800,7 +2821,7 @@ func BenchmarkDistributor_MetricsForLabelMatchers(b *testing.B) { samples := make([]cortexpb.Sample, numSeriesPerRequest) for i := 0; i < numSeriesPerRequest; i++ { - lbls := labels.NewBuilder(labels.Labels{{Name: model.MetricNameLabel, Value: fmt.Sprintf("foo_%d", i)}}) + lbls := labels.NewBuilder(labels.FromStrings(model.MetricNameLabel, fmt.Sprintf("foo_%d", i))) for i := 0; i < 10; i++ { lbls.Set(fmt.Sprintf("name_%d", i), fmt.Sprintf("value_%d", i)) } @@ -3789,7 +3810,9 @@ func TestDistributorValidation(t *testing.T) { // Test validation passes. { metadata: []*cortexpb.MetricMetadata{{MetricFamilyName: "testmetric", Help: "a test metric.", Unit: "", Type: cortexpb.COUNTER}}, - labels: []labels.Labels{{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}}}, + labels: []labels.Labels{ + labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar"), + }, samples: []cortexpb.Sample{{ TimestampMs: int64(now), Value: 1, @@ -3800,7 +3823,9 @@ func TestDistributorValidation(t *testing.T) { }, // Test validation fails for very old samples. { - labels: []labels.Labels{{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}}}, + labels: []labels.Labels{ + labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar"), + }, samples: []cortexpb.Sample{{ TimestampMs: int64(past), Value: 2, @@ -3809,7 +3834,9 @@ func TestDistributorValidation(t *testing.T) { }, // Test validation fails for samples from the future. { - labels: []labels.Labels{{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}}}, + labels: []labels.Labels{ + labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar"), + }, samples: []cortexpb.Sample{{ TimestampMs: int64(future), Value: 4, @@ -3819,7 +3846,9 @@ func TestDistributorValidation(t *testing.T) { // Test maximum labels names per series. { - labels: []labels.Labels{{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}, {Name: "foo2", Value: "bar2"}}}, + labels: []labels.Labels{ + labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar", "foo2", "bar2"), + }, samples: []cortexpb.Sample{{ TimestampMs: int64(now), Value: 2, @@ -3829,8 +3858,8 @@ func TestDistributorValidation(t *testing.T) { // Test multiple validation fails return the first one. { labels: []labels.Labels{ - {{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}, {Name: "foo2", Value: "bar2"}}, - {{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}}, + labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar", "foo2", "bar2"), + labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar"), }, samples: []cortexpb.Sample{ {TimestampMs: int64(now), Value: 2}, @@ -3841,7 +3870,9 @@ func TestDistributorValidation(t *testing.T) { // Test metadata validation fails { metadata: []*cortexpb.MetricMetadata{{MetricFamilyName: "", Help: "a test metric.", Unit: "", Type: cortexpb.COUNTER}}, - labels: []labels.Labels{{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}}}, + labels: []labels.Labels{ + labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar"), + }, samples: []cortexpb.Sample{{ TimestampMs: int64(now), Value: 1, @@ -3850,7 +3881,9 @@ func TestDistributorValidation(t *testing.T) { }, // Test maximum labels names per series for histogram samples. { - labels: []labels.Labels{{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}, {Name: "foo2", Value: "bar2"}}}, + labels: []labels.Labels{ + labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar", "foo2", "bar2"), + }, histograms: []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(int64(now), testHistogram), }, @@ -3858,7 +3891,9 @@ func TestDistributorValidation(t *testing.T) { }, // Test validation fails for very old histogram samples. { - labels: []labels.Labels{{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}}}, + labels: []labels.Labels{ + labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar"), + }, histograms: []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(int64(past), testHistogram), }, @@ -3866,7 +3901,9 @@ func TestDistributorValidation(t *testing.T) { }, // Test validation fails for histogram samples from the future. { - labels: []labels.Labels{{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}}}, + labels: []labels.Labels{ + labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar"), + }, histograms: []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(int64(future), testFloatHistogram), }, @@ -4004,28 +4041,16 @@ func TestDistributor_Push_Relabel(t *testing.T) { { name: "with no relabel config", inputSeries: []labels.Labels{ - { - {Name: "__name__", Value: "foo"}, - {Name: "cluster", Value: "one"}, - }, - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "foo"}, - {Name: "cluster", Value: "one"}, + labels.FromStrings("__name__", "foo", "cluster", "one"), }, + expectedSeries: labels.FromStrings("__name__", "foo", "cluster", "one"), }, { name: "with hardcoded replace", inputSeries: []labels.Labels{ - { - {Name: "__name__", Value: "foo"}, - {Name: "cluster", Value: "one"}, - }, - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "foo"}, - {Name: "cluster", Value: "two"}, + labels.FromStrings("__name__", "foo", "cluster", "one"), }, + expectedSeries: labels.FromStrings("__name__", "foo", "cluster", "two"), metricRelabelConfigs: []*relabel.Config{ { SourceLabels: []model.LabelName{"cluster"}, @@ -4039,19 +4064,10 @@ func TestDistributor_Push_Relabel(t *testing.T) { { name: "with drop action", inputSeries: []labels.Labels{ - { - {Name: "__name__", Value: "foo"}, - {Name: "cluster", Value: "one"}, - }, - { - {Name: "__name__", Value: "bar"}, - {Name: "cluster", Value: "two"}, - }, - }, - expectedSeries: labels.Labels{ - {Name: "__name__", Value: "bar"}, - {Name: "cluster", Value: "two"}, + labels.FromStrings("__name__", "foo", "cluster", "one"), + labels.FromStrings("__name__", "bar", "cluster", "two"), }, + expectedSeries: labels.FromStrings("__name__", "bar", "cluster", "two"), metricRelabelConfigs: []*relabel.Config{ { SourceLabels: []model.LabelName{"__name__"}, @@ -4113,19 +4129,10 @@ func TestDistributor_Push_EmptyLabel(t *testing.T) { { name: "with empty label", inputSeries: []labels.Labels{ - { //Token 1106054332 without filtering - {Name: "__name__", Value: "foo"}, - {Name: "empty", Value: ""}, - }, - { //Token 3827924124 without filtering - {Name: "__name__", Value: "foo"}, - {Name: "changHash", Value: ""}, - }, - }, - expectedSeries: labels.Labels{ - //Token 1797290973 - {Name: "__name__", Value: "foo"}, + labels.FromStrings("__name__", "foo", "empty", ""), + labels.FromStrings("__name__", "foo", "changHash", ""), }, + expectedSeries: labels.FromStrings("__name__", "foo"), }, } @@ -4191,14 +4198,8 @@ func TestDistributor_Push_RelabelDropWillExportMetricOfDroppedSamples(t *testing } inputSeries := []labels.Labels{ - { - {Name: "__name__", Value: "foo"}, - {Name: "cluster", Value: "one"}, - }, - { - {Name: "__name__", Value: "bar"}, - {Name: "cluster", Value: "two"}, - }, + labels.FromStrings("__name__", "foo", "cluster", "one"), + labels.FromStrings("__name__", "bar", "cluster", "two"), } var err error @@ -4248,22 +4249,10 @@ func TestDistributor_Push_RelabelDropWillExportMetricOfDroppedSamples(t *testing func TestDistributor_PushLabelSetMetrics(t *testing.T) { t.Parallel() inputSeries := []labels.Labels{ - { - {Name: "__name__", Value: "foo"}, - {Name: "cluster", Value: "one"}, - }, - { - {Name: "__name__", Value: "bar"}, - {Name: "cluster", Value: "one"}, - }, - { - {Name: "__name__", Value: "bar"}, - {Name: "cluster", Value: "two"}, - }, - { - {Name: "__name__", Value: "foo"}, - {Name: "cluster", Value: "three"}, - }, + labels.FromStrings("__name__", "foo", "cluster", "one"), + labels.FromStrings("__name__", "bar", "cluster", "one"), + labels.FromStrings("__name__", "bar", "cluster", "two"), + labels.FromStrings("__name__", "foo", "cluster", "three"), } var err error @@ -4301,14 +4290,8 @@ func TestDistributor_PushLabelSetMetrics(t *testing.T) { // Push more series. inputSeries = []labels.Labels{ - { - {Name: "__name__", Value: "baz"}, - {Name: "cluster", Value: "two"}, - }, - { - {Name: "__name__", Value: "foo"}, - {Name: "cluster", Value: "four"}, - }, + labels.FromStrings("__name__", "baz", "cluster", "two"), + labels.FromStrings("__name__", "foo", "cluster", "four"), } // Write the same request twice for different users. req = mockWriteRequest(inputSeries, 1, 1, false) diff --git a/pkg/ingester/active_series_test.go b/pkg/ingester/active_series_test.go index 3d84d7570cc..fe7840f2576 100644 --- a/pkg/ingester/active_series_test.go +++ b/pkg/ingester/active_series_test.go @@ -29,15 +29,15 @@ func TestActiveSeries_UpdateSeries(t *testing.T) { assert.Equal(t, 0, c.ActiveNativeHistogram()) labels1Hash := fromLabelToLabels(ls1).Hash() labels2Hash := fromLabelToLabels(ls2).Hash() - c.UpdateSeries(ls1, labels1Hash, time.Now(), true, copyFn) + c.UpdateSeries(fromLabelToLabels(ls1), labels1Hash, time.Now(), true, copyFn) assert.Equal(t, 1, c.Active()) assert.Equal(t, 1, c.ActiveNativeHistogram()) - c.UpdateSeries(ls1, labels1Hash, time.Now(), true, copyFn) + c.UpdateSeries(fromLabelToLabels(ls1), labels1Hash, time.Now(), true, copyFn) assert.Equal(t, 1, c.Active()) assert.Equal(t, 1, c.ActiveNativeHistogram()) - c.UpdateSeries(ls2, labels2Hash, time.Now(), true, copyFn) + c.UpdateSeries(fromLabelToLabels(ls2), labels2Hash, time.Now(), true, copyFn) assert.Equal(t, 2, c.Active()) assert.Equal(t, 2, c.ActiveNativeHistogram()) } @@ -56,7 +56,7 @@ func TestActiveSeries_Purge(t *testing.T) { c := NewActiveSeries() for i := 0; i < len(series); i++ { - c.UpdateSeries(series[i], fromLabelToLabels(series[i]).Hash(), time.Unix(int64(i), 0), true, copyFn) + c.UpdateSeries(fromLabelToLabels(series[i]), fromLabelToLabels(series[i]).Hash(), time.Unix(int64(i), 0), true, copyFn) } c.Purge(time.Unix(int64(ttl+1), 0)) @@ -109,9 +109,7 @@ func BenchmarkActiveSeriesTest_single_series(b *testing.B) { } func benchmarkActiveSeriesConcurrencySingleSeries(b *testing.B, goroutines int) { - series := labels.Labels{ - {Name: "a", Value: "a"}, - } + series := labels.FromStrings("a", "a") c := NewActiveSeries() @@ -152,7 +150,7 @@ func BenchmarkActiveSeries_UpdateSeries(b *testing.B) { series := make([]labels.Labels, b.N) labelhash := make([]uint64, b.N) for s := 0; s < b.N; s++ { - series[s] = labels.Labels{{Name: name, Value: name + strconv.Itoa(s)}} + series[s] = labels.FromStrings(name, name+strconv.Itoa(s)) labelhash[s] = series[s].Hash() } @@ -182,7 +180,7 @@ func benchmarkPurge(b *testing.B, twice bool) { series := [numSeries]labels.Labels{} labelhash := [numSeries]uint64{} for s := 0; s < numSeries; s++ { - series[s] = labels.Labels{{Name: "a", Value: strconv.Itoa(s)}} + series[s] = labels.FromStrings("a", strconv.Itoa(s)) labelhash[s] = series[s].Hash() } diff --git a/pkg/ingester/errors.go b/pkg/ingester/errors.go index b982f6ce09d..7da2f51b73b 100644 --- a/pkg/ingester/errors.go +++ b/pkg/ingester/errors.go @@ -35,7 +35,7 @@ func (e *validationError) Error() string { if e.err == nil { return e.errorType } - if e.labels == nil { + if e.labels.IsEmpty() { return e.err.Error() } return fmt.Sprintf("%s for series %s", e.err.Error(), e.labels.String()) diff --git a/pkg/ingester/ingester.go b/pkg/ingester/ingester.go index dd2dc4f1666..c2dab4a54ec 100644 --- a/pkg/ingester/ingester.go +++ b/pkg/ingester/ingester.go @@ -33,7 +33,7 @@ import ( "github.com/prometheus/prometheus/tsdb" "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunks" - "github.com/prometheus/prometheus/tsdb/wlog" + "github.com/prometheus/prometheus/util/compression" "github.com/prometheus/prometheus/util/zeropool" "github.com/thanos-io/objstore" "github.com/thanos-io/thanos/pkg/block/metadata" @@ -1147,15 +1147,17 @@ type extendedAppender interface { storage.GetRef } -func (i *Ingester) isLabelSetOutOfOrder(labels labels.Labels) bool { +func (i *Ingester) isLabelSetOutOfOrder(lbls labels.Labels) bool { last := "" - for _, l := range labels { + ooo := false + lbls.Range(func(l labels.Label) { if strings.Compare(last, l.Name) > 0 { - return true + ooo = true } last = l.Name - } - return false + }) + + return ooo } // Push adds metrics to a block @@ -1312,9 +1314,6 @@ func (i *Ingester) Push(ctx context.Context, req *cortexpb.WriteRequest) (*corte case errors.Is(cause, histogram.ErrHistogramCountMismatch): updateFirstPartial(func() error { return wrappedTSDBIngestErr(err, model.Time(timestampMs), lbls) }) - case errors.Is(cause, storage.ErrOOONativeHistogramsDisabled): - updateFirstPartial(func() error { return wrappedTSDBIngestErr(err, model.Time(timestampMs), lbls) }) - default: rollback = true } @@ -1461,7 +1460,7 @@ func (i *Ingester) Push(ctx context.Context, req *cortexpb.WriteRequest) (*corte Labels: cortexpb.FromLabelAdaptersToLabelsWithCopy(ex.Labels), } - if _, err = app.AppendExemplar(ref, nil, e); err == nil { + if _, err = app.AppendExemplar(ref, labels.EmptyLabels(), e); err == nil { succeededExemplarsCount++ continue } @@ -2518,9 +2517,9 @@ func (i *Ingester) createTSDB(userID string) (*userTSDB, error) { } oooTimeWindow := i.limits.OutOfOrderTimeWindow(userID) - walCompressType := wlog.CompressionNone + walCompressType := compression.None if i.cfg.BlocksStorageConfig.TSDB.WALCompressionType != "" { - walCompressType = wlog.CompressionType(i.cfg.BlocksStorageConfig.TSDB.WALCompressionType) + walCompressType = i.cfg.BlocksStorageConfig.TSDB.WALCompressionType } // Create a new user database @@ -2542,7 +2541,6 @@ func (i *Ingester) createTSDB(userID string) (*userTSDB, error) { EnableMemorySnapshotOnShutdown: i.cfg.BlocksStorageConfig.TSDB.MemorySnapshotOnShutdown, OutOfOrderTimeWindow: time.Duration(oooTimeWindow).Milliseconds(), OutOfOrderCapMax: i.cfg.BlocksStorageConfig.TSDB.OutOfOrderCapMax, - EnableOOONativeHistograms: true, EnableOverlappingCompaction: false, // Always let compactors handle overlapped blocks, e.g. OOO blocks. EnableNativeHistograms: true, // Always enable Native Histograms. Gate keeping is done though a per-tenant limit at ingestion. BlockChunkQuerierFunc: i.blockChunkQuerierFunc(userID), @@ -2578,15 +2576,7 @@ func (i *Ingester) createTSDB(userID string) (*userTSDB, error) { // Thanos shipper requires at least 1 external label to be set. For this reason, // we set the tenant ID as external label and we'll filter it out when reading // the series from the storage. - l := labels.Labels{ - { - Name: cortex_tsdb.TenantIDExternalLabel, - Value: userID, - }, { - Name: cortex_tsdb.IngesterIDExternalLabel, - Value: i.TSDBState.shipperIngesterID, - }, - } + l := labels.FromStrings(cortex_tsdb.TenantIDExternalLabel, userID, cortex_tsdb.IngesterIDExternalLabel, i.TSDBState.shipperIngesterID) // Create a new shipper for this database if i.cfg.BlocksStorageConfig.TSDB.IsBlocksShippingEnabled() { diff --git a/pkg/ingester/ingester_test.go b/pkg/ingester/ingester_test.go index c9948f9ec66..c59879a1d84 100644 --- a/pkg/ingester/ingester_test.go +++ b/pkg/ingester/ingester_test.go @@ -305,9 +305,9 @@ func TestIngesterPerLabelsetLimitExceeded(t *testing.T) { // Create first series within the limits for _, set := range limits.LimitsPerLabelSet { lbls := []string{labels.MetricName, "metric_name"} - for _, lbl := range set.LabelSet { - lbls = append(lbls, lbl.Name, lbl.Value) - } + set.LabelSet.Range(func(l labels.Label) { + lbls = append(lbls, l.Name, l.Value) + }) for i := 0; i < set.Limits.MaxSeries; i++ { _, err = ing.Push(ctx, cortexpb.ToWriteRequest( []labels.Labels{labels.FromStrings(append(lbls, "extraLabel", fmt.Sprintf("extraValue%v", i))...)}, samples, nil, nil, cortexpb.API)) @@ -330,9 +330,9 @@ func TestIngesterPerLabelsetLimitExceeded(t *testing.T) { // Should impose limits for _, set := range limits.LimitsPerLabelSet { lbls := []string{labels.MetricName, "metric_name"} - for _, lbl := range set.LabelSet { - lbls = append(lbls, lbl.Name, lbl.Value) - } + set.LabelSet.Range(func(l labels.Label) { + lbls = append(lbls, l.Name, l.Value) + }) _, err = ing.Push(ctx, cortexpb.ToWriteRequest( []labels.Labels{labels.FromStrings(append(lbls, "newLabel", "newValue")...)}, samples, nil, nil, cortexpb.API)) httpResp, ok := httpgrpc.HTTPResponseFromError(err) @@ -759,7 +759,7 @@ func TestIngesterUserLimitExceeded(t *testing.T) { userID := "1" // Series - labels1 := labels.Labels{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}} + labels1 := labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar") sample1 := cortexpb.Sample{ TimestampMs: 0, Value: 1, @@ -768,7 +768,7 @@ func TestIngesterUserLimitExceeded(t *testing.T) { TimestampMs: 1, Value: 2, } - labels3 := labels.Labels{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "biz"}} + labels3 := labels.FromStrings(labels.MetricName, "testmetric", "foo", "biz") sample3 := cortexpb.Sample{ TimestampMs: 1, Value: 3, @@ -878,8 +878,8 @@ func TestIngesterUserLimitExceededForNativeHistogram(t *testing.T) { userID := "1" // Series - labels1 := labels.Labels{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}} - labels3 := labels.Labels{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "biz"}} + labels1 := labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar") + labels3 := labels.FromStrings(labels.MetricName, "testmetric", "foo", "biz") sampleNativeHistogram1 := cortexpb.HistogramToHistogramProto(0, tsdbutil.GenerateTestHistogram(1)) sampleNativeHistogram2 := cortexpb.HistogramToHistogramProto(1, tsdbutil.GenerateTestHistogram(2)) sampleNativeHistogram3 := cortexpb.HistogramToHistogramProto(0, tsdbutil.GenerateTestHistogram(3)) @@ -958,13 +958,19 @@ func TestIngesterUserLimitExceededForNativeHistogram(t *testing.T) { func benchmarkData(nSeries int) (allLabels []labels.Labels, allSamples []cortexpb.Sample) { for j := 0; j < nSeries; j++ { - labels := chunk.BenchmarkLabels.Copy() - for i := range labels { - if labels[i].Name == "cpu" { - labels[i].Value = fmt.Sprintf("cpu%02d", j) + lbls := chunk.BenchmarkLabels.Copy() + + builder := labels.NewBuilder(labels.EmptyLabels()) + lbls.Range(func(l labels.Label) { + val := l.Value + if l.Name == "cpu" { + val = fmt.Sprintf("cpu%02d", j) } - } - allLabels = append(allLabels, labels) + + builder.Set(l.Name, val) + }) + + allLabels = append(allLabels, builder.Labels()) allSamples = append(allSamples, cortexpb.Sample{TimestampMs: 0, Value: float64(j)}) } return @@ -978,7 +984,7 @@ func TestIngesterMetricLimitExceeded(t *testing.T) { limits.MaxLocalMetadataPerMetric = 1 userID := "1" - labels1 := labels.Labels{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "bar"}} + labels1 := labels.FromStrings(labels.MetricName, "testmetric", "foo", "bar") sample1 := cortexpb.Sample{ TimestampMs: 0, Value: 1, @@ -987,7 +993,7 @@ func TestIngesterMetricLimitExceeded(t *testing.T) { TimestampMs: 1, Value: 2, } - labels3 := labels.Labels{{Name: labels.MetricName, Value: "testmetric"}, {Name: "foo", Value: "biz"}} + labels3 := labels.FromStrings(labels.MetricName, "testmetric", "foo", "biz") sample3 := cortexpb.Sample{ TimestampMs: 1, Value: 3, @@ -2472,13 +2478,13 @@ func TestIngester_Push_OutOfOrderLabels(t *testing.T) { ctx := user.InjectOrgID(context.Background(), "test-user") - outOfOrderLabels := labels.Labels{ + outOfOrderLabels := []cortexpb.LabelAdapter{ {Name: labels.MetricName, Value: "test_metric"}, {Name: "c", Value: "3"}, - {Name: "a", Value: "1"}, // Out of order (a comes before c) + {Name: "a", Value: "1"}, } - req, _ := mockWriteRequest(t, outOfOrderLabels, 1, 2) + req, _ := mockWriteRequest(t, cortexpb.FromLabelAdaptersToLabels(outOfOrderLabels), 1, 2) _, err = i.Push(ctx, req) require.Error(t, err) require.Contains(t, err.Error(), "out-of-order label set found") @@ -2599,7 +2605,7 @@ func Benchmark_Ingester_PushOnError(b *testing.B) { beforeBenchmark: func(b *testing.B, ingester *Ingester, numSeriesPerRequest int) { // Push a single time series to set the TSDB min time. currTimeReq := cortexpb.ToWriteRequest( - []labels.Labels{{{Name: labels.MetricName, Value: metricName}}}, + []labels.Labels{labels.FromStrings(labels.MetricName, metricName)}, []cortexpb.Sample{{Value: 1, TimestampMs: util.TimeToMillis(time.Now())}}, nil, nil, @@ -2624,7 +2630,7 @@ func Benchmark_Ingester_PushOnError(b *testing.B) { // For each series, push a single sample with a timestamp greater than next pushes. for i := 0; i < numSeriesPerRequest; i++ { currTimeReq := cortexpb.ToWriteRequest( - []labels.Labels{{{Name: labels.MetricName, Value: metricName}, {Name: "cardinality", Value: strconv.Itoa(i)}}}, + []labels.Labels{labels.FromStrings(labels.MetricName, metricName, "cardinality", strconv.Itoa(i))}, []cortexpb.Sample{{Value: 1, TimestampMs: sampleTimestamp + 1}}, nil, nil, @@ -2821,7 +2827,7 @@ func Benchmark_Ingester_PushOnError(b *testing.B) { metrics := make([]labels.Labels, 0, scenario.numSeriesPerRequest) samples := make([]cortexpb.Sample, 0, scenario.numSeriesPerRequest) for i := 0; i < scenario.numSeriesPerRequest; i++ { - metrics = append(metrics, labels.Labels{{Name: labels.MetricName, Value: metricName}, {Name: "cardinality", Value: strconv.Itoa(i)}}) + metrics = append(metrics, labels.FromStrings(labels.MetricName, metricName, "cardinality", strconv.Itoa(i))) samples = append(samples, cortexpb.Sample{Value: float64(i), TimestampMs: sampleTimestamp}) } @@ -2857,9 +2863,9 @@ func Test_Ingester_LabelNames(t *testing.T) { value float64 timestamp int64 }{ - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "200"}}, 1, 100000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "500"}}, 1, 110000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_2"}}, 2, 200000}, + {labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "200"), 1, 100000}, + {labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "500"), 1, 110000}, + {labels.FromStrings("__name__", "test_2"), 2, 200000}, } expected := []string{"__name__", "route", "status"} @@ -2913,9 +2919,9 @@ func Test_Ingester_LabelValues(t *testing.T) { value float64 timestamp int64 }{ - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "200"}}, 1, 100000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "500"}}, 1, 110000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_2"}}, 2, 200000}, + {labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "200"), 1, 100000}, + {labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "500"), 1, 110000}, + {labels.FromStrings("__name__", "test_2"), 2, 200000}, } expected := map[string][]string{ @@ -2991,7 +2997,7 @@ func Test_Ingester_LabelValue_MaxInflightQueryRequest(t *testing.T) { // Mock request ctx := user.InjectOrgID(context.Background(), "test") - wreq, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "200"}}, 1, 100000) + wreq, _ := mockWriteRequest(t, labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "200"), 1, 100000) _, err = i.Push(ctx, wreq) require.NoError(t, err) @@ -3007,9 +3013,9 @@ func Test_Ingester_Query(t *testing.T) { value float64 timestamp int64 }{ - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "200"}}, 1, 100000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "500"}}, 1, 110000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_2"}}, 2, 200000}, + {labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "200"), 1, 100000}, + {labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "500"), 1, 110000}, + {labels.FromStrings("__name__", "test_2"), 2, 200000}, } tests := map[string]struct { @@ -3150,7 +3156,7 @@ func Test_Ingester_Query_MaxInflightQueryRequest(t *testing.T) { // Mock request ctx := user.InjectOrgID(context.Background(), "test") - wreq, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "200"}}, 1, 100000) + wreq, _ := mockWriteRequest(t, labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "200"), 1, 100000) _, err = i.Push(ctx, wreq) require.NoError(t, err) @@ -3191,7 +3197,7 @@ func Test_Ingester_Query_ResourceThresholdBreached(t *testing.T) { value float64 timestamp int64 }{ - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "200"}}, 1, 100000}, + {labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "200"), 1, 100000}, } i, err := prepareIngesterWithBlocksStorage(t, defaultIngesterTestConfig(t), prometheus.NewRegistry()) @@ -3361,12 +3367,12 @@ func Test_Ingester_MetricsForLabelMatchers(t *testing.T) { value float64 timestamp int64 }{ - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "status", Value: "200"}}, 1, 100000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "status", Value: "500"}}, 1, 110000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_2"}}, 2, 200000}, + {labels.FromStrings("__name__", "test_1", "status", "200"), 1, 100000}, + {labels.FromStrings("__name__", "test_1", "status", "500"), 1, 110000}, + {labels.FromStrings("__name__", "test_2"), 2, 200000}, // The two following series have the same FastFingerprint=e002a3a451262627 - {labels.Labels{{Name: labels.MetricName, Value: "collision"}, {Name: "app", Value: "l"}, {Name: "uniq0", Value: "0"}, {Name: "uniq1", Value: "1"}}, 1, 300000}, - {labels.Labels{{Name: labels.MetricName, Value: "collision"}, {Name: "app", Value: "m"}, {Name: "uniq0", Value: "1"}, {Name: "uniq1", Value: "1"}}, 1, 300000}, + {labels.FromStrings("__name__", "collision", "app", "l", "uniq0", "0", "uniq1", "1"), 1, 300000}, + {labels.FromStrings("__name__", "collision", "app", "m", "uniq0", "1", "uniq1", "1"), 1, 300000}, } tests := map[string]struct { @@ -3639,10 +3645,7 @@ func createIngesterWithSeries(t testing.TB, userID string, numSeries, numSamples samples := make([]cortexpb.Sample, 0, batchSize) for s := 0; s < batchSize; s++ { - metrics = append(metrics, labels.Labels{ - {Name: labels.MetricName, Value: fmt.Sprintf("test_%d", o+s)}, - }) - + metrics = append(metrics, labels.FromStrings("__name__", fmt.Sprintf("test_%d", o+s))) samples = append(samples, cortexpb.Sample{ TimestampMs: ts, Value: 1, @@ -3677,7 +3680,7 @@ func TestIngester_QueryStream(t *testing.T) { // Push series. ctx := user.InjectOrgID(context.Background(), userID) - lbls := labels.Labels{{Name: labels.MetricName, Value: "foo"}} + lbls := labels.FromStrings(labels.MetricName, "foo") var ( req *cortexpb.WriteRequest expectedResponseChunks *client.QueryStreamResponse @@ -3773,15 +3776,15 @@ func TestIngester_QueryStreamManySamplesChunks(t *testing.T) { } // 100k samples in chunks use about 154 KiB, - _, err = i.Push(ctx, writeRequestSingleSeries(labels.Labels{{Name: labels.MetricName, Value: "foo"}, {Name: "l", Value: "1"}}, samples[0:100000])) + _, err = i.Push(ctx, writeRequestSingleSeries(labels.FromStrings("__name__", "foo", "l", "1"), samples[0:100000])) require.NoError(t, err) // 1M samples in chunks use about 1.51 MiB, - _, err = i.Push(ctx, writeRequestSingleSeries(labels.Labels{{Name: labels.MetricName, Value: "foo"}, {Name: "l", Value: "2"}}, samples)) + _, err = i.Push(ctx, writeRequestSingleSeries(labels.FromStrings("__name__", "foo", "l", "2"), samples)) require.NoError(t, err) // 500k samples in chunks need 775 KiB, - _, err = i.Push(ctx, writeRequestSingleSeries(labels.Labels{{Name: labels.MetricName, Value: "foo"}, {Name: "l", Value: "3"}}, samples[0:500000])) + _, err = i.Push(ctx, writeRequestSingleSeries(labels.FromStrings("__name__", "foo", "l", "3"), samples[0:500000])) require.NoError(t, err) // Create a GRPC server used to query back the data. @@ -3969,7 +3972,7 @@ func benchmarkQueryStream(b *testing.B, samplesCount, seriesCount int) { } for s := 0; s < seriesCount; s++ { - _, err = i.Push(ctx, writeRequestSingleSeries(labels.Labels{{Name: labels.MetricName, Value: "foo"}, {Name: "l", Value: strconv.Itoa(s)}}, samples)) + _, err = i.Push(ctx, writeRequestSingleSeries(labels.FromStrings("__name__", "foo", "l", strconv.Itoa(s)), samples)) require.NoError(b, err) } @@ -4717,7 +4720,7 @@ func TestIngester_invalidSamplesDontChangeLastUpdateTime(t *testing.T) { sampleTimestamp := int64(model.Now()) { - req, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test"}}, 0, sampleTimestamp) + req, _ := mockWriteRequest(t, labels.FromStrings("__name__", "test"), 0, sampleTimestamp) _, err = i.Push(ctx, req) require.NoError(t, err) } @@ -4733,7 +4736,7 @@ func TestIngester_invalidSamplesDontChangeLastUpdateTime(t *testing.T) { // Push another sample to the same metric and timestamp, with different value. We expect to get error. { - req, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test"}}, 1, sampleTimestamp) + req, _ := mockWriteRequest(t, labels.FromStrings("__name__", "test"), 1, sampleTimestamp) _, err = i.Push(ctx, req) require.Error(t, err) } @@ -5031,9 +5034,10 @@ func Test_Ingester_UserStats(t *testing.T) { value float64 timestamp int64 }{ - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "200"}}, 1, 100000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "500"}}, 1, 110000}, - {labels.Labels{{Name: labels.MetricName, Value: "test_2"}}, 2, 200000}, + + {labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "200"), 1, 100000}, + {labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "500"), 1, 110000}, + {labels.FromStrings("__name__", "test_2"), 2, 200000}, } // Create ingester @@ -5077,11 +5081,11 @@ func Test_Ingester_AllUserStats(t *testing.T) { value float64 timestamp int64 }{ - {"user-1", labels.Labels{{Name: labels.MetricName, Value: "test_1_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "200"}}, 1, 100000}, - {"user-1", labels.Labels{{Name: labels.MetricName, Value: "test_1_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "500"}}, 1, 110000}, - {"user-1", labels.Labels{{Name: labels.MetricName, Value: "test_1_2"}}, 2, 200000}, - {"user-2", labels.Labels{{Name: labels.MetricName, Value: "test_2_1"}}, 2, 200000}, - {"user-2", labels.Labels{{Name: labels.MetricName, Value: "test_2_2"}}, 2, 200000}, + {"user-1", labels.FromStrings("__name__", "test_1_1", "route", "get_user", "status", "200"), 1, 100000}, + {"user-1", labels.FromStrings("__name__", "test_1_1", "route", "get_user", "status", "500"), 1, 110000}, + {"user-1", labels.FromStrings("__name__", "test_1_2"), 2, 200000}, + {"user-2", labels.FromStrings("__name__", "test_2_1"), 2, 200000}, + {"user-2", labels.FromStrings("__name__", "test_2_2"), 2, 200000}, } // Create ingester @@ -5145,11 +5149,11 @@ func Test_Ingester_AllUserStatsHandler(t *testing.T) { value float64 timestamp int64 }{ - {"user-1", labels.Labels{{Name: labels.MetricName, Value: "test_1_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "200"}}, 1, 100000}, - {"user-1", labels.Labels{{Name: labels.MetricName, Value: "test_1_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "500"}}, 1, 110000}, - {"user-1", labels.Labels{{Name: labels.MetricName, Value: "test_1_2"}}, 2, 200000}, - {"user-2", labels.Labels{{Name: labels.MetricName, Value: "test_2_1"}}, 2, 200000}, - {"user-2", labels.Labels{{Name: labels.MetricName, Value: "test_2_2"}}, 2, 200000}, + {"user-1", labels.FromStrings("__name__", "test_1_1", "route", "get_user", "status", "200"), 1, 100000}, + {"user-1", labels.FromStrings("__name__", "test_1_1", "route", "get_user", "status", "500"), 1, 110000}, + {"user-1", labels.FromStrings("__name__", "test_1_2"), 2, 200000}, + {"user-2", labels.FromStrings("__name__", "test_2_1"), 2, 200000}, + {"user-2", labels.FromStrings("__name__", "test_2_2"), 2, 200000}, } // Create ingester @@ -5424,7 +5428,7 @@ func verifyCompactedHead(t *testing.T, i *Ingester, expected bool) { func pushSingleSampleWithMetadata(t *testing.T, i *Ingester) { ctx := user.InjectOrgID(context.Background(), userID) - req, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test"}}, 0, util.TimeToMillis(time.Now())) + req, _ := mockWriteRequest(t, labels.FromStrings("__name__", "test"), 0, util.TimeToMillis(time.Now())) req.Metadata = append(req.Metadata, &cortexpb.MetricMetadata{MetricFamilyName: "test", Help: "a help for metric", Unit: "", Type: cortexpb.COUNTER}) _, err := i.Push(ctx, req) require.NoError(t, err) @@ -5432,7 +5436,7 @@ func pushSingleSampleWithMetadata(t *testing.T, i *Ingester) { func pushSingleSampleAtTime(t *testing.T, i *Ingester, ts int64) { ctx := user.InjectOrgID(context.Background(), userID) - req, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test"}}, 0, ts) + req, _ := mockWriteRequest(t, labels.FromStrings("__name__", "test"), 0, ts) _, err := i.Push(ctx, req) require.NoError(t, err) } @@ -5461,7 +5465,7 @@ func TestHeadCompactionOnStartup(t *testing.T) { db.DisableCompactions() head := db.Head() - l := labels.Labels{{Name: "n", Value: "v"}} + l := labels.FromStrings("n", "v") for i := 0; i < numFullChunks; i++ { // Not using db.Appender() as it checks for compaction. app := head.Appender(context.Background()) @@ -5571,7 +5575,7 @@ func TestIngesterNotDeleteUnshippedBlocks(t *testing.T) { // Push some data to create 3 blocks. ctx := user.InjectOrgID(context.Background(), userID) for j := int64(0); j < 5; j++ { - req, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test"}}, 0, j*chunkRangeMilliSec) + req, _ := mockWriteRequest(t, labels.FromStrings(labels.MetricName, "test"), 0, j*chunkRangeMilliSec) _, err := i.Push(ctx, req) require.NoError(t, err) } @@ -5599,7 +5603,7 @@ func TestIngesterNotDeleteUnshippedBlocks(t *testing.T) { // Add more samples that could trigger another compaction and hence reload of blocks. for j := int64(5); j < 6; j++ { - req, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test"}}, 0, j*chunkRangeMilliSec) + req, _ := mockWriteRequest(t, labels.FromStrings(labels.MetricName, "test"), 0, j*chunkRangeMilliSec) _, err := i.Push(ctx, req) require.NoError(t, err) } @@ -5627,7 +5631,7 @@ func TestIngesterNotDeleteUnshippedBlocks(t *testing.T) { // Add more samples that could trigger another compaction and hence reload of blocks. for j := int64(6); j < 7; j++ { - req, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test"}}, 0, j*chunkRangeMilliSec) + req, _ := mockWriteRequest(t, labels.FromStrings(labels.MetricName, "test"), 0, j*chunkRangeMilliSec) _, err := i.Push(ctx, req) require.NoError(t, err) } @@ -5674,7 +5678,7 @@ func TestIngesterPushErrorDuringForcedCompaction(t *testing.T) { require.True(t, db.casState(active, forceCompacting)) // Ingestion should fail with a 503. - req, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test"}}, 0, util.TimeToMillis(time.Now())) + req, _ := mockWriteRequest(t, labels.FromStrings(labels.MetricName, "test"), 0, util.TimeToMillis(time.Now())) ctx := user.InjectOrgID(context.Background(), userID) _, err = i.Push(ctx, req) require.Equal(t, httpgrpc.Errorf(http.StatusServiceUnavailable, "%s", wrapWithUser(errors.New("forced compaction in progress"), userID).Error()), err) @@ -6608,7 +6612,7 @@ func Test_Ingester_QueryExemplar_MaxInflightQueryRequest(t *testing.T) { // Mock request ctx := user.InjectOrgID(context.Background(), "test") - wreq, _ := mockWriteRequest(t, labels.Labels{{Name: labels.MetricName, Value: "test_1"}, {Name: "route", Value: "get_user"}, {Name: "status", Value: "200"}}, 1, 100000) + wreq, _ := mockWriteRequest(t, labels.FromStrings("__name__", "test_1", "route", "get_user", "status", "200"), 1, 100000) _, err = i.Push(ctx, wreq) require.NoError(t, err) @@ -7149,7 +7153,7 @@ func CreateBlock(t *testing.T, ctx context.Context, dir string, mint, maxt int64 var ref storage.SeriesRef start := (maxt-mint)/2 + mint - _, err = app.Append(ref, labels.Labels{labels.Label{Name: "test_label", Value: "test_value"}}, start, float64(1)) + _, err = app.Append(ref, labels.FromStrings("test_label", "test_value"), start, float64(1)) require.NoError(t, err) err = app.Commit() require.NoError(t, err) diff --git a/pkg/ingester/user_state.go b/pkg/ingester/user_state.go index 062f4d5e1bd..032c6907d8c 100644 --- a/pkg/ingester/user_state.go +++ b/pkg/ingester/user_state.go @@ -191,9 +191,9 @@ func getCardinalityForLimitsPerLabelSet(ctx context.Context, numSeries uint64, i } func getPostingForLabels(ctx context.Context, ir tsdb.IndexReader, lbls labels.Labels) (index.Postings, error) { - postings := make([]index.Postings, 0, len(lbls)) - for _, lbl := range lbls { - p, err := ir.Postings(ctx, lbl.Name, lbl.Value) + postings := make([]index.Postings, 0, lbls.Len()) + for name, value := range lbls.Map() { + p, err := ir.Postings(ctx, name, value) if err != nil { return nil, err } diff --git a/pkg/ingester/user_state_test.go b/pkg/ingester/user_state_test.go index a75b7e3e3e5..38be322854d 100644 --- a/pkg/ingester/user_state_test.go +++ b/pkg/ingester/user_state_test.go @@ -343,11 +343,11 @@ func (ir *mockIndexReader) Postings(ctx context.Context, name string, values ... func (ir *mockIndexReader) Symbols() index.StringIter { return nil } -func (ir *mockIndexReader) SortedLabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { +func (ir *mockIndexReader) SortedLabelValues(ctx context.Context, name string, hints *storage.LabelHints, matchers ...*labels.Matcher) ([]string, error) { return nil, nil } -func (ir *mockIndexReader) LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { +func (ir *mockIndexReader) LabelValues(ctx context.Context, name string, hints *storage.LabelHints, matchers ...*labels.Matcher) ([]string, error) { return nil, nil } diff --git a/pkg/parquetconverter/converter_test.go b/pkg/parquetconverter/converter_test.go index fc8f6e99805..70b6469a7ba 100644 --- a/pkg/parquetconverter/converter_test.go +++ b/pkg/parquetconverter/converter_test.go @@ -63,10 +63,7 @@ func TestConverter(t *testing.T) { ctx := context.Background() - lbls := labels.Labels{labels.Label{ - Name: "__name__", - Value: "test", - }} + lbls := labels.FromStrings("__name__", "test") blocks := []ulid.ULID{} // Create blocks @@ -254,10 +251,7 @@ func TestConverter_BlockConversionFailure(t *testing.T) { require.NoError(t, err) // Create test labels - lbls := labels.Labels{labels.Label{ - Name: "__name__", - Value: "test", - }} + lbls := labels.FromStrings("__name__", "test") // Create a real TSDB block dir := t.TempDir() @@ -312,10 +306,7 @@ func TestConverter_ShouldNotFailOnAccessDenyError(t *testing.T) { require.NoError(t, err) // Create test labels - lbls := labels.Labels{labels.Label{ - Name: "__name__", - Value: "test", - }} + lbls := labels.FromStrings("__name__", "test") // Create a real TSDB block dir := t.TempDir() @@ -366,11 +357,11 @@ type mockBucket struct { getFailure error } -func (m *mockBucket) Upload(ctx context.Context, name string, r io.Reader) error { +func (m *mockBucket) Upload(ctx context.Context, name string, r io.Reader, opts ...objstore.ObjectUploadOption) error { if m.uploadFailure != nil { return m.uploadFailure } - return m.Bucket.Upload(ctx, name, r) + return m.Bucket.Upload(ctx, name, r, opts...) } func (m *mockBucket) Get(ctx context.Context, name string) (io.ReadCloser, error) { diff --git a/pkg/querier/blocks_store_queryable_test.go b/pkg/querier/blocks_store_queryable_test.go index da0a5df2679..9f890fc3902 100644 --- a/pkg/querier/blocks_store_queryable_test.go +++ b/pkg/querier/blocks_store_queryable_test.go @@ -129,7 +129,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1, block2), }}: {block1, block2}, }, @@ -155,7 +155,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), @@ -187,7 +187,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), @@ -218,8 +218,8 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 3, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 3, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1, block2), }}: {block1, block2}, }, @@ -250,7 +250,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel, series1Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), @@ -258,7 +258,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }, nil, ), mockSeriesResponse( - labels.Labels{metricNameLabel, series2Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram3), @@ -294,7 +294,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel, series1Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), @@ -302,7 +302,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }, ), mockSeriesResponse( - labels.Labels{metricNameLabel, series2Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram3), @@ -337,11 +337,11 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel}, []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block2), }}: {block2}, }, @@ -367,7 +367,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), @@ -377,7 +377,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT+1, testHistogram2), @@ -408,7 +408,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), @@ -418,7 +418,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT+1, testFloatHistogram2), @@ -448,11 +448,11 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel}, []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1), }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block2), }}: {block2}, }, @@ -478,7 +478,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT+1, testHistogram2), @@ -488,7 +488,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), @@ -520,7 +520,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT+1, testFloatHistogram2), @@ -530,7 +530,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), @@ -561,16 +561,16 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block2), }}: {block2}, &storeGatewayClientMock{remoteAddr: "3.3.3.3", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 3, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 3, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block3), }}: {block3}, }, @@ -631,16 +631,16 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block2), }}: {block2}, &storeGatewayClientMock{remoteAddr: "3.3.3.3", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 3, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 3, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block3), }}: {block3}, }, @@ -666,14 +666,14 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel, series1Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT+1, testHistogram2), }, nil, ), mockSeriesResponse( - labels.Labels{metricNameLabel, series2Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), @@ -683,7 +683,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel, series1Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), @@ -694,7 +694,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }}: {block2}, &storeGatewayClientMock{remoteAddr: "3.3.3.3", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel, series2Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), @@ -732,14 +732,14 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel, series1Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT+1, testFloatHistogram2), }, ), mockSeriesResponse( - labels.Labels{metricNameLabel, series2Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), @@ -749,7 +749,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel, series1Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), @@ -760,7 +760,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }}: {block2}, &storeGatewayClientMock{remoteAddr: "3.3.3.3", mockedSeriesResponses: []*storepb.SeriesResponse{ mockSeriesResponse( - labels.Labels{metricNameLabel, series2Label}, + labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), @@ -798,7 +798,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { // First attempt returns a client whose response does not include all expected blocks. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1), }}: {block1}, }, @@ -820,11 +820,11 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { // First attempt returns a client whose response does not include all expected blocks. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel}, []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1), }}: {block1}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel}, []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block2), }}: {block2}, }, @@ -846,25 +846,25 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { // First attempt returns a client whose response does not include all expected blocks. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1, block3}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block2), }}: {block2, block4}, }, // Second attempt returns 1 missing block. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "3.3.3.3", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block3), }}: {block3, block4}, }, // Third attempt returns the last missing block. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "4.4.4.4", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 3, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 3, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block4), }}: {block4}, }, @@ -924,7 +924,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1, block2), }}: {block1, block2}, }, @@ -949,7 +949,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1, block2), }}: {block1, block2}, }, @@ -966,7 +966,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, nil, []cortexpb.Histogram{ + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), cortexpb.HistogramToHistogramProto(minT+1, testHistogram2), }, nil), @@ -986,7 +986,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, nil, nil, []cortexpb.Histogram{ + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), cortexpb.FloatHistogramToHistogramProto(minT+1, testFloatHistogram2), }), @@ -1006,7 +1006,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1, block2), }}: {block1, block2}, }, @@ -1023,7 +1023,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, nil, []cortexpb.Histogram{ + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), cortexpb.HistogramToHistogramProto(minT+1, testHistogram2), }, nil), @@ -1043,7 +1043,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, nil, nil, []cortexpb.Histogram{ + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), cortexpb.FloatHistogramToHistogramProto(minT+1, testFloatHistogram2), }), @@ -1066,25 +1066,25 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { // First attempt returns a client whose response does not include all expected blocks. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1, block3}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block2), }}: {block2, block4}, }, // Second attempt returns 1 missing block. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "3.3.3.3", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block3), }}: {block3, block4}, }, // Third attempt returns the last missing block. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "4.4.4.4", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 3, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 3, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block4), }}: {block4}, }, @@ -1104,25 +1104,25 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { // First attempt returns a client whose response does not include all expected blocks. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1, block3}, &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block2), }}: {block2, block4}, }, // Second attempt returns 1 missing block. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "3.3.3.3", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block3), }}: {block3, block4}, }, // Third attempt returns the last missing block. map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "4.4.4.4", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 3, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 3, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block4), }}: {block4}, }, @@ -1139,8 +1139,8 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1, block2), }}: {block1, block2}, }, @@ -1157,12 +1157,12 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, nil, + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), }, nil, ), - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, nil, + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT+1, testHistogram2), }, nil, @@ -1183,12 +1183,12 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, nil, nil, + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), }, ), - mockSeriesResponse(labels.Labels{metricNameLabel, series2Label}, nil, nil, + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series2Label.Name, series2Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT+1, testFloatHistogram2), }, @@ -1209,7 +1209,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1, block2), }}: {block1, block2}, }, @@ -1226,7 +1226,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, nil, []cortexpb.Histogram{ + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), }, nil), mockHintsResponse(block1, block2), @@ -1245,7 +1245,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, nil, nil, []cortexpb.Histogram{ + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), }), mockHintsResponse(block1, block2), @@ -1264,7 +1264,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1, block2), }}: {block1, block2}, }, @@ -1281,7 +1281,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, nil, []cortexpb.Histogram{ + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, []cortexpb.Histogram{ cortexpb.HistogramToHistogramProto(minT, testHistogram1), }, nil), mockHintsResponse(block1, block2), @@ -1300,7 +1300,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { storeSetResponses: []interface{}{ map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, nil, nil, []cortexpb.Histogram{ + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), nil, nil, []cortexpb.Histogram{ cortexpb.FloatHistogramToHistogramProto(minT, testFloatHistogram1), }), mockHintsResponse(block1, block2), @@ -1324,7 +1324,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }, map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, }, @@ -1353,7 +1353,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }, map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, }, @@ -1382,7 +1382,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }, map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, }, @@ -1408,7 +1408,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { &storeGatewayClientMock{ remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }, mockedSeriesStreamErr: status.Error(codes.PermissionDenied, "PermissionDenied"), @@ -1418,7 +1418,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { &storeGatewayClientMock{ remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }, mockedSeriesStreamErr: status.Error(codes.PermissionDenied, "PermissionDenied"), @@ -1446,13 +1446,13 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { remoteAddr: "1.1.1.1", mockedSeriesStreamErr: status.Error(codes.Unavailable, "unavailable"), mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, }, map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, }, @@ -1481,7 +1481,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }, map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, }, @@ -1510,7 +1510,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }, map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, }, @@ -1531,7 +1531,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) { }, map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "2.2.2.2", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{metricNameLabel, series1Label}, []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), + mockSeriesResponse(labels.FromStrings(metricNameLabel.Name, metricNameLabel.Value, series1Label.Name, series1Label.Value), []cortexpb.Sample{{Value: 2, TimestampMs: minT}}, nil, nil), mockHintsResponse(block1), }}: {block1}, }, @@ -2736,9 +2736,9 @@ func mockValuesHints(ids ...ulid.ULID) *types.Any { func namesFromSeries(series ...labels.Labels) []string { namesMap := map[string]struct{}{} for _, s := range series { - for _, l := range s { + s.Range(func(l labels.Label) { namesMap[l.Name] = struct{}{} - } + }) } names := []string{} @@ -2753,11 +2753,11 @@ func namesFromSeries(series ...labels.Labels) []string { func valuesFromSeries(name string, series ...labels.Labels) []string { valuesMap := map[string]struct{}{} for _, s := range series { - for _, l := range s { + s.Range(func(l labels.Label) { if l.Name == name { valuesMap[l.Value] = struct{}{} } - } + }) } values := []string{} diff --git a/pkg/querier/codec/protobuf_codec.go b/pkg/querier/codec/protobuf_codec.go index 64bfa2e3945..733e61c79bd 100644 --- a/pkg/querier/codec/protobuf_codec.go +++ b/pkg/querier/codec/protobuf_codec.go @@ -5,6 +5,7 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/histogram" + "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/util/stats" v1 "github.com/prometheus/prometheus/web/api/v1" @@ -101,16 +102,18 @@ func getMatrixSampleStreams(data *v1.QueryData) *[]tripperware.SampleStream { for i := 0; i < sampleStreamsLen; i++ { sampleStream := data.Result.(promql.Matrix)[i] - labelsLen := len(sampleStream.Metric) - var labels []cortexpb.LabelAdapter + labelsLen := sampleStream.Metric.Len() + var lbls []cortexpb.LabelAdapter if labelsLen > 0 { - labels = make([]cortexpb.LabelAdapter, labelsLen) - for j := 0; j < labelsLen; j++ { - labels[j] = cortexpb.LabelAdapter{ - Name: sampleStream.Metric[j].Name, - Value: sampleStream.Metric[j].Value, + lbls = make([]cortexpb.LabelAdapter, labelsLen) + j := 0 + sampleStream.Metric.Range(func(l labels.Label) { + lbls[j] = cortexpb.LabelAdapter{ + Name: l.Name, + Value: l.Value, } - } + j++ + }) } samplesLen := len(sampleStream.Floats) @@ -145,7 +148,7 @@ func getMatrixSampleStreams(data *v1.QueryData) *[]tripperware.SampleStream { } } } - sampleStreams[i] = tripperware.SampleStream{Labels: labels, Samples: samples, Histograms: histograms} + sampleStreams[i] = tripperware.SampleStream{Labels: lbls, Samples: samples, Histograms: histograms} } return &sampleStreams } @@ -156,18 +159,20 @@ func getVectorSamples(data *v1.QueryData, cortexInternal bool) *[]tripperware.Sa for i := 0; i < vectorSamplesLen; i++ { sample := data.Result.(promql.Vector)[i] - labelsLen := len(sample.Metric) - var labels []cortexpb.LabelAdapter + labelsLen := sample.Metric.Len() + var lbls []cortexpb.LabelAdapter if labelsLen > 0 { - labels = make([]cortexpb.LabelAdapter, labelsLen) - for j := 0; j < labelsLen; j++ { - labels[j] = cortexpb.LabelAdapter{ - Name: sample.Metric[j].Name, - Value: sample.Metric[j].Value, + lbls = make([]cortexpb.LabelAdapter, labelsLen) + j := 0 + sample.Metric.Range(func(l labels.Label) { + lbls[j] = cortexpb.LabelAdapter{ + Name: l.Name, + Value: l.Value, } - } + j++ + }) } - vectorSamples[i].Labels = labels + vectorSamples[i].Labels = lbls // Float samples only. if sample.H == nil { diff --git a/pkg/querier/codec/protobuf_codec_test.go b/pkg/querier/codec/protobuf_codec_test.go index c7fee0ecba5..44ebf6f1732 100644 --- a/pkg/querier/codec/protobuf_codec_test.go +++ b/pkg/querier/codec/protobuf_codec_test.go @@ -170,10 +170,7 @@ func TestProtobufCodec_Encode(t *testing.T) { ResultType: parser.ValueTypeMatrix, Result: promql.Matrix{ promql.Series{ - Metric: labels.Labels{ - {Name: "__name__", Value: "foo"}, - {Name: "__job__", Value: "bar"}, - }, + Metric: labels.FromStrings("__name__", "foo", "__job__", "bar"), Floats: []promql.FPoint{ {F: 0.14, T: 18555000}, {F: 2.9, T: 18556000}, @@ -192,8 +189,8 @@ func TestProtobufCodec_Encode(t *testing.T) { SampleStreams: []tripperware.SampleStream{ { Labels: []cortexpb.LabelAdapter{ - {Name: "__name__", Value: "foo"}, {Name: "__job__", Value: "bar"}, + {Name: "__name__", Value: "foo"}, }, Samples: []cortexpb.Sample{ {Value: 0.14, TimestampMs: 18555000}, diff --git a/pkg/querier/distributor_queryable_test.go b/pkg/querier/distributor_queryable_test.go index bb7e20b7ba9..d7313bdf396 100644 --- a/pkg/querier/distributor_queryable_test.go +++ b/pkg/querier/distributor_queryable_test.go @@ -191,13 +191,13 @@ func TestIngesterStreaming(t *testing.T) { require.True(t, seriesSet.Next()) series := seriesSet.At() - require.Equal(t, labels.Labels{{Name: "bar", Value: "baz"}}, series.Labels()) + require.Equal(t, labels.FromStrings("bar", "baz"), series.Labels()) chkIter := series.Iterator(nil) require.Equal(t, enc.ChunkValueType(), chkIter.Next()) require.True(t, seriesSet.Next()) series = seriesSet.At() - require.Equal(t, labels.Labels{{Name: "foo", Value: "bar"}}, series.Labels()) + require.Equal(t, labels.FromStrings("foo", "bar"), series.Labels()) chkIter = series.Iterator(chkIter) require.Equal(t, enc.ChunkValueType(), chkIter.Next()) diff --git a/pkg/querier/error_translate_queryable_test.go b/pkg/querier/error_translate_queryable_test.go index b1b34149096..03a22d52375 100644 --- a/pkg/querier/error_translate_queryable_test.go +++ b/pkg/querier/error_translate_queryable_test.go @@ -176,6 +176,8 @@ func createPrometheusAPI(q storage.SampleAndChunkQueryable, engine promql.QueryE false, false, false, + false, + 5*time.Minute, ) promRouter := route.New().WithPrefix("/api/v1") diff --git a/pkg/querier/parquet_queryable_test.go b/pkg/querier/parquet_queryable_test.go index 01a4bcd559c..6c52e97d143 100644 --- a/pkg/querier/parquet_queryable_test.go +++ b/pkg/querier/parquet_queryable_test.go @@ -5,6 +5,7 @@ import ( "fmt" "math/rand" "path/filepath" + "strconv" "sync" "testing" "time" @@ -53,7 +54,7 @@ func TestParquetQueryableFallbackLogic(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{{Name: labels.MetricName, Value: "fromSg"}}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(labels.MetricName, "fromSg"), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1, block2), }, mockedLabelNamesResponse: &storepb.LabelNamesResponse{ @@ -415,10 +416,7 @@ func TestParquetQueryable_Limits(t *testing.T) { seriesCount := 100 lbls := make([]labels.Labels, seriesCount) for i := 0; i < seriesCount; i++ { - lbls[i] = labels.Labels{ - {Name: labels.MetricName, Value: metricName}, - {Name: "series", Value: fmt.Sprintf("%d", i)}, - } + lbls[i] = labels.FromStrings(labels.MetricName, metricName, "series", strconv.Itoa(i)) } rnd := rand.New(rand.NewSource(time.Now().UnixNano())) @@ -728,7 +726,7 @@ func TestParquetQueryableFallbackDisabled(t *testing.T) { map[BlocksStoreClient][]ulid.ULID{ &storeGatewayClientMock{remoteAddr: "1.1.1.1", mockedSeriesResponses: []*storepb.SeriesResponse{ - mockSeriesResponse(labels.Labels{{Name: labels.MetricName, Value: "fromSg"}}, []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), + mockSeriesResponse(labels.FromStrings(labels.MetricName, "fromSg"), []cortexpb.Sample{{Value: 1, TimestampMs: minT}, {Value: 2, TimestampMs: minT + 1}}, nil, nil), mockHintsResponse(block1, block2), }, mockedLabelNamesResponse: &storepb.LabelNamesResponse{ diff --git a/pkg/querier/querier_test.go b/pkg/querier/querier_test.go index 06f44039a11..3c48c0ab7d5 100644 --- a/pkg/querier/querier_test.go +++ b/pkg/querier/querier_test.go @@ -120,11 +120,9 @@ var ( // Very simple single-point gets, with low step. Performance should be // similar to above. { - query: "foo", - step: sampleRate * 4, - labels: labels.Labels{ - labels.Label{Name: model.MetricNameLabel, Value: "foo"}, - }, + query: "foo", + step: sampleRate * 4, + labels: labels.FromStrings(labels.MetricName, "foo"), samples: func(from, through time.Time, step time.Duration) int { return int(through.Sub(from)/step) + 1 }, @@ -182,11 +180,9 @@ var ( // Single points gets with large step; excersise Seek performance. { - query: "foo", - step: sampleRate * 4 * 10, - labels: labels.Labels{ - labels.Label{Name: model.MetricNameLabel, Value: "foo"}, - }, + query: "foo", + step: sampleRate * 4 * 10, + labels: labels.FromStrings(labels.MetricName, "foo"), samples: func(from, through time.Time, step time.Duration) int { return int(through.Sub(from)/step) + 1 }, diff --git a/pkg/querier/series/series_set.go b/pkg/querier/series/series_set.go index 53a3ca4a1b1..4aaf6f89305 100644 --- a/pkg/querier/series/series_set.go +++ b/pkg/querier/series/series_set.go @@ -195,17 +195,12 @@ func MetricsToSeriesSet(ctx context.Context, sortSeries bool, ms []model.Metric) } func metricToLabels(m model.Metric) labels.Labels { - ls := make(labels.Labels, 0, len(m)) + builder := labels.NewBuilder(labels.EmptyLabels()) for k, v := range m { - ls = append(ls, labels.Label{ - Name: string(k), - Value: string(v), - }) + builder.Set(string(k), string(v)) + } - // PromQL expects all labels to be sorted! In general, anyone constructing - // a labels.Labels list is responsible for sorting it during construction time. - sort.Sort(ls) - return ls + return builder.Labels() } type byLabels []storage.Series diff --git a/pkg/querier/series/series_set_test.go b/pkg/querier/series/series_set_test.go index 7e243a14449..cf82cb61fec 100644 --- a/pkg/querier/series/series_set_test.go +++ b/pkg/querier/series/series_set_test.go @@ -46,11 +46,5 @@ func TestMatrixToSeriesSetSortsMetricLabels(t *testing.T) { require.NoError(t, ss.Err()) l := ss.At().Labels() - require.Equal(t, labels.Labels{ - {Name: string(model.MetricNameLabel), Value: "testmetric"}, - {Name: "a", Value: "b"}, - {Name: "c", Value: "d"}, - {Name: "e", Value: "f"}, - {Name: "g", Value: "h"}, - }, l) + require.Equal(t, labels.FromStrings(labels.MetricName, "testmetric", "a", "b", "c", "d", "e", "f", "g", "h"), l) } diff --git a/pkg/querier/stats_renderer_test.go b/pkg/querier/stats_renderer_test.go index 6f197b01657..9f033486127 100644 --- a/pkg/querier/stats_renderer_test.go +++ b/pkg/querier/stats_renderer_test.go @@ -90,6 +90,8 @@ func Test_StatsRenderer(t *testing.T) { false, false, false, + false, + 5*time.Minute, ) promRouter := route.New().WithPrefix("/api/v1") diff --git a/pkg/querier/tenantfederation/exemplar_merge_queryable.go b/pkg/querier/tenantfederation/exemplar_merge_queryable.go index a5f40ca59dc..c6b24caeb03 100644 --- a/pkg/querier/tenantfederation/exemplar_merge_queryable.go +++ b/pkg/querier/tenantfederation/exemplar_merge_queryable.go @@ -175,10 +175,7 @@ func (m mergeExemplarQuerier) Select(start, end int64, matchers ...[]*labels.Mat // append __tenant__ label to `seriesLabels` to identify each tenants for i, e := range res { - e.SeriesLabels = setLabelsRetainExisting(e.SeriesLabels, labels.Label{ - Name: m.idLabelName, - Value: job.id, - }) + e.SeriesLabels = setLabelsRetainExisting(e.SeriesLabels, labels.FromStrings(m.idLabelName, job.id)) res[i] = e } diff --git a/pkg/querier/tenantfederation/merge_queryable.go b/pkg/querier/tenantfederation/merge_queryable.go index 71bf0e2531e..58cdb7625f2 100644 --- a/pkg/querier/tenantfederation/merge_queryable.go +++ b/pkg/querier/tenantfederation/merge_queryable.go @@ -364,12 +364,7 @@ func (m *mergeQuerier) Select(ctx context.Context, sortSeries bool, hints *stora newCtx := user.InjectOrgID(parentCtx, job.id) seriesSets[job.pos] = &addLabelsSeriesSet{ upstream: job.querier.Select(newCtx, sortSeries, hints, filteredMatchers...), - labels: labels.Labels{ - { - Name: m.idLabelName, - Value: job.id, - }, - }, + labels: labels.FromStrings(m.idLabelName, job.id), } return nil } @@ -442,7 +437,7 @@ func (m *addLabelsSeriesSet) At() storage.Series { upstream := m.upstream.At() m.currSeries = &addLabelsSeries{ upstream: upstream, - labels: setLabelsRetainExisting(upstream.Labels(), m.labels...), + labels: setLabelsRetainExisting(upstream.Labels(), m.labels), } } return m.currSeries @@ -471,11 +466,11 @@ func rewriteLabelName(s string) string { } // this outputs a more readable error format -func labelsToString(labels labels.Labels) string { - parts := make([]string, len(labels)) - for pos, l := range labels { - parts[pos] = rewriteLabelName(l.Name) + " " + l.Value - } +func labelsToString(lbls labels.Labels) string { + parts := make([]string, 0, lbls.Len()) + lbls.Range(func(l labels.Label) { + parts = append(parts, rewriteLabelName(l.Name)+" "+l.Value) + }) return strings.Join(parts, ", ") } @@ -496,17 +491,17 @@ func (a *addLabelsSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator { // this sets a label and preserves an existing value a new label prefixed with // original_. It doesn't do this recursively. -func setLabelsRetainExisting(src labels.Labels, additionalLabels ...labels.Label) labels.Labels { +func setLabelsRetainExisting(src labels.Labels, additionalLabels labels.Labels) labels.Labels { lb := labels.NewBuilder(src) - for _, additionalL := range additionalLabels { - if oldValue := src.Get(additionalL.Name); oldValue != "" { + for name, value := range additionalLabels.Map() { + if oldValue := src.Get(name); oldValue != "" { lb.Set( - retainExistingPrefix+additionalL.Name, + retainExistingPrefix+name, oldValue, ) } - lb.Set(additionalL.Name, additionalL.Value) + lb.Set(name, value) } return lb.Labels() diff --git a/pkg/querier/tenantfederation/merge_queryable_test.go b/pkg/querier/tenantfederation/merge_queryable_test.go index 8015ca21951..5be2f70a764 100644 --- a/pkg/querier/tenantfederation/merge_queryable_test.go +++ b/pkg/querier/tenantfederation/merge_queryable_test.go @@ -492,24 +492,24 @@ func TestMergeQueryable_Select(t *testing.T) { matchers: []*labels.Matcher{{Name: defaultTenantLabel, Value: "team-b", Type: labels.MatchNotEqual}}, expectedSeriesCount: 4, expectedLabels: []labels.Labels{ - { - {Name: "__tenant_id__", Value: "team-a"}, - {Name: "instance", Value: "host1"}, - {Name: "tenant-team-a", Value: "static"}, - }, - { - {Name: "__tenant_id__", Value: "team-a"}, - {Name: "instance", Value: "host2.team-a"}, - }, - { - {Name: "__tenant_id__", Value: "team-c"}, - {Name: "instance", Value: "host1"}, - {Name: "tenant-team-c", Value: "static"}, - }, - { - {Name: "__tenant_id__", Value: "team-c"}, - {Name: "instance", Value: "host2.team-c"}, - }, + labels.FromStrings( + "__tenant_id__", "team-a", + "instance", "host1", + "tenant-team-a", "static", + ), + labels.FromStrings( + "__tenant_id__", "team-a", + "instance", "host2.team-a", + ), + labels.FromStrings( + "__tenant_id__", "team-c", + "instance", "host1", + "tenant-team-c", "static", + ), + labels.FromStrings( + "__tenant_id__", "team-c", + "instance", "host2.team-c", + ), }, expectedMetrics: expectedThreeTenantsMetrics, }, @@ -518,15 +518,15 @@ func TestMergeQueryable_Select(t *testing.T) { matchers: []*labels.Matcher{{Name: defaultTenantLabel, Value: "team-b", Type: labels.MatchEqual}}, expectedSeriesCount: 2, expectedLabels: []labels.Labels{ - { - {Name: "__tenant_id__", Value: "team-b"}, - {Name: "instance", Value: "host1"}, - {Name: "tenant-team-b", Value: "static"}, - }, - { - {Name: "__tenant_id__", Value: "team-b"}, - {Name: "instance", Value: "host2.team-b"}, - }, + labels.FromStrings( + "__tenant_id__", "team-b", + "instance", "host1", + "tenant-team-b", "static", + ), + labels.FromStrings( + "__tenant_id__", "team-b", + "instance", "host2.team-b", + ), }, expectedMetrics: expectedThreeTenantsMetrics, }, @@ -545,39 +545,39 @@ func TestMergeQueryable_Select(t *testing.T) { name: "should return all series when no matchers are provided", expectedSeriesCount: 6, expectedLabels: []labels.Labels{ - { - {Name: "__tenant_id__", Value: "team-a"}, - {Name: "instance", Value: "host1"}, - {Name: "original___tenant_id__", Value: "original-value"}, - {Name: "tenant-team-a", Value: "static"}, - }, - { - {Name: "__tenant_id__", Value: "team-a"}, - {Name: "instance", Value: "host2.team-a"}, - {Name: "original___tenant_id__", Value: "original-value"}, - }, - { - {Name: "__tenant_id__", Value: "team-b"}, - {Name: "instance", Value: "host1"}, - {Name: "original___tenant_id__", Value: "original-value"}, - {Name: "tenant-team-b", Value: "static"}, - }, - { - {Name: "__tenant_id__", Value: "team-b"}, - {Name: "instance", Value: "host2.team-b"}, - {Name: "original___tenant_id__", Value: "original-value"}, - }, - { - {Name: "__tenant_id__", Value: "team-c"}, - {Name: "instance", Value: "host1"}, - {Name: "original___tenant_id__", Value: "original-value"}, - {Name: "tenant-team-c", Value: "static"}, - }, - { - {Name: "__tenant_id__", Value: "team-c"}, - {Name: "instance", Value: "host2.team-c"}, - {Name: "original___tenant_id__", Value: "original-value"}, - }, + labels.FromStrings( + "__tenant_id__", "team-a", + "instance", "host1", + "original___tenant_id__", "original-value", + "tenant-team-a", "static", + ), + labels.FromStrings( + "__tenant_id__", "team-a", + "instance", "host2.team-a", + "original___tenant_id__", "original-value", + ), + labels.FromStrings( + "__tenant_id__", "team-b", + "instance", "host1", + "original___tenant_id__", "original-value", + "tenant-team-b", "static", + ), + labels.FromStrings( + "__tenant_id__", "team-b", + "instance", "host2.team-b", + "original___tenant_id__", "original-value", + ), + labels.FromStrings( + "__tenant_id__", "team-c", + "instance", "host1", + "original___tenant_id__", "original-value", + "tenant-team-c", "static", + ), + labels.FromStrings( + "__tenant_id__", "team-c", + "instance", "host2.team-c", + "original___tenant_id__", "original-value", + ), }, expectedMetrics: expectedThreeTenantsMetrics, }, @@ -599,17 +599,17 @@ func TestMergeQueryable_Select(t *testing.T) { matchers: []*labels.Matcher{{Name: defaultTenantLabel, Value: "team-b", Type: labels.MatchEqual}}, expectedSeriesCount: 2, expectedLabels: []labels.Labels{ - { - {Name: "__tenant_id__", Value: "team-b"}, - {Name: "instance", Value: "host1"}, - {Name: "original___tenant_id__", Value: "original-value"}, - {Name: "tenant-team-b", Value: "static"}, - }, - { - {Name: "__tenant_id__", Value: "team-b"}, - {Name: "instance", Value: "host2.team-b"}, - {Name: "original___tenant_id__", Value: "original-value"}, - }, + labels.FromStrings( + "__tenant_id__", "team-b", + "instance", "host1", + "original___tenant_id__", "original-value", + "tenant-team-b", "static", + ), + labels.FromStrings( + "__tenant_id__", "team-b", + "instance", "host2.team-b", + "original___tenant_id__", "original-value", + ), }, expectedMetrics: expectedThreeTenantsMetrics, }, @@ -1178,33 +1178,33 @@ func TestSetLabelsRetainExisting(t *testing.T) { }{ // Test adding labels at the end. { - labels: labels.Labels{{Name: "a", Value: "b"}}, - additionalLabels: labels.Labels{{Name: "c", Value: "d"}}, - expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, + labels: labels.FromStrings("a", "b"), + additionalLabels: labels.FromStrings("c", "d"), + expected: labels.FromStrings("a", "b", "c", "d"), }, // Test adding labels at the beginning. { - labels: labels.Labels{{Name: "c", Value: "d"}}, - additionalLabels: labels.Labels{{Name: "a", Value: "b"}}, - expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, + labels: labels.FromStrings("c", "d"), + additionalLabels: labels.FromStrings("a", "b"), + expected: labels.FromStrings("a", "b", "c", "d"), }, // Test we do override existing labels and expose the original value. { - labels: labels.Labels{{Name: "a", Value: "b"}}, - additionalLabels: labels.Labels{{Name: "a", Value: "c"}}, - expected: labels.Labels{{Name: "a", Value: "c"}, {Name: "original_a", Value: "b"}}, + labels: labels.FromStrings("a", "b"), + additionalLabels: labels.FromStrings("a", "c"), + expected: labels.FromStrings("a", "c", "original_a", "b"), }, // Test we do override existing labels but don't do it recursively. { - labels: labels.Labels{{Name: "a", Value: "b"}, {Name: "original_a", Value: "i am lost"}}, - additionalLabels: labels.Labels{{Name: "a", Value: "d"}}, - expected: labels.Labels{{Name: "a", Value: "d"}, {Name: "original_a", Value: "b"}}, + labels: labels.FromStrings("a", "b", "original_a", "i am lost"), + additionalLabels: labels.FromStrings("a", "d"), + expected: labels.FromStrings("a", "d", "original_a", "b"), }, } { - assert.Equal(t, tc.expected, setLabelsRetainExisting(tc.labels, tc.additionalLabels...)) + assert.Equal(t, tc.expected, setLabelsRetainExisting(tc.labels, tc.additionalLabels)) } } diff --git a/pkg/querier/testutils.go b/pkg/querier/testutils.go index a032e545ddc..4ac69988bfa 100644 --- a/pkg/querier/testutils.go +++ b/pkg/querier/testutils.go @@ -142,7 +142,7 @@ func ConvertToChunks(t *testing.T, samples []cortexpb.Sample, histograms []*cort } } - c := chunk.NewChunk(nil, chk, model.Time(samples[0].TimestampMs), model.Time(samples[len(samples)-1].TimestampMs)) + c := chunk.NewChunk(labels.EmptyLabels(), chk, model.Time(samples[0].TimestampMs), model.Time(samples[len(samples)-1].TimestampMs)) clientChunks, err := chunkcompat.ToChunks([]chunk.Chunk{c}) require.NoError(t, err) diff --git a/pkg/querier/tripperware/distributed_query.go b/pkg/querier/tripperware/distributed_query.go index 02a0692153d..5439a3dc697 100644 --- a/pkg/querier/tripperware/distributed_query.go +++ b/pkg/querier/tripperware/distributed_query.go @@ -64,7 +64,10 @@ func (d distributedQueryMiddleware) newLogicalPlan(qs string, start time.Time, e DisableDuplicateLabelCheck: false, } - logicalPlan := logicalplan.NewFromAST(expr, &qOpts, planOpts) + logicalPlan, err := logicalplan.NewFromAST(expr, &qOpts, planOpts) + if err != nil { + return nil, err + } optimizedPlan, _ := logicalPlan.Optimize(logicalplan.DefaultOptimizers) return &optimizedPlan, nil diff --git a/pkg/querier/tripperware/queryrange/results_cache.go b/pkg/querier/tripperware/queryrange/results_cache.go index db6d2f284f5..6378a82fbef 100644 --- a/pkg/querier/tripperware/queryrange/results_cache.go +++ b/pkg/querier/tripperware/queryrange/results_cache.go @@ -335,7 +335,12 @@ func (s resultsCache) isAtModifierCachable(ctx context.Context, r tripperware.Re } // This resolves the start() and end() used with the @ modifier. - expr = promql.PreprocessExpr(expr, timestamp.Time(r.GetStart()), timestamp.Time(r.GetEnd())) + expr, err = promql.PreprocessExpr(expr, timestamp.Time(r.GetStart()), timestamp.Time(r.GetEnd()), time.Duration(r.GetStep())*time.Millisecond) + if err != nil { + // We are being pessimistic in such cases. + level.Warn(util_log.WithContext(ctx, s.logger)).Log("msg", "failed to preprocess expr", "query", query, "err", err) + return false + } end := r.GetEnd() atModCachable := true diff --git a/pkg/querier/tripperware/queryrange/test_utils.go b/pkg/querier/tripperware/queryrange/test_utils.go index 6e198baebbc..a48ae956131 100644 --- a/pkg/querier/tripperware/queryrange/test_utils.go +++ b/pkg/querier/tripperware/queryrange/test_utils.go @@ -24,13 +24,12 @@ func genLabels( Value: fmt.Sprintf("%d", i), } if len(rest) == 0 { - set := labels.Labels{x} - result = append(result, set) + result = append(result, labels.FromStrings(x.Name, x.Value)) continue } for _, others := range rest { - set := append(others, x) - result = append(result, set) + builder := labels.NewBuilder(others).Set(x.Name, x.Value) + result = append(result, builder.Labels()) } } return result diff --git a/pkg/querier/tripperware/queryrange/test_utils_test.go b/pkg/querier/tripperware/queryrange/test_utils_test.go index 7e0d8268ea5..8bdf75b3dd2 100644 --- a/pkg/querier/tripperware/queryrange/test_utils_test.go +++ b/pkg/querier/tripperware/queryrange/test_utils_test.go @@ -2,7 +2,6 @@ package queryrange import ( "math" - "sort" "testing" "github.com/prometheus/prometheus/model/labels" @@ -12,51 +11,13 @@ import ( func TestGenLabelsCorrectness(t *testing.T) { t.Parallel() ls := genLabels([]string{"a", "b"}, 2) - for _, set := range ls { - sort.Sort(set) - } expected := []labels.Labels{ - { - labels.Label{ - Name: "a", - Value: "0", - }, - labels.Label{ - Name: "b", - Value: "0", - }, - }, - { - labels.Label{ - Name: "a", - Value: "0", - }, - labels.Label{ - Name: "b", - Value: "1", - }, - }, - { - labels.Label{ - Name: "a", - Value: "1", - }, - labels.Label{ - Name: "b", - Value: "0", - }, - }, - { - labels.Label{ - Name: "a", - Value: "1", - }, - labels.Label{ - Name: "b", - Value: "1", - }, - }, + labels.FromStrings("a", "0", "b", "0"), + labels.FromStrings("a", "0", "b", "1"), + labels.FromStrings("a", "1", "b", "0"), + labels.FromStrings("a", "1", "b", "1"), } + require.Equal(t, expected, ls) } diff --git a/pkg/querier/tripperware/queryrange/value.go b/pkg/querier/tripperware/queryrange/value.go index efa8569a9d5..e13bb54fc65 100644 --- a/pkg/querier/tripperware/queryrange/value.go +++ b/pkg/querier/tripperware/queryrange/value.go @@ -58,10 +58,10 @@ func FromResult(res *promql.Result) ([]tripperware.SampleStream, error) { } func mapLabels(ls labels.Labels) []cortexpb.LabelAdapter { - result := make([]cortexpb.LabelAdapter, 0, len(ls)) - for _, l := range ls { + result := make([]cortexpb.LabelAdapter, 0, ls.Len()) + ls.Range(func(l labels.Label) { result = append(result, cortexpb.LabelAdapter(l)) - } + }) return result } diff --git a/pkg/querier/tripperware/queryrange/value_test.go b/pkg/querier/tripperware/queryrange/value_test.go index e82eadfa737..b31230b4ae5 100644 --- a/pkg/querier/tripperware/queryrange/value_test.go +++ b/pkg/querier/tripperware/queryrange/value_test.go @@ -48,20 +48,14 @@ func TestFromValue(t *testing.T) { input: &promql.Result{ Value: promql.Vector{ promql.Sample{ - T: 1, - F: 1, - Metric: labels.Labels{ - {Name: "a", Value: "a1"}, - {Name: "b", Value: "b1"}, - }, + T: 1, + F: 1, + Metric: labels.FromStrings("a", "a1", "b", "b1"), }, promql.Sample{ - T: 2, - F: 2, - Metric: labels.Labels{ - {Name: "a", Value: "a2"}, - {Name: "b", Value: "b2"}, - }, + T: 2, + F: 2, + Metric: labels.FromStrings("a", "a2", "b", "b2"), }, }, }, @@ -98,20 +92,14 @@ func TestFromValue(t *testing.T) { input: &promql.Result{ Value: promql.Matrix{ { - Metric: labels.Labels{ - {Name: "a", Value: "a1"}, - {Name: "b", Value: "b1"}, - }, + Metric: labels.FromStrings("a", "a1", "b", "b1"), Floats: []promql.FPoint{ {T: 1, F: 1}, {T: 2, F: 2}, }, }, { - Metric: labels.Labels{ - {Name: "a", Value: "a2"}, - {Name: "b", Value: "b2"}, - }, + Metric: labels.FromStrings("a", "a2", "b", "b2"), Floats: []promql.FPoint{ {T: 1, F: 8}, {T: 2, F: 9}, diff --git a/pkg/ruler/external_labels.go b/pkg/ruler/external_labels.go index 886fc4d0ed8..b0f2e4306b5 100644 --- a/pkg/ruler/external_labels.go +++ b/pkg/ruler/external_labels.go @@ -20,7 +20,7 @@ func newUserExternalLabels(global labels.Labels, limits RulesLimits) *userExtern return &userExternalLabels{ global: global, limits: limits, - builder: labels.NewBuilder(nil), + builder: labels.NewBuilder(labels.EmptyLabels()), mtx: sync.Mutex{}, users: map[string]labels.Labels{}, @@ -41,9 +41,9 @@ func (e *userExternalLabels) update(userID string) (labels.Labels, bool) { defer e.mtx.Unlock() e.builder.Reset(e.global) - for _, l := range lset { + lset.Range(func(l labels.Label) { e.builder.Set(l.Name, l.Value) - } + }) lset = e.builder.Labels() if !labels.Equal(e.users[userID], lset) { diff --git a/pkg/ruler/external_labels_test.go b/pkg/ruler/external_labels_test.go index 45ff1507c83..1bc13a65831 100644 --- a/pkg/ruler/external_labels_test.go +++ b/pkg/ruler/external_labels_test.go @@ -22,7 +22,7 @@ func TestUserExternalLabels(t *testing.T) { name: "global labels only", removeBeforeTest: false, exists: false, - userExternalLabels: nil, + userExternalLabels: labels.EmptyLabels(), expectedExternalLabels: labels.FromStrings("from", "cortex"), }, { diff --git a/pkg/ruler/frontend_decoder.go b/pkg/ruler/frontend_decoder.go index 92a6b1a3f6e..4086dceffb7 100644 --- a/pkg/ruler/frontend_decoder.go +++ b/pkg/ruler/frontend_decoder.go @@ -5,7 +5,6 @@ import ( "encoding/json" "errors" "fmt" - "sort" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" @@ -76,20 +75,14 @@ func (j JsonDecoder) Decode(body []byte) (promql.Vector, Warnings, error) { func (j JsonDecoder) vectorToPromQLVector(vector model.Vector) promql.Vector { v := make([]promql.Sample, 0, len(vector)) for _, sample := range vector { - metric := make([]labels.Label, 0, len(sample.Metric)) + builder := labels.NewBuilder(labels.EmptyLabels()) for k, v := range sample.Metric { - metric = append(metric, labels.Label{ - Name: string(k), - Value: string(v), - }) + builder.Set(string(k), string(v)) } - sort.Slice(metric, func(i, j int) bool { - return metric[i].Name < metric[j].Name - }) v = append(v, promql.Sample{ T: int64(sample.Timestamp), F: float64(sample.Value), - Metric: metric, + Metric: builder.Labels(), }) } return v diff --git a/pkg/ruler/notifier_test.go b/pkg/ruler/notifier_test.go index 8d3c6ba2af7..e27e3527ed7 100644 --- a/pkg/ruler/notifier_test.go +++ b/pkg/ruler/notifier_test.go @@ -225,9 +225,7 @@ func TestBuildNotifierConfig(t *testing.T) { name: "with external labels", cfg: &Config{ AlertmanagerURL: "http://alertmanager.default.svc.cluster.local/alertmanager", - ExternalLabels: []labels.Label{ - {Name: "region", Value: "us-east-1"}, - }, + ExternalLabels: labels.FromStrings("region", "us-east-1"), }, ncfg: &config.Config{ AlertingConfig: config.AlertingConfig{ @@ -247,9 +245,7 @@ func TestBuildNotifierConfig(t *testing.T) { }, }, GlobalConfig: config.GlobalConfig{ - ExternalLabels: []labels.Label{ - {Name: "region", Value: "us-east-1"}, - }, + ExternalLabels: labels.FromStrings("region", "us-east-1"), }, }, }, diff --git a/pkg/ruler/ruler_test.go b/pkg/ruler/ruler_test.go index 538d7a0ac2f..cd171eb5da9 100644 --- a/pkg/ruler/ruler_test.go +++ b/pkg/ruler/ruler_test.go @@ -414,7 +414,7 @@ func TestNotifierSendsUserIDHeader(t *testing.T) { time.Sleep(10 * time.Millisecond) } n.Send(¬ifier.Alert{ - Labels: labels.Labels{labels.Label{Name: "alertname", Value: "testalert"}}, + Labels: labels.FromStrings("alertname", "testalert"), }) wg.Wait() @@ -450,7 +450,7 @@ func TestNotifierSendExternalLabels(t *testing.T) { cfg := defaultRulerConfig(t) cfg.AlertmanagerURL = ts.URL cfg.AlertmanagerDiscovery = false - cfg.ExternalLabels = []labels.Label{{Name: "region", Value: "us-east-1"}} + cfg.ExternalLabels = labels.FromStrings("region", "us-east-1") limits := &ruleLimits{} engine, queryable, pusher, logger, _, reg := testSetup(t, nil) metrics := NewRuleEvalMetrics(cfg, nil) @@ -481,12 +481,12 @@ func TestNotifierSendExternalLabels(t *testing.T) { }, { name: "local labels without overriding", - userExternalLabels: labels.FromStrings("mylabel", "local"), + userExternalLabels: []labels.Label{{Name: "mylabel", Value: "local"}}, expectedExternalLabels: []labels.Label{{Name: "region", Value: "us-east-1"}, {Name: "mylabel", Value: "local"}}, }, { name: "local labels that override globals", - userExternalLabels: labels.FromStrings("region", "cloud", "mylabel", "local"), + userExternalLabels: []labels.Label{{Name: "region", Value: "cloud"}, {Name: "mylabel", Value: "local"}}, expectedExternalLabels: []labels.Label{{Name: "region", Value: "cloud"}, {Name: "mylabel", Value: "local"}}, }, } @@ -494,7 +494,7 @@ func TestNotifierSendExternalLabels(t *testing.T) { test := test t.Run(test.name, func(t *testing.T) { - limits.setRulerExternalLabels(test.userExternalLabels) + limits.setRulerExternalLabels(labels.New(test.userExternalLabels...)) manager.SyncRuleGroups(context.Background(), map[string]rulespb.RuleGroupList{ userID: {&rulespb.RuleGroupDesc{Name: "group", Namespace: "ns", Interval: time.Minute, User: userID}}, }) @@ -506,7 +506,7 @@ func TestNotifierSendExternalLabels(t *testing.T) { }, 10*time.Second, 10*time.Millisecond) n.notifier.Send(¬ifier.Alert{ - Labels: labels.Labels{labels.Label{Name: "alertname", Value: "testalert"}}, + Labels: labels.FromStrings("alertname", "testalert"), }) select { case <-time.After(5 * time.Second): @@ -2680,8 +2680,8 @@ func TestSendAlerts(t *testing.T) { { in: []*promRules.Alert{ { - Labels: []labels.Label{{Name: "l1", Value: "v1"}}, - Annotations: []labels.Label{{Name: "a2", Value: "v2"}}, + Labels: labels.FromStrings("l1", "v1"), + Annotations: labels.FromStrings("a2", "v2"), ActiveAt: time.Unix(1, 0), FiredAt: time.Unix(2, 0), ValidUntil: time.Unix(3, 0), @@ -2689,8 +2689,8 @@ func TestSendAlerts(t *testing.T) { }, exp: []*notifier.Alert{ { - Labels: []labels.Label{{Name: "l1", Value: "v1"}}, - Annotations: []labels.Label{{Name: "a2", Value: "v2"}}, + Labels: labels.FromStrings("l1", "v1"), + Annotations: labels.FromStrings("a2", "v2"), StartsAt: time.Unix(2, 0), EndsAt: time.Unix(3, 0), GeneratorURL: "http://localhost:9090/graph?g0.expr=up&g0.tab=1", @@ -2700,8 +2700,8 @@ func TestSendAlerts(t *testing.T) { { in: []*promRules.Alert{ { - Labels: []labels.Label{{Name: "l1", Value: "v1"}}, - Annotations: []labels.Label{{Name: "a2", Value: "v2"}}, + Labels: labels.FromStrings("l1", "v1"), + Annotations: labels.FromStrings("a2", "v2"), ActiveAt: time.Unix(1, 0), FiredAt: time.Unix(2, 0), ResolvedAt: time.Unix(4, 0), @@ -2709,8 +2709,8 @@ func TestSendAlerts(t *testing.T) { }, exp: []*notifier.Alert{ { - Labels: []labels.Label{{Name: "l1", Value: "v1"}}, - Annotations: []labels.Label{{Name: "a2", Value: "v2"}}, + Labels: labels.FromStrings("l1", "v1"), + Annotations: labels.FromStrings("a2", "v2"), StartsAt: time.Unix(2, 0), EndsAt: time.Unix(4, 0), GeneratorURL: "http://localhost:9090/graph?g0.expr=up&g0.tab=1", diff --git a/pkg/storage/bucket/client_mock.go b/pkg/storage/bucket/client_mock.go index f323000db27..d641067ae05 100644 --- a/pkg/storage/bucket/client_mock.go +++ b/pkg/storage/bucket/client_mock.go @@ -5,6 +5,7 @@ import ( "context" "errors" "io" + "strings" "sync" "time" @@ -23,6 +24,10 @@ type ClientMock struct { uploaded sync.Map } +func (m *ClientMock) Provider() objstore.ObjProvider { + return objstore.FILESYSTEM +} + func (m *ClientMock) WithExpectedErrs(objstore.IsOpFailureExpectedFunc) objstore.Bucket { return m } @@ -32,16 +37,21 @@ func (m *ClientMock) ReaderWithExpectedErrs(objstore.IsOpFailureExpectedFunc) ob } // Upload mocks objstore.Bucket.Upload() -func (m *ClientMock) Upload(ctx context.Context, name string, r io.Reader) error { +func (m *ClientMock) Upload(ctx context.Context, name string, r io.Reader, opts ...objstore.ObjectUploadOption) error { if _, ok := m.uploaded.Load(name); ok { m.uploaded.Store(name, true) } - args := m.Called(ctx, name, r) - return args.Error(0) + if len(opts) > 0 { + args := m.Called(ctx, name, r, opts) + return args.Error(0) + } else { + args := m.Called(ctx, name, r) + return args.Error(0) + } } func (m *ClientMock) MockUpload(name string, err error) { - m.On("Upload", mock.Anything, name, mock.Anything).Return(err) + m.On("Upload", mock.Anything, name, mock.Anything, mock.Anything).Return(err) } // Delete mocks objstore.Bucket.Delete() @@ -73,6 +83,42 @@ func (m *ClientMock) Iter(ctx context.Context, dir string, f func(string) error, return args.Error(0) } +func (m *ClientMock) MockIterWithAttributes(prefix string, objects []string, err error, cb func()) { + m.On("IterWithAttributes", mock.Anything, prefix, mock.Anything, mock.Anything).Return(err).Run(func(args mock.Arguments) { + f := args.Get(2).(func(attrs objstore.IterObjectAttributes) error) + opts := args.Get(3).([]objstore.IterOption) + + // Determine if recursive flag is passed + params := objstore.ApplyIterOptions(opts...) + recursive := params.Recursive + + for _, o := range objects { + // Check if object is under current prefix + if !strings.HasPrefix(o, prefix) { + continue + } + + // Extract the remaining path after prefix + suffix := strings.TrimPrefix(o, prefix) + + // If not recursive and there's a slash in the remaining path, skip it + if !recursive && strings.Contains(suffix, "/") { + continue + } + + attrs := objstore.IterObjectAttributes{ + Name: o, + } + if cb != nil { + cb() + } + if err := f(attrs); err != nil { + break + } + } + }) +} + // MockIter is a convenient method to mock Iter() func (m *ClientMock) MockIter(prefix string, objects []string, err error) { m.MockIterWithCallback(prefix, objects, err, nil) @@ -81,6 +127,7 @@ func (m *ClientMock) MockIter(prefix string, objects []string, err error) { // MockIterWithCallback is a convenient method to mock Iter() and get a callback called when the Iter // API is called. func (m *ClientMock) MockIterWithCallback(prefix string, objects []string, err error, cb func()) { + m.MockIterWithAttributes(prefix, objects, err, cb) m.On("Iter", mock.Anything, prefix, mock.Anything, mock.Anything).Return(err).Run(func(args mock.Arguments) { if cb != nil { cb() diff --git a/pkg/storage/bucket/prefixed_bucket_client.go b/pkg/storage/bucket/prefixed_bucket_client.go index ac3ca06ce30..1f979df3121 100644 --- a/pkg/storage/bucket/prefixed_bucket_client.go +++ b/pkg/storage/bucket/prefixed_bucket_client.go @@ -31,8 +31,8 @@ func (b *PrefixedBucketClient) Close() error { } // Upload the contents of the reader as an object into the bucket. -func (b *PrefixedBucketClient) Upload(ctx context.Context, name string, r io.Reader) (err error) { - err = b.bucket.Upload(ctx, b.fullName(name), r) +func (b *PrefixedBucketClient) Upload(ctx context.Context, name string, r io.Reader, opts ...objstore.ObjectUploadOption) (err error) { + err = b.bucket.Upload(ctx, b.fullName(name), r, opts...) return } @@ -44,9 +44,14 @@ func (b *PrefixedBucketClient) Delete(ctx context.Context, name string) error { // Name returns the bucket name for the provider. func (b *PrefixedBucketClient) Name() string { return b.bucket.Name() } -// TODO(Sungjin1212): Implement if needed +// IterWithAttributes calls f for each entry in the given directory (not recursive.). The argument to f is the object attributes +// including the prefix of the inspected directory. The configured prefix will be stripped +// before supplied function is applied. func (b *PrefixedBucketClient) IterWithAttributes(ctx context.Context, dir string, f func(attrs objstore.IterObjectAttributes) error, options ...objstore.IterOption) error { - return b.bucket.IterWithAttributes(ctx, dir, f, options...) + return b.bucket.IterWithAttributes(ctx, b.fullName(dir), func(attrs objstore.IterObjectAttributes) error { + attrs.Name = strings.TrimPrefix(attrs.Name, b.prefix+objstore.DirDelim) + return f(attrs) + }, options...) } func (b *PrefixedBucketClient) SupportedIterOptions() []objstore.IterOptionType { @@ -109,3 +114,7 @@ func (b *PrefixedBucketClient) WithExpectedErrs(fn objstore.IsOpFailureExpectedF } return b } + +func (b *PrefixedBucketClient) Provider() objstore.ObjProvider { + return b.bucket.Provider() +} diff --git a/pkg/storage/bucket/s3/bucket_client.go b/pkg/storage/bucket/s3/bucket_client.go index 220afb90256..8d3ed4a6367 100644 --- a/pkg/storage/bucket/s3/bucket_client.go +++ b/pkg/storage/bucket/s3/bucket_client.go @@ -119,6 +119,10 @@ type BucketWithRetries struct { retryMaxBackoff time.Duration } +func (b *BucketWithRetries) Provider() objstore.ObjProvider { + return b.bucket.Provider() +} + func (b *BucketWithRetries) retry(ctx context.Context, f func() error, operationInfo string) error { var lastErr error retries := backoff.New(ctx, backoff.Config{ @@ -191,12 +195,12 @@ func (b *BucketWithRetries) Exists(ctx context.Context, name string) (exists boo return } -func (b *BucketWithRetries) Upload(ctx context.Context, name string, r io.Reader) error { +func (b *BucketWithRetries) Upload(ctx context.Context, name string, r io.Reader, uploadOpts ...objstore.ObjectUploadOption) error { rs, ok := r.(io.ReadSeeker) if !ok { // Skip retry if incoming Reader is not seekable to avoid // loading entire content into memory - err := b.bucket.Upload(ctx, name, r) + err := b.bucket.Upload(ctx, name, r, uploadOpts...) if err != nil { level.Warn(b.logger).Log("msg", "skip upload retry as reader is not seekable", "file", name, "err", err) } @@ -206,7 +210,7 @@ func (b *BucketWithRetries) Upload(ctx context.Context, name string, r io.Reader if _, err := rs.Seek(0, io.SeekStart); err != nil { return err } - return b.bucket.Upload(ctx, name, rs) + return b.bucket.Upload(ctx, name, rs, uploadOpts...) }, fmt.Sprintf("Upload %s", name)) } diff --git a/pkg/storage/bucket/s3/bucket_client_test.go b/pkg/storage/bucket/s3/bucket_client_test.go index ec757100a0b..50653d32665 100644 --- a/pkg/storage/bucket/s3/bucket_client_test.go +++ b/pkg/storage/bucket/s3/bucket_client_test.go @@ -184,8 +184,12 @@ type mockBucket struct { calledCount int } +func (m *mockBucket) Provider() objstore.ObjProvider { + return objstore.FILESYSTEM +} + // Upload mocks objstore.Bucket.Upload() -func (m *mockBucket) Upload(ctx context.Context, name string, r io.Reader) error { +func (m *mockBucket) Upload(ctx context.Context, name string, r io.Reader, opts ...objstore.ObjectUploadOption) error { var buf bytes.Buffer if _, err := buf.ReadFrom(r); err != nil { return err diff --git a/pkg/storage/bucket/sse_bucket_client.go b/pkg/storage/bucket/sse_bucket_client.go index 873b74e74a8..1f645ab6577 100644 --- a/pkg/storage/bucket/sse_bucket_client.go +++ b/pkg/storage/bucket/sse_bucket_client.go @@ -51,7 +51,7 @@ func (b *SSEBucketClient) Close() error { } // Upload the contents of the reader as an object into the bucket. -func (b *SSEBucketClient) Upload(ctx context.Context, name string, r io.Reader) error { +func (b *SSEBucketClient) Upload(ctx context.Context, name string, r io.Reader, opts ...objstore.ObjectUploadOption) error { if sse, err := b.getCustomS3SSEConfig(); err != nil { return err } else if sse != nil { @@ -60,7 +60,11 @@ func (b *SSEBucketClient) Upload(ctx context.Context, name string, r io.Reader) ctx = s3.ContextWithSSEConfig(ctx, sse) } - return b.bucket.Upload(ctx, name, r) + return b.bucket.Upload(ctx, name, r, opts...) +} + +func (b *SSEBucketClient) Provider() objstore.ObjProvider { + return b.bucket.Provider() } // Delete implements objstore.Bucket. diff --git a/pkg/storage/tsdb/bucketindex/block_ids_fetcher.go b/pkg/storage/tsdb/bucketindex/block_ids_fetcher.go index 51a333c60c1..f942b7009a9 100644 --- a/pkg/storage/tsdb/bucketindex/block_ids_fetcher.go +++ b/pkg/storage/tsdb/bucketindex/block_ids_fetcher.go @@ -33,20 +33,20 @@ func NewBlockLister(logger log.Logger, bkt objstore.Bucket, userID string, cfgPr } } -func (f *BlockLister) GetActiveAndPartialBlockIDs(ctx context.Context, ch chan<- ulid.ULID) (partialBlocks map[ulid.ULID]bool, err error) { +func (f *BlockLister) GetActiveAndPartialBlockIDs(ctx context.Context, activeBlocks chan<- block.ActiveBlockFetchData) (partialBlocks map[ulid.ULID]bool, err error) { // Fetch the bucket index. idx, err := ReadIndex(ctx, f.bkt, f.userID, f.cfgProvider, f.logger) if errors.Is(err, ErrIndexNotFound) { // This is a legit case happening when the first blocks of a tenant have recently been uploaded by ingesters // and their bucket index has not been created yet. // Fallback to BaseBlockIDsFetcher. - return f.baseLister.GetActiveAndPartialBlockIDs(ctx, ch) + return f.baseLister.GetActiveAndPartialBlockIDs(ctx, activeBlocks) } if errors.Is(err, ErrIndexCorrupted) { // In case a single tenant bucket index is corrupted, we want to return empty active blocks and parital blocks, so skipping this compaction cycle level.Error(f.logger).Log("msg", "corrupted bucket index found", "user", f.userID, "err", err) // Fallback to BaseBlockIDsFetcher. - return f.baseLister.GetActiveAndPartialBlockIDs(ctx, ch) + return f.baseLister.GetActiveAndPartialBlockIDs(ctx, activeBlocks) } if errors.Is(err, bucket.ErrCustomerManagedKeyAccessDenied) { @@ -73,7 +73,7 @@ func (f *BlockLister) GetActiveAndPartialBlockIDs(ctx context.Context, ch chan<- select { case <-ctx.Done(): return nil, ctx.Err() - case ch <- b.ID: + case activeBlocks <- block.ActiveBlockFetchData{ULID: b.ID}: } } return nil, nil diff --git a/pkg/storage/tsdb/bucketindex/block_ids_fetcher_test.go b/pkg/storage/tsdb/bucketindex/block_ids_fetcher_test.go index c3673d287ee..04c807f6d9d 100644 --- a/pkg/storage/tsdb/bucketindex/block_ids_fetcher_test.go +++ b/pkg/storage/tsdb/bucketindex/block_ids_fetcher_test.go @@ -13,6 +13,7 @@ import ( "github.com/go-kit/log" "github.com/oklog/ulid/v2" "github.com/stretchr/testify/require" + "github.com/thanos-io/thanos/pkg/block" "github.com/thanos-io/thanos/pkg/block/metadata" cortex_testutil "github.com/cortexproject/cortex/pkg/storage/tsdb/testutil" @@ -44,14 +45,14 @@ func TestBlockIDsFetcher_Fetch(t *testing.T) { })) blockIdsFetcher := NewBlockLister(logger, bkt, userID, nil) - ch := make(chan ulid.ULID) + ch := make(chan block.ActiveBlockFetchData) var wg sync.WaitGroup var blockIds []ulid.ULID wg.Add(1) go func() { defer wg.Done() for id := range ch { - blockIds = append(blockIds, id) + blockIds = append(blockIds, id.ULID) } }() _, err := blockIdsFetcher.GetActiveAndPartialBlockIDs(ctx, ch) @@ -96,14 +97,14 @@ func TestBlockIDsFetcherFetcher_Fetch_NoBucketIndex(t *testing.T) { require.NoError(t, bkt.Upload(ctx, path.Join(userID, mark.ID.String(), metadata.DeletionMarkFilename), &buf)) } blockIdsFetcher := NewBlockLister(logger, bkt, userID, nil) - ch := make(chan ulid.ULID) + ch := make(chan block.ActiveBlockFetchData) var wg sync.WaitGroup var blockIds []ulid.ULID wg.Add(1) go func() { defer wg.Done() for id := range ch { - blockIds = append(blockIds, id) + blockIds = append(blockIds, id.ULID) } }() _, err := blockIdsFetcher.GetActiveAndPartialBlockIDs(ctx, ch) diff --git a/pkg/storage/tsdb/bucketindex/markers_bucket_client.go b/pkg/storage/tsdb/bucketindex/markers_bucket_client.go index e2271cc3939..1773db2a680 100644 --- a/pkg/storage/tsdb/bucketindex/markers_bucket_client.go +++ b/pkg/storage/tsdb/bucketindex/markers_bucket_client.go @@ -24,11 +24,15 @@ func BucketWithGlobalMarkers(b objstore.InstrumentedBucket) objstore.Instrumente } } +func (b *globalMarkersBucket) Provider() objstore.ObjProvider { + return b.parent.Provider() +} + // Upload implements objstore.Bucket. -func (b *globalMarkersBucket) Upload(ctx context.Context, name string, r io.Reader) error { +func (b *globalMarkersBucket) Upload(ctx context.Context, name string, r io.Reader, opts ...objstore.ObjectUploadOption) error { globalMarkPath, ok := b.isMark(name) if !ok { - return b.parent.Upload(ctx, name, r) + return b.parent.Upload(ctx, name, r, opts...) } // Read the marker. @@ -38,12 +42,12 @@ func (b *globalMarkersBucket) Upload(ctx context.Context, name string, r io.Read } // Upload it to the global marker's location. - if err := b.parent.Upload(ctx, globalMarkPath, bytes.NewReader(body)); err != nil { + if err := b.parent.Upload(ctx, globalMarkPath, bytes.NewReader(body), opts...); err != nil { return err } // Upload it to the original location too. - return b.parent.Upload(ctx, name, bytes.NewReader(body)) + return b.parent.Upload(ctx, name, bytes.NewReader(body), opts...) } // Delete implements objstore.Bucket. diff --git a/pkg/storage/tsdb/cached_chunks_querier.go b/pkg/storage/tsdb/cached_chunks_querier.go index e5b230e64be..ab3b11c4fd0 100644 --- a/pkg/storage/tsdb/cached_chunks_querier.go +++ b/pkg/storage/tsdb/cached_chunks_querier.go @@ -61,7 +61,7 @@ func newBlockBaseQuerier(b prom_tsdb.BlockReader, mint, maxt int64) (*blockBaseQ } func (q *blockBaseQuerier) LabelValues(ctx context.Context, name string, hints *storage.LabelHints, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) { - res, err := q.index.SortedLabelValues(ctx, name, matchers...) + res, err := q.index.SortedLabelValues(ctx, name, hints, matchers...) return res, nil, err } diff --git a/pkg/storage/tsdb/testutil/objstore.go b/pkg/storage/tsdb/testutil/objstore.go index d879ab2bb42..c2ad987f5c7 100644 --- a/pkg/storage/tsdb/testutil/objstore.go +++ b/pkg/storage/tsdb/testutil/objstore.go @@ -79,7 +79,7 @@ func (m *MockBucketFailure) Get(ctx context.Context, name string) (io.ReadCloser return m.Bucket.Get(ctx, name) } -func (m *MockBucketFailure) Upload(ctx context.Context, name string, r io.Reader) error { +func (m *MockBucketFailure) Upload(ctx context.Context, name string, r io.Reader, opts ...objstore.ObjectUploadOption) error { m.UploadCalls.Add(1) for prefix, err := range m.UploadFailures { if strings.HasPrefix(name, prefix) { @@ -90,7 +90,7 @@ func (m *MockBucketFailure) Upload(ctx context.Context, name string, r io.Reader return e } - return m.Bucket.Upload(ctx, name, r) + return m.Bucket.Upload(ctx, name, r, opts...) } func (m *MockBucketFailure) WithExpectedErrs(expectedFunc objstore.IsOpFailureExpectedFunc) objstore.Bucket { diff --git a/pkg/storegateway/bucket_index_metadata_fetcher_test.go b/pkg/storegateway/bucket_index_metadata_fetcher_test.go index 9a7f7dd562a..8bd23eaa44a 100644 --- a/pkg/storegateway/bucket_index_metadata_fetcher_test.go +++ b/pkg/storegateway/bucket_index_metadata_fetcher_test.go @@ -86,6 +86,7 @@ func TestBucketIndexMetadataFetcher_Fetch(t *testing.T) { blocks_meta_synced{state="marked-for-no-compact"} 0 blocks_meta_synced{state="no-bucket-index"} 0 blocks_meta_synced{state="no-meta-json"} 0 + blocks_meta_synced{state="parquet-migrated"} 0 blocks_meta_synced{state="time-excluded"} 0 blocks_meta_synced{state="too-fresh"} 0 @@ -134,6 +135,7 @@ func TestBucketIndexMetadataFetcher_Fetch_KeyPermissionDenied(t *testing.T) { blocks_meta_synced{state="marked-for-no-compact"} 0 blocks_meta_synced{state="no-bucket-index"} 0 blocks_meta_synced{state="no-meta-json"} 0 + blocks_meta_synced{state="parquet-migrated"} 0 blocks_meta_synced{state="time-excluded"} 0 blocks_meta_synced{state="too-fresh"} 0 # HELP blocks_meta_syncs_total Total blocks metadata synchronization attempts @@ -185,6 +187,7 @@ func TestBucketIndexMetadataFetcher_Fetch_NoBucketIndex(t *testing.T) { blocks_meta_synced{state="marked-for-no-compact"} 0 blocks_meta_synced{state="no-bucket-index"} 1 blocks_meta_synced{state="no-meta-json"} 0 + blocks_meta_synced{state="parquet-migrated"} 0 blocks_meta_synced{state="time-excluded"} 0 blocks_meta_synced{state="too-fresh"} 0 @@ -240,6 +243,7 @@ func TestBucketIndexMetadataFetcher_Fetch_CorruptedBucketIndex(t *testing.T) { blocks_meta_synced{state="marked-for-no-compact"} 0 blocks_meta_synced{state="no-bucket-index"} 0 blocks_meta_synced{state="no-meta-json"} 0 + blocks_meta_synced{state="parquet-migrated"} 0 blocks_meta_synced{state="time-excluded"} 0 blocks_meta_synced{state="too-fresh"} 0 @@ -287,6 +291,7 @@ func TestBucketIndexMetadataFetcher_Fetch_ShouldResetGaugeMetrics(t *testing.T) blocks_meta_synced{state="marked-for-no-compact"} 0 blocks_meta_synced{state="no-bucket-index"} 0 blocks_meta_synced{state="no-meta-json"} 0 + blocks_meta_synced{state="parquet-migrated"} 0 blocks_meta_synced{state="time-excluded"} 0 blocks_meta_synced{state="too-fresh"} 0 `), "blocks_meta_synced")) @@ -311,6 +316,7 @@ func TestBucketIndexMetadataFetcher_Fetch_ShouldResetGaugeMetrics(t *testing.T) blocks_meta_synced{state="marked-for-no-compact"} 0 blocks_meta_synced{state="no-bucket-index"} 1 blocks_meta_synced{state="no-meta-json"} 0 + blocks_meta_synced{state="parquet-migrated"} 0 blocks_meta_synced{state="time-excluded"} 0 blocks_meta_synced{state="too-fresh"} 0 `), "blocks_meta_synced")) @@ -343,6 +349,7 @@ func TestBucketIndexMetadataFetcher_Fetch_ShouldResetGaugeMetrics(t *testing.T) blocks_meta_synced{state="marked-for-no-compact"} 0 blocks_meta_synced{state="no-bucket-index"} 0 blocks_meta_synced{state="no-meta-json"} 0 + blocks_meta_synced{state="parquet-migrated"} 0 blocks_meta_synced{state="time-excluded"} 0 blocks_meta_synced{state="too-fresh"} 0 `), "blocks_meta_synced")) @@ -369,6 +376,7 @@ func TestBucketIndexMetadataFetcher_Fetch_ShouldResetGaugeMetrics(t *testing.T) blocks_meta_synced{state="marked-for-no-compact"} 0 blocks_meta_synced{state="no-bucket-index"} 0 blocks_meta_synced{state="no-meta-json"} 0 + blocks_meta_synced{state="parquet-migrated"} 0 blocks_meta_synced{state="time-excluded"} 0 blocks_meta_synced{state="too-fresh"} 0 `), "blocks_meta_synced")) diff --git a/pkg/storegateway/bucket_stores_test.go b/pkg/storegateway/bucket_stores_test.go index 69c018ccfa4..674a2bae27b 100644 --- a/pkg/storegateway/bucket_stores_test.go +++ b/pkg/storegateway/bucket_stores_test.go @@ -659,6 +659,7 @@ func TestBucketStores_SyncBlocksWithIgnoreBlocksBefore(t *testing.T) { cortex_blocks_meta_synced{state="marked-for-deletion"} 0 cortex_blocks_meta_synced{state="marked-for-no-compact"} 0 cortex_blocks_meta_synced{state="no-meta-json"} 0 + cortex_blocks_meta_synced{state="parquet-migrated"} 0 cortex_blocks_meta_synced{state="time-excluded"} 1 cortex_blocks_meta_synced{state="too-fresh"} 0 # HELP cortex_blocks_meta_syncs_total Total blocks metadata synchronization attempts @@ -701,7 +702,7 @@ func generateStorageBlock(t *testing.T, storageDir, userID string, metricName st require.NoError(t, db.Close()) }() - series := labels.Labels{labels.Label{Name: labels.MetricName, Value: metricName}} + series := labels.FromStrings(labels.MetricName, metricName) app := db.Appender(context.Background()) for ts := minT; ts < maxT; ts += int64(step) { diff --git a/pkg/storegateway/gateway_test.go b/pkg/storegateway/gateway_test.go index 57bccae5fe3..b9070c236e7 100644 --- a/pkg/storegateway/gateway_test.go +++ b/pkg/storegateway/gateway_test.go @@ -1299,7 +1299,7 @@ func mockTSDB(t *testing.T, dir string, numSeries, numBlocks int, minT, maxT int step := (maxT - minT) / int64(numSeries) ctx := context.Background() addSample := func(i int) { - lbls := labels.Labels{labels.Label{Name: "series_id", Value: strconv.Itoa(i)}} + lbls := labels.FromStrings("series_id", strconv.Itoa(i)) app := db.Appender(ctx) _, err := app.Append(0, lbls, minT+(step*int64(i)), float64(i)) diff --git a/pkg/util/labels.go b/pkg/util/labels.go index c1bc12653f7..2e78a0aa905 100644 --- a/pkg/util/labels.go +++ b/pkg/util/labels.go @@ -10,10 +10,10 @@ import ( // LabelsToMetric converts a Labels to Metric // Don't do this on any performance sensitive paths. func LabelsToMetric(ls labels.Labels) model.Metric { - m := make(model.Metric, len(ls)) - for _, l := range ls { + m := make(model.Metric, ls.Len()) + ls.Range(func(l labels.Label) { m[model.LabelName(l.Name)] = model.LabelValue(l.Value) - } + }) return m } diff --git a/pkg/util/metrics_helper.go b/pkg/util/metrics_helper.go index 0a823920fdc..e5f9e7fb76b 100644 --- a/pkg/util/metrics_helper.go +++ b/pkg/util/metrics_helper.go @@ -723,7 +723,7 @@ func (r *UserRegistries) BuildMetricFamiliesPerUser() MetricFamiliesPerUser { // FromLabelPairsToLabels converts dto.LabelPair into labels.Labels. func FromLabelPairsToLabels(pairs []*dto.LabelPair) labels.Labels { - builder := labels.NewBuilder(nil) + builder := labels.NewBuilder(labels.EmptyLabels()) for _, pair := range pairs { builder.Set(pair.GetName(), pair.GetValue()) } @@ -770,7 +770,7 @@ func GetLabels(c prometheus.Collector, filter map[string]string) ([]labels.Label errs := tsdb_errors.NewMulti() var result []labels.Labels dtoMetric := &dto.Metric{} - lbls := labels.NewBuilder(nil) + lbls := labels.NewBuilder(labels.EmptyLabels()) nextMetric: for m := range ch { @@ -781,7 +781,7 @@ nextMetric: continue } - lbls.Reset(nil) + lbls.Reset(labels.EmptyLabels()) for _, lp := range dtoMetric.Label { n := lp.GetName() v := lp.GetValue() diff --git a/pkg/util/push/otlp.go b/pkg/util/push/otlp.go index e328f1ae712..9fa05148abc 100644 --- a/pkg/util/push/otlp.go +++ b/pkg/util/push/otlp.go @@ -10,6 +10,7 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" + "github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/prompb" "github.com/prometheus/prometheus/storage/remote/otlptranslator/prometheusremotewrite" @@ -187,7 +188,9 @@ func convertToPromTS(ctx context.Context, pmetrics pmetric.Metrics, cfg distribu if cfg.ConvertAllAttributes { annots, err = promConverter.FromMetrics(ctx, convertToMetricsAttributes(pmetrics), settings) } else { - settings.PromoteResourceAttributes = overrides.PromoteResourceAttributes(userID) + settings.PromoteResourceAttributes = prometheusremotewrite.NewPromoteResourceAttributes(config.OTLPConfig{ + PromoteResourceAttributes: overrides.PromoteResourceAttributes(userID), + }) annots, err = promConverter.FromMetrics(ctx, pmetrics, settings) } @@ -205,11 +208,11 @@ func convertToPromTS(ctx context.Context, pmetrics pmetric.Metrics, cfg distribu } func makeLabels(in []prompb.Label) []cortexpb.LabelAdapter { - out := make(labels.Labels, 0, len(in)) + builder := labels.NewBuilder(labels.EmptyLabels()) for _, l := range in { - out = append(out, labels.Label{Name: l.Name, Value: l.Value}) + builder.Set(l.Name, l.Value) } - return cortexpb.FromLabelsToLabelAdapters(out) + return cortexpb.FromLabelsToLabelAdapters(builder.Labels()) } func makeSamples(in []prompb.Sample) []cortexpb.Sample { diff --git a/pkg/util/validation/limits.go b/pkg/util/validation/limits.go index fcd96fea36b..5d999230dd1 100644 --- a/pkg/util/validation/limits.go +++ b/pkg/util/validation/limits.go @@ -1202,11 +1202,16 @@ outer: defaultPartitionIndex = i continue } - for _, lbl := range lbls.LabelSet { + found := true + lbls.LabelSet.Range(func(l labels.Label) { // We did not find some of the labels on the set - if v := metric.Get(lbl.Name); v != lbl.Value { - continue outer + if v := metric.Get(l.Name); v != l.Value { + found = false } + }) + + if !found { + continue outer } r = append(r, lbls) } diff --git a/pkg/util/validation/limits_test.go b/pkg/util/validation/limits_test.go index 308067e959e..260686fdb50 100644 --- a/pkg/util/validation/limits_test.go +++ b/pkg/util/validation/limits_test.go @@ -116,11 +116,11 @@ func TestLimits_Validate(t *testing.T) { expected: errMaxLocalNativeHistogramSeriesPerUserValidation, }, "external-labels invalid label name": { - limits: Limits{RulerExternalLabels: labels.Labels{{Name: "123invalid", Value: "good"}}}, + limits: Limits{RulerExternalLabels: labels.FromStrings("123invalid", "good")}, expected: errInvalidLabelName, }, "external-labels invalid label value": { - limits: Limits{RulerExternalLabels: labels.Labels{{Name: "good", Value: string([]byte{0xff, 0xfe, 0xfd})}}}, + limits: Limits{RulerExternalLabels: labels.FromStrings("good", string([]byte{0xff, 0xfe, 0xfd}))}, expected: errInvalidLabelValue, }, } diff --git a/vendor/cloud.google.com/go/auth/CHANGES.md b/vendor/cloud.google.com/go/auth/CHANGES.md index 500c34cf445..66131916eb7 100644 --- a/vendor/cloud.google.com/go/auth/CHANGES.md +++ b/vendor/cloud.google.com/go/auth/CHANGES.md @@ -1,5 +1,34 @@ # Changelog +## [0.16.2](https://github.com/googleapis/google-cloud-go/compare/auth/v0.16.1...auth/v0.16.2) (2025-06-04) + + +### Bug Fixes + +* **auth:** Add back DirectPath misconfiguration logging ([#11162](https://github.com/googleapis/google-cloud-go/issues/11162)) ([8d52da5](https://github.com/googleapis/google-cloud-go/commit/8d52da58da5a0ed77a0f6307d1b561bc045406a1)) +* **auth:** Remove s2a fallback option ([#12354](https://github.com/googleapis/google-cloud-go/issues/12354)) ([d5acc59](https://github.com/googleapis/google-cloud-go/commit/d5acc599cd775ddc404349e75906fa02e8ff133e)) + +## [0.16.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.16.0...auth/v0.16.1) (2025-04-23) + + +### Bug Fixes + +* **auth:** Clone detectopts before assigning TokenBindingType ([#11881](https://github.com/googleapis/google-cloud-go/issues/11881)) ([2167b02](https://github.com/googleapis/google-cloud-go/commit/2167b020fdc43b517c2b6ecca264a10e357ea035)) + +## [0.16.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.15.0...auth/v0.16.0) (2025-04-14) + + +### Features + +* **auth/credentials:** Return X.509 certificate chain as subject token ([#11948](https://github.com/googleapis/google-cloud-go/issues/11948)) ([d445a3f](https://github.com/googleapis/google-cloud-go/commit/d445a3f66272ffd5c39c4939af9bebad4582631c)), refs [#11757](https://github.com/googleapis/google-cloud-go/issues/11757) +* **auth:** Configure DirectPath bound credentials from AllowedHardBoundTokens ([#11665](https://github.com/googleapis/google-cloud-go/issues/11665)) ([0fc40bc](https://github.com/googleapis/google-cloud-go/commit/0fc40bcf4e4673704df0973e9fa65957395d7bb4)) + + +### Bug Fixes + +* **auth:** Allow non-default SA credentials for DP ([#11828](https://github.com/googleapis/google-cloud-go/issues/11828)) ([3a996b4](https://github.com/googleapis/google-cloud-go/commit/3a996b4129e6d0a34dfda6671f535d5aefb26a82)) +* **auth:** Restore calling DialContext ([#11930](https://github.com/googleapis/google-cloud-go/issues/11930)) ([9ec9a29](https://github.com/googleapis/google-cloud-go/commit/9ec9a29494e93197edbaf45aba28984801e9770a)), refs [#11118](https://github.com/googleapis/google-cloud-go/issues/11118) + ## [0.15.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.14.1...auth/v0.15.0) (2025-02-19) diff --git a/vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/externalaccount.go b/vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/externalaccount.go index a8220642348..f4f49f175dc 100644 --- a/vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/externalaccount.go +++ b/vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/externalaccount.go @@ -413,7 +413,10 @@ func newSubjectTokenProvider(o *Options) (subjectTokenProvider, error) { if cert.UseDefaultCertificateConfig && cert.CertificateConfigLocation != "" { return nil, errors.New("credentials: \"certificate\" object cannot specify both a certificate_config_location and use_default_certificate_config=true") } - return &x509Provider{}, nil + return &x509Provider{ + TrustChainPath: o.CredentialSource.Certificate.TrustChainPath, + ConfigFilePath: o.CredentialSource.Certificate.CertificateConfigLocation, + }, nil } return nil, errors.New("credentials: unable to parse credential source") } diff --git a/vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/x509_provider.go b/vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/x509_provider.go index 115df5881f1..d86ca593c8c 100644 --- a/vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/x509_provider.go +++ b/vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/x509_provider.go @@ -17,27 +17,184 @@ package externalaccount import ( "context" "crypto/tls" + "crypto/x509" + "encoding/base64" + "encoding/json" + "encoding/pem" + "errors" + "fmt" + "io/fs" "net/http" + "os" + "strings" "time" "cloud.google.com/go/auth/internal/transport/cert" ) -// x509Provider implements the subjectTokenProvider type for -// x509 workload identity credentials. Because x509 credentials -// rely on an mTLS connection to represent the 3rd party identity -// rather than a subject token, this provider will always return -// an empty string when a subject token is requested by the external account -// token provider. +// x509Provider implements the subjectTokenProvider type for x509 workload +// identity credentials. This provider retrieves and formats a JSON array +// containing the leaf certificate and trust chain (if provided) as +// base64-encoded strings. This JSON array serves as the subject token for +// mTLS authentication. type x509Provider struct { + // TrustChainPath is the path to the file containing the trust chain certificates. + // The file should contain one or more PEM-encoded certificates. + TrustChainPath string + // ConfigFilePath is the path to the configuration file containing the path + // to the leaf certificate file. + ConfigFilePath string } +const pemCertificateHeader = "-----BEGIN CERTIFICATE-----" + func (xp *x509Provider) providerType() string { return x509ProviderType } -func (xp *x509Provider) subjectToken(ctx context.Context) (string, error) { - return "", nil +// loadLeafCertificate loads and parses the leaf certificate from the specified +// configuration file. It retrieves the certificate path from the config file, +// reads the certificate file, and parses the certificate data. +func loadLeafCertificate(configFilePath string) (*x509.Certificate, error) { + // Get the path to the certificate file from the configuration file. + path, err := cert.GetCertificatePath(configFilePath) + if err != nil { + return nil, fmt.Errorf("failed to get certificate path from config file: %w", err) + } + leafCertBytes, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("failed to read leaf certificate file: %w", err) + } + // Parse the certificate bytes. + return parseCertificate(leafCertBytes) +} + +// encodeCert encodes a x509.Certificate to a base64 string. +func encodeCert(cert *x509.Certificate) string { + // cert.Raw contains the raw DER-encoded certificate. Encode the raw certificate bytes to base64. + return base64.StdEncoding.EncodeToString(cert.Raw) +} + +// parseCertificate parses a PEM-encoded certificate from the given byte slice. +func parseCertificate(certData []byte) (*x509.Certificate, error) { + if len(certData) == 0 { + return nil, errors.New("invalid certificate data: empty input") + } + // Decode the PEM-encoded data. + block, _ := pem.Decode(certData) + if block == nil { + return nil, errors.New("invalid PEM-encoded certificate data: no PEM block found") + } + if block.Type != "CERTIFICATE" { + return nil, fmt.Errorf("invalid PEM-encoded certificate data: expected CERTIFICATE block type, got %s", block.Type) + } + // Parse the DER-encoded certificate. + certificate, err := x509.ParseCertificate(block.Bytes) + if err != nil { + return nil, fmt.Errorf("failed to parse certificate: %w", err) + } + return certificate, nil +} + +// readTrustChain reads a file of PEM-encoded X.509 certificates and returns a slice of parsed certificates. +// It splits the file content into PEM certificate blocks and parses each one. +func readTrustChain(trustChainPath string) ([]*x509.Certificate, error) { + certificateTrustChain := []*x509.Certificate{} + + // If no trust chain path is provided, return an empty slice. + if trustChainPath == "" { + return certificateTrustChain, nil + } + + // Read the trust chain file. + trustChainData, err := os.ReadFile(trustChainPath) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + return nil, fmt.Errorf("trust chain file not found: %w", err) + } + return nil, fmt.Errorf("failed to read trust chain file: %w", err) + } + + // Split the file content into PEM certificate blocks. + certBlocks := strings.Split(string(trustChainData), pemCertificateHeader) + + // Iterate over each certificate block. + for _, certBlock := range certBlocks { + // Trim whitespace from the block. + certBlock = strings.TrimSpace(certBlock) + + if certBlock != "" { + // Add the PEM header to the block. + certData := pemCertificateHeader + "\n" + certBlock + + // Parse the certificate data. + cert, err := parseCertificate([]byte(certData)) + if err != nil { + return nil, fmt.Errorf("error parsing certificate from trust chain file: %w", err) + } + + // Append the certificate to the trust chain. + certificateTrustChain = append(certificateTrustChain, cert) + } + } + + return certificateTrustChain, nil +} + +// subjectToken retrieves the X.509 subject token. It loads the leaf +// certificate and, if a trust chain path is configured, the trust chain +// certificates. It then constructs a JSON array containing the base64-encoded +// leaf certificate and each base64-encoded certificate in the trust chain. +// The leaf certificate must be at the top of the trust chain file. This JSON +// array is used as the subject token for mTLS authentication. +func (xp *x509Provider) subjectToken(context.Context) (string, error) { + // Load the leaf certificate. + leafCert, err := loadLeafCertificate(xp.ConfigFilePath) + if err != nil { + return "", fmt.Errorf("failed to load leaf certificate: %w", err) + } + + // Read the trust chain. + trustChain, err := readTrustChain(xp.TrustChainPath) + if err != nil { + return "", fmt.Errorf("failed to read trust chain: %w", err) + } + + // Initialize the certificate chain with the leaf certificate. + certChain := []string{encodeCert(leafCert)} + + // If there is a trust chain, add certificates to the certificate chain. + if len(trustChain) > 0 { + firstCert := encodeCert(trustChain[0]) + + // If the first certificate in the trust chain is not the same as the leaf certificate, add it to the chain. + if firstCert != certChain[0] { + certChain = append(certChain, firstCert) + } + + // Iterate over the remaining certificates in the trust chain. + for i := 1; i < len(trustChain); i++ { + encoded := encodeCert(trustChain[i]) + + // Return an error if the current certificate is the same as the leaf certificate. + if encoded == certChain[0] { + return "", errors.New("the leaf certificate must be at the top of the trust chain file") + } + + // Add the current certificate to the chain. + certChain = append(certChain, encoded) + } + } + + // Convert the certificate chain to a JSON array of base64-encoded strings. + jsonChain, err := json.Marshal(certChain) + if err != nil { + return "", fmt.Errorf("failed to format certificate data: %w", err) + } + + // Return the JSON-formatted certificate chain. + return string(jsonChain), nil + } // createX509Client creates a new client that is configured with mTLS, using the diff --git a/vendor/cloud.google.com/go/auth/grpctransport/directpath.go b/vendor/cloud.google.com/go/auth/grpctransport/directpath.go index c541da2b1ac..69d6d0034e4 100644 --- a/vendor/cloud.google.com/go/auth/grpctransport/directpath.go +++ b/vendor/cloud.google.com/go/auth/grpctransport/directpath.go @@ -20,13 +20,18 @@ import ( "os" "strconv" "strings" + "time" "cloud.google.com/go/auth" + "cloud.google.com/go/auth/credentials" "cloud.google.com/go/auth/internal/compute" + "golang.org/x/time/rate" "google.golang.org/grpc" grpcgoogle "google.golang.org/grpc/credentials/google" ) +var logRateLimiter = rate.Sometimes{Interval: 1 * time.Second} + func isDirectPathEnabled(endpoint string, opts *Options) bool { if opts.InternalOptions != nil && !opts.InternalOptions.EnableDirectPath { return false @@ -97,14 +102,36 @@ func isDirectPathXdsUsed(o *Options) bool { return false } +func isDirectPathBoundTokenEnabled(opts *InternalOptions) bool { + for _, ev := range opts.AllowHardBoundTokens { + if ev == "ALTS" { + return true + } + } + return false +} + // configureDirectPath returns some dial options and an endpoint to use if the // configuration allows the use of direct path. If it does not the provided // grpcOpts and endpoint are returned. -func configureDirectPath(grpcOpts []grpc.DialOption, opts *Options, endpoint string, creds *auth.Credentials) ([]grpc.DialOption, string) { +func configureDirectPath(grpcOpts []grpc.DialOption, opts *Options, endpoint string, creds *auth.Credentials) ([]grpc.DialOption, string, error) { + logRateLimiter.Do(func() { + logDirectPathMisconfig(endpoint, creds, opts) + }) if isDirectPathEnabled(endpoint, opts) && compute.OnComputeEngine() && isTokenProviderDirectPathCompatible(creds, opts) { // Overwrite all of the previously specific DialOptions, DirectPath uses its own set of credentials and certificates. + defaultCredetialsOptions := grpcgoogle.DefaultCredentialsOptions{PerRPCCreds: &grpcCredentialsProvider{creds: creds}} + if isDirectPathBoundTokenEnabled(opts.InternalOptions) && isTokenProviderComputeEngine(creds) { + optsClone := opts.resolveDetectOptions() + optsClone.TokenBindingType = credentials.ALTSHardBinding + altsCreds, err := credentials.DetectDefault(optsClone) + if err != nil { + return nil, "", err + } + defaultCredetialsOptions.ALTSPerRPCCreds = &grpcCredentialsProvider{creds: altsCreds} + } grpcOpts = []grpc.DialOption{ - grpc.WithCredentialsBundle(grpcgoogle.NewDefaultCredentialsWithOptions(grpcgoogle.DefaultCredentialsOptions{PerRPCCreds: &grpcCredentialsProvider{creds: creds}}))} + grpc.WithCredentialsBundle(grpcgoogle.NewDefaultCredentialsWithOptions(defaultCredetialsOptions))} if timeoutDialerOption != nil { grpcOpts = append(grpcOpts, timeoutDialerOption) } @@ -129,5 +156,22 @@ func configureDirectPath(grpcOpts []grpc.DialOption, opts *Options, endpoint str } // TODO: add support for system parameters (quota project, request reason) via chained interceptor. } - return grpcOpts, endpoint + return grpcOpts, endpoint, nil +} + +func logDirectPathMisconfig(endpoint string, creds *auth.Credentials, o *Options) { + + // Case 1: does not enable DirectPath + if !isDirectPathEnabled(endpoint, o) { + o.logger().Warn("DirectPath is disabled. To enable, please set the EnableDirectPath option along with the EnableDirectPathXds option.") + } else { + // Case 2: credential is not correctly set + if !isTokenProviderDirectPathCompatible(creds, o) { + o.logger().Warn("DirectPath is disabled. Please make sure the token source is fetched from GCE metadata server and the default service account is used.") + } + // Case 3: not running on GCE + if !compute.OnComputeEngine() { + o.logger().Warn("DirectPath is disabled. DirectPath is only available in a GCE environment.") + } + } } diff --git a/vendor/cloud.google.com/go/auth/grpctransport/grpctransport.go b/vendor/cloud.google.com/go/auth/grpctransport/grpctransport.go index 4610a485511..834aef41c87 100644 --- a/vendor/cloud.google.com/go/auth/grpctransport/grpctransport.go +++ b/vendor/cloud.google.com/go/auth/grpctransport/grpctransport.go @@ -304,17 +304,18 @@ func dial(ctx context.Context, secure bool, opts *Options) (*grpc.ClientConn, er // This condition is only met for non-DirectPath clients because // TransportTypeMTLSS2A is used only when InternalOptions.EnableDirectPath // is false. + optsClone := opts.resolveDetectOptions() if transportCreds.TransportType == transport.TransportTypeMTLSS2A { // Check that the client allows requesting hard-bound token for the transport type mTLS using S2A. for _, ev := range opts.InternalOptions.AllowHardBoundTokens { if ev == "MTLS_S2A" { - opts.DetectOpts.TokenBindingType = credentials.MTLSHardBinding + optsClone.TokenBindingType = credentials.MTLSHardBinding break } } } var err error - creds, err = credentials.DetectDefault(opts.resolveDetectOptions()) + creds, err = credentials.DetectDefault(optsClone) if err != nil { return nil, err } @@ -341,7 +342,10 @@ func dial(ctx context.Context, secure bool, opts *Options) (*grpc.ClientConn, er }), ) // Attempt Direct Path - grpcOpts, transportCreds.Endpoint = configureDirectPath(grpcOpts, opts, transportCreds.Endpoint, creds) + grpcOpts, transportCreds.Endpoint, err = configureDirectPath(grpcOpts, opts, transportCreds.Endpoint, creds) + if err != nil { + return nil, err + } } // Add tracing, but before the other options, so that clients can override the @@ -350,7 +354,7 @@ func dial(ctx context.Context, secure bool, opts *Options) (*grpc.ClientConn, er grpcOpts = addOpenTelemetryStatsHandler(grpcOpts, opts) grpcOpts = append(grpcOpts, opts.GRPCDialOpts...) - return grpc.Dial(transportCreds.Endpoint, grpcOpts...) + return grpc.DialContext(ctx, transportCreds.Endpoint, grpcOpts...) } // grpcKeyProvider satisfies https://pkg.go.dev/google.golang.org/grpc/credentials#PerRPCCredentials. diff --git a/vendor/cloud.google.com/go/auth/internal/credsfile/filetype.go b/vendor/cloud.google.com/go/auth/internal/credsfile/filetype.go index 3be6e5bbb41..606347304cb 100644 --- a/vendor/cloud.google.com/go/auth/internal/credsfile/filetype.go +++ b/vendor/cloud.google.com/go/auth/internal/credsfile/filetype.go @@ -127,6 +127,7 @@ type ExecutableConfig struct { type CertificateConfig struct { UseDefaultCertificateConfig bool `json:"use_default_certificate_config"` CertificateConfigLocation string `json:"certificate_config_location"` + TrustChainPath string `json:"trust_chain_path"` } // ServiceAccountImpersonationInfo has impersonation configuration. diff --git a/vendor/cloud.google.com/go/auth/internal/transport/cba.go b/vendor/cloud.google.com/go/auth/internal/transport/cba.go index b1f0fcf9374..14bca966ecc 100644 --- a/vendor/cloud.google.com/go/auth/internal/transport/cba.go +++ b/vendor/cloud.google.com/go/auth/internal/transport/cba.go @@ -31,7 +31,6 @@ import ( "cloud.google.com/go/auth/internal" "cloud.google.com/go/auth/internal/transport/cert" "github.com/google/s2a-go" - "github.com/google/s2a-go/fallback" "google.golang.org/grpc/credentials" ) @@ -170,18 +169,9 @@ func GetGRPCTransportCredsAndEndpoint(opts *Options) (*GRPCTransportCredentials, return &GRPCTransportCredentials{defaultTransportCreds, config.endpoint, TransportTypeUnknown}, nil } - var fallbackOpts *s2a.FallbackOptions - // In case of S2A failure, fall back to the endpoint that would've been used without S2A. - if fallbackHandshake, err := fallback.DefaultFallbackClientHandshakeFunc(config.endpoint); err == nil { - fallbackOpts = &s2a.FallbackOptions{ - FallbackClientHandshakeFunc: fallbackHandshake, - } - } - s2aTransportCreds, err := s2a.NewClientCreds(&s2a.ClientOptions{ S2AAddress: s2aAddr, TransportCreds: transportCredsForS2A, - FallbackOpts: fallbackOpts, }) if err != nil { // Use default if we cannot initialize S2A client transport credentials. @@ -218,23 +208,9 @@ func GetHTTPTransportConfig(opts *Options) (cert.Provider, func(context.Context, return config.clientCertSource, nil, nil } - var fallbackOpts *s2a.FallbackOptions - // In case of S2A failure, fall back to the endpoint that would've been used without S2A. - if fallbackURL, err := url.Parse(config.endpoint); err == nil { - if fallbackDialer, fallbackServerAddr, err := fallback.DefaultFallbackDialerAndAddress(fallbackURL.Hostname()); err == nil { - fallbackOpts = &s2a.FallbackOptions{ - FallbackDialer: &s2a.FallbackDialer{ - Dialer: fallbackDialer, - ServerAddr: fallbackServerAddr, - }, - } - } - } - dialTLSContextFunc := s2a.NewS2ADialTLSContextFunc(&s2a.ClientOptions{ S2AAddress: s2aAddr, TransportCreds: transportCredsForS2A, - FallbackOpts: fallbackOpts, }) return nil, dialTLSContextFunc, nil } diff --git a/vendor/cloud.google.com/go/auth/internal/transport/cert/workload_cert.go b/vendor/cloud.google.com/go/auth/internal/transport/cert/workload_cert.go index 347aaced721..b2a3be23c74 100644 --- a/vendor/cloud.google.com/go/auth/internal/transport/cert/workload_cert.go +++ b/vendor/cloud.google.com/go/auth/internal/transport/cert/workload_cert.go @@ -37,6 +37,36 @@ type certificateConfig struct { CertConfigs certConfigs `json:"cert_configs"` } +// getconfigFilePath determines the path to the certificate configuration file. +// It first checks for the presence of an environment variable that specifies +// the file path. If the environment variable is not set, it falls back to +// a default configuration file path. +func getconfigFilePath() string { + envFilePath := util.GetConfigFilePathFromEnv() + if envFilePath != "" { + return envFilePath + } + return util.GetDefaultConfigFilePath() + +} + +// GetCertificatePath retrieves the certificate file path from the provided +// configuration file. If the configFilePath is empty, it attempts to load +// the configuration from a well-known gcloud location. +// This function is exposed to allow other packages, such as the +// externalaccount package, to retrieve the certificate path without needing +// to load the entire certificate configuration. +func GetCertificatePath(configFilePath string) (string, error) { + if configFilePath == "" { + configFilePath = getconfigFilePath() + } + certFile, _, err := getCertAndKeyFiles(configFilePath) + if err != nil { + return "", err + } + return certFile, nil +} + // NewWorkloadX509CertProvider creates a certificate source // that reads a certificate and private key file from the local file system. // This is intended to be used for workload identity federation. @@ -47,14 +77,8 @@ type certificateConfig struct { // a well-known gcloud location. func NewWorkloadX509CertProvider(configFilePath string) (Provider, error) { if configFilePath == "" { - envFilePath := util.GetConfigFilePathFromEnv() - if envFilePath != "" { - configFilePath = envFilePath - } else { - configFilePath = util.GetDefaultConfigFilePath() - } + configFilePath = getconfigFilePath() } - certFile, keyFile, err := getCertAndKeyFiles(configFilePath) if err != nil { return nil, err diff --git a/vendor/cloud.google.com/go/iam/CHANGES.md b/vendor/cloud.google.com/go/iam/CHANGES.md index 6bfd910506e..7839f3b8951 100644 --- a/vendor/cloud.google.com/go/iam/CHANGES.md +++ b/vendor/cloud.google.com/go/iam/CHANGES.md @@ -1,6 +1,50 @@ # Changes +## [1.5.2](https://github.com/googleapis/google-cloud-go/compare/iam/v1.5.1...iam/v1.5.2) (2025-04-15) + + +### Bug Fixes + +* **iam:** Update google.golang.org/api to 0.229.0 ([3319672](https://github.com/googleapis/google-cloud-go/commit/3319672f3dba84a7150772ccb5433e02dab7e201)) + +## [1.5.1](https://github.com/googleapis/google-cloud-go/compare/iam/v1.5.0...iam/v1.5.1) (2025-04-15) + + +### Documentation + +* **iam:** Formatting update for ListPolicyBindingsRequest ([dfdf404](https://github.com/googleapis/google-cloud-go/commit/dfdf404138728724aa6305c5c465ecc6fe5b1264)) +* **iam:** Minor doc update for ListPrincipalAccessBoundaryPoliciesResponse ([20f762c](https://github.com/googleapis/google-cloud-go/commit/20f762c528726a3f038d3e1f37e8a4952118badf)) +* **iam:** Minor doc update for ListPrincipalAccessBoundaryPoliciesResponse ([20f762c](https://github.com/googleapis/google-cloud-go/commit/20f762c528726a3f038d3e1f37e8a4952118badf)) + +## [1.5.0](https://github.com/googleapis/google-cloud-go/compare/iam/v1.4.2...iam/v1.5.0) (2025-03-31) + + +### Features + +* **iam:** New client(s) ([#11933](https://github.com/googleapis/google-cloud-go/issues/11933)) ([d5cb2e5](https://github.com/googleapis/google-cloud-go/commit/d5cb2e58334c6963cc46885f565fe3b19c52cb63)) + +## [1.4.2](https://github.com/googleapis/google-cloud-go/compare/iam/v1.4.1...iam/v1.4.2) (2025-03-13) + + +### Bug Fixes + +* **iam:** Update golang.org/x/net to 0.37.0 ([1144978](https://github.com/googleapis/google-cloud-go/commit/11449782c7fb4896bf8b8b9cde8e7441c84fb2fd)) + +## [1.4.1](https://github.com/googleapis/google-cloud-go/compare/iam/v1.4.0...iam/v1.4.1) (2025-03-06) + + +### Bug Fixes + +* **iam:** Fix out-of-sync version.go ([28f0030](https://github.com/googleapis/google-cloud-go/commit/28f00304ebb13abfd0da2f45b9b79de093cca1ec)) + +## [1.4.0](https://github.com/googleapis/google-cloud-go/compare/iam/v1.3.1...iam/v1.4.0) (2025-02-12) + + +### Features + +* **iam/admin:** Regenerate client ([#11570](https://github.com/googleapis/google-cloud-go/issues/11570)) ([eab87d7](https://github.com/googleapis/google-cloud-go/commit/eab87d73bea884c636ec88f03b9aa90102a2833f)), refs [#8219](https://github.com/googleapis/google-cloud-go/issues/8219) + ## [1.3.1](https://github.com/googleapis/google-cloud-go/compare/iam/v1.3.0...iam/v1.3.1) (2025-01-02) diff --git a/vendor/cloud.google.com/go/iam/apiv1/iampb/iam_policy.pb.go b/vendor/cloud.google.com/go/iam/apiv1/iampb/iam_policy.pb.go index f975d76191b..2b57ae3b82d 100644 --- a/vendor/cloud.google.com/go/iam/apiv1/iampb/iam_policy.pb.go +++ b/vendor/cloud.google.com/go/iam/apiv1/iampb/iam_policy.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/iam/apiv1/iampb/options.pb.go b/vendor/cloud.google.com/go/iam/apiv1/iampb/options.pb.go index 0c82db752bd..745de05ba25 100644 --- a/vendor/cloud.google.com/go/iam/apiv1/iampb/options.pb.go +++ b/vendor/cloud.google.com/go/iam/apiv1/iampb/options.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/iam/apiv1/iampb/policy.pb.go b/vendor/cloud.google.com/go/iam/apiv1/iampb/policy.pb.go index a2e42f87869..0eba150896b 100644 --- a/vendor/cloud.google.com/go/iam/apiv1/iampb/policy.pb.go +++ b/vendor/cloud.google.com/go/iam/apiv1/iampb/policy.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/iam/apiv1/iampb/resource_policy_member.pb.go b/vendor/cloud.google.com/go/iam/apiv1/iampb/resource_policy_member.pb.go index 361d79752ad..c3339e26c45 100644 --- a/vendor/cloud.google.com/go/iam/apiv1/iampb/resource_policy_member.pb.go +++ b/vendor/cloud.google.com/go/iam/apiv1/iampb/resource_policy_member.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/internal/.repo-metadata-full.json b/vendor/cloud.google.com/go/internal/.repo-metadata-full.json index b1a50e87388..d72e823299d 100644 --- a/vendor/cloud.google.com/go/internal/.repo-metadata-full.json +++ b/vendor/cloud.google.com/go/internal/.repo-metadata-full.json @@ -959,16 +959,6 @@ "release_level": "preview", "library_type": "GAPIC_AUTO" }, - "cloud.google.com/go/dataform/apiv1alpha2": { - "api_shortname": "dataform", - "distribution_name": "cloud.google.com/go/dataform/apiv1alpha2", - "description": "Dataform API", - "language": "go", - "client_library_type": "generated", - "client_documentation": "https://cloud.google.com/go/docs/reference/cloud.google.com/go/dataform/latest/apiv1alpha2", - "release_level": "preview", - "library_type": "GAPIC_AUTO" - }, "cloud.google.com/go/dataform/apiv1beta1": { "api_shortname": "dataform", "distribution_name": "cloud.google.com/go/dataform/apiv1beta1", @@ -1299,6 +1289,16 @@ "release_level": "stable", "library_type": "GAPIC_AUTO" }, + "cloud.google.com/go/financialservices/apiv1": { + "api_shortname": "financialservices", + "distribution_name": "cloud.google.com/go/financialservices/apiv1", + "description": "Financial Services API", + "language": "go", + "client_library_type": "generated", + "client_documentation": "https://cloud.google.com/go/docs/reference/cloud.google.com/go/financialservices/latest/apiv1", + "release_level": "preview", + "library_type": "GAPIC_AUTO" + }, "cloud.google.com/go/firestore": { "api_shortname": "firestore", "distribution_name": "cloud.google.com/go/firestore", @@ -1789,6 +1789,16 @@ "release_level": "stable", "library_type": "GAPIC_AUTO" }, + "cloud.google.com/go/modelarmor/apiv1": { + "api_shortname": "modelarmor", + "distribution_name": "cloud.google.com/go/modelarmor/apiv1", + "description": "Model Armor API", + "language": "go", + "client_library_type": "generated", + "client_documentation": "https://cloud.google.com/go/docs/reference/cloud.google.com/go/modelarmor/latest/apiv1", + "release_level": "preview", + "library_type": "GAPIC_AUTO" + }, "cloud.google.com/go/monitoring/apiv3/v2": { "api_shortname": "monitoring", "distribution_name": "cloud.google.com/go/monitoring/apiv3/v2", @@ -2269,16 +2279,6 @@ "release_level": "stable", "library_type": "GAPIC_AUTO" }, - "cloud.google.com/go/resourcesettings/apiv1": { - "api_shortname": "resourcesettings", - "distribution_name": "cloud.google.com/go/resourcesettings/apiv1", - "description": "Resource Settings API", - "language": "go", - "client_library_type": "generated", - "client_documentation": "https://cloud.google.com/go/docs/reference/cloud.google.com/go/resourcesettings/latest/apiv1", - "release_level": "stable", - "library_type": "GAPIC_AUTO" - }, "cloud.google.com/go/retail/apiv2": { "api_shortname": "retail", "distribution_name": "cloud.google.com/go/retail/apiv2", diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/alert.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/alert.pb.go index 222e1d170a1..24ca1414bb3 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/alert.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/alert.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/alert_service.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/alert_service.pb.go index 02103f8cd49..ba0c4f65f2c 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/alert_service.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/alert_service.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/common.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/common.pb.go index e301262a2fa..81b8c8f5e46 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/common.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/common.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/dropped_labels.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/dropped_labels.pb.go index 0dbf58e4351..0c3ac5a1c8a 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/dropped_labels.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/dropped_labels.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/group.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/group.pb.go index 11d1a62d35b..c35046ac71c 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/group.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/group.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/group_service.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/group_service.pb.go index 3cfa112bb45..fbdf9ef54f1 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/group_service.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/group_service.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/metric.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/metric.pb.go index 1961a1e3a5c..ae7eea5b6fa 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/metric.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/metric.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/metric_service.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/metric_service.pb.go index 9e7cbcdd2f1..39b9595241b 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/metric_service.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/metric_service.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/mutation_record.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/mutation_record.pb.go index 5fd4f338075..e03d89efe4d 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/mutation_record.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/mutation_record.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/notification.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/notification.pb.go index 48d69d1431d..0d5cacbecb0 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/notification.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/notification.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/notification_service.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/notification_service.pb.go index 9ae6580b1b4..fd0230036da 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/notification_service.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/notification_service.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/query_service.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/query_service.pb.go index b1f18a6d253..6402f18ca11 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/query_service.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/query_service.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/service.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/service.pb.go index aa462351d7c..a9d2ae8cb67 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/service.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/service.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/service_service.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/service_service.pb.go index 01520d88a2c..08c2e08e264 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/service_service.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/service_service.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/snooze.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/snooze.pb.go index ef7fbded0c5..861e045f2d4 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/snooze.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/snooze.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/snooze_service.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/snooze_service.pb.go index bfe661ea702..c562d60bcc7 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/snooze_service.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/snooze_service.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/span_context.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/span_context.pb.go index 3555d6e0a1c..23f42835f14 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/span_context.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/span_context.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/uptime.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/uptime.pb.go index 7e122ade520..f303ac25156 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/uptime.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/uptime.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/uptime_service.pb.go b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/uptime_service.pb.go index d2958b86589..9ea159bbd2d 100644 --- a/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/uptime_service.pb.go +++ b/vendor/cloud.google.com/go/monitoring/apiv3/v2/monitoringpb/uptime_service.pb.go @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vendor/cloud.google.com/go/monitoring/internal/version.go b/vendor/cloud.google.com/go/monitoring/internal/version.go index 291a237fe1c..e199c1168a1 100644 --- a/vendor/cloud.google.com/go/monitoring/internal/version.go +++ b/vendor/cloud.google.com/go/monitoring/internal/version.go @@ -15,4 +15,4 @@ package internal // Version is the current tagged release of the library. -const Version = "1.24.0" +const Version = "1.24.2" diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/CHANGELOG.md b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/CHANGELOG.md index 926ed3882cd..d99d530934b 100644 --- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/CHANGELOG.md +++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/CHANGELOG.md @@ -1,12 +1,18 @@ # Release History +## 1.18.1 (2025-07-10) + +### Bugs Fixed + +* Fixed incorrect request/response logging try info when logging a request that's being retried. +* Fixed a data race in `ResourceID.String()` + ## 1.18.0 (2025-04-03) ### Features Added * Added `AccessToken.RefreshOn` and updated `BearerTokenPolicy` to consider nonzero values of it when deciding whether to request a new token - ## 1.17.1 (2025-03-20) ### Other Changes diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/internal/resource/resource_identifier.go b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/internal/resource/resource_identifier.go index d9a4e36dccb..a08d3d0ffa6 100644 --- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/internal/resource/resource_identifier.go +++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/internal/resource/resource_identifier.go @@ -27,7 +27,8 @@ var RootResourceID = &ResourceID{ } // ResourceID represents a resource ID such as `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myRg`. -// Don't create this type directly, use ParseResourceID instead. +// Don't create this type directly, use [ParseResourceID] instead. Fields are considered immutable and shouldn't be +// modified after creation. type ResourceID struct { // Parent is the parent ResourceID of this instance. // Can be nil if there is no parent. @@ -85,28 +86,6 @@ func ParseResourceID(id string) (*ResourceID, error) { // String returns the string of the ResourceID func (id *ResourceID) String() string { - if len(id.stringValue) > 0 { - return id.stringValue - } - - if id.Parent == nil { - return "" - } - - builder := strings.Builder{} - builder.WriteString(id.Parent.String()) - - if id.isChild { - builder.WriteString(fmt.Sprintf("/%s", id.ResourceType.lastType())) - if len(id.Name) > 0 { - builder.WriteString(fmt.Sprintf("/%s", id.Name)) - } - } else { - builder.WriteString(fmt.Sprintf("/providers/%s/%s/%s", id.ResourceType.Namespace, id.ResourceType.Type, id.Name)) - } - - id.stringValue = builder.String() - return id.stringValue } @@ -185,6 +164,15 @@ func (id *ResourceID) init(parent *ResourceID, resourceType ResourceType, name s id.isChild = isChild id.ResourceType = resourceType id.Name = name + id.stringValue = id.Parent.String() + if id.isChild { + id.stringValue += "/" + id.ResourceType.lastType() + if id.Name != "" { + id.stringValue += "/" + id.Name + } + } else { + id.stringValue += fmt.Sprintf("/providers/%s/%s/%s", id.ResourceType.Namespace, id.ResourceType.Type, id.Name) + } } func appendNext(parent *ResourceID, parts []string, id string) (*ResourceID, error) { diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/ci.yml b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/ci.yml index 99348527b54..b81b6210384 100644 --- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/ci.yml +++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/ci.yml @@ -27,3 +27,5 @@ extends: template: /eng/pipelines/templates/jobs/archetype-sdk-client.yml parameters: ServiceDirectory: azcore + TriggeringPaths: + - /eng/ diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported/request.go b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported/request.go index e3e2d4e588a..9b3f5badb5e 100644 --- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported/request.go +++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported/request.go @@ -71,7 +71,8 @@ func (ov opValues) get(value any) bool { // NewRequestFromRequest creates a new policy.Request with an existing *http.Request // Exported as runtime.NewRequestFromRequest(). func NewRequestFromRequest(req *http.Request) (*Request, error) { - policyReq := &Request{req: req} + // populate values so that the same instance is propagated across policies + policyReq := &Request{req: req, values: opValues{}} if req.Body != nil { // we can avoid a body copy here if the underlying stream is already a @@ -117,7 +118,8 @@ func NewRequest(ctx context.Context, httpMethod string, endpoint string) (*Reque if !(req.URL.Scheme == "http" || req.URL.Scheme == "https") { return nil, fmt.Errorf("unsupported protocol scheme %s", req.URL.Scheme) } - return &Request{req: req}, nil + // populate values so that the same instance is propagated across policies + return &Request{req: req, values: opValues{}}, nil } // Body returns the original body specified when the Request was created. diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared/constants.go b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared/constants.go index 85514db3b84..23788b14d92 100644 --- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared/constants.go +++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared/constants.go @@ -40,5 +40,5 @@ const ( Module = "azcore" // Version is the semantic version (see http://semver.org) of this module. - Version = "v1.18.0" + Version = "v1.18.1" ) diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/policy/policy.go b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/policy/policy.go index bb37a5efb4e..368a2199e08 100644 --- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/policy/policy.go +++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/policy/policy.go @@ -103,7 +103,7 @@ type RetryOptions struct { // RetryDelay specifies the initial amount of delay to use before retrying an operation. // The value is used only if the HTTP response does not contain a Retry-After header. // The delay increases exponentially with each retry up to the maximum specified by MaxRetryDelay. - // The default value is four seconds. A value less than zero means no delay between retries. + // The default value is 800 milliseconds. A value less than zero means no delay between retries. RetryDelay time.Duration // MaxRetryDelay specifies the maximum delay allowed before retrying an operation. diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/CHANGELOG.md b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/CHANGELOG.md index f5bd8586b9d..84e7941e4f3 100644 --- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/CHANGELOG.md +++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +## 1.10.1 (2025-06-10) + +### Bugs Fixed +- `AzureCLICredential` and `AzureDeveloperCLICredential` could wait indefinitely for subprocess output + ## 1.10.0 (2025-05-14) ### Features Added diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/TOKEN_CACHING.MD b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/TOKEN_CACHING.MD index 2bda7f2a7f8..da2094e36b1 100644 --- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/TOKEN_CACHING.MD +++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/TOKEN_CACHING.MD @@ -27,6 +27,7 @@ Persistent caches are encrypted at rest using a mechanism that depends on the op | Linux | kernel key retention service (keyctl) | Cache data is lost on system shutdown because kernel keys are stored in memory. Depending on kernel compile options, data may also be lost on logout, or storage may be impossible because the key retention service isn't available. | | macOS | Keychain | Building requires cgo and native build tools. Keychain access requires a graphical session, so persistent caching isn't possible in a headless environment such as an SSH session (macOS as host). | | Windows | Data Protection API (DPAPI) | No specific limitations. | + Persistent caching requires encryption. When the required encryption facility is unuseable, or the application is running on an unsupported OS, the persistent cache constructor returns an error. This doesn't mean that authentication is impossible, only that credentials can't persist authentication data and the application will need to reauthenticate the next time it runs. See the package documentation for examples showing how to configure persistent caching and access cached data for [users][user_example] and [service principals][sp_example]. ### Credentials supporting token caching diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/TROUBLESHOOTING.md b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/TROUBLESHOOTING.md index 10a4009c376..91f4f05cc0c 100644 --- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/TROUBLESHOOTING.md +++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/TROUBLESHOOTING.md @@ -219,7 +219,7 @@ azd auth token --output json --scope https://management.core.windows.net/.defaul | Error Message |Description| Mitigation | |---|---|---| -|no client ID/tenant ID/token file specified|Incomplete configuration|In most cases these values are provided via environment variables set by Azure Workload Identity.