diff --git a/CHANGELOG.md b/CHANGELOG.md index 29f7547c861..cd257e7fcd7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ## master / unreleased * [ENHANCEMENT] Upgraded Docker base images to `alpine:3.14`. #4514 +* [ENHANCEMENT] Updated Prometheus to latest. Includes changes from prometheus#9239, adding 15 new functions. Multiple TSDB bugfixes prometheus#9438 & prometheus#9381. #4524 ## 1.11.0-rc.0 in progress diff --git a/go.mod b/go.mod index 3bf4fdb16a2..6f92e8aabcb 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/Azure/azure-storage-blob-go v0.13.0 github.com/Masterminds/squirrel v0.0.0-20161115235646-20f192218cf5 github.com/NYTimes/gziphandler v1.1.1 - github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15 + github.com/alecthomas/units v0.0.0-20210912230133-d1bdfacee922 github.com/alicebob/miniredis/v2 v2.14.3 github.com/aws/aws-sdk-go v1.40.37 github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b @@ -44,8 +44,8 @@ require ( github.com/prometheus/alertmanager v0.23.1-0.20210914172521-e35efbddb66a github.com/prometheus/client_golang v1.11.0 github.com/prometheus/client_model v0.2.0 - github.com/prometheus/common v0.30.0 - github.com/prometheus/prometheus v1.8.2-0.20210914090109-37468d88dce8 + github.com/prometheus/common v0.31.1 + github.com/prometheus/prometheus v1.8.2-0.20211011171444-354d8d2ecfac github.com/segmentio/fasthash v0.0.0-20180216231524-a72b379d632e github.com/sony/gobreaker v0.4.1 github.com/spf13/afero v1.3.4 diff --git a/go.sum b/go.sum index 2f9c6d814a1..7ff0f434bee 100644 --- a/go.sum +++ b/go.sum @@ -212,8 +212,9 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15 h1:AUNCr9CiJuwrRYS3XieqF+Z9B9gNxo/eANAJCF2eiN4= github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= +github.com/alecthomas/units v0.0.0-20210912230133-d1bdfacee922 h1:8ypNbf5sd3Sm3cKJ9waOGoQv6dKAFiFty9L6NP1AqJ4= +github.com/alecthomas/units v0.0.0-20210912230133-d1bdfacee922/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0= github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk= github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= @@ -1617,8 +1618,9 @@ github.com/prometheus/common v0.21.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16 github.com/prometheus/common v0.23.0/go.mod h1:H6QK/N6XVT42whUeIdI3dp36w49c+/iMDk7UAI2qm7Q= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= github.com/prometheus/common v0.29.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= -github.com/prometheus/common v0.30.0 h1:JEkYlQnpzrzQFxi6gnukFPdQ+ac82oRhzMcIduJu/Ug= github.com/prometheus/common v0.30.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.31.1 h1:d18hG4PkHnNAKNMOmFuXFaiY8Us0nird/2m60uS1AMs= +github.com/prometheus/common v0.31.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= github.com/prometheus/common/sigv4 v0.1.0 h1:qoVebwtwwEhS85Czm2dSROY5fTo2PAPEVdDeppTwGX4= github.com/prometheus/common/sigv4 v0.1.0/go.mod h1:2Jkxxk9yYvCkE5G1sQT7GuEXm57JrvHu9k5YwTjsNtI= github.com/prometheus/exporter-toolkit v0.5.0/go.mod h1:OCkM4805mmisBhLmVFw858QYi3v0wKdY6/UxrT0pZVg= @@ -1665,8 +1667,9 @@ github.com/prometheus/prometheus v1.8.2-0.20210315220929-1cba1741828b/go.mod h1: github.com/prometheus/prometheus v1.8.2-0.20210324152458-c7a62b95cea0/go.mod h1:sf7j/iAbhZahjeC0s3wwMmp5dksrJ/Za1UKdR+j6Hmw= github.com/prometheus/prometheus v1.8.2-0.20210519120135-d95b0972505f/go.mod h1:yUzDYX0hIYu5YVHmpj/JXLOclB6QcLNDgmagD3FUnSU= github.com/prometheus/prometheus v1.8.2-0.20210720123808-b1ed4a0a663d/go.mod h1:o6V+A4iPEWjLG0rSEKeev3OzfBZwP+ay+4iS4dkfLI4= -github.com/prometheus/prometheus v1.8.2-0.20210914090109-37468d88dce8 h1:U8ZpFGP11pZi0ZavWWGeAqck3dVT9AY6zbr4fbBftjA= github.com/prometheus/prometheus v1.8.2-0.20210914090109-37468d88dce8/go.mod h1:02eURgmH1YsgJ2TtWNUGMQMCnLxmtHH9nOgvYxIjGAo= +github.com/prometheus/prometheus v1.8.2-0.20211011171444-354d8d2ecfac h1:emphJoDK6yZ1GxyFbyYa7ByoWkl3dXfOmPvHAy0L0XI= +github.com/prometheus/prometheus v1.8.2-0.20211011171444-354d8d2ecfac/go.mod h1:wP6L5BiOZ1JZYadRh17u5RujSS19zNSGZfGUi/MZUpM= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/rafaeljusto/redigomock v0.0.0-20190202135759-257e089e14a1/go.mod h1:JaY6n2sDr+z2WTsXkOmNRUfDy6FN0L6Nk7x06ndm4tY= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= diff --git a/pkg/configs/legacy_promql/test.go b/pkg/configs/legacy_promql/test.go index aeaccd8deaa..5e31b520016 100644 --- a/pkg/configs/legacy_promql/test.go +++ b/pkg/configs/legacy_promql/test.go @@ -491,7 +491,8 @@ func (t *Test) exec(tc testCommand) error { func (t *Test) clear() { if t.storage != nil { if err := t.storage.Close(); err != nil { - t.T.Fatalf("closing test storage: %s", err) + t.T.Errorf("closing test storage: %s", err) + t.T.FailNow() } } if t.cancelCtx != nil { @@ -508,7 +509,8 @@ func (t *Test) Close() { t.cancelCtx() if err := t.storage.Close(); err != nil { - t.T.Fatalf("closing test storage: %s", err) + t.T.Errorf("closing test storage: %s", err) + t.T.FailNow() } } @@ -546,17 +548,13 @@ func parseNumber(s string) (float64, error) { return f, nil } -type T interface { - Fatal(args ...interface{}) - Fatalf(format string, args ...interface{}) -} - // NewStorage returns a new storage for testing purposes // that removes all associated files on closing. -func NewStorage(t T) storage.Storage { +func NewStorage(t testutil.T) storage.Storage { dir, err := ioutil.TempDir("", "test_storage") if err != nil { - t.Fatalf("Opening test dir failed: %s", err) + t.Errorf("Opening test dir failed: %s", err) + t.FailNow() } // Tests just load data for a series sequentially. Thus we @@ -566,7 +564,8 @@ func NewStorage(t T) storage.Storage { MaxBlockDuration: int64(24 * time.Hour / time.Millisecond), }, nil) if err != nil { - t.Fatalf("Opening test storage failed: %s", err) + t.Errorf("Opening test storage failed: %s", err) + t.FailNow() } return testStorage{Storage: Adapter(db, int64(0)), dir: dir} } diff --git a/vendor/github.com/alecthomas/units/bytes.go b/vendor/github.com/alecthomas/units/bytes.go index cd439f51c5b..f740537f03f 100644 --- a/vendor/github.com/alecthomas/units/bytes.go +++ b/vendor/github.com/alecthomas/units/bytes.go @@ -40,6 +40,12 @@ func (b Base2Bytes) String() string { return ToString(int64(b), 1024, "iB", "B") } +// MarshalText implement encoding.TextMarshaler to process json/yaml. +func (b Base2Bytes) MarshalText() ([]byte, error) { + return []byte(b.String()), nil +} + +// UnmarshalText implement encoding.TextUnmarshaler to process json/yaml. func (b *Base2Bytes) UnmarshalText(text []byte) error { n, err := ParseBase2Bytes(string(text)) *b = n diff --git a/vendor/github.com/prometheus/common/config/http_config.go b/vendor/github.com/prometheus/common/config/http_config.go index 6c0c033d481..8c07c41ffe0 100644 --- a/vendor/github.com/prometheus/common/config/http_config.go +++ b/vendor/github.com/prometheus/common/config/http_config.go @@ -110,11 +110,25 @@ func (u *URL) UnmarshalYAML(unmarshal func(interface{}) error) error { // MarshalYAML implements the yaml.Marshaler interface for URLs. func (u URL) MarshalYAML() (interface{}, error) { if u.URL != nil { - return u.String(), nil + return u.Redacted(), nil } return nil, nil } +// Redacted returns the URL but replaces any password with "xxxxx". +func (u URL) Redacted() string { + if u.URL == nil { + return "" + } + + ru := *u.URL + if _, ok := ru.User.Password(); ok { + // We can not use secretToken because it would be escaped. + ru.User = url.UserPassword(ru.User.Username(), "xxxxx") + } + return ru.String() +} + // UnmarshalJSON implements the json.Marshaler interface for URL. func (u *URL) UnmarshalJSON(data []byte) error { var s string @@ -380,17 +394,18 @@ func NewRoundTripperFromConfig(cfg HTTPClientConfig, name string, optFuncs ...HT ExpectContinueTimeout: 1 * time.Second, DialContext: dialContext, } - if opts.http2Enabled || os.Getenv("PROMETHEUS_COMMON_ENABLE_HTTP2") != "" { - // HTTP/2 support is golang has many problematic cornercases where + if opts.http2Enabled && os.Getenv("PROMETHEUS_COMMON_DISABLE_HTTP2") == "" { + // HTTP/2 support is golang had many problematic cornercases where // dead connections would be kept and used in connection pools. // https://github.com/golang/go/issues/32388 // https://github.com/golang/go/issues/39337 // https://github.com/golang/go/issues/39750 - // Enable HTTP2 if the environment variable - // PROMETHEUS_COMMON_ENABLE_HTTP2 is set. - // This is a temporary workaround so that users can safely test this - // and validate that HTTP2 can be enabled Prometheus-Wide again. + // Do not enable HTTP2 if the environment variable + // PROMETHEUS_COMMON_DISABLE_HTTP2 is set to a non-empty value. + // This allows users to easily disable HTTP2 in case they run into + // issues again, but will be removed once we are confident that + // things work as expected. http2t, err := http2.ConfigureTransports(rt.(*http.Transport)) if err != nil { diff --git a/vendor/github.com/prometheus/prometheus/notifier/notifier.go b/vendor/github.com/prometheus/prometheus/notifier/notifier.go index 7af21c565cc..97086d56293 100644 --- a/vendor/github.com/prometheus/prometheus/notifier/notifier.go +++ b/vendor/github.com/prometheus/prometheus/notifier/notifier.go @@ -634,7 +634,7 @@ type alertmanagerSet struct { } func newAlertmanagerSet(cfg *config.AlertmanagerConfig, logger log.Logger, metrics *alertMetrics) (*alertmanagerSet, error) { - client, err := config_util.NewClientFromConfig(cfg.HTTPClientConfig, "alertmanager", config_util.WithHTTP2Disabled()) + client, err := config_util.NewClientFromConfig(cfg.HTTPClientConfig, "alertmanager") if err != nil { return nil, err } diff --git a/vendor/github.com/prometheus/prometheus/pkg/rulefmt/rulefmt.go b/vendor/github.com/prometheus/prometheus/pkg/rulefmt/rulefmt.go index 920c7a2038b..62cd6de0921 100644 --- a/vendor/github.com/prometheus/prometheus/pkg/rulefmt/rulefmt.go +++ b/vendor/github.com/prometheus/prometheus/pkg/rulefmt/rulefmt.go @@ -107,6 +107,7 @@ func (g *RuleGroups) Validate(node ruleGroups) (errs []error) { type RuleGroup struct { Name string `yaml:"name"` Interval model.Duration `yaml:"interval,omitempty"` + Limit int `yaml:"limit,omitempty"` Rules []RuleNode `yaml:"rules"` } @@ -239,6 +240,7 @@ func testTemplateParsing(rl *RuleNode) (errs []error) { model.Time(timestamp.FromTime(time.Now())), nil, nil, + nil, ) return tmpl.ParseTest() } diff --git a/vendor/github.com/prometheus/prometheus/promql/engine.go b/vendor/github.com/prometheus/prometheus/promql/engine.go index 44290f1638a..e5dbcd2d771 100644 --- a/vendor/github.com/prometheus/prometheus/promql/engine.go +++ b/vendor/github.com/prometheus/prometheus/promql/engine.go @@ -913,6 +913,8 @@ func (ev *evaluator) Eval(expr parser.Expr) (v parser.Value, ws storage.Warnings type EvalSeriesHelper struct { // The grouping key used by aggregation. groupingKey uint64 + // Used to map left-hand to right-hand in binary operations. + signature string } // EvalNodeHelper stores extra information and caches for evaluating a single node across steps. @@ -925,8 +927,6 @@ type EvalNodeHelper struct { // Caches. // DropMetricName and label_*. Dmn map[uint64]labels.Labels - // signatureFunc. - sigf map[string]string // funcHistogramQuantile. signatureToMetricWithBuckets map[string]*metricWithBuckets // label_replace. @@ -957,23 +957,6 @@ func (enh *EvalNodeHelper) DropMetricName(l labels.Labels) labels.Labels { return ret } -func (enh *EvalNodeHelper) signatureFunc(on bool, names ...string) func(labels.Labels) string { - if enh.sigf == nil { - enh.sigf = make(map[string]string, len(enh.Out)) - } - f := signatureFunc(on, enh.lblBuf, names...) - return func(l labels.Labels) string { - enh.lblBuf = l.Bytes(enh.lblBuf) - ret, ok := enh.sigf[string(enh.lblBuf)] - if ok { - return ret - } - ret = f(l) - enh.sigf[string(enh.lblBuf)] = ret - return ret - } -} - // rangeEval evaluates the given expressions, and then for each step calls // the given funcCall with the values computed for each expression at that // step. The return value is the combination into time series of all the @@ -1432,22 +1415,28 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { return append(enh.Out, Sample{Point: Point{V: val}}), nil }, e.LHS, e.RHS) case lt == parser.ValueTypeVector && rt == parser.ValueTypeVector: + // Function to compute the join signature for each series. + buf := make([]byte, 0, 1024) + sigf := signatureFunc(e.VectorMatching.On, buf, e.VectorMatching.MatchingLabels...) + initSignatures := func(series labels.Labels, h *EvalSeriesHelper) { + h.signature = sigf(series) + } switch e.Op { case parser.LAND: - return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { - return ev.VectorAnd(v[0].(Vector), v[1].(Vector), e.VectorMatching, enh), nil + return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.VectorAnd(v[0].(Vector), v[1].(Vector), e.VectorMatching, sh[0], sh[1], enh), nil }, e.LHS, e.RHS) case parser.LOR: - return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { - return ev.VectorOr(v[0].(Vector), v[1].(Vector), e.VectorMatching, enh), nil + return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.VectorOr(v[0].(Vector), v[1].(Vector), e.VectorMatching, sh[0], sh[1], enh), nil }, e.LHS, e.RHS) case parser.LUNLESS: - return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { - return ev.VectorUnless(v[0].(Vector), v[1].(Vector), e.VectorMatching, enh), nil + return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.VectorUnless(v[0].(Vector), v[1].(Vector), e.VectorMatching, sh[0], sh[1], enh), nil }, e.LHS, e.RHS) default: - return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { - return ev.VectorBinop(e.Op, v[0].(Vector), v[1].(Vector), e.VectorMatching, e.ReturnBool, enh), nil + return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.VectorBinop(e.Op, v[0].(Vector), v[1].(Vector), e.VectorMatching, e.ReturnBool, sh[0], sh[1], enh), nil }, e.LHS, e.RHS) } @@ -1774,62 +1763,72 @@ func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, m return out } -func (ev *evaluator) VectorAnd(lhs, rhs Vector, matching *parser.VectorMatching, enh *EvalNodeHelper) Vector { +func (ev *evaluator) VectorAnd(lhs, rhs Vector, matching *parser.VectorMatching, lhsh, rhsh []EvalSeriesHelper, enh *EvalNodeHelper) Vector { if matching.Card != parser.CardManyToMany { panic("set operations must only use many-to-many matching") } - sigf := enh.signatureFunc(matching.On, matching.MatchingLabels...) + if len(lhs) == 0 || len(rhs) == 0 { + return nil // Short-circuit: AND with nothing is nothing. + } // The set of signatures for the right-hand side Vector. rightSigs := map[string]struct{}{} // Add all rhs samples to a map so we can easily find matches later. - for _, rs := range rhs { - rightSigs[sigf(rs.Metric)] = struct{}{} + for _, sh := range rhsh { + rightSigs[sh.signature] = struct{}{} } - for _, ls := range lhs { + for i, ls := range lhs { // If there's a matching entry in the right-hand side Vector, add the sample. - if _, ok := rightSigs[sigf(ls.Metric)]; ok { + if _, ok := rightSigs[lhsh[i].signature]; ok { enh.Out = append(enh.Out, ls) } } return enh.Out } -func (ev *evaluator) VectorOr(lhs, rhs Vector, matching *parser.VectorMatching, enh *EvalNodeHelper) Vector { +func (ev *evaluator) VectorOr(lhs, rhs Vector, matching *parser.VectorMatching, lhsh, rhsh []EvalSeriesHelper, enh *EvalNodeHelper) Vector { if matching.Card != parser.CardManyToMany { panic("set operations must only use many-to-many matching") } - sigf := enh.signatureFunc(matching.On, matching.MatchingLabels...) + if len(lhs) == 0 { // Short-circuit. + return rhs + } else if len(rhs) == 0 { + return lhs + } leftSigs := map[string]struct{}{} // Add everything from the left-hand-side Vector. - for _, ls := range lhs { - leftSigs[sigf(ls.Metric)] = struct{}{} + for i, ls := range lhs { + leftSigs[lhsh[i].signature] = struct{}{} enh.Out = append(enh.Out, ls) } // Add all right-hand side elements which have not been added from the left-hand side. - for _, rs := range rhs { - if _, ok := leftSigs[sigf(rs.Metric)]; !ok { + for j, rs := range rhs { + if _, ok := leftSigs[rhsh[j].signature]; !ok { enh.Out = append(enh.Out, rs) } } return enh.Out } -func (ev *evaluator) VectorUnless(lhs, rhs Vector, matching *parser.VectorMatching, enh *EvalNodeHelper) Vector { +func (ev *evaluator) VectorUnless(lhs, rhs Vector, matching *parser.VectorMatching, lhsh, rhsh []EvalSeriesHelper, enh *EvalNodeHelper) Vector { if matching.Card != parser.CardManyToMany { panic("set operations must only use many-to-many matching") } - sigf := enh.signatureFunc(matching.On, matching.MatchingLabels...) + // Short-circuit: empty rhs means we will return everything in lhs; + // empty lhs means we will return empty - don't need to build a map. + if len(lhs) == 0 || len(rhs) == 0 { + return lhs + } rightSigs := map[string]struct{}{} - for _, rs := range rhs { - rightSigs[sigf(rs.Metric)] = struct{}{} + for _, sh := range rhsh { + rightSigs[sh.signature] = struct{}{} } - for _, ls := range lhs { - if _, ok := rightSigs[sigf(ls.Metric)]; !ok { + for i, ls := range lhs { + if _, ok := rightSigs[lhsh[i].signature]; !ok { enh.Out = append(enh.Out, ls) } } @@ -1837,17 +1836,20 @@ func (ev *evaluator) VectorUnless(lhs, rhs Vector, matching *parser.VectorMatchi } // VectorBinop evaluates a binary operation between two Vectors, excluding set operators. -func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *parser.VectorMatching, returnBool bool, enh *EvalNodeHelper) Vector { +func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *parser.VectorMatching, returnBool bool, lhsh, rhsh []EvalSeriesHelper, enh *EvalNodeHelper) Vector { if matching.Card == parser.CardManyToMany { panic("many-to-many only allowed for set operators") } - sigf := enh.signatureFunc(matching.On, matching.MatchingLabels...) + if len(lhs) == 0 || len(rhs) == 0 { + return nil // Short-circuit: nothing is going to match. + } // The control flow below handles one-to-one or many-to-one matching. // For one-to-many, swap sidedness and account for the swap when calculating // values. if matching.Card == parser.CardOneToMany { lhs, rhs = rhs, lhs + lhsh, rhsh = rhsh, lhsh } // All samples from the rhs hashed by the matching label/values. @@ -1861,8 +1863,8 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching * rightSigs := enh.rightSigs // Add all rhs samples to a map so we can easily find matches later. - for _, rs := range rhs { - sig := sigf(rs.Metric) + for i, rs := range rhs { + sig := rhsh[i].signature // The rhs is guaranteed to be the 'one' side. Having multiple samples // with the same signature means that the matching is many-to-many. if duplSample, found := rightSigs[sig]; found { @@ -1892,8 +1894,8 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching * // For all lhs samples find a respective rhs sample and perform // the binary operation. - for _, ls := range lhs { - sig := sigf(ls.Metric) + for i, ls := range lhs { + sig := lhsh[i].signature rs, found := rightSigs[sig] // Look for a match in the rhs Vector. if !found { @@ -2114,6 +2116,8 @@ func vectorElemBinop(op parser.ItemType, lhs, rhs float64) (float64, bool) { return lhs, lhs >= rhs case parser.LTE: return lhs, lhs <= rhs + case parser.ATAN2: + return math.Atan2(lhs, rhs), true } panic(errors.Errorf("operator %q not allowed for operations between Vectors", op)) } @@ -2210,22 +2214,24 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without resultSize := k if k > inputVecLen { resultSize = inputVecLen + } else if k == 0 { + resultSize = 1 } switch op { case parser.STDVAR, parser.STDDEV: result[groupingKey].value = 0 case parser.TOPK, parser.QUANTILE: - result[groupingKey].heap = make(vectorByValueHeap, 0, resultSize) - heap.Push(&result[groupingKey].heap, &Sample{ + result[groupingKey].heap = make(vectorByValueHeap, 1, resultSize) + result[groupingKey].heap[0] = Sample{ Point: Point{V: s.V}, Metric: s.Metric, - }) + } case parser.BOTTOMK: - result[groupingKey].reverseHeap = make(vectorByReverseValueHeap, 0, resultSize) - heap.Push(&result[groupingKey].reverseHeap, &Sample{ + result[groupingKey].reverseHeap = make(vectorByReverseValueHeap, 1, resultSize) + result[groupingKey].reverseHeap[0] = Sample{ Point: Point{V: s.V}, Metric: s.Metric, - }) + } case parser.GROUP: result[groupingKey].value = 1 } @@ -2283,6 +2289,13 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without case parser.TOPK: if int64(len(group.heap)) < k || group.heap[0].V < s.V || math.IsNaN(group.heap[0].V) { if int64(len(group.heap)) == k { + if k == 1 { // For k==1 we can replace in-situ. + group.heap[0] = Sample{ + Point: Point{V: s.V}, + Metric: s.Metric, + } + break + } heap.Pop(&group.heap) } heap.Push(&group.heap, &Sample{ @@ -2294,6 +2307,13 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without case parser.BOTTOMK: if int64(len(group.reverseHeap)) < k || group.reverseHeap[0].V > s.V || math.IsNaN(group.reverseHeap[0].V) { if int64(len(group.reverseHeap)) == k { + if k == 1 { // For k==1 we can replace in-situ. + group.reverseHeap[0] = Sample{ + Point: Point{V: s.V}, + Metric: s.Metric, + } + break + } heap.Pop(&group.reverseHeap) } heap.Push(&group.reverseHeap, &Sample{ @@ -2327,7 +2347,9 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without case parser.TOPK: // The heap keeps the lowest value on top, so reverse it. - sort.Sort(sort.Reverse(aggr.heap)) + if len(aggr.heap) > 1 { + sort.Sort(sort.Reverse(aggr.heap)) + } for _, v := range aggr.heap { enh.Out = append(enh.Out, Sample{ Metric: v.Metric, @@ -2338,7 +2360,9 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without case parser.BOTTOMK: // The heap keeps the highest value on top, so reverse it. - sort.Sort(sort.Reverse(aggr.reverseHeap)) + if len(aggr.reverseHeap) > 1 { + sort.Sort(sort.Reverse(aggr.reverseHeap)) + } for _, v := range aggr.reverseHeap { enh.Out = append(enh.Out, Sample{ Metric: v.Metric, diff --git a/vendor/github.com/prometheus/prometheus/promql/functions.go b/vendor/github.com/prometheus/prometheus/promql/functions.go index d72b4caf65c..50594503dcc 100644 --- a/vendor/github.com/prometheus/prometheus/promql/functions.go +++ b/vendor/github.com/prometheus/prometheus/promql/functions.go @@ -570,6 +570,87 @@ func funcLog10(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper return simpleFunc(vals, enh, math.Log10) } +// === sin(Vector parser.ValueTypeVector) Vector === +func funcSin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Sin) +} + +// === cos(Vector parser.ValueTypeVector) Vector === +func funcCos(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Cos) +} + +// === tan(Vector parser.ValueTypeVector) Vector === +func funcTan(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Tan) +} + +// == asin(Vector parser.ValueTypeVector) Vector === +func funcAsin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Asin) +} + +// == acos(Vector parser.ValueTypeVector) Vector === +func funcAcos(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Acos) +} + +// == atan(Vector parser.ValueTypeVector) Vector === +func funcAtan(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Atan) +} + +// == sinh(Vector parser.ValueTypeVector) Vector === +func funcSinh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Sinh) +} + +// == cosh(Vector parser.ValueTypeVector) Vector === +func funcCosh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Cosh) +} + +// == tanh(Vector parser.ValueTypeVector) Vector === +func funcTanh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Tanh) +} + +// == asinh(Vector parser.ValueTypeVector) Vector === +func funcAsinh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Asinh) +} + +// == acosh(Vector parser.ValueTypeVector) Vector === +func funcAcosh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Acosh) +} + +// == atanh(Vector parser.ValueTypeVector) Vector === +func funcAtanh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, math.Atanh) +} + +// === rad(Vector parser.ValueTypeVector) Vector === +func funcRad(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, func(v float64) float64 { + return v * math.Pi / 180 + }) +} + +// === deg(Vector parser.ValueTypeVector) Vector === +func funcDeg(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return simpleFunc(vals, enh, func(v float64) float64 { + return v * 180 / math.Pi + }) +} + +// === pi() Scalar === +func funcPi(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { + return Vector{Sample{Point: Point{ + V: math.Pi, + }}} +} + // === sgn(Vector parser.ValueTypeVector) Vector === func funcSgn(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { return simpleFunc(vals, enh, func(v float64) float64 { @@ -935,16 +1016,25 @@ var FunctionCalls = map[string]FunctionCall{ "abs": funcAbs, "absent": funcAbsent, "absent_over_time": funcAbsentOverTime, + "acos": funcAcos, + "acosh": funcAcosh, + "asin": funcAsin, + "asinh": funcAsinh, + "atan": funcAtan, + "atanh": funcAtanh, "avg_over_time": funcAvgOverTime, "ceil": funcCeil, "changes": funcChanges, "clamp": funcClamp, "clamp_max": funcClampMax, "clamp_min": funcClampMin, + "cos": funcCos, + "cosh": funcCosh, "count_over_time": funcCountOverTime, "days_in_month": funcDaysInMonth, "day_of_month": funcDayOfMonth, "day_of_week": funcDayOfWeek, + "deg": funcDeg, "delta": funcDelta, "deriv": funcDeriv, "exp": funcExp, @@ -965,20 +1055,26 @@ var FunctionCalls = map[string]FunctionCall{ "min_over_time": funcMinOverTime, "minute": funcMinute, "month": funcMonth, + "pi": funcPi, "predict_linear": funcPredictLinear, "present_over_time": funcPresentOverTime, "quantile_over_time": funcQuantileOverTime, + "rad": funcRad, "rate": funcRate, "resets": funcResets, "round": funcRound, "scalar": funcScalar, "sgn": funcSgn, + "sin": funcSin, + "sinh": funcSinh, "sort": funcSort, "sort_desc": funcSortDesc, "sqrt": funcSqrt, "stddev_over_time": funcStddevOverTime, "stdvar_over_time": funcStdvarOverTime, "sum_over_time": funcSumOverTime, + "tan": funcTan, + "tanh": funcTanh, "time": funcTime, "timestamp": funcTimestamp, "vector": funcVector, diff --git a/vendor/github.com/prometheus/prometheus/promql/parser/functions.go b/vendor/github.com/prometheus/prometheus/promql/parser/functions.go index da5d279f313..c4bc5ddb8d2 100644 --- a/vendor/github.com/prometheus/prometheus/promql/parser/functions.go +++ b/vendor/github.com/prometheus/prometheus/promql/parser/functions.go @@ -39,9 +39,34 @@ var Functions = map[string]*Function{ ArgTypes: []ValueType{ValueTypeMatrix}, ReturnType: ValueTypeVector, }, - "present_over_time": { - Name: "present_over_time", - ArgTypes: []ValueType{ValueTypeMatrix}, + "acos": { + Name: "acos", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, + "acosh": { + Name: "acosh", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, + "asin": { + Name: "asin", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, + "asinh": { + Name: "asinh", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, + "atan": { + Name: "atan", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, + "atanh": { + Name: "atanh", + ArgTypes: []ValueType{ValueTypeVector}, ReturnType: ValueTypeVector, }, "avg_over_time": { @@ -74,6 +99,16 @@ var Functions = map[string]*Function{ ArgTypes: []ValueType{ValueTypeVector, ValueTypeScalar}, ReturnType: ValueTypeVector, }, + "cos": { + Name: "cos", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, + "cosh": { + Name: "cosh", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, "count_over_time": { Name: "count_over_time", ArgTypes: []ValueType{ValueTypeMatrix}, @@ -97,6 +132,11 @@ var Functions = map[string]*Function{ Variadic: 1, ReturnType: ValueTypeVector, }, + "deg": { + Name: "deg", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, "delta": { Name: "delta", ArgTypes: []ValueType{ValueTypeMatrix}, @@ -201,16 +241,31 @@ var Functions = map[string]*Function{ Variadic: 1, ReturnType: ValueTypeVector, }, + "pi": { + Name: "pi", + ArgTypes: []ValueType{}, + ReturnType: ValueTypeScalar, + }, "predict_linear": { Name: "predict_linear", ArgTypes: []ValueType{ValueTypeMatrix, ValueTypeScalar}, ReturnType: ValueTypeVector, }, + "present_over_time": { + Name: "present_over_time", + ArgTypes: []ValueType{ValueTypeMatrix}, + ReturnType: ValueTypeVector, + }, "quantile_over_time": { Name: "quantile_over_time", ArgTypes: []ValueType{ValueTypeScalar, ValueTypeMatrix}, ReturnType: ValueTypeVector, }, + "rad": { + Name: "rad", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, "rate": { Name: "rate", ArgTypes: []ValueType{ValueTypeMatrix}, @@ -237,6 +292,16 @@ var Functions = map[string]*Function{ ArgTypes: []ValueType{ValueTypeVector}, ReturnType: ValueTypeVector, }, + "sin": { + Name: "sin", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, + "sinh": { + Name: "sinh", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, "sort": { Name: "sort", ArgTypes: []ValueType{ValueTypeVector}, @@ -267,6 +332,16 @@ var Functions = map[string]*Function{ ArgTypes: []ValueType{ValueTypeMatrix}, ReturnType: ValueTypeVector, }, + "tan": { + Name: "tan", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, + "tanh": { + Name: "tanh", + ArgTypes: []ValueType{ValueTypeVector}, + ReturnType: ValueTypeVector, + }, "time": { Name: "time", ArgTypes: []ValueType{}, diff --git a/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y b/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y index 75f147ee411..fcf504aca5e 100644 --- a/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y +++ b/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y @@ -84,6 +84,7 @@ NEQ_REGEX POW SUB AT +ATAN2 %token operatorsEnd // Aggregators. @@ -156,7 +157,7 @@ START_METRIC_SELECTOR %left LAND LUNLESS %left EQLC GTE GTR LSS LTE NEQ %left ADD SUB -%left MUL DIV MOD +%left MUL DIV MOD ATAN2 %right POW // Offset modifiers do not have associativity. @@ -237,6 +238,7 @@ aggregate_modifier: // Operator precedence only works if each of those is listed separately. binary_expr : expr ADD bin_modifier expr { $$ = yylex.(*parser).newBinaryExpression($1, $2, $3, $4) } + | expr ATAN2 bin_modifier expr { $$ = yylex.(*parser).newBinaryExpression($1, $2, $3, $4) } | expr DIV bin_modifier expr { $$ = yylex.(*parser).newBinaryExpression($1, $2, $3, $4) } | expr EQLC bin_modifier expr { $$ = yylex.(*parser).newBinaryExpression($1, $2, $3, $4) } | expr GTE bin_modifier expr { $$ = yylex.(*parser).newBinaryExpression($1, $2, $3, $4) } @@ -674,7 +676,7 @@ series_value : IDENTIFIER aggregate_op : AVG | BOTTOMK | COUNT | COUNT_VALUES | GROUP | MAX | MIN | QUANTILE | STDDEV | STDVAR | SUM | TOPK ; // inside of grouping options label names can be recognized as keywords by the lexer. This is a list of keywords that could also be a label name. -maybe_label : AVG | BOOL | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | GROUP_LEFT | GROUP_RIGHT | IDENTIFIER | IGNORING | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | ON | QUANTILE | STDDEV | STDVAR | SUM | TOPK | START | END; +maybe_label : AVG | BOOL | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | GROUP_LEFT | GROUP_RIGHT | IDENTIFIER | IGNORING | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | ON | QUANTILE | STDDEV | STDVAR | SUM | TOPK | START | END | ATAN2; unary_op : ADD | SUB; diff --git a/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y.go b/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y.go index 5a2aafe3de4..71614913a0c 100644 --- a/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y.go +++ b/vendor/github.com/prometheus/prometheus/promql/parser/generated_parser.y.go @@ -1,11 +1,11 @@ -// Code generated by goyacc -o generated_parser.y.go generated_parser.y. DO NOT EDIT. +// Code generated by goyacc -o promql/parser/generated_parser.y.go promql/parser/generated_parser.y. DO NOT EDIT. -//line generated_parser.y:15 +//line promql/parser/generated_parser.y:15 package parser import __yyfmt__ "fmt" -//line generated_parser.y:15 +//line promql/parser/generated_parser.y:15 import ( "math" @@ -17,7 +17,7 @@ import ( "github.com/prometheus/prometheus/pkg/value" ) -//line generated_parser.y:28 +//line promql/parser/generated_parser.y:28 type yySymType struct { yys int node Node @@ -73,41 +73,42 @@ const NEQ_REGEX = 57382 const POW = 57383 const SUB = 57384 const AT = 57385 -const operatorsEnd = 57386 -const aggregatorsStart = 57387 -const AVG = 57388 -const BOTTOMK = 57389 -const COUNT = 57390 -const COUNT_VALUES = 57391 -const GROUP = 57392 -const MAX = 57393 -const MIN = 57394 -const QUANTILE = 57395 -const STDDEV = 57396 -const STDVAR = 57397 -const SUM = 57398 -const TOPK = 57399 -const aggregatorsEnd = 57400 -const keywordsStart = 57401 -const BOOL = 57402 -const BY = 57403 -const GROUP_LEFT = 57404 -const GROUP_RIGHT = 57405 -const IGNORING = 57406 -const OFFSET = 57407 -const ON = 57408 -const WITHOUT = 57409 -const keywordsEnd = 57410 -const preprocessorStart = 57411 -const START = 57412 -const END = 57413 -const preprocessorEnd = 57414 -const startSymbolsStart = 57415 -const START_METRIC = 57416 -const START_SERIES_DESCRIPTION = 57417 -const START_EXPRESSION = 57418 -const START_METRIC_SELECTOR = 57419 -const startSymbolsEnd = 57420 +const ATAN2 = 57386 +const operatorsEnd = 57387 +const aggregatorsStart = 57388 +const AVG = 57389 +const BOTTOMK = 57390 +const COUNT = 57391 +const COUNT_VALUES = 57392 +const GROUP = 57393 +const MAX = 57394 +const MIN = 57395 +const QUANTILE = 57396 +const STDDEV = 57397 +const STDVAR = 57398 +const SUM = 57399 +const TOPK = 57400 +const aggregatorsEnd = 57401 +const keywordsStart = 57402 +const BOOL = 57403 +const BY = 57404 +const GROUP_LEFT = 57405 +const GROUP_RIGHT = 57406 +const IGNORING = 57407 +const OFFSET = 57408 +const ON = 57409 +const WITHOUT = 57410 +const keywordsEnd = 57411 +const preprocessorStart = 57412 +const START = 57413 +const END = 57414 +const preprocessorEnd = 57415 +const startSymbolsStart = 57416 +const START_METRIC = 57417 +const START_SERIES_DESCRIPTION = 57418 +const START_EXPRESSION = 57419 +const START_METRIC_SELECTOR = 57420 +const startSymbolsEnd = 57421 var yyToknames = [...]string{ "$end", @@ -153,6 +154,7 @@ var yyToknames = [...]string{ "POW", "SUB", "AT", + "ATAN2", "operatorsEnd", "aggregatorsStart", "AVG", @@ -196,7 +198,7 @@ const yyEofCode = 1 const yyErrCode = 2 const yyInitialStackSize = 16 -//line generated_parser.y:747 +//line promql/parser/generated_parser.y:749 //line yacctab:1 var yyExca = [...]int{ @@ -204,258 +206,258 @@ var yyExca = [...]int{ 1, -1, -2, 0, -1, 35, - 1, 130, - 10, 130, - 22, 130, + 1, 131, + 10, 131, + 22, 131, -2, 0, -1, 58, - 2, 142, - 15, 142, - 61, 142, - 67, 142, - -2, 96, - -1, 59, 2, 143, 15, 143, - 61, 143, - 67, 143, + 62, 143, + 68, 143, -2, 97, - -1, 60, + -1, 59, 2, 144, 15, 144, - 61, 144, - 67, 144, - -2, 99, - -1, 61, + 62, 144, + 68, 144, + -2, 98, + -1, 60, 2, 145, 15, 145, - 61, 145, - 67, 145, + 62, 145, + 68, 145, -2, 100, - -1, 62, + -1, 61, 2, 146, 15, 146, - 61, 146, - 67, 146, + 62, 146, + 68, 146, -2, 101, - -1, 63, + -1, 62, 2, 147, 15, 147, - 61, 147, - 67, 147, - -2, 106, - -1, 64, + 62, 147, + 68, 147, + -2, 102, + -1, 63, 2, 148, 15, 148, - 61, 148, - 67, 148, - -2, 108, - -1, 65, + 62, 148, + 68, 148, + -2, 107, + -1, 64, 2, 149, 15, 149, - 61, 149, - 67, 149, - -2, 110, - -1, 66, + 62, 149, + 68, 149, + -2, 109, + -1, 65, 2, 150, 15, 150, - 61, 150, - 67, 150, + 62, 150, + 68, 150, -2, 111, - -1, 67, + -1, 66, 2, 151, 15, 151, - 61, 151, - 67, 151, + 62, 151, + 68, 151, -2, 112, - -1, 68, + -1, 67, 2, 152, 15, 152, - 61, 152, - 67, 152, + 62, 152, + 68, 152, -2, 113, - -1, 69, + -1, 68, 2, 153, 15, 153, - 61, 153, - 67, 153, + 62, 153, + 68, 153, -2, 114, - -1, 188, - 12, 197, - 13, 197, - 16, 197, - 17, 197, - 23, 197, - 26, 197, - 32, 197, - 33, 197, - 36, 197, - 42, 197, - 46, 197, - 47, 197, - 48, 197, - 49, 197, - 50, 197, - 51, 197, - 52, 197, - 53, 197, - 54, 197, - 55, 197, - 56, 197, - 57, 197, - 61, 197, - 65, 197, - 67, 197, - 70, 197, - 71, 197, + -1, 69, + 2, 154, + 15, 154, + 62, 154, + 68, 154, + -2, 115, + -1, 190, + 12, 199, + 13, 199, + 16, 199, + 17, 199, + 23, 199, + 26, 199, + 32, 199, + 33, 199, + 36, 199, + 42, 199, + 47, 199, + 48, 199, + 49, 199, + 50, 199, + 51, 199, + 52, 199, + 53, 199, + 54, 199, + 55, 199, + 56, 199, + 57, 199, + 58, 199, + 62, 199, + 66, 199, + 68, 199, + 71, 199, + 72, 199, -2, 0, - -1, 189, - 12, 197, - 13, 197, - 16, 197, - 17, 197, - 23, 197, - 26, 197, - 32, 197, - 33, 197, - 36, 197, - 42, 197, - 46, 197, - 47, 197, - 48, 197, - 49, 197, - 50, 197, - 51, 197, - 52, 197, - 53, 197, - 54, 197, - 55, 197, - 56, 197, - 57, 197, - 61, 197, - 65, 197, - 67, 197, - 70, 197, - 71, 197, + -1, 191, + 12, 199, + 13, 199, + 16, 199, + 17, 199, + 23, 199, + 26, 199, + 32, 199, + 33, 199, + 36, 199, + 42, 199, + 47, 199, + 48, 199, + 49, 199, + 50, 199, + 51, 199, + 52, 199, + 53, 199, + 54, 199, + 55, 199, + 56, 199, + 57, 199, + 58, 199, + 62, 199, + 66, 199, + 68, 199, + 71, 199, + 72, 199, -2, 0, - -1, 209, - 19, 195, + -1, 212, + 19, 197, -2, 0, - -1, 258, - 19, 196, + -1, 262, + 19, 198, -2, 0, } const yyPrivate = 57344 -const yyLast = 654 +const yyLast = 659 var yyAct = [...]int{ - 264, 37, 213, 140, 254, 253, 148, 112, 77, 101, - 100, 146, 186, 103, 187, 188, 189, 6, 102, 104, - 125, 267, 248, 147, 57, 99, 151, 247, 120, 51, - 72, 105, 53, 22, 52, 152, 72, 163, 265, 256, - 54, 268, 249, 70, 152, 243, 151, 205, 246, 18, - 19, 153, 95, 20, 98, 107, 105, 108, 242, 71, - 153, 106, 121, 58, 59, 60, 61, 62, 63, 64, - 65, 66, 67, 68, 69, 178, 97, 103, 13, 149, - 150, 109, 24, 104, 30, 33, 142, 31, 32, 2, - 3, 4, 5, 143, 210, 262, 177, 7, 209, 143, - 261, 168, 269, 154, 114, 79, 167, 164, 158, 161, - 156, 208, 157, 260, 113, 78, 48, 166, 81, 34, - 244, 175, 73, 185, 1, 176, 141, 184, 190, 191, - 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, - 202, 203, 257, 47, 183, 204, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 160, 46, 170, 117, 171, 259, 8, 119, 116, 118, - 35, 155, 143, 159, 114, 245, 206, 207, 143, 115, - 36, 99, 51, 72, 113, 53, 22, 52, 250, 173, - 111, 251, 252, 54, 83, 255, 70, 10, 45, 172, - 174, 44, 18, 19, 92, 93, 20, 74, 95, 124, - 98, 56, 71, 258, 9, 9, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, 69, 215, 43, - 42, 13, 97, 79, 41, 24, 122, 30, 225, 162, - 31, 32, 231, 78, 40, 263, 270, 123, 180, 76, - 266, 39, 38, 49, 144, 182, 181, 80, 227, 228, - 151, 179, 229, 211, 271, 75, 145, 55, 272, 152, - 214, 169, 216, 218, 220, 221, 222, 230, 232, 235, - 236, 237, 238, 239, 50, 153, 217, 219, 223, 224, - 226, 233, 234, 110, 0, 0, 240, 241, 51, 72, + 268, 37, 216, 142, 258, 257, 150, 113, 77, 102, + 101, 104, 188, 271, 189, 190, 191, 105, 6, 126, + 218, 57, 253, 149, 154, 252, 251, 266, 180, 121, + 228, 260, 265, 272, 234, 103, 269, 144, 274, 247, + 155, 72, 213, 162, 145, 264, 212, 250, 106, 179, + 230, 231, 246, 153, 232, 108, 161, 109, 208, 211, + 106, 107, 245, 33, 122, 219, 221, 223, 224, 225, + 233, 235, 238, 239, 240, 241, 242, 143, 110, 220, + 222, 226, 227, 229, 236, 237, 115, 79, 7, 243, + 244, 2, 3, 4, 5, 104, 114, 78, 145, 263, + 170, 105, 248, 177, 156, 169, 145, 118, 166, 160, + 163, 158, 117, 159, 157, 10, 168, 100, 120, 273, + 119, 145, 81, 116, 187, 74, 178, 34, 186, 192, + 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, + 203, 204, 205, 206, 96, 185, 99, 207, 127, 128, + 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, + 139, 140, 141, 182, 56, 1, 115, 9, 9, 98, + 184, 148, 172, 218, 173, 153, 114, 249, 209, 210, + 261, 8, 112, 228, 154, 35, 153, 234, 47, 46, + 254, 215, 79, 255, 256, 154, 45, 259, 44, 175, + 155, 125, 78, 230, 231, 43, 48, 232, 76, 174, + 176, 155, 73, 42, 41, 245, 262, 123, 219, 221, + 223, 224, 225, 233, 235, 238, 239, 240, 241, 242, + 164, 40, 220, 222, 226, 227, 229, 236, 237, 124, + 151, 152, 243, 244, 39, 38, 49, 146, 183, 267, + 80, 181, 214, 75, 270, 51, 72, 147, 53, 22, + 52, 55, 217, 165, 171, 50, 54, 111, 275, 70, + 0, 0, 276, 0, 0, 18, 19, 0, 0, 20, + 0, 0, 0, 0, 0, 71, 0, 0, 0, 0, + 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, + 68, 69, 0, 0, 0, 13, 0, 0, 0, 24, + 0, 30, 0, 0, 31, 32, 36, 100, 51, 72, 0, 53, 22, 52, 0, 0, 0, 0, 0, 54, - 0, 0, 70, 0, 0, 0, 0, 0, 18, 19, - 0, 0, 20, 0, 0, 0, 0, 0, 71, 0, - 0, 0, 58, 59, 60, 61, 62, 63, 64, 65, - 66, 67, 68, 69, 215, 0, 0, 13, 0, 0, - 0, 24, 0, 30, 225, 0, 31, 32, 231, 0, - 0, 0, 212, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 227, 228, 0, 0, 229, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 216, 218, - 220, 221, 222, 230, 232, 235, 236, 237, 238, 239, - 0, 0, 217, 219, 223, 224, 226, 233, 234, 0, - 17, 72, 240, 241, 22, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 18, 19, 0, 0, 20, 0, 17, 33, 0, 0, - 22, 0, 0, 0, 11, 12, 14, 15, 16, 21, - 23, 25, 26, 27, 28, 29, 18, 19, 0, 13, + 84, 0, 70, 0, 0, 0, 0, 0, 18, 19, + 93, 94, 20, 0, 96, 0, 99, 83, 71, 0, + 0, 0, 0, 58, 59, 60, 61, 62, 63, 64, + 65, 66, 67, 68, 69, 0, 0, 0, 13, 98, + 0, 0, 24, 0, 30, 0, 0, 31, 32, 51, + 72, 0, 53, 22, 52, 0, 0, 0, 0, 0, + 54, 0, 0, 70, 0, 0, 0, 0, 0, 18, + 19, 0, 0, 20, 0, 0, 17, 72, 0, 71, + 22, 0, 0, 0, 58, 59, 60, 61, 62, 63, + 64, 65, 66, 67, 68, 69, 18, 19, 0, 13, 20, 0, 0, 24, 0, 30, 0, 0, 31, 32, - 11, 12, 14, 15, 16, 21, 23, 25, 26, 27, - 28, 29, 0, 0, 99, 13, 0, 0, 0, 24, - 165, 30, 0, 0, 31, 32, 82, 83, 84, 0, - 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, - 0, 95, 96, 98, 0, 0, 0, 0, 0, 0, - 99, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 82, 83, 84, 97, 85, 86, 87, 88, - 89, 90, 91, 92, 93, 94, 0, 95, 96, 98, - 0, 0, 99, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 82, 83, 84, 0, 85, 86, - 87, 97, 89, 90, 91, 92, 93, 94, 0, 95, - 96, 98, 0, 0, 99, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 82, 83, 84, 0, - 85, 86, 99, 97, 89, 90, 0, 92, 93, 94, - 0, 95, 96, 98, 82, 83, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 92, 93, 0, 0, 95, - 96, 98, 0, 0, 0, 97, 0, 0, 0, 0, + 0, 11, 12, 14, 15, 16, 21, 23, 25, 26, + 27, 28, 29, 17, 33, 0, 13, 22, 0, 0, + 24, 0, 30, 0, 0, 31, 32, 0, 0, 0, + 0, 0, 0, 18, 19, 0, 0, 20, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 11, 12, + 14, 15, 16, 21, 23, 25, 26, 27, 28, 29, + 0, 0, 100, 13, 0, 0, 0, 24, 167, 30, + 0, 0, 31, 32, 82, 84, 85, 0, 86, 87, + 88, 89, 90, 91, 92, 93, 94, 95, 100, 96, + 97, 99, 83, 0, 0, 0, 0, 0, 0, 0, + 82, 84, 85, 0, 86, 87, 88, 89, 90, 91, + 92, 93, 94, 95, 98, 96, 97, 99, 83, 0, + 0, 100, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 82, 84, 85, 0, 86, 87, 88, + 98, 90, 91, 92, 93, 94, 95, 100, 96, 97, + 99, 83, 0, 0, 0, 0, 0, 0, 0, 82, + 84, 85, 0, 86, 87, 0, 100, 90, 91, 0, + 93, 94, 95, 98, 96, 97, 99, 83, 82, 84, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 93, + 94, 0, 0, 96, 97, 99, 83, 0, 0, 98, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 97, + 0, 0, 0, 0, 0, 0, 0, 0, 98, } var yyPact = [...]int{ - 15, 87, 424, 424, 170, 398, -1000, -1000, -1000, 72, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + 16, 78, 441, 441, 306, 394, -1000, -1000, -1000, 50, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, 231, -1000, 116, -1000, 506, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - 16, 41, -1000, 286, -1000, 286, 23, -1000, -1000, -1000, + -1000, -1000, -1000, 190, -1000, 120, -1000, 514, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, 172, -1000, -1000, 161, -1000, -1000, 165, -1000, - 6, -1000, -40, -40, -40, -40, -40, -40, -40, -40, - -40, -40, -40, -40, -40, -40, -40, 84, 9, 169, - 41, -48, -1000, 158, 158, 17, -1000, 470, 11, -1000, - 99, -1000, -1000, 160, -1000, -1000, 103, -1000, 73, -1000, - 243, 286, -1000, -52, -47, -1000, 286, 286, 286, 286, - 286, 286, 286, 286, 286, 286, 286, 286, 286, 286, - -1000, 90, -1000, -1000, -1000, 32, -1000, -1000, -1000, -1000, - -1000, -1000, 29, 29, 92, -1000, -1000, -1000, -1000, 342, - -1000, -1000, 38, -1000, 506, -1000, -1000, 102, -1000, 25, + 33, 45, -1000, 367, -1000, 367, 28, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -2, 18, -1000, -1000, -1000, 167, 158, 158, 158, 158, - 11, 588, 588, 588, 570, 538, 588, 588, 570, 11, - 11, 588, 11, 167, -1000, 19, -1000, -1000, -1000, 163, - -1000, 93, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, 164, -1000, -1000, 105, -1000, -1000, 116, -1000, + 7, -1000, -42, -42, -42, -42, -42, -42, -42, -42, + -42, -42, -42, -42, -42, -42, -42, -42, 35, 169, + 112, 45, -51, -1000, 41, 41, 243, -1000, 488, 103, + -1000, 98, -1000, -1000, 170, -1000, -1000, 85, -1000, 26, + -1000, 158, 367, -1000, -53, -48, -1000, 367, 367, 367, + 367, 367, 367, 367, 367, 367, 367, 367, 367, 367, + 367, 367, -1000, 89, -1000, -1000, -1000, 43, -1000, -1000, + -1000, -1000, -1000, -1000, 36, 36, 40, -1000, -1000, -1000, + -1000, 171, -1000, -1000, 32, -1000, 514, -1000, -1000, 84, + -1000, 24, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, 1, -2, -1000, -1000, -1000, 303, 41, 41, + 41, 41, 103, 103, 592, 592, 592, 573, 547, 592, + 592, 573, 103, 103, 592, 103, 303, -1000, 11, -1000, + -1000, -1000, 97, -1000, 25, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, 286, -1000, -1000, -1000, -1000, 21, 21, - -3, -1000, -1000, -1000, -1000, -1000, -1000, 22, 100, -1000, - -1000, 226, -1000, 506, -1000, -1000, -1000, 21, -1000, -1000, - -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, 367, -1000, -1000, + -1000, -1000, 19, 19, -11, -1000, -1000, -1000, -1000, -1000, + -1000, 14, 117, -1000, -1000, 18, -1000, 514, -1000, -1000, + -1000, 19, -1000, -1000, -1000, -1000, -1000, } var yyPgo = [...]int{ - 0, 293, 7, 284, 2, 271, 270, 211, 267, 266, - 197, 265, 166, 8, 263, 4, 5, 261, 257, 0, - 23, 256, 6, 254, 253, 252, 10, 62, 251, 247, - 1, 244, 239, 9, 236, 24, 234, 230, 229, 209, - 201, 198, 161, 143, 116, 3, 142, 124, 119, + 0, 267, 7, 265, 2, 264, 262, 164, 261, 257, + 115, 253, 181, 8, 252, 4, 5, 251, 250, 0, + 23, 248, 6, 247, 246, 245, 10, 64, 244, 239, + 1, 231, 230, 9, 217, 21, 214, 213, 205, 201, + 198, 196, 189, 188, 206, 3, 180, 165, 127, } var yyR1 = [...]int{ @@ -463,22 +465,23 @@ var yyR1 = [...]int{ 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 25, 25, 25, 25, 26, 26, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, - 28, 27, 29, 29, 39, 39, 34, 34, 34, 34, - 15, 15, 15, 15, 14, 14, 14, 4, 4, 31, - 33, 33, 32, 32, 32, 40, 38, 38, 38, 24, - 24, 24, 9, 9, 36, 42, 42, 42, 42, 42, - 43, 44, 44, 44, 35, 35, 35, 1, 1, 1, - 2, 2, 2, 2, 12, 12, 7, 7, 7, 7, + 28, 28, 27, 29, 29, 39, 39, 34, 34, 34, + 34, 15, 15, 15, 15, 14, 14, 14, 4, 4, + 31, 33, 33, 32, 32, 32, 40, 38, 38, 38, + 24, 24, 24, 9, 9, 36, 42, 42, 42, 42, + 42, 43, 44, 44, 44, 35, 35, 35, 1, 1, + 1, 2, 2, 2, 2, 12, 12, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, - 7, 7, 7, 7, 7, 7, 7, 7, 10, 10, - 10, 10, 11, 11, 11, 13, 13, 13, 13, 48, - 18, 18, 18, 18, 17, 17, 17, 17, 17, 21, - 21, 21, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 6, 6, 6, 6, 6, 6, + 7, 7, 7, 7, 7, 7, 7, 7, 7, 10, + 10, 10, 10, 11, 11, 11, 13, 13, 13, 13, + 48, 18, 18, 18, 18, 17, 17, 17, 17, 17, + 21, 21, 21, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, - 8, 8, 5, 5, 5, 5, 37, 20, 22, 22, - 23, 23, 19, 45, 41, 46, 46, 16, 16, + 6, 6, 8, 8, 5, 5, 5, 5, 37, 20, + 22, 22, 23, 23, 19, 45, 41, 46, 46, 16, + 16, } var yyR2 = [...]int{ @@ -486,84 +489,85 @@ var yyR2 = [...]int{ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 2, 2, 2, 2, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 1, 0, 1, 3, 3, 1, 1, 3, 3, - 3, 4, 2, 1, 3, 1, 2, 1, 1, 2, - 3, 2, 3, 1, 2, 3, 3, 4, 3, 3, - 5, 3, 1, 1, 4, 6, 6, 5, 4, 3, - 2, 2, 1, 1, 3, 4, 2, 3, 1, 2, - 3, 3, 2, 1, 2, 1, 1, 1, 1, 1, + 4, 4, 1, 0, 1, 3, 3, 1, 1, 3, + 3, 3, 4, 2, 1, 3, 1, 2, 1, 1, + 2, 3, 2, 3, 1, 2, 3, 3, 4, 3, + 3, 5, 3, 1, 1, 4, 6, 6, 5, 4, + 3, 2, 2, 1, 1, 3, 4, 2, 3, 1, + 2, 3, 3, 2, 1, 2, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, + 4, 2, 0, 3, 1, 2, 3, 3, 2, 1, + 2, 0, 3, 2, 1, 1, 3, 1, 3, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 3, 4, - 2, 0, 3, 1, 2, 3, 3, 2, 1, 2, - 0, 3, 2, 1, 1, 3, 1, 3, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, - 1, 1, 1, 1, 1, 0, 1, 0, 1, + 2, 2, 1, 1, 1, 1, 1, 0, 1, 0, + 1, } var yyChk = [...]int{ - -1000, -47, 74, 75, 76, 77, 2, 10, -12, -7, - -10, 46, 47, 61, 48, 49, 50, 12, 32, 33, - 36, 51, 16, 52, 65, 53, 54, 55, 56, 57, - 67, 70, 71, 13, -48, -12, 10, -30, -25, -28, + -1000, -47, 75, 76, 77, 78, 2, 10, -12, -7, + -10, 47, 48, 62, 49, 50, 51, 12, 32, 33, + 36, 52, 16, 53, 66, 54, 55, 56, 57, 58, + 68, 71, 72, 13, -48, -12, 10, -30, -25, -28, -31, -36, -37, -38, -40, -41, -42, -43, -44, -24, - -3, 12, 17, 15, 23, -8, -7, -35, 46, 47, - 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, + -3, 12, 17, 15, 23, -8, -7, -35, 47, 48, + 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 26, 42, 13, -44, -10, -11, 18, -13, 12, 2, - -18, 2, 26, 27, 28, 30, 31, 32, 33, 34, - 35, 36, 37, 38, 39, 41, 42, 65, 43, 14, - -26, -33, 2, 61, 67, 15, -33, -30, -30, -35, - -1, 18, -2, 12, 2, 18, 7, 2, 4, 2, - 22, -27, -34, -29, -39, 60, -27, -27, -27, -27, + -18, 2, 26, 44, 27, 28, 30, 31, 32, 33, + 34, 35, 36, 37, 38, 39, 41, 42, 66, 43, + 14, -26, -33, 2, 62, 68, 15, -33, -30, -30, + -35, -1, 18, -2, 12, 2, 18, 7, 2, 4, + 2, 22, -27, -34, -29, -39, 61, -27, -27, -27, -27, -27, -27, -27, -27, -27, -27, -27, -27, -27, - -45, 42, 2, 9, -23, -9, 2, -20, -22, 70, - 71, 17, 26, 42, -45, 2, -33, -26, -15, 15, - 2, -15, -32, 20, -30, 20, 18, 7, 2, -5, - 2, 4, 39, 29, 40, 18, -13, 23, 2, -17, - 5, -21, 12, -20, -22, -30, 64, 66, 62, 63, - -30, -30, -30, -30, -30, -30, -30, -30, -30, -30, - -30, -30, -30, -30, -45, 15, -20, -20, 19, 6, - 2, -14, 20, -4, -6, 2, 46, 60, 47, 61, - 48, 49, 50, 62, 63, 12, 64, 32, 33, 36, - 51, 16, 52, 65, 66, 53, 54, 55, 56, 57, - 70, 71, 20, 7, 18, -2, 23, 2, 24, 24, - -22, -15, -15, -16, -15, -16, 20, -46, -45, 2, - 20, 7, 2, -30, -19, 17, -19, 24, 19, 2, - 20, -4, -19, + -27, -27, -45, 42, 2, 9, -23, -9, 2, -20, + -22, 71, 72, 17, 26, 42, -45, 2, -33, -26, + -15, 15, 2, -15, -32, 20, -30, 20, 18, 7, + 2, -5, 2, 4, 39, 29, 40, 18, -13, 23, + 2, -17, 5, -21, 12, -20, -22, -30, 65, 67, + 63, 64, -30, -30, -30, -30, -30, -30, -30, -30, + -30, -30, -30, -30, -30, -30, -30, -45, 15, -20, + -20, 19, 6, 2, -14, 20, -4, -6, 2, 47, + 61, 48, 62, 49, 50, 51, 63, 64, 12, 65, + 32, 33, 36, 52, 16, 53, 66, 67, 54, 55, + 56, 57, 58, 71, 72, 44, 20, 7, 18, -2, + 23, 2, 24, 24, -22, -15, -15, -16, -15, -16, + 20, -46, -45, 2, 20, 7, 2, -30, -19, 17, + -19, 24, 19, 2, 20, -4, -19, } var yyDef = [...]int{ - 0, -2, 121, 121, 0, 0, 7, 6, 1, 121, - 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, - 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, - 115, 116, 117, 0, 2, -2, 3, 4, 8, 9, + 0, -2, 122, 122, 0, 0, 7, 6, 1, 122, + 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, + 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, + 116, 117, 118, 0, 2, -2, 3, 4, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, - 0, 102, 186, 0, 194, 0, 82, 83, -2, -2, + 0, 103, 188, 0, 196, 0, 83, 84, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - 180, 181, 0, 5, 94, 0, 120, 123, 0, 128, - 129, 133, 42, 42, 42, 42, 42, 42, 42, 42, - 42, 42, 42, 42, 42, 42, 42, 0, 0, 0, - 0, 22, 23, 0, 0, 0, 59, 0, 80, 81, - 0, 86, 88, 0, 93, 118, 0, 124, 0, 127, - 132, 0, 41, 46, 47, 43, 0, 0, 0, 0, + 182, 183, 0, 5, 95, 0, 121, 124, 0, 129, + 130, 134, 43, 43, 43, 43, 43, 43, 43, 43, + 43, 43, 43, 43, 43, 43, 43, 43, 0, 0, + 0, 0, 22, 23, 0, 0, 0, 60, 0, 81, + 82, 0, 87, 89, 0, 94, 119, 0, 125, 0, + 128, 133, 0, 42, 47, 48, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 66, 0, 68, 193, 69, 0, 71, 190, 191, 72, - 73, 187, 0, 0, 0, 79, 20, 21, 24, 0, - 53, 25, 0, 61, 63, 65, 84, 0, 89, 0, - 92, 182, 183, 184, 185, 119, 122, 125, 126, 131, - 134, 136, 139, 140, 141, 26, 0, 0, -2, -2, - 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 67, 0, 188, 189, 74, -2, - 78, 0, 52, 55, 57, 58, 154, 155, 156, 157, - 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, - 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, - 178, 179, 60, 64, 85, 87, 90, 91, 0, 0, - 0, 44, 45, 48, 198, 49, 70, 0, -2, 77, - 50, 0, 56, 62, 135, 192, 137, 0, 75, 76, - 51, 54, 138, + 0, 0, 67, 0, 69, 195, 70, 0, 72, 192, + 193, 73, 74, 189, 0, 0, 0, 80, 20, 21, + 24, 0, 54, 25, 0, 62, 64, 66, 85, 0, + 90, 0, 93, 184, 185, 186, 187, 120, 123, 126, + 127, 132, 135, 137, 140, 141, 142, 26, 0, 0, + -2, -2, 27, 28, 29, 30, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 68, 0, 190, + 191, 75, -2, 79, 0, 53, 56, 58, 59, 155, + 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, + 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, + 176, 177, 178, 179, 180, 181, 61, 65, 86, 88, + 91, 92, 0, 0, 0, 45, 46, 49, 200, 50, + 71, 0, -2, 78, 51, 0, 57, 63, 136, 194, + 138, 0, 76, 77, 52, 55, 139, } var yyTok1 = [...]int{ @@ -578,7 +582,7 @@ var yyTok2 = [...]int{ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, - 72, 73, 74, 75, 76, 77, 78, + 72, 73, 74, 75, 76, 77, 78, 79, } var yyTok3 = [...]int{ @@ -924,62 +928,62 @@ yydefault: case 1: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:173 +//line promql/parser/generated_parser.y:174 { yylex.(*parser).generatedParserResult = yyDollar[2].labels } case 3: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:176 +//line promql/parser/generated_parser.y:177 { yylex.(*parser).addParseErrf(PositionRange{}, "no expression found in input") } case 4: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:178 +//line promql/parser/generated_parser.y:179 { yylex.(*parser).generatedParserResult = yyDollar[2].node } case 5: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:180 +//line promql/parser/generated_parser.y:181 { yylex.(*parser).generatedParserResult = yyDollar[2].node } case 7: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:183 +//line promql/parser/generated_parser.y:184 { yylex.(*parser).unexpected("", "") } case 20: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:206 +//line promql/parser/generated_parser.y:207 { yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, yyDollar[2].node, yyDollar[3].node) } case 21: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:208 +//line promql/parser/generated_parser.y:209 { yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, yyDollar[3].node, yyDollar[2].node) } case 22: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:210 +//line promql/parser/generated_parser.y:211 { yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, &AggregateExpr{}, yyDollar[2].node) } case 23: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:212 +//line promql/parser/generated_parser.y:213 { yylex.(*parser).unexpected("aggregation", "") yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, &AggregateExpr{}, Expressions{}) } case 24: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:220 +//line promql/parser/generated_parser.y:221 { yyVAL.node = &AggregateExpr{ Grouping: yyDollar[2].strings, @@ -987,7 +991,7 @@ yydefault: } case 25: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:226 +//line promql/parser/generated_parser.y:227 { yyVAL.node = &AggregateExpr{ Grouping: yyDollar[2].strings, @@ -996,205 +1000,211 @@ yydefault: } case 26: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:239 +//line promql/parser/generated_parser.y:240 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 27: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:240 +//line promql/parser/generated_parser.y:241 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 28: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:241 +//line promql/parser/generated_parser.y:242 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 29: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:242 +//line promql/parser/generated_parser.y:243 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 30: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:243 +//line promql/parser/generated_parser.y:244 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 31: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:244 +//line promql/parser/generated_parser.y:245 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 32: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:245 +//line promql/parser/generated_parser.y:246 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 33: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:246 +//line promql/parser/generated_parser.y:247 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 34: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:247 +//line promql/parser/generated_parser.y:248 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 35: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:248 +//line promql/parser/generated_parser.y:249 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 36: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:249 +//line promql/parser/generated_parser.y:250 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 37: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:250 +//line promql/parser/generated_parser.y:251 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 38: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:251 +//line promql/parser/generated_parser.y:252 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 39: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:252 +//line promql/parser/generated_parser.y:253 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 40: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:253 +//line promql/parser/generated_parser.y:254 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } - case 42: + case 41: + yyDollar = yyS[yypt-4 : yypt+1] +//line promql/parser/generated_parser.y:255 + { + yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) + } + case 43: yyDollar = yyS[yypt-0 : yypt+1] -//line generated_parser.y:261 +//line promql/parser/generated_parser.y:263 { yyVAL.node = &BinaryExpr{ VectorMatching: &VectorMatching{Card: CardOneToOne}, } } - case 43: + case 44: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:266 +//line promql/parser/generated_parser.y:268 { yyVAL.node = &BinaryExpr{ VectorMatching: &VectorMatching{Card: CardOneToOne}, ReturnBool: true, } } - case 44: + case 45: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:274 +//line promql/parser/generated_parser.y:276 { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.MatchingLabels = yyDollar[3].strings } - case 45: + case 46: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:279 +//line promql/parser/generated_parser.y:281 { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.MatchingLabels = yyDollar[3].strings yyVAL.node.(*BinaryExpr).VectorMatching.On = true } - case 48: + case 49: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:289 +//line promql/parser/generated_parser.y:291 { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.Card = CardManyToOne yyVAL.node.(*BinaryExpr).VectorMatching.Include = yyDollar[3].strings } - case 49: + case 50: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:295 +//line promql/parser/generated_parser.y:297 { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.Card = CardOneToMany yyVAL.node.(*BinaryExpr).VectorMatching.Include = yyDollar[3].strings } - case 50: + case 51: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:304 +//line promql/parser/generated_parser.y:306 { yyVAL.strings = yyDollar[2].strings } - case 51: + case 52: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:306 +//line promql/parser/generated_parser.y:308 { yyVAL.strings = yyDollar[2].strings } - case 52: + case 53: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:308 +//line promql/parser/generated_parser.y:310 { yyVAL.strings = []string{} } - case 53: + case 54: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:310 +//line promql/parser/generated_parser.y:312 { yylex.(*parser).unexpected("grouping opts", "\"(\"") yyVAL.strings = nil } - case 54: + case 55: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:316 +//line promql/parser/generated_parser.y:318 { yyVAL.strings = append(yyDollar[1].strings, yyDollar[3].item.Val) } - case 55: + case 56: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:318 +//line promql/parser/generated_parser.y:320 { yyVAL.strings = []string{yyDollar[1].item.Val} } - case 56: + case 57: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:320 +//line promql/parser/generated_parser.y:322 { yylex.(*parser).unexpected("grouping opts", "\",\" or \")\"") yyVAL.strings = yyDollar[1].strings } - case 57: + case 58: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:324 +//line promql/parser/generated_parser.y:326 { if !isLabel(yyDollar[1].item.Val) { yylex.(*parser).unexpected("grouping opts", "label") } yyVAL.item = yyDollar[1].item } - case 58: + case 59: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:331 +//line promql/parser/generated_parser.y:333 { yylex.(*parser).unexpected("grouping opts", "label") yyVAL.item = Item{} } - case 59: + case 60: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:339 +//line promql/parser/generated_parser.y:341 { fn, exist := getFunction(yyDollar[1].item.Val) if !exist { @@ -1209,88 +1219,88 @@ yydefault: }, } } - case 60: + case 61: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:356 +//line promql/parser/generated_parser.y:358 { yyVAL.node = yyDollar[2].node } - case 61: + case 62: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:358 +//line promql/parser/generated_parser.y:360 { yyVAL.node = Expressions{} } - case 62: + case 63: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:362 +//line promql/parser/generated_parser.y:364 { yyVAL.node = append(yyDollar[1].node.(Expressions), yyDollar[3].node.(Expr)) } - case 63: + case 64: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:364 +//line promql/parser/generated_parser.y:366 { yyVAL.node = Expressions{yyDollar[1].node.(Expr)} } - case 64: + case 65: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:366 +//line promql/parser/generated_parser.y:368 { yylex.(*parser).addParseErrf(yyDollar[2].item.PositionRange(), "trailing commas not allowed in function call args") yyVAL.node = yyDollar[1].node } - case 65: + case 66: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:377 +//line promql/parser/generated_parser.y:379 { yyVAL.node = &ParenExpr{Expr: yyDollar[2].node.(Expr), PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[3].item)} } - case 66: + case 67: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:385 +//line promql/parser/generated_parser.y:387 { yylex.(*parser).addOffset(yyDollar[1].node, yyDollar[3].duration) yyVAL.node = yyDollar[1].node } - case 67: + case 68: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:390 +//line promql/parser/generated_parser.y:392 { yylex.(*parser).addOffset(yyDollar[1].node, -yyDollar[4].duration) yyVAL.node = yyDollar[1].node } - case 68: + case 69: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:395 +//line promql/parser/generated_parser.y:397 { yylex.(*parser).unexpected("offset", "duration") yyVAL.node = yyDollar[1].node } - case 69: + case 70: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:402 +//line promql/parser/generated_parser.y:404 { yylex.(*parser).setTimestamp(yyDollar[1].node, yyDollar[3].float) yyVAL.node = yyDollar[1].node } - case 70: + case 71: yyDollar = yyS[yypt-5 : yypt+1] -//line generated_parser.y:407 +//line promql/parser/generated_parser.y:409 { yylex.(*parser).setAtModifierPreprocessor(yyDollar[1].node, yyDollar[3].item) yyVAL.node = yyDollar[1].node } - case 71: + case 72: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:412 +//line promql/parser/generated_parser.y:414 { yylex.(*parser).unexpected("@", "timestamp") yyVAL.node = yyDollar[1].node } - case 74: + case 75: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:422 +//line promql/parser/generated_parser.y:424 { var errMsg string vs, ok := yyDollar[1].node.(*VectorSelector) @@ -1313,9 +1323,9 @@ yydefault: EndPos: yylex.(*parser).lastClosing, } } - case 75: + case 76: yyDollar = yyS[yypt-6 : yypt+1] -//line generated_parser.y:447 +//line promql/parser/generated_parser.y:449 { yyVAL.node = &SubqueryExpr{ Expr: yyDollar[1].node.(Expr), @@ -1325,37 +1335,37 @@ yydefault: EndPos: yyDollar[6].item.Pos + 1, } } - case 76: + case 77: yyDollar = yyS[yypt-6 : yypt+1] -//line generated_parser.y:457 +//line promql/parser/generated_parser.y:459 { yylex.(*parser).unexpected("subquery selector", "\"]\"") yyVAL.node = yyDollar[1].node } - case 77: + case 78: yyDollar = yyS[yypt-5 : yypt+1] -//line generated_parser.y:459 +//line promql/parser/generated_parser.y:461 { yylex.(*parser).unexpected("subquery selector", "duration or \"]\"") yyVAL.node = yyDollar[1].node } - case 78: + case 79: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:461 +//line promql/parser/generated_parser.y:463 { yylex.(*parser).unexpected("subquery or range", "\":\" or \"]\"") yyVAL.node = yyDollar[1].node } - case 79: + case 80: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:463 +//line promql/parser/generated_parser.y:465 { yylex.(*parser).unexpected("subquery selector", "duration") yyVAL.node = yyDollar[1].node } - case 80: + case 81: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:473 +//line promql/parser/generated_parser.y:475 { if nl, ok := yyDollar[2].node.(*NumberLiteral); ok { if yyDollar[1].item.Typ == SUB { @@ -1367,9 +1377,9 @@ yydefault: yyVAL.node = &UnaryExpr{Op: yyDollar[1].item.Typ, Expr: yyDollar[2].node.(Expr), StartPos: yyDollar[1].item.Pos} } } - case 81: + case 82: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:491 +//line promql/parser/generated_parser.y:493 { vs := yyDollar[2].node.(*VectorSelector) vs.PosRange = mergeRanges(&yyDollar[1].item, vs) @@ -1377,9 +1387,9 @@ yydefault: yylex.(*parser).assembleVectorSelector(vs) yyVAL.node = vs } - case 82: + case 83: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:499 +//line promql/parser/generated_parser.y:501 { vs := &VectorSelector{ Name: yyDollar[1].item.Val, @@ -1389,44 +1399,44 @@ yydefault: yylex.(*parser).assembleVectorSelector(vs) yyVAL.node = vs } - case 83: + case 84: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:509 +//line promql/parser/generated_parser.y:511 { vs := yyDollar[1].node.(*VectorSelector) yylex.(*parser).assembleVectorSelector(vs) yyVAL.node = vs } - case 84: + case 85: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:517 +//line promql/parser/generated_parser.y:519 { yyVAL.node = &VectorSelector{ LabelMatchers: yyDollar[2].matchers, PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[3].item), } } - case 85: + case 86: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:524 +//line promql/parser/generated_parser.y:526 { yyVAL.node = &VectorSelector{ LabelMatchers: yyDollar[2].matchers, PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[4].item), } } - case 86: + case 87: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:531 +//line promql/parser/generated_parser.y:533 { yyVAL.node = &VectorSelector{ LabelMatchers: []*labels.Matcher{}, PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[2].item), } } - case 87: + case 88: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:540 +//line promql/parser/generated_parser.y:542 { if yyDollar[1].matchers != nil { yyVAL.matchers = append(yyDollar[1].matchers, yyDollar[3].matcher) @@ -1434,196 +1444,196 @@ yydefault: yyVAL.matchers = yyDollar[1].matchers } } - case 88: + case 89: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:548 +//line promql/parser/generated_parser.y:550 { yyVAL.matchers = []*labels.Matcher{yyDollar[1].matcher} } - case 89: + case 90: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:550 +//line promql/parser/generated_parser.y:552 { yylex.(*parser).unexpected("label matching", "\",\" or \"}\"") yyVAL.matchers = yyDollar[1].matchers } - case 90: + case 91: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:554 +//line promql/parser/generated_parser.y:556 { yyVAL.matcher = yylex.(*parser).newLabelMatcher(yyDollar[1].item, yyDollar[2].item, yyDollar[3].item) } - case 91: + case 92: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:556 +//line promql/parser/generated_parser.y:558 { yylex.(*parser).unexpected("label matching", "string") yyVAL.matcher = nil } - case 92: + case 93: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:558 +//line promql/parser/generated_parser.y:560 { yylex.(*parser).unexpected("label matching", "label matching operator") yyVAL.matcher = nil } - case 93: + case 94: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:560 +//line promql/parser/generated_parser.y:562 { yylex.(*parser).unexpected("label matching", "identifier or \"}\"") yyVAL.matcher = nil } - case 94: + case 95: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:568 +//line promql/parser/generated_parser.y:570 { yyVAL.labels = append(yyDollar[2].labels, labels.Label{Name: labels.MetricName, Value: yyDollar[1].item.Val}) sort.Sort(yyVAL.labels) } - case 95: + case 96: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:570 +//line promql/parser/generated_parser.y:572 { yyVAL.labels = yyDollar[1].labels } - case 118: + case 119: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:577 +//line promql/parser/generated_parser.y:579 { yyVAL.labels = labels.New(yyDollar[2].labels...) } - case 119: + case 120: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:579 +//line promql/parser/generated_parser.y:581 { yyVAL.labels = labels.New(yyDollar[2].labels...) } - case 120: + case 121: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:581 +//line promql/parser/generated_parser.y:583 { yyVAL.labels = labels.New() } - case 121: + case 122: yyDollar = yyS[yypt-0 : yypt+1] -//line generated_parser.y:583 +//line promql/parser/generated_parser.y:585 { yyVAL.labels = labels.New() } - case 122: + case 123: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:587 +//line promql/parser/generated_parser.y:589 { yyVAL.labels = append(yyDollar[1].labels, yyDollar[3].label) } - case 123: + case 124: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:589 +//line promql/parser/generated_parser.y:591 { yyVAL.labels = []labels.Label{yyDollar[1].label} } - case 124: + case 125: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:591 +//line promql/parser/generated_parser.y:593 { yylex.(*parser).unexpected("label set", "\",\" or \"}\"") yyVAL.labels = yyDollar[1].labels } - case 125: + case 126: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:596 +//line promql/parser/generated_parser.y:598 { yyVAL.label = labels.Label{Name: yyDollar[1].item.Val, Value: yylex.(*parser).unquoteString(yyDollar[3].item.Val)} } - case 126: + case 127: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:598 +//line promql/parser/generated_parser.y:600 { yylex.(*parser).unexpected("label set", "string") yyVAL.label = labels.Label{} } - case 127: + case 128: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:600 +//line promql/parser/generated_parser.y:602 { yylex.(*parser).unexpected("label set", "\"=\"") yyVAL.label = labels.Label{} } - case 128: + case 129: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:602 +//line promql/parser/generated_parser.y:604 { yylex.(*parser).unexpected("label set", "identifier or \"}\"") yyVAL.label = labels.Label{} } - case 129: + case 130: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:610 +//line promql/parser/generated_parser.y:612 { yylex.(*parser).generatedParserResult = &seriesDescription{ labels: yyDollar[1].labels, values: yyDollar[2].series, } } - case 130: + case 131: yyDollar = yyS[yypt-0 : yypt+1] -//line generated_parser.y:619 +//line promql/parser/generated_parser.y:621 { yyVAL.series = []SequenceValue{} } - case 131: + case 132: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:621 +//line promql/parser/generated_parser.y:623 { yyVAL.series = append(yyDollar[1].series, yyDollar[3].series...) } - case 132: + case 133: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:623 +//line promql/parser/generated_parser.y:625 { yyVAL.series = yyDollar[1].series } - case 133: + case 134: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:625 +//line promql/parser/generated_parser.y:627 { yylex.(*parser).unexpected("series values", "") yyVAL.series = nil } - case 134: + case 135: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:629 +//line promql/parser/generated_parser.y:631 { yyVAL.series = []SequenceValue{{Omitted: true}} } - case 135: + case 136: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:631 +//line promql/parser/generated_parser.y:633 { yyVAL.series = []SequenceValue{} for i := uint64(0); i < yyDollar[3].uint; i++ { yyVAL.series = append(yyVAL.series, SequenceValue{Omitted: true}) } } - case 136: + case 137: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:638 +//line promql/parser/generated_parser.y:640 { yyVAL.series = []SequenceValue{{Value: yyDollar[1].float}} } - case 137: + case 138: yyDollar = yyS[yypt-3 : yypt+1] -//line generated_parser.y:640 +//line promql/parser/generated_parser.y:642 { yyVAL.series = []SequenceValue{} for i := uint64(0); i <= yyDollar[3].uint; i++ { yyVAL.series = append(yyVAL.series, SequenceValue{Value: yyDollar[1].float}) } } - case 138: + case 139: yyDollar = yyS[yypt-4 : yypt+1] -//line generated_parser.y:647 +//line promql/parser/generated_parser.y:649 { yyVAL.series = []SequenceValue{} for i := uint64(0); i <= yyDollar[4].uint; i++ { @@ -1631,45 +1641,45 @@ yydefault: yyDollar[1].float += yyDollar[2].float } } - case 139: + case 140: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:657 +//line promql/parser/generated_parser.y:659 { if yyDollar[1].item.Val != "stale" { yylex.(*parser).unexpected("series values", "number or \"stale\"") } yyVAL.float = math.Float64frombits(value.StaleNaN) } - case 186: + case 188: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:688 +//line promql/parser/generated_parser.y:690 { yyVAL.node = &NumberLiteral{ Val: yylex.(*parser).number(yyDollar[1].item.Val), PosRange: yyDollar[1].item.PositionRange(), } } - case 187: + case 189: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:696 +//line promql/parser/generated_parser.y:698 { yyVAL.float = yylex.(*parser).number(yyDollar[1].item.Val) } - case 188: + case 190: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:698 +//line promql/parser/generated_parser.y:700 { yyVAL.float = yyDollar[2].float } - case 189: + case 191: yyDollar = yyS[yypt-2 : yypt+1] -//line generated_parser.y:699 +//line promql/parser/generated_parser.y:701 { yyVAL.float = -yyDollar[2].float } - case 192: + case 194: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:705 +//line promql/parser/generated_parser.y:707 { var err error yyVAL.uint, err = strconv.ParseUint(yyDollar[1].item.Val, 10, 64) @@ -1677,9 +1687,9 @@ yydefault: yylex.(*parser).addParseErrf(yyDollar[1].item.PositionRange(), "invalid repetition in series values: %s", err) } } - case 193: + case 195: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:715 +//line promql/parser/generated_parser.y:717 { var err error yyVAL.duration, err = parseDuration(yyDollar[1].item.Val) @@ -1687,24 +1697,24 @@ yydefault: yylex.(*parser).addParseErr(yyDollar[1].item.PositionRange(), err) } } - case 194: + case 196: yyDollar = yyS[yypt-1 : yypt+1] -//line generated_parser.y:726 +//line promql/parser/generated_parser.y:728 { yyVAL.node = &StringLiteral{ Val: yylex.(*parser).unquoteString(yyDollar[1].item.Val), PosRange: yyDollar[1].item.PositionRange(), } } - case 195: + case 197: yyDollar = yyS[yypt-0 : yypt+1] -//line generated_parser.y:739 +//line promql/parser/generated_parser.y:741 { yyVAL.duration = 0 } - case 197: + case 199: yyDollar = yyS[yypt-0 : yypt+1] -//line generated_parser.y:743 +//line promql/parser/generated_parser.y:745 { yyVAL.strings = nil } diff --git a/vendor/github.com/prometheus/prometheus/promql/parser/lex.go b/vendor/github.com/prometheus/prometheus/promql/parser/lex.go index 313bd8f88b0..e1dee335645 100644 --- a/vendor/github.com/prometheus/prometheus/promql/parser/lex.go +++ b/vendor/github.com/prometheus/prometheus/promql/parser/lex.go @@ -97,6 +97,7 @@ var key = map[string]ItemType{ "and": LAND, "or": LOR, "unless": LUNLESS, + "atan2": ATAN2, // Aggregators. "sum": SUM, diff --git a/vendor/github.com/prometheus/prometheus/promql/test.go b/vendor/github.com/prometheus/prometheus/promql/test.go index dc67b08bf95..d577b25d128 100644 --- a/vendor/github.com/prometheus/prometheus/promql/test.go +++ b/vendor/github.com/prometheus/prometheus/promql/test.go @@ -25,6 +25,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" + "github.com/stretchr/testify/require" "github.com/prometheus/prometheus/pkg/exemplar" "github.com/prometheus/prometheus/pkg/labels" @@ -597,9 +598,8 @@ func (t *Test) exec(tc testCommand) error { // clear the current test storage of all inserted samples. func (t *Test) clear() { if t.storage != nil { - if err := t.storage.Close(); err != nil { - t.T.Fatalf("closing test storage: %s", err) - } + err := t.storage.Close() + require.NoError(t.T, err, "Unexpected error while closing test storage.") } if t.cancelCtx != nil { t.cancelCtx() @@ -623,9 +623,8 @@ func (t *Test) clear() { func (t *Test) Close() { t.cancelCtx() - if err := t.storage.Close(); err != nil { - t.T.Fatalf("closing test storage: %s", err) - } + err := t.storage.Close() + require.NoError(t.T, err, "Unexpected error while closing test storage.") } // samplesAlmostEqual returns true if the two sample lines only differ by a @@ -722,9 +721,8 @@ func (ll *LazyLoader) parse(input string) error { // clear the current test storage of all inserted samples. func (ll *LazyLoader) clear() { if ll.storage != nil { - if err := ll.storage.Close(); err != nil { - ll.T.Fatalf("closing test storage: %s", err) - } + err := ll.storage.Close() + require.NoError(ll.T, err, "Unexpected error while closing test storage.") } if ll.cancelCtx != nil { ll.cancelCtx() @@ -798,8 +796,6 @@ func (ll *LazyLoader) Storage() storage.Storage { // Close closes resources associated with the LazyLoader. func (ll *LazyLoader) Close() { ll.cancelCtx() - - if err := ll.storage.Close(); err != nil { - ll.T.Fatalf("closing test storage: %s", err) - } + err := ll.storage.Close() + require.NoError(ll.T, err, "Unexpected error while closing test storage.") } diff --git a/vendor/github.com/prometheus/prometheus/rules/alerting.go b/vendor/github.com/prometheus/prometheus/rules/alerting.go index 15cd1cf6027..5e7b8975c81 100644 --- a/vendor/github.com/prometheus/prometheus/rules/alerting.go +++ b/vendor/github.com/prometheus/prometheus/rules/alerting.go @@ -297,7 +297,7 @@ const resolvedRetention = 15 * time.Minute // Eval evaluates the rule expression and then creates pending alerts and fires // or removes previously pending alerts accordingly. -func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, externalURL *url.URL) (promql.Vector, error) { +func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, externalURL *url.URL, limit int) (promql.Vector, error) { res, err := query(ctx, r.vector.String(), ts) if err != nil { return nil, err @@ -338,6 +338,7 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, model.Time(timestamp.FromTime(ts)), template.QueryFunc(query), externalURL, + nil, ) result, err := tmpl.Expand() if err != nil { @@ -414,6 +415,12 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, } } + numActive := len(r.active) + if limit != 0 && numActive > limit { + r.active = map[uint64]*Alert{} + return nil, errors.Errorf("exceeded limit of %d with %d alerts", limit, numActive) + } + return vec, nil } diff --git a/vendor/github.com/prometheus/prometheus/rules/manager.go b/vendor/github.com/prometheus/prometheus/rules/manager.go index 63186d1d806..fa8cd6763a2 100644 --- a/vendor/github.com/prometheus/prometheus/rules/manager.go +++ b/vendor/github.com/prometheus/prometheus/rules/manager.go @@ -213,7 +213,7 @@ type Rule interface { // Labels of the rule. Labels() labels.Labels // eval evaluates the rule, including any associated recording or alerting actions. - Eval(context.Context, time.Time, QueryFunc, *url.URL) (promql.Vector, error) + Eval(context.Context, time.Time, QueryFunc, *url.URL, int) (promql.Vector, error) // String returns a human-readable string representation of the rule. String() string // Query returns the rule query expression. @@ -244,6 +244,7 @@ type Group struct { name string file string interval time.Duration + limit int rules []Rule seriesInPreviousEval []map[string]labels.Labels // One per Rule. staleSeries []labels.Labels @@ -267,6 +268,7 @@ type Group struct { type GroupOptions struct { Name, File string Interval time.Duration + Limit int Rules []Rule ShouldRestore bool Opts *ManagerOptions @@ -295,6 +297,7 @@ func NewGroup(o GroupOptions) *Group { name: o.Name, file: o.File, interval: o.Interval, + limit: o.Limit, rules: o.Rules, shouldRestore: o.ShouldRestore, opts: o.Opts, @@ -319,6 +322,9 @@ func (g *Group) Rules() []Rule { return g.rules } // Interval returns the group's interval. func (g *Group) Interval() time.Duration { return g.interval } +// Limit returns the group's limit. +func (g *Group) Limit() int { return g.limit } + func (g *Group) run(ctx context.Context) { defer close(g.terminated) @@ -591,7 +597,7 @@ func (g *Group) Eval(ctx context.Context, ts time.Time) { g.metrics.EvalTotal.WithLabelValues(GroupKey(g.File(), g.Name())).Inc() - vector, err := rule.Eval(ctx, ts, g.opts.QueryFunc, g.opts.ExternalURL) + vector, err := rule.Eval(ctx, ts, g.opts.QueryFunc, g.opts.ExternalURL, g.Limit()) if err != nil { rule.SetHealth(HealthBad) rule.SetLastError(err) @@ -850,6 +856,10 @@ func (g *Group) Equals(ng *Group) bool { return false } + if g.limit != ng.limit { + return false + } + if len(g.rules) != len(ng.rules) { return false } @@ -1086,6 +1096,7 @@ func (m *Manager) LoadGroups( Name: rg.Name, File: fn, Interval: itv, + Limit: rg.Limit, Rules: rules, ShouldRestore: shouldRestore, Opts: m.opts, diff --git a/vendor/github.com/prometheus/prometheus/rules/recording.go b/vendor/github.com/prometheus/prometheus/rules/recording.go index 1ffe98cbdae..08a7e37ca36 100644 --- a/vendor/github.com/prometheus/prometheus/rules/recording.go +++ b/vendor/github.com/prometheus/prometheus/rules/recording.go @@ -73,7 +73,7 @@ func (rule *RecordingRule) Labels() labels.Labels { } // Eval evaluates the rule and then overrides the metric names and labels accordingly. -func (rule *RecordingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, _ *url.URL) (promql.Vector, error) { +func (rule *RecordingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, _ *url.URL, limit int) (promql.Vector, error) { vector, err := query(ctx, rule.vector.String(), ts) if err != nil { return nil, err @@ -99,6 +99,13 @@ func (rule *RecordingRule) Eval(ctx context.Context, ts time.Time, query QueryFu return nil, fmt.Errorf("vector contains metrics with the same labelset after applying rule labels") } + numSamples := len(vector) + if limit != 0 && numSamples > limit { + return nil, fmt.Errorf("exceeded limit %d with %d samples", limit, numSamples) + } + + rule.SetHealth(HealthGood) + rule.SetLastError(err) return vector, nil } diff --git a/vendor/github.com/prometheus/prometheus/scrape/scrape.go b/vendor/github.com/prometheus/prometheus/scrape/scrape.go index 9ae9932c694..f3622cf2ea9 100644 --- a/vendor/github.com/prometheus/prometheus/scrape/scrape.go +++ b/vendor/github.com/prometheus/prometheus/scrape/scrape.go @@ -269,7 +269,7 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed logger = log.NewNopLogger() } - client, err := config_util.NewClientFromConfig(cfg.HTTPClientConfig, cfg.JobName, config_util.WithHTTP2Disabled()) + client, err := config_util.NewClientFromConfig(cfg.HTTPClientConfig, cfg.JobName) if err != nil { targetScrapePoolsFailed.Inc() return nil, errors.Wrap(err, "error creating HTTP client") @@ -380,7 +380,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error { targetScrapePoolReloads.Inc() start := time.Now() - client, err := config_util.NewClientFromConfig(cfg.HTTPClientConfig, cfg.JobName, config_util.WithHTTP2Disabled()) + client, err := config_util.NewClientFromConfig(cfg.HTTPClientConfig, cfg.JobName) if err != nil { targetScrapePoolReloadsFailed.Inc() return errors.Wrap(err, "error creating HTTP client") @@ -721,7 +721,7 @@ var errBodySizeLimit = errors.New("body size limit exceeded") const acceptHeader = `application/openmetrics-text; version=0.0.1,text/plain;version=0.0.4;q=0.5,*/*;q=0.1` -var userAgentHeader = fmt.Sprintf("Prometheus/%s", version.Version) +var UserAgent = fmt.Sprintf("Prometheus/%s", version.Version) func (s *targetScraper) scrape(ctx context.Context, w io.Writer) (string, error) { if s.req == nil { @@ -731,7 +731,7 @@ func (s *targetScraper) scrape(ctx context.Context, w io.Writer) (string, error) } req.Header.Add("Accept", acceptHeader) req.Header.Add("Accept-Encoding", "gzip") - req.Header.Set("User-Agent", userAgentHeader) + req.Header.Set("User-Agent", UserAgent) req.Header.Set("X-Prometheus-Scrape-Timeout-Seconds", strconv.FormatFloat(s.timeout.Seconds(), 'f', -1, 64)) s.req = req diff --git a/vendor/github.com/prometheus/prometheus/storage/merge.go b/vendor/github.com/prometheus/prometheus/storage/merge.go index 1c08a537f4b..bf81fcc6dd7 100644 --- a/vendor/github.com/prometheus/prometheus/storage/merge.go +++ b/vendor/github.com/prometheus/prometheus/storage/merge.go @@ -18,7 +18,6 @@ import ( "container/heap" "math" "sort" - "strings" "sync" "github.com/pkg/errors" @@ -197,15 +196,13 @@ func mergeStrings(a, b []string) []string { res := make([]string, 0, maxl*10/9) for len(a) > 0 && len(b) > 0 { - d := strings.Compare(a[0], b[0]) - - if d == 0 { + if a[0] == b[0] { res = append(res, a[0]) a, b = a[1:], b[1:] - } else if d < 0 { + } else if a[0] < b[0] { res = append(res, a[0]) a = a[1:] - } else if d > 0 { + } else { res = append(res, b[0]) b = b[1:] } diff --git a/vendor/github.com/prometheus/prometheus/storage/remote/client.go b/vendor/github.com/prometheus/prometheus/storage/remote/client.go index 2d6c5a10abb..7539c2c92e7 100644 --- a/vendor/github.com/prometheus/prometheus/storage/remote/client.go +++ b/vendor/github.com/prometheus/prometheus/storage/remote/client.go @@ -110,7 +110,7 @@ type ReadClient interface { // NewReadClient creates a new client for remote read. func NewReadClient(name string, conf *ClientConfig) (ReadClient, error) { - httpClient, err := config_util.NewClientFromConfig(conf.HTTPClientConfig, "remote_storage_read_client", config_util.WithHTTP2Disabled()) + httpClient, err := config_util.NewClientFromConfig(conf.HTTPClientConfig, "remote_storage_read_client") if err != nil { return nil, err } @@ -136,7 +136,7 @@ func NewReadClient(name string, conf *ClientConfig) (ReadClient, error) { // NewWriteClient creates a new client for remote write. func NewWriteClient(name string, conf *ClientConfig) (WriteClient, error) { - httpClient, err := config_util.NewClientFromConfig(conf.HTTPClientConfig, "remote_storage_write_client", config_util.WithHTTP2Disabled()) + httpClient, err := config_util.NewClientFromConfig(conf.HTTPClientConfig, "remote_storage_write_client") if err != nil { return nil, err } diff --git a/vendor/github.com/prometheus/prometheus/storage/remote/codec.go b/vendor/github.com/prometheus/prometheus/storage/remote/codec.go index 0bd1b976221..545138da565 100644 --- a/vendor/github.com/prometheus/prometheus/storage/remote/codec.go +++ b/vendor/github.com/prometheus/prometheus/storage/remote/codec.go @@ -26,6 +26,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" + "github.com/prometheus/prometheus/pkg/exemplar" "github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/textparse" "github.com/prometheus/prometheus/prompb" @@ -450,6 +451,17 @@ func FromLabelMatchers(matchers []*prompb.LabelMatcher) ([]*labels.Matcher, erro return result, nil } +func exemplarProtoToExemplar(ep prompb.Exemplar) exemplar.Exemplar { + timestamp := ep.Timestamp + + return exemplar.Exemplar{ + Labels: labelProtosToLabels(ep.Labels), + Value: ep.Value, + Ts: timestamp, + HasTs: timestamp != 0, + } +} + // LabelProtosToMetric unpack a []*prompb.Label to a model.Metric func LabelProtosToMetric(labelPairs []*prompb.Label) model.Metric { metric := make(model.Metric, len(labelPairs)) diff --git a/vendor/github.com/prometheus/prometheus/storage/remote/write.go b/vendor/github.com/prometheus/prometheus/storage/remote/write.go index 2d96f70ae2b..b3fec364a7a 100644 --- a/vendor/github.com/prometheus/prometheus/storage/remote/write.go +++ b/vendor/github.com/prometheus/prometheus/storage/remote/write.go @@ -158,7 +158,10 @@ func (rws *WriteStorage) ApplyConfig(conf *config.Config) error { continue } - endpoint := rwConf.URL.String() + // Redacted to remove any passwords in the URL (that are + // technically accepted but not recommended) since this is + // only used for metric labels. + endpoint := rwConf.URL.Redacted() newQueues[hash] = NewQueueManager( newQueueManagerMetrics(rws.reg, name, endpoint), rws.watcherMetrics, diff --git a/vendor/github.com/prometheus/prometheus/storage/remote/write_handler.go b/vendor/github.com/prometheus/prometheus/storage/remote/write_handler.go index a3dee6136ae..92637cf471d 100644 --- a/vendor/github.com/prometheus/prometheus/storage/remote/write_handler.go +++ b/vendor/github.com/prometheus/prometheus/storage/remote/write_handler.go @@ -15,10 +15,14 @@ package remote import ( "context" + "fmt" "net/http" "github.com/go-kit/log" "github.com/go-kit/log/level" + "github.com/pkg/errors" + + "github.com/prometheus/prometheus/pkg/exemplar" "github.com/prometheus/prometheus/prompb" "github.com/prometheus/prometheus/storage" ) @@ -62,16 +66,35 @@ func (h *writeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusNoContent) } +// checkAppendExemplarError modifies the AppendExamplar's returned error based on the error cause. +func (h *writeHandler) checkAppendExemplarError(err error, e exemplar.Exemplar, outOfOrderErrs *int) error { + switch errors.Cause(err) { + case storage.ErrNotFound: + return storage.ErrNotFound + case storage.ErrOutOfOrderExemplar: + *outOfOrderErrs++ + level.Debug(h.logger).Log("msg", "Out of order exemplar", "exemplar", fmt.Sprintf("%+v", e)) + return nil + default: + return err + } +} + func (h *writeHandler) write(ctx context.Context, req *prompb.WriteRequest) (err error) { + var ( + outOfOrderExemplarErrs = 0 + ) + app := h.appendable.Appender(ctx) defer func() { if err != nil { - app.Rollback() + _ = app.Rollback() return } err = app.Commit() }() + var exemplarErr error for _, ts := range req.Timeseries { labels := labelProtosToLabels(ts.Labels) for _, s := range ts.Samples { @@ -79,7 +102,23 @@ func (h *writeHandler) write(ctx context.Context, req *prompb.WriteRequest) (err if err != nil { return err } + } + + for _, ep := range ts.Exemplars { + e := exemplarProtoToExemplar(ep) + + _, exemplarErr = app.AppendExemplar(0, labels, e) + exemplarErr = h.checkAppendExemplarError(exemplarErr, e, &outOfOrderExemplarErrs) + if exemplarErr != nil { + // Since exemplar storage is still experimental, we don't fail the request on ingestion errors. + level.Debug(h.logger).Log("msg", "Error while adding exemplar in AddExemplar", "exemplar", fmt.Sprintf("%+v", e), "err", exemplarErr) + } + } + } + + if outOfOrderExemplarErrs > 0 { + _ = level.Warn(h.logger).Log("msg", "Error on ingesting out-of-order exemplars", "num_dropped", outOfOrderExemplarErrs) } return nil diff --git a/vendor/github.com/prometheus/prometheus/template/template.go b/vendor/github.com/prometheus/prometheus/template/template.go index b0d8f5cc037..dca5aa432a9 100644 --- a/vendor/github.com/prometheus/prometheus/template/template.go +++ b/vendor/github.com/prometheus/prometheus/template/template.go @@ -115,6 +115,7 @@ type Expander struct { name string data interface{} funcMap text_template.FuncMap + options []string } // NewTemplateExpander returns a template expander ready to use. @@ -126,7 +127,11 @@ func NewTemplateExpander( timestamp model.Time, queryFunc QueryFunc, externalURL *url.URL, + options []string, ) *Expander { + if options == nil { + options = []string{"missingkey=zero"} + } return &Expander{ text: text, name: name, @@ -291,6 +296,7 @@ func NewTemplateExpander( return externalURL.String() }, }, + options: options, } } @@ -336,7 +342,9 @@ func (te Expander) Expand() (result string, resultErr error) { templateTextExpansionTotal.Inc() - tmpl, err := text_template.New(te.name).Funcs(te.funcMap).Option("missingkey=zero").Parse(te.text) + tmpl := text_template.New(te.name).Funcs(te.funcMap) + tmpl.Option(te.options...) + tmpl, err := tmpl.Parse(te.text) if err != nil { return "", errors.Wrapf(err, "error parsing template %v", te.name) } @@ -361,7 +369,7 @@ func (te Expander) ExpandHTML(templateFiles []string) (result string, resultErr }() tmpl := html_template.New(te.name).Funcs(html_template.FuncMap(te.funcMap)) - tmpl.Option("missingkey=zero") + tmpl.Option(te.options...) tmpl.Funcs(html_template.FuncMap{ "tmpl": func(name string, data interface{}) (html_template.HTML, error) { var buffer bytes.Buffer diff --git a/vendor/github.com/prometheus/prometheus/tsdb/README.md b/vendor/github.com/prometheus/prometheus/tsdb/README.md index 98443f17856..59f800c7aea 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/README.md +++ b/vendor/github.com/prometheus/prometheus/tsdb/README.md @@ -17,4 +17,6 @@ A series of blog posts explaining different components of TSDB: * [WAL and Checkpoint](https://ganeshvernekar.com/blog/prometheus-tsdb-wal-and-checkpoint/) * [Memory Mapping of Head Chunks from Disk](https://ganeshvernekar.com/blog/prometheus-tsdb-mmapping-head-chunks-from-disk/) * [Persistent Block and its Index](https://ganeshvernekar.com/blog/prometheus-tsdb-persistent-block-and-its-index/) -* [Queries](https://ganeshvernekar.com/blog/prometheus-tsdb-queries/) \ No newline at end of file +* [Queries](https://ganeshvernekar.com/blog/prometheus-tsdb-queries/) +* [Compaction and Retention](https://ganeshvernekar.com/blog/prometheus-tsdb-compaction-and-retention/) +* [Snapshot on Shutdown](https://ganeshvernekar.com/blog/prometheus-tsdb-snapshot-on-shutdown/) diff --git a/vendor/github.com/prometheus/prometheus/tsdb/db.go b/vendor/github.com/prometheus/prometheus/tsdb/db.go index 046fb52710e..c69f6f464f7 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/db.go +++ b/vendor/github.com/prometheus/prometheus/tsdb/db.go @@ -1663,9 +1663,12 @@ func (db *DB) Delete(mint, maxt int64, ms ...*labels.Matcher) error { }(b)) } } - g.Go(func() error { - return db.head.Delete(mint, maxt, ms...) - }) + if db.head.OverlapsClosedInterval(mint, maxt) { + g.Go(func() error { + return db.head.Delete(mint, maxt, ms...) + }) + } + return g.Wait() } diff --git a/vendor/github.com/prometheus/prometheus/tsdb/head.go b/vendor/github.com/prometheus/prometheus/tsdb/head.go index ec0916bba97..cd0f9bd1660 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/head.go +++ b/vendor/github.com/prometheus/prometheus/tsdb/head.go @@ -176,6 +176,10 @@ func NewHead(r prometheus.Registerer, l log.Logger, wal *wal.WAL, opts *HeadOpti stats = NewHeadStats() } + if !opts.EnableExemplarStorage { + opts.MaxExemplars.Store(0) + } + h := &Head{ wal: wal, logger: l, @@ -211,7 +215,16 @@ func NewHead(r prometheus.Registerer, l log.Logger, wal *wal.WAL, opts *HeadOpti func (h *Head) resetInMemoryState() error { var err error - em := NewExemplarMetrics(h.reg) + var em *ExemplarMetrics + if h.exemplars != nil { + ce, ok := h.exemplars.(*CircularExemplarStorage) + if ok { + em = ce.metrics + } + } + if em == nil { + em = NewExemplarMetrics(h.reg) + } es, err := NewCircularExemplarStorage(h.opts.MaxExemplars.Load(), em) if err != nil { return err @@ -736,6 +749,11 @@ func (h *Head) Truncate(mint int64) (err error) { return h.truncateWAL(mint) } +// OverlapsClosedInterval returns true if the head overlaps [mint, maxt]. +func (h *Head) OverlapsClosedInterval(mint, maxt int64) bool { + return h.MinTime() <= maxt && mint <= h.MaxTime() +} + // truncateMemory removes old data before mint from the head. func (h *Head) truncateMemory(mint int64) (err error) { h.chunkSnapshotMtx.Lock() @@ -1101,6 +1119,10 @@ func (h *Head) gc() int64 { // Remove deleted series IDs from the postings lists. h.postings.Delete(deleted) + // Remove tombstones referring to the deleted series. + h.tombstones.DeleteTombstones(deleted) + h.tombstones.TruncateBefore(mint) + if h.wal != nil { _, last, _ := wal.Segments(h.wal.Dir()) h.deletedMtx.Lock() diff --git a/vendor/github.com/prometheus/prometheus/tsdb/head_wal.go b/vendor/github.com/prometheus/prometheus/tsdb/head_wal.go index 6a647dd8527..47882982400 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/head_wal.go +++ b/vendor/github.com/prometheus/prometheus/tsdb/head_wal.go @@ -46,6 +46,8 @@ func (h *Head) loadWAL(r *wal.Reader, multiRef map[uint64]uint64, mmappedChunks // for error reporting. var unknownRefs atomic.Uint64 var unknownExemplarRefs atomic.Uint64 + // Track number of series records that had overlapping m-map chunks. + var mmapOverlappingChunks uint64 // Start workers that each process samples for a partition of the series ID space. // They are connected through a ring of channels which ensures that all sample batches @@ -241,8 +243,6 @@ Outer: } // Checking if the new m-mapped chunks overlap with the already existing ones. - // This should never happen, but we have a check anyway to detect any - // edge cases that we might have missed. if len(mSeries.mmappedChunks) > 0 && len(mmc) > 0 { if overlapsClosedInterval( mSeries.mmappedChunks[0].minTime, @@ -250,9 +250,17 @@ Outer: mmc[0].minTime, mmc[len(mmc)-1].maxTime, ) { - // The m-map chunks for the new series ref overlaps with old m-map chunks. - seriesCreationErr = errors.Errorf("overlapping m-mapped chunks for series %s", mSeries.lset.String()) - break Outer + mmapOverlappingChunks++ + level.Debug(h.logger).Log( + "msg", "M-mapped chunks overlap on a duplicate series record", + "series", mSeries.lset.String(), + "oldref", mSeries.ref, + "oldmint", mSeries.mmappedChunks[0].minTime, + "oldmaxt", mSeries.mmappedChunks[len(mSeries.mmappedChunks)-1].maxTime, + "newref", walSeries.Ref, + "newmint", mmc[0].minTime, + "newmaxt", mmc[len(mmc)-1].maxTime, + ) } } @@ -351,6 +359,9 @@ Outer: if unknownRefs.Load() > 0 || unknownExemplarRefs.Load() > 0 { level.Warn(h.logger).Log("msg", "Unknown series references", "samples", unknownRefs.Load(), "exemplars", unknownExemplarRefs.Load()) } + if mmapOverlappingChunks > 0 { + level.Info(h.logger).Log("msg", "Overlapping m-map chunks on duplicate series records", "count", mmapOverlappingChunks) + } return nil } @@ -937,6 +948,11 @@ Outer: } } + if !h.opts.EnableExemplarStorage || h.opts.MaxExemplars.Load() <= 0 { + // Exemplar storage is disabled. + continue Outer + } + decbuf := encoding.Decbuf{B: rec[1:]} exemplarBuf = exemplarBuf[:0] @@ -958,7 +974,7 @@ Outer: Value: e.V, Ts: e.T, }); err != nil { - loopErr = errors.Wrap(err, "append exemplar") + loopErr = errors.Wrap(err, "add exemplar") break Outer } } diff --git a/vendor/github.com/prometheus/prometheus/tsdb/index/postings.go b/vendor/github.com/prometheus/prometheus/tsdb/index/postings.go index c63cff71364..6c493be1df5 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/index/postings.go +++ b/vendor/github.com/prometheus/prometheus/tsdb/index/postings.go @@ -18,7 +18,6 @@ import ( "encoding/binary" "runtime" "sort" - "strings" "sync" "github.com/prometheus/prometheus/pkg/labels" @@ -94,8 +93,8 @@ func (p *MemPostings) SortedKeys() []labels.Label { p.mtx.RUnlock() sort.Slice(keys, func(i, j int) bool { - if d := strings.Compare(keys[i].Name, keys[j].Name); d != 0 { - return d < 0 + if keys[i].Name != keys[j].Name { + return keys[i].Name < keys[j].Name } return keys[i].Value < keys[j].Value }) diff --git a/vendor/github.com/prometheus/prometheus/tsdb/tombstones/tombstones.go b/vendor/github.com/prometheus/prometheus/tsdb/tombstones/tombstones.go index 32431886b55..8b95481a4fc 100644 --- a/vendor/github.com/prometheus/prometheus/tsdb/tombstones/tombstones.go +++ b/vendor/github.com/prometheus/prometheus/tsdb/tombstones/tombstones.go @@ -252,6 +252,34 @@ func (t *MemTombstones) Get(ref uint64) (Intervals, error) { return t.intvlGroups[ref], nil } +func (t *MemTombstones) DeleteTombstones(refs map[uint64]struct{}) { + t.mtx.Lock() + defer t.mtx.Unlock() + for ref := range refs { + delete(t.intvlGroups, ref) + } +} + +func (t *MemTombstones) TruncateBefore(beforeT int64) { + t.mtx.Lock() + defer t.mtx.Unlock() + for ref, ivs := range t.intvlGroups { + i := len(ivs) - 1 + for ; i >= 0; i-- { + if beforeT > ivs[i].Maxt { + break + } + } + if len(ivs[i+1:]) == 0 { + delete(t.intvlGroups, ref) + } else { + newIvs := make(Intervals, len(ivs[i+1:])) + copy(newIvs, ivs[i+1:]) + t.intvlGroups[ref] = newIvs + } + } +} + func (t *MemTombstones) Iter(f func(uint64, Intervals) error) error { t.mtx.RLock() defer t.mtx.RUnlock() diff --git a/vendor/github.com/prometheus/prometheus/util/teststorage/storage.go b/vendor/github.com/prometheus/prometheus/util/teststorage/storage.go index 53c4ece1322..39aac596ebc 100644 --- a/vendor/github.com/prometheus/prometheus/util/teststorage/storage.go +++ b/vendor/github.com/prometheus/prometheus/util/teststorage/storage.go @@ -19,6 +19,8 @@ import ( "time" "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + "github.com/prometheus/prometheus/pkg/exemplar" "github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/storage" @@ -30,9 +32,7 @@ import ( // that removes all associated files on closing. func New(t testutil.T) *TestStorage { dir, err := ioutil.TempDir("", "test_storage") - if err != nil { - t.Fatalf("Opening test dir failed: %s", err) - } + require.NoError(t, err, "unexpected error while opening test directory") // Tests just load data for a series sequentially. Thus we // need a long appendable window. @@ -40,16 +40,12 @@ func New(t testutil.T) *TestStorage { opts.MinBlockDuration = int64(24 * time.Hour / time.Millisecond) opts.MaxBlockDuration = int64(24 * time.Hour / time.Millisecond) db, err := tsdb.Open(dir, nil, nil, opts, tsdb.NewDBStats()) - if err != nil { - t.Fatalf("Opening test storage failed: %s", err) - } + require.NoError(t, err, "unexpected error while opening test storage") reg := prometheus.NewRegistry() eMetrics := tsdb.NewExemplarMetrics(reg) es, err := tsdb.NewCircularExemplarStorage(10, eMetrics) - if err != nil { - t.Fatalf("Opening test exemplar storage failed: %s", err) - } + require.NoError(t, err, "unexpected error while opening test exemplar storage") return &TestStorage{DB: db, exemplarStorage: es, dir: dir} } diff --git a/vendor/github.com/prometheus/prometheus/util/testutil/directory.go b/vendor/github.com/prometheus/prometheus/util/testutil/directory.go index 1c87e35de5d..70cf0422731 100644 --- a/vendor/github.com/prometheus/prometheus/util/testutil/directory.go +++ b/vendor/github.com/prometheus/prometheus/util/testutil/directory.go @@ -73,8 +73,8 @@ type ( // the test flags, which we do not want in non-test binaries even if // they make use of these utilities for some reason). T interface { - Fatal(args ...interface{}) - Fatalf(format string, args ...interface{}) + Errorf(format string, args ...interface{}) + FailNow() } ) @@ -105,9 +105,7 @@ func (t temporaryDirectory) Close() { err = os.RemoveAll(t.path) } } - if err != nil { - t.tester.Fatal(err) - } + require.NoError(t.tester, err) } func (t temporaryDirectory) Path() string { @@ -123,9 +121,7 @@ func NewTemporaryDirectory(name string, t T) (handler TemporaryDirectory) { ) directory, err = ioutil.TempDir(defaultDirectory, name) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) handler = temporaryDirectory{ path: directory, diff --git a/vendor/modules.txt b/vendor/modules.txt index bc78ee5c094..add27528564 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -52,7 +52,7 @@ github.com/PuerkitoBio/urlesc # github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 github.com/alecthomas/template github.com/alecthomas/template/parse -# github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15 +# github.com/alecthomas/units v0.0.0-20210912230133-d1bdfacee922 ## explicit github.com/alecthomas/units # github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a @@ -508,7 +508,7 @@ github.com/prometheus/client_golang/prometheus/testutil/promlint # github.com/prometheus/client_model v0.2.0 ## explicit github.com/prometheus/client_model/go -# github.com/prometheus/common v0.30.0 +# github.com/prometheus/common v0.31.1 ## explicit github.com/prometheus/common/config github.com/prometheus/common/expfmt @@ -526,7 +526,7 @@ github.com/prometheus/node_exporter/https github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util -# github.com/prometheus/prometheus v1.8.2-0.20210914090109-37468d88dce8 +# github.com/prometheus/prometheus v1.8.2-0.20211011171444-354d8d2ecfac ## explicit github.com/prometheus/prometheus/config github.com/prometheus/prometheus/discovery