diff --git a/Cargo.lock b/Cargo.lock index ca4dbb2b85119..dc458afae5b86 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1100,15 +1100,15 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.25" +version = "0.4.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdbc37d37da9e5bce8173f3a41b71d9bf3c674deebbaceacd0ebdabde76efb03" +checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", "serde", - "winapi", + "windows-targets 0.52.4", ] [[package]] @@ -9940,7 +9940,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets", + "windows-targets 0.48.0", ] [[package]] @@ -9964,7 +9964,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets", + "windows-targets 0.48.0", ] [[package]] @@ -9982,6 +9982,21 @@ dependencies = [ "windows_x86_64_msvc 0.48.0", ] +[[package]] +name = "windows-targets" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +dependencies = [ + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -9994,6 +10009,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -10006,6 +10027,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" + [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -10018,6 +10045,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +[[package]] +name = "windows_i686_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -10030,6 +10063,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +[[package]] +name = "windows_i686_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -10042,6 +10081,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -10054,6 +10099,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -10066,6 +10117,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" + [[package]] name = "winnow" version = "0.5.4" diff --git a/Cargo.toml b/Cargo.toml index 2dc8703a5d3ff..5fead62b11855 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -138,7 +138,7 @@ resolver = "2" [workspace.package] edition = "2021" -rust-version = "1.76.0" +rust-version = "1.77.0" [profile.dev] split-debuginfo = "unpacked" diff --git a/bin/ci-builder b/bin/ci-builder index 44824e90b2a0b..f700d1a2a31e0 100755 --- a/bin/ci-builder +++ b/bin/ci-builder @@ -16,7 +16,7 @@ set -euo pipefail -NIGHTLY_RUST_DATE=2024-01-01 +NIGHTLY_RUST_DATE=2024-02-01 cd "$(dirname "$0")/.." diff --git a/bin/lint-versions b/bin/lint-versions index 5cf901c6d65eb..bbae5381a2abe 100755 --- a/bin/lint-versions +++ b/bin/lint-versions @@ -11,5 +11,5 @@ # # lint-versions - Check rust version -grep "rust-version = " Cargo.toml | grep -q "1\.76\.0" || \ +grep "rust-version = " Cargo.toml | grep -q "1\.77\.0" || \ (echo "Please validate new Rust versions for compilation time performance regressions or ask Team Testing to do so. Afterwards change the tested version in bin/lint-versions" && exit 1) diff --git a/ci/builder/Dockerfile b/ci/builder/Dockerfile index add0269b363e1..984739bf96d99 100644 --- a/ci/builder/Dockerfile +++ b/ci/builder/Dockerfile @@ -188,8 +188,8 @@ RUN mkdir rust \ && tar -xzf rust-src.tar.gz -C /usr/local/lib/rustlib/src/rust --strip-components=1 \ && rm -f rust-src.asc rust-src.tar.gz \ && case $RUST_COMPONENTS in *miri*) \ - PATH=$PATH:/root/.cargo/bin cargo miri setup \ - ;; \ + PATH=$PATH:/root/.cargo/bin cargo miri setup \ + ;; \ esac \ && curl -fsSL https://static.rust-lang.org/dist$RUST_DATE/rust-std-$RUST_VERSION-wasm32-unknown-unknown.tar.gz > rust.tar.gz \ && curl -fsSL https://static.rust-lang.org/dist$RUST_DATE/rust-std-$RUST_VERSION-wasm32-unknown-unknown.tar.gz.asc > rust.asc \ @@ -197,9 +197,9 @@ RUN mkdir rust \ && tar -xzf rust.tar.gz -C /usr/local/lib/rustlib/ --strip-components=4 \ && rm -rf rust.asc rust.tar.gz rust \ && cargo install --root /usr/local --version "=0.8.0" --locked cargo-vet \ - && cargo install --root /usr/local --version "=0.6.0" --locked cargo-about \ + && cargo install --root /usr/local --version "=0.6.1" --locked cargo-about \ && cargo install --root /usr/local --version "=2.0.2" --locked cargo-deb \ - && cargo install --root /usr/local --version "=0.12.2" --locked cargo-deny \ + && cargo install --root /usr/local --version "=0.14.20" --locked cargo-deny \ && cargo install --root /usr/local --version "=0.1.0" --locked cargo-deplint \ && cargo install --root /usr/local --version ="0.9.28" --locked cargo-hakari \ && cargo install --root /usr/local --version "=0.9.63" --locked cargo-nextest \ @@ -261,13 +261,13 @@ RUN if [ $ARCH_GCC = x86_64 ]; then \ # Install KinD and kubectl RUN curl -fsSL https://kind.sigs.k8s.io/dl/v0.14.0/kind-linux-$ARCH_GO > /usr/local/bin/kind \ - && chmod +x /usr/local/bin/kind + && chmod +x /usr/local/bin/kind RUN if [ $ARCH_GO = amd64 ]; then echo 'af5e8331f2165feab52ec2ae07c427c7b66f4ad044d09f253004a20252524c8b /usr/local/bin/kind' | sha256sum --check; fi RUN if [ $ARCH_GO = arm64 ]; then echo '95c9601f21fdb2c286442339d5e370149b4fe2fc7c49f615647e4e27bdfb17e2 /usr/local/bin/kind' | sha256sum --check; fi RUN curl -fsSL https://dl.k8s.io/release/v1.24.3/bin/linux/$ARCH_GO/kubectl > /usr/local/bin/kubectl \ - && chmod +x /usr/local/bin/kubectl + && chmod +x /usr/local/bin/kubectl RUN if [ $ARCH_GO = amd64 ]; then echo '8a45348bdaf81d46caf1706c8bf95b3f431150554f47d444ffde89e8cdd712c1 /usr/local/bin/kubectl' | sha256sum --check; fi RUN if [ $ARCH_GO = arm64 ]; then echo 'bdad4d3063ddb7bfa5ecf17fb8b029d5d81d7d4ea1650e4369aafa13ed97149a /usr/local/bin/kubectl' | sha256sum --check; fi diff --git a/deny.toml b/deny.toml index 9d5bc6911475c..d213482efef8b 100644 --- a/deny.toml +++ b/deny.toml @@ -27,6 +27,15 @@ skip = [ { name = "windows_x86_64_gnullvm", version = "0.42.0" }, { name = "windows_x86_64_gnu", version = "0.42.0" }, { name = "windows_x86_64_msvc", version = "0.42.0" }, + { name = "windows-targets", version = "0.48.0" }, + { name = "windows-sys", version = "0.48.0" }, + { name = "windows_aarch64_gnullvm", version = "0.48.0" }, + { name = "windows_aarch64_msvc", version = "0.48.0" }, + { name = "windows_i686_gnu", version = "0.48.0" }, + { name = "windows_i686_msvc", version = "0.48.0" }, + { name = "windows_x86_64_gnullvm", version = "0.48.0" }, + { name = "windows_x86_64_gnu", version = "0.48.0" }, + { name = "windows_x86_64_msvc", version = "0.48.0" }, # Newer versions of crates like `tempfile` are held back by crates like `atty`. # This is very Unfortunate as we don't actually use these platforms. { name = "hermit-abi", version = "0.1.6" }, diff --git a/misc/cargo-vet/audits.toml b/misc/cargo-vet/audits.toml index c9872be7ab448..fafb3c06f06f8 100644 --- a/misc/cargo-vet/audits.toml +++ b/misc/cargo-vet/audits.toml @@ -30,6 +30,26 @@ who = "Roshan Jobanputra " criteria = "safe-to-deploy" version = "1.0.1" +[[audits.chrono]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.4.35" + +[[audits.chrono-tz]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.8.1" + +[[audits.chrono-tz-build]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.1.0" + +[[audits.chrono-tz-build]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.2.1" + [[audits.core_affinity]] who = "Jan Teske " criteria = "safe-to-deploy" @@ -316,6 +336,46 @@ who = "Gus Wynn " criteria = "maintained-and-necessary" version = "0.2.4" +[[audits.windows-targets]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.52.4" + +[[audits.windows_aarch64_gnullvm]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.52.4" + +[[audits.windows_aarch64_msvc]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.52.4" + +[[audits.windows_i686_gnu]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.52.4" + +[[audits.windows_i686_msvc]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.52.4" + +[[audits.windows_x86_64_gnu]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.52.4" + +[[audits.windows_x86_64_gnullvm]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.52.4" + +[[audits.windows_x86_64_msvc]] +who = "Roshan Jobanputra " +criteria = "safe-to-deploy" +version = "0.52.4" + [[audits.winreg]] who = "Matt Jibson " criteria = "safe-to-deploy" diff --git a/misc/cargo-vet/config.toml b/misc/cargo-vet/config.toml index 458481b7da034..377f425623be2 100644 --- a/misc/cargo-vet/config.toml +++ b/misc/cargo-vet/config.toml @@ -262,18 +262,6 @@ criteria = "safe-to-deploy" version = "0.3.0" criteria = "safe-to-deploy" -[[exemptions.chrono]] -version = "0.4.25" -criteria = "safe-to-deploy" - -[[exemptions.chrono-tz]] -version = "0.8.1" -criteria = "safe-to-deploy" - -[[exemptions.chrono-tz-build]] -version = "0.1.0" -criteria = "safe-to-deploy" - [[exemptions.chunked_transfer]] version = "1.4.0" criteria = "safe-to-deploy" diff --git a/misc/python/materialize/mzbuild.py b/misc/python/materialize/mzbuild.py index 23989602d2a32..73f096e89a222 100644 --- a/misc/python/materialize/mzbuild.py +++ b/misc/python/materialize/mzbuild.py @@ -281,9 +281,11 @@ def generate_cargo_build_command( rustflags = ( rustc_flags.coverage if rd.coverage - else rustc_flags.sanitizer[rd.sanitizer] - if rd.sanitizer != Sanitizer.none - else ["--cfg=tokio_unstable"] + else ( + rustc_flags.sanitizer[rd.sanitizer] + if rd.sanitizer != Sanitizer.none + else ["--cfg=tokio_unstable"] + ) ) cflags = ( [ @@ -428,7 +430,16 @@ def copy(exe: Path) -> None: # Some crates are built for both the host and the target. # Ignore the built-for-host out dir. continue - package = message["package_id"].split()[0] + # parse the package name from a package_id that looks like one of: + # git+https://github.com/MaterializeInc/rust-server-sdk#launchdarkly-server-sdk@1.0.0 + # path+file:///Users/roshan/materialize/src/catalog#mz-catalog@0.0.0 + # registry+https://github.com/rust-lang/crates.io-index#num-rational@0.4.0 + # file:///path/to/my-package#0.1.0 + package_id = message["package_id"] + if "@" in package_id: + package = package_id.split("@")[0].split("#")[-1] + else: + package = message["package_id"].split("#")[0].split("/")[-1] for src, dst in self.extract.get(package, {}).items(): spawn.runv(["cp", "-R", out_dir / src, self.path / dst]) diff --git a/src/adapter/Cargo.toml b/src/adapter/Cargo.toml index e160d05fa739a..a57e8de72795c 100644 --- a/src/adapter/Cargo.toml +++ b/src/adapter/Cargo.toml @@ -14,7 +14,7 @@ anyhow = "1.0.66" async-trait = "0.1.68" bytes = "1.3.0" bytesize = "1.1.0" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } dec = "0.4.8" deadpool-postgres = "0.10.3" derivative = "2.2.0" diff --git a/src/adapter/src/catalog.rs b/src/adapter/src/catalog.rs index 55ad42ee08a60..281b663c97d15 100644 --- a/src/adapter/src/catalog.rs +++ b/src/adapter/src/catalog.rs @@ -4134,7 +4134,6 @@ impl SessionCatalog for ConnCatalog<'_> { mod tests { use std::collections::{BTreeMap, BTreeSet}; use std::sync::Arc; - use std::time::{Duration, Instant}; use std::{env, iter}; use itertools::Itertools; @@ -5247,7 +5246,6 @@ mod tests { // Execute the function as much as possible, ensuring no panics occur, but // otherwise ignoring eval errors. We also do various other checks. - let start = Instant::now(); let res = (op.0)(&ecx, scalars, &imp.params, vec![]); if let Ok(hir) = res { if let Ok(mut mir) = hir.lower_uncorrelated() { diff --git a/src/adapter/src/catalog/builtin_table_updates.rs b/src/adapter/src/catalog/builtin_table_updates.rs index 2473dc4f780b2..78436e21b84c0 100644 --- a/src/adapter/src/catalog/builtin_table_updates.rs +++ b/src/adapter/src/catalog/builtin_table_updates.rs @@ -12,7 +12,6 @@ mod notice; use std::net::Ipv4Addr; use bytesize::ByteSize; -use chrono::{DateTime, Utc}; use mz_audit_log::{EventDetails, EventType, ObjectType, VersionedEvent, VersionedStorageUsage}; use mz_catalog::builtin::{ MZ_AGGREGATES, MZ_ARRAY_TYPES, MZ_AUDIT_EVENTS, MZ_AWS_CONNECTIONS, @@ -1324,7 +1323,7 @@ impl CatalogState { .iter() .next() .expect("details created above with a single jsonb column"); - let dt = mz_ore::now::to_datetime(occurred_at).naive_utc(); + let dt = mz_ore::now::to_datetime(occurred_at); let id = event.sortable_id(); Ok(BuiltinTableUpdate { id: self.resolve_builtin_table(&MZ_AUDIT_EVENTS), @@ -1337,7 +1336,7 @@ impl CatalogState { Some(user) => Datum::String(user), None => Datum::Null, }, - Datum::TimestampTz(DateTime::from_utc(dt, Utc).try_into().expect("must fit")), + Datum::TimestampTz(dt.try_into().expect("must fit")), ]), diff: 1, }) diff --git a/src/adapter/src/coord/appends.rs b/src/adapter/src/coord/appends.rs index 4f4a426085c9c..ba2a31100fcd9 100644 --- a/src/adapter/src/coord/appends.rs +++ b/src/adapter/src/coord/appends.rs @@ -651,4 +651,4 @@ impl GroupCommitWaiter { /// Note: We sometimes want to throttle how many group commits are running at once, which this /// permit allows us to do. #[derive(Debug)] -pub struct GroupCommitPermit(OwnedSemaphorePermit); +pub struct GroupCommitPermit(#[allow(dead_code)] OwnedSemaphorePermit); diff --git a/src/adapter/src/coord/timestamp_selection.rs b/src/adapter/src/coord/timestamp_selection.rs index 9574260462ad9..b3f454e957cb3 100644 --- a/src/adapter/src/coord/timestamp_selection.rs +++ b/src/adapter/src/coord/timestamp_selection.rs @@ -12,7 +12,7 @@ use std::fmt; use async_trait::async_trait; -use chrono::{DateTime, NaiveDateTime, Utc}; +use chrono::{DateTime, Utc}; use differential_dataflow::lattice::Lattice; use mz_compute_types::ComputeInstanceId; use mz_expr::MirScalarExpr; @@ -659,9 +659,11 @@ impl Coordinator { ScalarType::TimestampTz { .. } => { evaled.unwrap_timestamptz().timestamp_millis().try_into()? } - ScalarType::Timestamp { .. } => { - evaled.unwrap_timestamp().timestamp_millis().try_into()? - } + ScalarType::Timestamp { .. } => evaled + .unwrap_timestamp() + .and_utc() + .timestamp_millis() + .try_into()?, _ => coord_bail!( "can't use {} as a mz_timestamp for AS OF or UP TO", catalog.for_session(session).humanize_column_type(&ty) @@ -730,7 +732,7 @@ impl DisplayableInTimeline for mz_repr::Timestamp { if let Some(Timeline::EpochMilliseconds) = timeline { let ts_ms: u64 = self.into(); if let Ok(ts_ms) = i64::try_from(ts_ms) { - if let Some(ndt) = NaiveDateTime::from_timestamp_millis(ts_ms) { + if let Some(ndt) = DateTime::from_timestamp_millis(ts_ms) { return write!(f, "{:13} ({})", self, ndt.format("%Y-%m-%d %H:%M:%S%.3f")); } } @@ -822,7 +824,7 @@ impl Result { let result = match unit { - TsUnit::Millis => NaiveDateTime::from_timestamp_millis(value), - TsUnit::Micros => NaiveDateTime::from_timestamp_micros(value), + TsUnit::Millis => DateTime::from_timestamp_millis(value), + TsUnit::Micros => DateTime::from_timestamp_micros(value), }; let ndt = result.ok_or(AvroError::Decode(DecodeError::BadTimestamp { unit, value }))?; - Ok(Value::Timestamp(ndt)) + Ok(Value::Timestamp(ndt.naive_utc())) } /// A convenience trait for types that are both readable and skippable. @@ -176,6 +190,10 @@ pub trait Skip: Read { /// # Errors /// /// Can return an error in all the same cases that [`Read::read`] can. + /// + /// TODO: Remove this clippy suppression when the issue is fixed. + /// See + #[allow(clippy::unused_io_amount)] fn skip(&mut self, mut len: usize) -> Result<(), io::Error> { const BUF_SIZE: usize = 512; let mut buf = [0; BUF_SIZE]; @@ -1545,7 +1563,10 @@ impl<'a> AvroDeserializer for GeneralDeserializer<'a> { let date = NaiveDate::from_ymd_opt(1970, 1, 1) .expect("naive date known valid") - .checked_add_signed(chrono::Duration::days(days.into())) + .checked_add_signed( + chrono::Duration::try_days(days.into()) + .ok_or(AvroError::Decode(DecodeError::BadDate(days)))?, + ) .ok_or(AvroError::Decode(DecodeError::BadDate(days)))?; let dt = date.and_hms_opt(0, 0, 0).expect("HMS known valid"); d.scalar(Scalar::Timestamp(dt)) diff --git a/src/avro/src/encode.rs b/src/avro/src/encode.rs index 2615bd9b98dbe..764c4dc3fa673 100644 --- a/src/avro/src/encode.rs +++ b/src/avro/src/encode.rs @@ -76,13 +76,14 @@ pub fn encode_ref(value: &Value, schema: SchemaNode, buffer: &mut Vec) { other => panic!("Invalid schema for timestamp: {:?}", other), }; let ts_seconds = d + .and_utc() .timestamp() .checked_mul(mult) .expect("All chrono dates can be converted to timestamps"); let sub_part: i64 = if mult == 1_000 { - d.timestamp_subsec_millis().into() + d.and_utc().timestamp_subsec_millis().into() } else { - d.timestamp_subsec_micros().into() + d.and_utc().timestamp_subsec_micros().into() }; let ts = if ts_seconds >= 0 { ts_seconds + sub_part diff --git a/src/avro/src/lib.rs b/src/avro/src/lib.rs index 6f5980ae382fa..ce711e67bad93 100644 --- a/src/avro/src/lib.rs +++ b/src/avro/src/lib.rs @@ -578,7 +578,9 @@ mod tests { let writer_schema = Schema::from_str(writer_raw_schema).unwrap(); let mut writer = Writer::with_codec(writer_schema.clone(), Vec::new(), Codec::Null); let mut record = Record::new(writer_schema.top_node()).unwrap(); - let dt = chrono::NaiveDateTime::from_timestamp_opt(1_000, 995_000_000).unwrap(); + let dt = chrono::DateTime::from_timestamp(1_000, 995_000_000) + .unwrap() + .naive_utc(); record.put("a", types::Value::Timestamp(dt)); writer.append(record).unwrap(); writer.flush().unwrap(); diff --git a/src/avro/tests/io.rs b/src/avro/tests/io.rs index aaecbdfc4b34d..ba702ce029894 100644 --- a/src/avro/tests/io.rs +++ b/src/avro/tests/io.rs @@ -26,7 +26,7 @@ use std::io::Cursor; use std::str::FromStr; -use chrono::{NaiveDate, NaiveDateTime}; +use chrono::{DateTime, NaiveDate}; use mz_avro::error::Error as AvroError; use mz_avro::schema::resolve_schemas; use mz_avro::types::{DecimalValue, Value}; @@ -445,12 +445,12 @@ fn test_datetime_resolutions() { ("f1".into(), Value::Int(1000)), ( "f2".into(), - Value::Timestamp(NaiveDateTime::from_timestamp_opt(12345, 0).unwrap()), + Value::Timestamp(DateTime::from_timestamp(12345, 0).unwrap().naive_utc()), ), ("f3".into(), Value::Long(23456000)), ( "f4".into(), - Value::Timestamp(NaiveDateTime::from_timestamp_opt(34567, 0).unwrap()), + Value::Timestamp(DateTime::from_timestamp(34567, 0).unwrap().naive_utc()), ), ("f5".into(), Value::Int(365 * 2)), ("f6".into(), Value::Date(365 * 3 + 1)), @@ -459,15 +459,15 @@ fn test_datetime_resolutions() { let datum_to_read = Value::Record(vec![ ( "f1".into(), - Value::Timestamp(NaiveDateTime::from_timestamp_opt(1, 0).unwrap()), + Value::Timestamp(DateTime::from_timestamp(1, 0).unwrap().naive_utc()), ), ( "f2".into(), - Value::Timestamp(NaiveDateTime::from_timestamp_opt(12345, 0).unwrap()), + Value::Timestamp(DateTime::from_timestamp(12345, 0).unwrap().naive_utc()), ), ( "f3".into(), - Value::Timestamp(NaiveDateTime::from_timestamp_opt(23456, 0).unwrap()), + Value::Timestamp(DateTime::from_timestamp(23456, 0).unwrap().naive_utc()), ), ("f4".into(), Value::Long(34567000000)), ("f5".into(), Value::Date(365 * 2)), diff --git a/src/avro/tests/schema.rs b/src/avro/tests/schema.rs index 2783c7d4b3ee3..d068ee9734cbd 100644 --- a/src/avro/tests/schema.rs +++ b/src/avro/tests/schema.rs @@ -27,7 +27,7 @@ use std::collections::BTreeMap; use std::str::FromStr; -use chrono::NaiveDateTime; +use chrono::DateTime; use mz_avro::types::{DecimalValue, Value}; use mz_avro::Schema; use once_cell::sync::Lazy; @@ -344,12 +344,12 @@ static VALID_LOGICAL_TYPES: Lazy> = Lazy::new(|| { // Timestamp millis logical type ( r#"{"type": "long", "logicalType": "timestamp-millis"}"#, - Value::Timestamp(NaiveDateTime::from_timestamp_opt(0, 0).unwrap()), + Value::Timestamp(DateTime::from_timestamp(0, 0).unwrap().naive_utc()), ), // Timestamp micros logical type ( r#"{"type": "long", "logicalType": "timestamp-micros"}"#, - Value::Timestamp(NaiveDateTime::from_timestamp_opt(0, 0).unwrap()), + Value::Timestamp(DateTime::from_timestamp(0, 0).unwrap().naive_utc()), ), ] }); diff --git a/src/catalog/Cargo.toml b/src/catalog/Cargo.toml index f87d2753a7c09..762d04c987401 100644 --- a/src/catalog/Cargo.toml +++ b/src/catalog/Cargo.toml @@ -14,7 +14,7 @@ anyhow = "1.0.66" async-trait = "0.1.68" bytes = { version = "1.3.0", features = ["serde"] } bytesize = "1.1.0" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } derivative = "2.2.0" differential-dataflow = "0.12.0" fail = { version = "0.5.1", features = ["failpoints"] } diff --git a/src/cloud-api/Cargo.toml b/src/cloud-api/Cargo.toml index d07c22bbb6a7c..a368354c76ff7 100644 --- a/src/cloud-api/Cargo.toml +++ b/src/cloud-api/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] anyhow = "1.0.44" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } reqwest = { version = "0.11.4", features = ["json"] } once_cell = "1.16.0" serde = { version = "1.0.130", features = ["derive"] } diff --git a/src/cloud-resources/Cargo.toml b/src/cloud-resources/Cargo.toml index dee1ef07e20c0..dfeee4d693225 100644 --- a/src/cloud-resources/Cargo.toml +++ b/src/cloud-resources/Cargo.toml @@ -14,7 +14,7 @@ anyhow = "1.0.66" async-trait = "0.1.68" k8s-openapi = { version = "0.20.0", features = ["schemars", "v1_26"] } kube = { version = "0.87.1", default-features = false, features = ["client", "derive", "openssl-tls", "ws"] } -chrono = { version = "0.4.23", default-features = false } +chrono = { version = "0.4.35", default-features = false } futures = "0.3.25" mz-ore = { path = "../ore", features = [] } mz-repr = { path = "../repr" } diff --git a/src/cluster-client/Cargo.toml b/src/cluster-client/Cargo.toml index 73bc78e4391d0..fe93e33dcf0b3 100644 --- a/src/cluster-client/Cargo.toml +++ b/src/cluster-client/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] anyhow = "1.0.66" async-trait = "0.1.68" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } futures = "0.3.25" http = "0.2.8" itertools = "0.10.5" diff --git a/src/compute-client/Cargo.toml b/src/compute-client/Cargo.toml index daba9917b39f8..9fb92c71951df 100644 --- a/src/compute-client/Cargo.toml +++ b/src/compute-client/Cargo.toml @@ -14,7 +14,7 @@ anyhow = "1.0.66" async-stream = "0.3.3" async-trait = "0.1.68" bytesize = "1.1.0" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } crossbeam-channel = "0.5.8" differential-dataflow = "0.12.0" futures = "0.3.25" diff --git a/src/compute-client/src/controller/instance.rs b/src/compute-client/src/controller/instance.rs index b81ce5e969ece..7d94b1d062b9f 100644 --- a/src/compute-client/src/controller/instance.rs +++ b/src/compute-client/src/controller/instance.rs @@ -752,7 +752,7 @@ where .expect("replica must exist"); let now = Utc::now() - .duration_trunc(Duration::seconds(60)) + .duration_trunc(Duration::try_seconds(60).unwrap()) .expect("cannot fail"); let mut updates = Vec::new(); diff --git a/src/compute/src/compute_state.rs b/src/compute/src/compute_state.rs index be3ecf8cf7ce5..c865362dc5c14 100644 --- a/src/compute/src/compute_state.rs +++ b/src/compute/src/compute_state.rs @@ -300,7 +300,7 @@ pub(crate) struct ActiveComputeState<'a, A: Allocate> { } /// A token that keeps a sink alive. -pub struct SinkToken(Box); +pub struct SinkToken(#[allow(dead_code)] Box); impl SinkToken { /// Create a new `SinkToken`. diff --git a/src/controller/Cargo.toml b/src/controller/Cargo.toml index 4fff4380357b2..8727616a5148a 100644 --- a/src/controller/Cargo.toml +++ b/src/controller/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] anyhow = "1.0.66" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } differential-dataflow = "0.12.0" futures = "0.3.25" mz-build-info = { path = "../build-info" } diff --git a/src/environmentd/Cargo.toml b/src/environmentd/Cargo.toml index 9e2dc9156e7a7..7dcb5b94f3191 100644 --- a/src/environmentd/Cargo.toml +++ b/src/environmentd/Cargo.toml @@ -22,7 +22,7 @@ axum = { version = "0.6.20", features = ["headers", "ws"] } base64 = "0.13.1" bytes = "1.3.0" bytesize = "1.1.0" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } clap = { version = "3.2.24", features = ["wrap_help", "env", "derive"] } fail = { version = "0.5.1", features = ["failpoints"] } futures = "0.3.25" diff --git a/src/environmentd/tests/server.rs b/src/environmentd/tests/server.rs index 1e1b936f72847..8570ce5ea139c 100644 --- a/src/environmentd/tests/server.rs +++ b/src/environmentd/tests/server.rs @@ -297,7 +297,7 @@ ORDER BY mseh.began_at;", // both the start and end time, but the `NowFn` mechanism doesn't // appear to give us any way to do that. Instead, let's just check // that none of these statements took longer than 5s wall-clock time. - assert!(r.finished_at - r.began_at <= chrono::Duration::seconds(5)); + assert!(r.finished_at - r.began_at <= chrono::Duration::try_seconds(5).unwrap()); if !r.sql.is_empty() { let expected_redacted = mz_sql::parse::parse(&r.sql) .unwrap() @@ -433,7 +433,7 @@ ORDER BY mseh.began_at", if let Some(ts) = r.execution_timestamp { if ts != u64::MAX { let ts = to_datetime(ts); - assert!((ts - r.prepared_at).abs() < chrono::Duration::seconds(5)) + assert!((ts - r.prepared_at).abs() < chrono::Duration::try_seconds(5).unwrap()) } } } diff --git a/src/expr/Cargo.toml b/src/expr/Cargo.toml index 570c53c0992de..b73c49463d90e 100644 --- a/src/expr/Cargo.toml +++ b/src/expr/Cargo.toml @@ -18,7 +18,7 @@ aho-corasick = "0.7.20" anyhow = "1.0.66" bytes = "1.3.0" bytesize = "1.1.0" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } chrono-tz = { version = "0.8.1", features = ["serde", "case-insensitive"] } csv = "1.1.6" dec = "0.4.8" diff --git a/src/expr/src/scalar/func.rs b/src/expr/src/scalar/func.rs index ecbbe22bd2279..771510b49e1e7 100644 --- a/src/expr/src/scalar/func.rs +++ b/src/expr/src/scalar/func.rs @@ -1930,7 +1930,7 @@ fn timezone_interval_timestamp(a: Datum<'_>, b: Datum<'_>) -> Result Ok(DateTime::from_utc(sub, Utc).try_into()?), + Some(sub) => Ok(DateTime::from_naive_utc_and_offset(sub, Utc).try_into()?), None => Err(EvalError::TimestampOutOfRange), } } @@ -2484,18 +2484,15 @@ impl BinaryFunc { a.unwrap_interval(), b.unwrap_timestamp(), CheckedTimestamp::from_timestamplike( - NaiveDateTime::from_timestamp_opt(0, 0).unwrap(), + DateTime::from_timestamp(0, 0).unwrap().naive_utc(), ) .expect("must fit"), ), BinaryFunc::DateBinTimestampTz => date_bin( a.unwrap_interval(), b.unwrap_timestamptz(), - CheckedTimestamp::from_timestamplike(DateTime::::from_utc( - NaiveDateTime::from_timestamp_opt(0, 0).unwrap(), - Utc, - )) - .expect("must fit"), + CheckedTimestamp::from_timestamplike(DateTime::from_timestamp(0, 0).unwrap()) + .expect("must fit"), ), BinaryFunc::ExtractInterval => date_part_interval::(a, b), BinaryFunc::ExtractTime => date_part_time::(a, b), diff --git a/src/expr/src/scalar/func/impls/date.rs b/src/expr/src/scalar/func/impls/date.rs index 7a919f6cfd4fa..d0a24c2e28835 100644 --- a/src/expr/src/scalar/func/impls/date.rs +++ b/src/expr/src/scalar/func/impls/date.rs @@ -83,10 +83,11 @@ impl<'a> EagerUnaryFunc<'a> for CastDateToTimestampTz { type Output = Result>, EvalError>; fn call(&self, a: Date) -> Result>, EvalError> { - let out = CheckedTimestamp::from_timestamplike(DateTime::::from_utc( - NaiveDate::from(a).and_hms_opt(0, 0, 0).unwrap(), - Utc, - ))?; + let out = + CheckedTimestamp::from_timestamplike(DateTime::::from_naive_utc_and_offset( + NaiveDate::from(a).and_hms_opt(0, 0, 0).unwrap(), + Utc, + ))?; let updated = out.round_to_precision(self.0)?; Ok(updated) } diff --git a/src/expr/src/scalar/func/impls/float64.rs b/src/expr/src/scalar/func/impls/float64.rs index 5129438dd1230..28b152a462985 100644 --- a/src/expr/src/scalar/func/impls/float64.rs +++ b/src/expr/src/scalar/func/impls/float64.rs @@ -9,7 +9,7 @@ use std::fmt; -use chrono::{DateTime, NaiveDateTime, Utc}; +use chrono::{DateTime, Utc}; use mz_lowertest::MzReflect; use mz_ore::cast::TryCastFrom; use mz_repr::adt::numeric::{self, Numeric, NumericMaxScale}; @@ -483,12 +483,9 @@ sqlfunc!( .ok_or(EvalError::TimestampOutOfRange)?; nanosecs %= NANO_SECONDS_PER_SECOND; let nanosecs = u32::try_from(nanosecs).map_err(|_| EvalError::TimestampOutOfRange)?; - match NaiveDateTime::from_timestamp_opt(secs, nanosecs) { - Some(ts) => { - let dt = DateTime::::from_utc(ts, Utc); - CheckedTimestamp::from_timestamplike(dt) - .map_err(|_| EvalError::TimestampOutOfRange) - } + match DateTime::from_timestamp(secs, nanosecs) { + Some(dt) => CheckedTimestamp::from_timestamplike(dt) + .map_err(|_| EvalError::TimestampOutOfRange), None => Err(EvalError::TimestampOutOfRange), } } diff --git a/src/expr/src/scalar/func/impls/mz_timestamp.rs b/src/expr/src/scalar/func/impls/mz_timestamp.rs index bdfe2ad1866d1..42fe8febc06e2 100644 --- a/src/expr/src/scalar/func/impls/mz_timestamp.rs +++ b/src/expr/src/scalar/func/impls/mz_timestamp.rs @@ -107,7 +107,8 @@ sqlfunc!( fn cast_timestamp_to_mz_timestamp( a: CheckedTimestamp, ) -> Result { - a.timestamp_millis() + a.and_utc() + .timestamp_millis() .try_into() .map_err(|_| EvalError::MzTimestampOutOfRange(a.to_string())) } diff --git a/src/expr/src/scalar/func/impls/timestamp.rs b/src/expr/src/scalar/func/impls/timestamp.rs index 6005d589f5b26..571359f1c1bf4 100644 --- a/src/expr/src/scalar/func/impls/timestamp.rs +++ b/src/expr/src/scalar/func/impls/timestamp.rs @@ -85,7 +85,8 @@ impl<'a> EagerUnaryFunc<'a> for CastTimestampToTimestampTz { &self, a: CheckedTimestamp, ) -> Result>, EvalError> { - let out = CheckedTimestamp::try_from(DateTime::::from_utc(a.into(), Utc))?; + let out = + CheckedTimestamp::try_from(DateTime::::from_naive_utc_and_offset(a.into(), Utc))?; let updated = out.round_to_precision(self.to)?; Ok(updated) } @@ -626,7 +627,9 @@ pub fn timezone_timestamp( Some(offset) => offset.fix(), None => { let dt = dt - .checked_add_signed(Duration::hours(1)) + .checked_add_signed( + Duration::try_hours(1).ok_or(EvalError::TimestampOutOfRange)?, + ) .ok_or(EvalError::TimestampOutOfRange)?; tz.offset_from_local_datetime(&dt) .latest() @@ -635,7 +638,9 @@ pub fn timezone_timestamp( } }, }; - DateTime::from_utc(dt - offset, Utc).try_into().err_into() + DateTime::from_naive_utc_and_offset(dt - offset, Utc) + .try_into() + .err_into() } /// Converts the UTC timestamptz `utc` to the local timestamp of the timezone `tz`. @@ -654,8 +659,11 @@ fn checked_add_with_leapsecond(lhs: &NaiveDateTime, rhs: &FixedOffset) -> Option let nanos = lhs.nanosecond(); let lhs = lhs.with_nanosecond(0).unwrap(); let rhs = rhs.local_minus_utc(); - lhs.checked_add_signed(chrono::Duration::seconds(i64::from(rhs))) - .map(|dt| dt.with_nanosecond(nanos).unwrap()) + lhs.checked_add_signed(match chrono::Duration::try_seconds(i64::from(rhs)) { + Some(dur) => dur, + None => return None, + }) + .map(|dt| dt.with_nanosecond(nanos).unwrap()) } #[derive( diff --git a/src/interchange/Cargo.toml b/src/interchange/Cargo.toml index 7cdee1f9808d7..c35781320f0df 100644 --- a/src/interchange/Cargo.toml +++ b/src/interchange/Cargo.toml @@ -17,7 +17,7 @@ harness = false [dependencies] anyhow = "1.0.66" byteorder = "1.4.3" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } clap = { version = "3.2.24", features = ["derive"] } differential-dataflow = "0.12.0" itertools = "0.10.5" diff --git a/src/interchange/src/avro.rs b/src/interchange/src/avro.rs index ea5aa647df5d5..ce39e2e5cf280 100644 --- a/src/interchange/src/avro.rs +++ b/src/interchange/src/avro.rs @@ -119,8 +119,10 @@ mod tests { ( ScalarType::TimestampTz { precision: None }, Datum::TimestampTz( - CheckedTimestamp::from_timestamplike(DateTime::from_utc(date_time, Utc)) - .unwrap(), + CheckedTimestamp::from_timestamplike(DateTime::from_naive_utc_and_offset( + date_time, Utc, + )) + .unwrap(), ), Value::Timestamp(date_time), ), diff --git a/src/interchange/src/avro/decode.rs b/src/interchange/src/avro/decode.rs index cfe1b48c25299..0ccd9c7dc25cd 100644 --- a/src/interchange/src/avro/decode.rs +++ b/src/interchange/src/avro/decode.rs @@ -222,7 +222,7 @@ impl AvroDecode for RowDecoder { // Get around orphan rule #[derive(Debug)] -pub(super) struct RowWrapper(pub Row); +pub(super) struct RowWrapper(#[allow(dead_code)] pub Row); impl StatefulAvroDecodable for RowWrapper { type Decoder = RowDecoder; diff --git a/src/interchange/src/json.rs b/src/interchange/src/json.rs index 29c6921c61d0d..336fe0f4acf55 100644 --- a/src/interchange/src/json.rs +++ b/src/interchange/src/json.rs @@ -160,17 +160,17 @@ impl ToJson for TypedDatum<'_> { ScalarType::Date => serde_json::Value::String(format!("{}", datum.unwrap_date())), ScalarType::Time => serde_json::Value::String(format!("{:?}", datum.unwrap_time())), ScalarType::Timestamp { .. } => { - let naive = datum.unwrap_timestamp().to_naive(); - let millis = naive.timestamp_millis(); - let micros = naive.timestamp_subsec_micros() - - (naive.timestamp_subsec_millis() * MICROS_PER_MILLIS); + let dt = datum.unwrap_timestamp().to_naive().and_utc(); + let millis = dt.timestamp_millis(); + let micros = dt.timestamp_subsec_micros() + - (dt.timestamp_subsec_millis() * MICROS_PER_MILLIS); serde_json::Value::String(format!("{millis}.{micros:0>3}")) } ScalarType::TimestampTz { .. } => { - let naive = datum.unwrap_timestamptz().to_naive(); - let millis = naive.timestamp_millis(); - let micros = naive.timestamp_subsec_micros() - - (naive.timestamp_subsec_millis() * MICROS_PER_MILLIS); + let dt = datum.unwrap_timestamptz().to_utc(); + let millis = dt.timestamp_millis(); + let micros = dt.timestamp_subsec_micros() + - (dt.timestamp_subsec_millis() * MICROS_PER_MILLIS); serde_json::Value::String(format!("{millis}.{micros:0>3}")) } ScalarType::Interval => { diff --git a/src/kafka-util/Cargo.toml b/src/kafka-util/Cargo.toml index 819bdd0d799f6..7f43bbde096f0 100644 --- a/src/kafka-util/Cargo.toml +++ b/src/kafka-util/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] anyhow = "1.0.66" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } clap = { version = "3.2.24", features = ["derive"] } crossbeam = "0.8.2" fancy-regex = "0.11.0" diff --git a/src/kafka-util/src/bin/kgen.rs b/src/kafka-util/src/bin/kgen.rs index 3bfb7ea7767a5..fc72c8a009a24 100644 --- a/src/kafka-util/src/bin/kgen.rs +++ b/src/kafka-util/src/bin/kgen.rs @@ -14,7 +14,7 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use std::time::Duration; use anyhow::bail; -use chrono::NaiveDateTime; +use chrono::DateTime; use crossbeam::thread; use mz_avro::schema::{SchemaNode, SchemaPiece, SchemaPieceOrNamed}; use mz_avro::types::{DecimalValue, Value}; @@ -114,8 +114,8 @@ impl<'a> RandomAvroGenerator<'a> { // TODO(benesch): rewrite to avoid `as`. #[allow(clippy::as_conversions)] let fraction = (millis % 1000) as u32; - let val = NaiveDateTime::from_timestamp_opt(seconds, fraction * 1_000_000).unwrap(); - Value::Timestamp(val) + let val = DateTime::from_timestamp(seconds, fraction * 1_000_000).unwrap(); + Value::Timestamp(val.naive_utc()) } SchemaPiece::TimestampMicro => { let micros = self.longs.get_mut(&p).unwrap()(rng); @@ -124,8 +124,8 @@ impl<'a> RandomAvroGenerator<'a> { // TODO(benesch): rewrite to avoid `as`. #[allow(clippy::as_conversions)] let fraction = (micros % 1_000_000) as u32; - let val = NaiveDateTime::from_timestamp_opt(seconds, fraction * 1_000).unwrap(); - Value::Timestamp(val) + let val = DateTime::from_timestamp(seconds, fraction * 1_000).unwrap(); + Value::Timestamp(val.naive_utc()) } SchemaPiece::Decimal { precision, diff --git a/src/mysql-util/Cargo.toml b/src/mysql-util/Cargo.toml index 5706cadaecbbf..3f4bb35ce7441 100644 --- a/src/mysql-util/Cargo.toml +++ b/src/mysql-util/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] anyhow = "1.0.66" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } indexmap = { version = "1.9.1", default-features = false, features = ["std"] } itertools = "0.10.5" mz-cloud-resources = { path = "../cloud-resources" } diff --git a/src/mysql-util/src/decoding.rs b/src/mysql-util/src/decoding.rs index 8fd94c1feae3d..9dad646d38199 100644 --- a/src/mysql-util/src/decoding.rs +++ b/src/mysql-util/src/decoding.rs @@ -220,8 +220,9 @@ fn val_to_datum<'a>( let chrono_timestamp = match value { Value::Date(..) => from_value_opt::(value)?, // old temporal format from before MySQL 5.6; didn't support fractional seconds - Value::Int(val) => chrono::NaiveDateTime::from_timestamp_opt(val, 0) - .ok_or(anyhow::anyhow!("received invalid timestamp value: {}", val))?, + Value::Int(val) => chrono::DateTime::from_timestamp(val, 0) + .ok_or(anyhow::anyhow!("received invalid timestamp value: {}", val))? + .naive_utc(), Value::Bytes(data) => { let data = std::str::from_utf8(&data)?; if data.contains('.') { diff --git a/src/mz/src/config_file.rs b/src/mz/src/config_file.rs index fb3fe71e7789a..2680ba0e893b8 100644 --- a/src/mz/src/config_file.rs +++ b/src/mz/src/config_file.rs @@ -96,6 +96,7 @@ impl ConfigFile { .read(true) .write(true) .create(true) + .truncate(false) .open(&path)?; let mut buffer = String::new(); diff --git a/src/orchestrator-kubernetes/Cargo.toml b/src/orchestrator-kubernetes/Cargo.toml index 40539140cc19b..d2d440fa7de6e 100644 --- a/src/orchestrator-kubernetes/Cargo.toml +++ b/src/orchestrator-kubernetes/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] anyhow = "1.0.66" async-trait = "0.1.68" -chrono = { version = "0.4.23", default-features = false } +chrono = { version = "0.4.35", default-features = false } clap = { version = "3.2.24", features = ["derive"] } fail = { version = "0.5.1", features = ["failpoints"] } futures = "0.3.25" diff --git a/src/orchestrator-process/Cargo.toml b/src/orchestrator-process/Cargo.toml index 030214c66e9fc..69a258b4a540c 100644 --- a/src/orchestrator-process/Cargo.toml +++ b/src/orchestrator-process/Cargo.toml @@ -13,7 +13,7 @@ workspace = true anyhow = "1.0.66" async-stream = "0.3.3" async-trait = "0.1.68" -chrono = { version = "0.4.23", default_features = false, features = ["clock"] } +chrono = { version = "0.4.35", default_features = false, features = ["clock"] } futures = "0.3.25" hex = "0.4.3" itertools = "0.10.5" diff --git a/src/orchestrator/Cargo.toml b/src/orchestrator/Cargo.toml index c9d405ac04465..488128b5198fc 100644 --- a/src/orchestrator/Cargo.toml +++ b/src/orchestrator/Cargo.toml @@ -13,7 +13,7 @@ workspace = true anyhow = "1.0.66" async-trait = "0.1.68" bytesize = "1.1.0" -chrono = { version = "0.4.23", default_features = false, features = ["serde"] } +chrono = { version = "0.4.35", default_features = false, features = ["serde"] } derivative = "2.2.0" futures-core = "0.3.21" mz-ore = { path = "../ore"} diff --git a/src/ore/Cargo.toml b/src/ore/Cargo.toml index 393b671f38c1f..7c231da19bb24 100644 --- a/src/ore/Cargo.toml +++ b/src/ore/Cargo.toml @@ -20,7 +20,7 @@ anyhow = { version = "1.0.66", optional = true } # dependencies and is widely considered to be basically part of the stdlib. async-trait = { version = "0.1.68", optional = true } bytes = { version = "1.3.0", optional = true } -chrono = { version = "0.4.23", default-features = false, features = [ +chrono = { version = "0.4.35", default-features = false, features = [ "std", ], optional = true } clap = { version = "3.2.24", features = ["env"], optional = true } diff --git a/src/ore/src/now.rs b/src/ore/src/now.rs index 1831483662ec2..bb6b8475bc6f4 100644 --- a/src/ore/src/now.rs +++ b/src/ore/src/now.rs @@ -142,7 +142,7 @@ mod tests { assert_eq!(datetime, converted_datetime); assert_eq!( millis, - u64::try_from(converted_datetime.timestamp_millis()).unwrap() + u64::try_from(converted_datetime.and_utc().timestamp_millis()).unwrap() ) } } diff --git a/src/ore/src/panic.rs b/src/ore/src/panic.rs index 11aba32fa2277..5f9c189d14ebe 100644 --- a/src/ore/src/panic.rs +++ b/src/ore/src/panic.rs @@ -24,7 +24,7 @@ use std::process; use tokio::task_local; thread_local! { - static CATCHING_UNWIND: RefCell = RefCell::new(false); + static CATCHING_UNWIND: RefCell = const { RefCell::new(false) }; } #[cfg(feature = "async")] diff --git a/src/ore/src/time.rs b/src/ore/src/time.rs index 20d0154f9f2e0..b769f4f846c22 100644 --- a/src/ore/src/time.rs +++ b/src/ore/src/time.rs @@ -21,7 +21,7 @@ use num::Zero; /// Generic Error type returned from methods on [`DurationExt`]. #[derive(Copy, Clone, Debug)] -pub struct DurationError(&'static str); +pub struct DurationError(#[allow(dead_code)] &'static str); /// Extensions for [`std::time::Duration`]. pub trait DurationExt { diff --git a/src/persist-client/src/cache.rs b/src/persist-client/src/cache.rs index c813bbfa88dec..e83a58f60e1e5 100644 --- a/src/persist-client/src/cache.rs +++ b/src/persist-client/src/cache.rs @@ -64,7 +64,7 @@ pub struct PersistClientCache { } #[derive(Debug)] -struct RttLatencyTask(AbortOnDropHandle<()>); +struct RttLatencyTask(#[allow(dead_code)] AbortOnDropHandle<()>); impl PersistClientCache { /// Returns a new [PersistClientCache]. diff --git a/src/persist-client/src/error.rs b/src/persist-client/src/error.rs index bad0179e97978..0067e95336e82 100644 --- a/src/persist-client/src/error.rs +++ b/src/persist-client/src/error.rs @@ -165,7 +165,7 @@ impl std::fmt::Display for CodecMismatch { /// [mz_persist_types::Codec64] impl. #[derive(Debug)] #[cfg_attr(any(test, debug_assertions), derive(PartialEq))] -pub struct CodecConcreteType(pub(crate) &'static str); +pub struct CodecConcreteType(#[allow(dead_code)] pub(crate) &'static str); impl From for InvalidUsage { fn from(x: CodecMismatch) -> Self { diff --git a/src/persist-client/src/internal/metrics.rs b/src/persist-client/src/internal/metrics.rs index 5195b19ae1418..7360c5f3965c5 100644 --- a/src/persist-client/src/internal/metrics.rs +++ b/src/persist-client/src/internal/metrics.rs @@ -558,7 +558,7 @@ impl MetricsVecs { } #[derive(Debug)] -pub struct CmdCasMismatchMetric(pub(crate) IntCounter); +pub struct CmdCasMismatchMetric(#[allow(dead_code)] pub(crate) IntCounter); #[derive(Debug)] pub struct CmdMetrics { diff --git a/src/persist-types/Cargo.toml b/src/persist-types/Cargo.toml index c00b40c8663da..d8f8d81a98a42 100644 --- a/src/persist-types/Cargo.toml +++ b/src/persist-types/Cargo.toml @@ -15,7 +15,7 @@ workspace = true anyhow = { version = "1.0.66", features = ["backtrace"] } arrow2 = { version = "0.16.0", features = ["compute_aggregate", "io_ipc", "io_parquet"] } bytes = "1.3.0" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } hex = "0.4.3" mz-ore = { path = "../ore", features = ["test"] } mz-proto = { path = "../proto" } diff --git a/src/pgrepr/Cargo.toml b/src/pgrepr/Cargo.toml index b866368f918f8..555e44aa83013 100644 --- a/src/pgrepr/Cargo.toml +++ b/src/pgrepr/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] byteorder = "1.4.3" bytes = "1.3.0" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } dec = "0.4.8" once_cell = "1.16.0" mz-ore = { path = "../ore" } diff --git a/src/pgtz/Cargo.toml b/src/pgtz/Cargo.toml index 580cc5b0211b0..780da8d3a3fd9 100644 --- a/src/pgtz/Cargo.toml +++ b/src/pgtz/Cargo.toml @@ -10,7 +10,7 @@ publish = false workspace = true [dependencies] -chrono = { version = "0.4.23", default-features = false, features = ["serde", "std"] } +chrono = { version = "0.4.35", default-features = false, features = ["serde", "std"] } chrono-tz = { version = "0.8.1", features = ["serde", "case-insensitive"] } mz-lowertest = { path = "../lowertest" } mz-ore = { path = "../ore", features = ["test"] } diff --git a/src/proto/Cargo.toml b/src/proto/Cargo.toml index cc1fc1d7b7e24..82be18803c0ae 100644 --- a/src/proto/Cargo.toml +++ b/src/proto/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] anyhow = "1.0.66" -chrono = { version = "0.4.23", default-features = false, features = ["serde", "std"], optional = true } +chrono = { version = "0.4.35", default-features = false, features = ["serde", "std"], optional = true } chrono-tz = { version = "0.8.1", features = ["serde", "case-insensitive"], optional = true } globset = "0.4.9" http = "0.2.8" diff --git a/src/proto/src/chrono.rs b/src/proto/src/chrono.rs index c70b0acbf94e4..b9d11b0a6196c 100644 --- a/src/proto/src/chrono.rs +++ b/src/proto/src/chrono.rs @@ -98,7 +98,10 @@ impl RustType for DateTime { } fn from_proto(proto: ProtoNaiveDateTime) -> Result { - Ok(DateTime::from_utc(NaiveDateTime::from_proto(proto)?, Utc)) + Ok(DateTime::from_naive_utc_and_offset( + NaiveDateTime::from_proto(proto)?, + Utc, + )) } } @@ -143,7 +146,7 @@ pub fn any_naive_datetime() -> impl Strategy { } pub fn any_datetime() -> impl Strategy> { - any_naive_datetime().prop_map(|x| DateTime::from_utc(x, Utc)) + any_naive_datetime().prop_map(|x| DateTime::from_naive_utc_and_offset(x, Utc)) } pub fn any_fixed_offset() -> impl Strategy { diff --git a/src/repr-test-util/Cargo.toml b/src/repr-test-util/Cargo.toml index c220c26e5ee7f..e51437a904ef8 100644 --- a/src/repr-test-util/Cargo.toml +++ b/src/repr-test-util/Cargo.toml @@ -10,7 +10,7 @@ publish = false workspace = true [dependencies] -chrono = { version = "0.4.23", default-features = false, features = ["serde", "std"] } +chrono = { version = "0.4.35", default-features = false, features = ["serde", "std"] } mz-lowertest = { path = "../lowertest" } mz-ore = { path = "../ore" } mz-repr = { path = "../repr" } diff --git a/src/repr/Cargo.toml b/src/repr/Cargo.toml index 3d9161b6568b9..173f601ffa424 100644 --- a/src/repr/Cargo.toml +++ b/src/repr/Cargo.toml @@ -23,7 +23,7 @@ bitflags = "1.3.2" bytes = "1.3.0" cfg-if = "1.0.0" columnation = { git = "https://github.com/frankmcsherry/columnation" } -chrono = { version = "0.4.23", default-features = false, features = ["serde", "std"] } +chrono = { version = "0.4.35", default-features = false, features = ["serde", "std"] } chrono-tz = { version = "0.8.1", features = ["serde", "case-insensitive"] } compact_bytes = "0.1.1" dec = "0.4.8" diff --git a/src/repr/src/adt/date.rs b/src/repr/src/adt/date.rs index c9e9e2ab9b564..356ac123fdf90 100644 --- a/src/repr/src/adt/date.rs +++ b/src/repr/src/adt/date.rs @@ -63,10 +63,10 @@ impl Date { pub const LOW_DAYS: i32 = -2451545; // 4714-11-24 BC /// Largest date support by Materialize. Although Postgres can go up to - /// 5874897-12-31, chrono is limited to December 31, 262143, which we mirror + /// 5874897-12-31, chrono is limited to December 31, 262142, which we mirror /// here so we can use chrono's formatting methods and have guaranteed safe /// conversions. - pub const HIGH_DAYS: i32 = 95_015_644; + pub const HIGH_DAYS: i32 = 95_015_279; /// Constructs a new `Date` as the days since the postgres epoch /// (2000-01-01). diff --git a/src/repr/src/adt/interval.rs b/src/repr/src/adt/interval.rs index 0e06464ebe13c..cc77eb1fccdac 100644 --- a/src/repr/src/adt/interval.rs +++ b/src/repr/src/adt/interval.rs @@ -391,7 +391,7 @@ impl Interval { /// Converts this `Interval`'s duration into `chrono::Duration`. pub fn duration_as_chrono(&self) -> chrono::Duration { use chrono::Duration; - Duration::days(self.days.into()) + Duration::microseconds(self.micros) + Duration::try_days(self.days.into()).unwrap() + Duration::microseconds(self.micros) } pub fn duration(&self) -> Result { diff --git a/src/repr/src/adt/timestamp.rs b/src/repr/src/adt/timestamp.rs index ae74dd707dbae..7f7b33d5c3aae 100644 --- a/src/repr/src/adt/timestamp.rs +++ b/src/repr/src/adt/timestamp.rs @@ -187,7 +187,7 @@ pub trait DateLike: chrono::Datelike { .unwrap() .and_hms_opt(0, 0, 0) .unwrap(); - naive_date.timestamp() + naive_date.and_utc().timestamp() } fn millennium(&self) -> i32 { @@ -319,7 +319,9 @@ pub trait TimestampLike: let num_days_from_monday = i64::from(self.date().weekday().num_days_from_monday()); let new_date = NaiveDate::from_ymd_opt(self.year(), self.month(), self.day()) .unwrap() - .checked_sub_signed(Duration::days(num_days_from_monday)) + .checked_sub_signed( + Duration::try_days(num_days_from_monday).ok_or(TimestampError::OutOfRange)?, + ) .ok_or(TimestampError::OutOfRange)?; Ok(Self::new( new_date, @@ -470,11 +472,11 @@ impl TimestampLike for chrono::NaiveDateTime { } fn timestamp(&self) -> i64 { - self.timestamp() + self.and_utc().timestamp() } fn timestamp_subsec_micros(&self) -> u32 { - self.timestamp_subsec_micros() + self.and_utc().timestamp_subsec_micros() } fn timezone_offset(&self) -> &'static str { @@ -516,7 +518,7 @@ impl TimestampLike for chrono::DateTime { } fn from_date_time(dt: NaiveDateTime) -> Self { - DateTime::::from_utc(dt, Utc) + DateTime::::from_naive_utc_and_offset(dt, Utc) } fn timestamp(&self) -> i64 { @@ -584,16 +586,16 @@ impl Serialize for CheckedTimestamp { // 16:47:04.192 to 292278994-08-17 07:12:55.807. // - Avro also supports i64 microseconds since the Unix epoch: -290308-12-21 // 19:59:05.224192 to 294247-01-10 04:00:54.775807. -// - chrono's NaiveDate supports January 1, 262145 BCE to December 31, 262143 +// - chrono's NaiveDate supports January 1, 262144 BCE to December 31, 262142 // CE. // // Thus on the low end we have 4713-12-31 BC from Postgres, and on the high end -// 262143-12-31 from chrono. +// 262142-12-31 from chrono. pub static LOW_DATE: Lazy = Lazy::new(|| NaiveDate::from_ymd_opt(-4713, 12, 31).unwrap()); pub static HIGH_DATE: Lazy = - Lazy::new(|| NaiveDate::from_ymd_opt(262143, 12, 31).unwrap()); + Lazy::new(|| NaiveDate::from_ymd_opt(262142, 12, 31).unwrap()); impl CheckedTimestamp { pub fn from_timestamplike(t: T) -> Result { @@ -823,7 +825,7 @@ impl CheckedTimestamp { } // this is copied from [`chrono::round::duration_round`] // but using microseconds instead of nanoseconds precision - let stamp = original.timestamp_micros(); + let stamp = original.and_utc().timestamp_micros(); let dt = { let delta_down = stamp % round_to_micros; if delta_down == 0 { @@ -1008,7 +1010,7 @@ mod test { let updated = dt .round_to_precision(Some(TimestampPrecision(precision))) .unwrap(); - assert_eq!(expected, updated.timestamp_micros()); + assert_eq!(expected, updated.and_utc().timestamp_micros()); } #[mz_ore::test] @@ -1042,44 +1044,44 @@ mod test { let high = CheckedTimestamp::try_from(HIGH_DATE.and_hms_nano_opt(0, 0, 0, 123456789).unwrap()) .unwrap(); - assert_round_to_precision(high, 0, 8210298326400000000); - assert_round_to_precision(high, 1, 8210298326400100000); - assert_round_to_precision(high, 2, 8210298326400120000); - assert_round_to_precision(high, 3, 8210298326400123000); - assert_round_to_precision(high, 4, 8210298326400123500); - assert_round_to_precision(high, 5, 8210298326400123460); - assert_round_to_precision(high, 6, 8210298326400123457); + assert_round_to_precision(high, 0, 8210266790400000000); + assert_round_to_precision(high, 1, 8210266790400100000); + assert_round_to_precision(high, 2, 8210266790400120000); + assert_round_to_precision(high, 3, 8210266790400123000); + assert_round_to_precision(high, 4, 8210266790400123500); + assert_round_to_precision(high, 5, 8210266790400123460); + assert_round_to_precision(high, 6, 8210266790400123457); } #[mz_ore::test] fn test_precision_edge_cases() { let result = mz_ore::panic::catch_unwind(|| { - let date = - CheckedTimestamp::try_from(NaiveDateTime::from_timestamp_micros(123456).unwrap()) - .unwrap(); + let date = CheckedTimestamp::try_from( + DateTime::from_timestamp_micros(123456).unwrap().naive_utc(), + ) + .unwrap(); let _ = date.round_to_precision(Some(TimestampPrecision(7))); }); assert!(result.is_err()); - let date = - CheckedTimestamp::try_from(NaiveDateTime::from_timestamp_micros(123456).unwrap()) - .unwrap(); + let date = CheckedTimestamp::try_from( + DateTime::from_timestamp_micros(123456).unwrap().naive_utc(), + ) + .unwrap(); let date = date.round_to_precision(None).unwrap(); - assert_eq!(123456, date.timestamp_micros()); + assert_eq!(123456, date.and_utc().timestamp_micros()); } #[mz_ore::test] fn test_equality_with_same_precision() { let date1 = - CheckedTimestamp::try_from(NaiveDateTime::from_timestamp_opt(0, 123456).unwrap()) - .unwrap(); + CheckedTimestamp::try_from(DateTime::from_timestamp(0, 123456).unwrap()).unwrap(); let date1 = date1 .round_to_precision(Some(TimestampPrecision(0))) .unwrap(); let date2 = - CheckedTimestamp::try_from(NaiveDateTime::from_timestamp_opt(0, 123456789).unwrap()) - .unwrap(); + CheckedTimestamp::try_from(DateTime::from_timestamp(0, 123456789).unwrap()).unwrap(); let date2 = date2 .round_to_precision(Some(TimestampPrecision(0))) .unwrap(); @@ -1089,15 +1091,13 @@ mod test { #[mz_ore::test] fn test_equality_with_different_precisions() { let date1 = - CheckedTimestamp::try_from(NaiveDateTime::from_timestamp_opt(0, 123500000).unwrap()) - .unwrap(); + CheckedTimestamp::try_from(DateTime::from_timestamp(0, 123500000).unwrap()).unwrap(); let date1 = date1 .round_to_precision(Some(TimestampPrecision(5))) .unwrap(); let date2 = - CheckedTimestamp::try_from(NaiveDateTime::from_timestamp_opt(0, 123456789).unwrap()) - .unwrap(); + CheckedTimestamp::try_from(DateTime::from_timestamp(0, 123456789).unwrap()).unwrap(); let date2 = date2 .round_to_precision(Some(TimestampPrecision(4))) .unwrap(); diff --git a/src/repr/src/row.rs b/src/repr/src/row.rs index 6ab429bb41fa8..66085f6f59cf9 100644 --- a/src/repr/src/row.rs +++ b/src/repr/src/row.rs @@ -866,8 +866,9 @@ pub unsafe fn read_datum<'a>(data: &'a [u8], offset: &mut usize) -> Datum<'a> { let ts = i64::from_le_bytes(read_byte_array(data, offset)); let secs = ts.div_euclid(1_000_000_000); let nsecs: u32 = ts.rem_euclid(1_000_000_000).try_into().unwrap(); - let ndt = NaiveDateTime::from_timestamp_opt(secs, nsecs) - .expect("We only write round-trippable timestamps"); + let ndt = DateTime::from_timestamp(secs, nsecs) + .expect("We only write round-trippable timestamps") + .naive_utc(); Datum::Timestamp( CheckedTimestamp::from_timestamplike(ndt).expect("unexpected timestamp"), ) @@ -876,11 +877,10 @@ pub unsafe fn read_datum<'a>(data: &'a [u8], offset: &mut usize) -> Datum<'a> { let ts = i64::from_le_bytes(read_byte_array(data, offset)); let secs = ts.div_euclid(1_000_000_000); let nsecs: u32 = ts.rem_euclid(1_000_000_000).try_into().unwrap(); - let ndt = NaiveDateTime::from_timestamp_opt(secs, nsecs) + let dt = DateTime::from_timestamp(secs, nsecs) .expect("We only write round-trippable timestamps"); Datum::TimestampTz( - CheckedTimestamp::from_timestamplike(DateTime::from_utc(ndt, Utc)) - .expect("unexpected timestamp"), + CheckedTimestamp::from_timestamplike(dt).expect("unexpected timestamp"), ) } Tag::Timestamp => { @@ -895,8 +895,11 @@ pub unsafe fn read_datum<'a>(data: &'a [u8], offset: &mut usize) -> Datum<'a> { let date = read_naive_date(data, offset); let time = read_time(data, offset); Datum::TimestampTz( - CheckedTimestamp::from_timestamplike(DateTime::from_utc(date.and_time(time), Utc)) - .expect("unexpected timestamptz"), + CheckedTimestamp::from_timestamplike(DateTime::from_naive_utc_and_offset( + date.and_time(time), + Utc, + )) + .expect("unexpected timestamptz"), ) } Tag::Interval => { @@ -1119,7 +1122,7 @@ fn checked_timestamp_nanos(dt: NaiveDateTime) -> Option { if subsec_nanos >= 1_000_000_000 { return None; } - let as_ns = dt.timestamp().checked_mul(1_000_000_000)?; + let as_ns = dt.and_utc().timestamp().checked_mul(1_000_000_000)?; as_ns.checked_add(i64::from(subsec_nanos)) } @@ -2427,7 +2430,7 @@ impl std::ops::Deref for SharedRow { #[cfg(test)] mod tests { - use chrono::{DateTime, NaiveDate, NaiveDateTime, Utc}; + use chrono::{DateTime, NaiveDate}; use crate::ScalarType; @@ -2531,11 +2534,8 @@ mod tests { .unwrap(), ), Datum::TimestampTz( - CheckedTimestamp::from_timestamplike(DateTime::::from_utc( - NaiveDateTime::from_timestamp_opt(61, 0).unwrap(), - Utc, - )) - .unwrap(), + CheckedTimestamp::from_timestamplike(DateTime::from_timestamp(61, 0).unwrap()) + .unwrap(), ), Datum::Interval(Interval { months: 312, @@ -2777,16 +2777,13 @@ mod tests { ), Datum::Timestamp( CheckedTimestamp::from_timestamplike( - NaiveDateTime::from_timestamp_opt(0, 0).unwrap(), + DateTime::from_timestamp(0, 0).unwrap().naive_utc(), ) .unwrap(), ), Datum::TimestampTz( - CheckedTimestamp::from_timestamplike(DateTime::from_utc( - NaiveDateTime::from_timestamp_opt(0, 0).unwrap(), - Utc, - )) - .unwrap(), + CheckedTimestamp::from_timestamplike(DateTime::from_timestamp(0, 0).unwrap()) + .unwrap(), ), Datum::Interval(Interval::default()), Datum::Bytes(&[]), diff --git a/src/repr/src/row/encoding.rs b/src/repr/src/row/encoding.rs index 44e2473a7ba5e..1b35e7e4baca1 100644 --- a/src/repr/src/row/encoding.rs +++ b/src/repr/src/row/encoding.rs @@ -1103,7 +1103,7 @@ mod tests { .unwrap(), ), Datum::TimestampTz( - CheckedTimestamp::from_timestamplike(DateTime::from_utc( + CheckedTimestamp::from_timestamplike(DateTime::from_naive_utc_and_offset( NaiveDate::from_ymd_opt(18, 19 % 12, 20) .unwrap() .and_time(NaiveTime::from_hms_opt(21, 22, 23).unwrap()), diff --git a/src/repr/src/scalar.rs b/src/repr/src/scalar.rs index 2313609d48c48..6382dcf0f815f 100644 --- a/src/repr/src/scalar.rs +++ b/src/repr/src/scalar.rs @@ -3084,8 +3084,9 @@ impl ScalarType { static TIMESTAMP: Lazy = Lazy::new(|| { Row::pack_slice(&[ Datum::Timestamp( - NaiveDateTime::from_timestamp_opt(0, 0) + DateTime::from_timestamp(0, 0) .unwrap() + .naive_utc() .try_into() .unwrap(), ), @@ -3105,8 +3106,9 @@ impl ScalarType { ), // nano seconds Datum::Timestamp( - NaiveDateTime::from_timestamp_opt(0, 123456789) + DateTime::from_timestamp(0, 123456789) .unwrap() + .naive_utc() .try_into() .unwrap(), ), @@ -3115,7 +3117,7 @@ impl ScalarType { CheckedTimestamp::from_timestamplike( NaiveDate::from_isoywd_opt(2019, 30, chrono::Weekday::Wed) .unwrap() - .and_hms_milli_opt(14, 32, 11, 1234) + .and_hms_milli_opt(23, 59, 59, 1234) .unwrap(), ) .unwrap(), @@ -3124,13 +3126,9 @@ impl ScalarType { }); static TIMESTAMPTZ: Lazy = Lazy::new(|| { Row::pack_slice(&[ + Datum::TimestampTz(DateTime::from_timestamp(0, 0).unwrap().try_into().unwrap()), Datum::TimestampTz( - DateTime::from_utc(NaiveDateTime::from_timestamp_opt(0, 0).unwrap(), Utc) - .try_into() - .unwrap(), - ), - Datum::TimestampTz( - DateTime::from_utc( + DateTime::from_naive_utc_and_offset( crate::adt::timestamp::LOW_DATE .and_hms_opt(0, 0, 0) .unwrap(), @@ -3140,7 +3138,7 @@ impl ScalarType { .unwrap(), ), Datum::TimestampTz( - DateTime::from_utc( + DateTime::from_naive_utc_and_offset( crate::adt::timestamp::HIGH_DATE .and_hms_opt(23, 59, 59) .unwrap(), @@ -3151,12 +3149,10 @@ impl ScalarType { ), // nano seconds Datum::TimestampTz( - DateTime::from_utc( - NaiveDateTime::from_timestamp_opt(0, 123456789).unwrap(), - Utc, - ) - .try_into() - .unwrap(), + DateTime::from_timestamp(0, 123456789) + .unwrap() + .try_into() + .unwrap(), ), ]) }); @@ -3655,7 +3651,7 @@ pub fn arb_datum() -> BoxedStrategy { /// Generates an arbitrary [`NaiveDateTime`]. pub fn arb_naive_date_time() -> impl Strategy { - add_arb_duration(chrono::NaiveDateTime::from_timestamp_opt(0, 0).unwrap()) + add_arb_duration(chrono::DateTime::from_timestamp(0, 0).unwrap().naive_utc()) } /// Generates an arbitrary [`DateTime`] in [`Utc`]. diff --git a/src/repr/src/strconv.rs b/src/repr/src/strconv.rs index 01d71de0d58ba..82296d235c45c 100644 --- a/src/repr/src/strconv.rs +++ b/src/repr/src/strconv.rs @@ -481,7 +481,7 @@ pub fn parse_timestamptz(s: &str) -> Result>, Par Tz(tz) => match tz.offset_from_local_datetime(&dt).latest() { Some(offset) => offset.fix(), None => { - dt += Duration::hours(1); + dt += Duration::try_hours(1).unwrap(); tz.offset_from_local_datetime(&dt) .latest() .ok_or_else(|| "invalid timezone conversion".to_owned())? @@ -489,7 +489,7 @@ pub fn parse_timestamptz(s: &str) -> Result>, Par } }, }; - Ok(DateTime::from_utc(dt - offset, Utc)) + Ok(DateTime::from_naive_utc_and_offset(dt - offset, Utc)) }) .map_err(|e| { ParseError::invalid_input_syntax("timestamp with time zone", s).with_details(e) diff --git a/src/repr/tests/strconv.rs b/src/repr/tests/strconv.rs index b369a35f9fe49..de3a4cc268d40 100644 --- a/src/repr/tests/strconv.rs +++ b/src/repr/tests/strconv.rs @@ -282,11 +282,7 @@ fn test_parse_timestamptz() { .unwrap(); let offset = FixedOffset::east_opt(test.8).unwrap(); let dt_fixed_offset = offset.from_local_datetime(&expected).earliest().unwrap(); - let expected = CheckedTimestamp::from_timestamplike(DateTime::::from_utc( - dt_fixed_offset.naive_utc(), - Utc, - )) - .unwrap(); + let expected = CheckedTimestamp::from_timestamplike(dt_fixed_offset.to_utc()).unwrap(); assert_eq!(actual, expected); } diff --git a/src/sql/Cargo.toml b/src/sql/Cargo.toml index 3a90ce4c11787..31f6e072e8263 100644 --- a/src/sql/Cargo.toml +++ b/src/sql/Cargo.toml @@ -16,7 +16,7 @@ aws-sdk-sts = { version = "1.7.0", default-features = false, features = [ "rt-tokio", ] } bitflags = "1.3.2" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } clap = { version = "3.2.24", features = ["derive"] } derivative = "2.2.0" dynfmt = { version = "0.1.5", features = ["curly"] } diff --git a/src/sql/src/plan/lowering.rs b/src/sql/src/plan/lowering.rs index 9187f1530440f..531333e0459b4 100644 --- a/src/sql/src/plan/lowering.rs +++ b/src/sql/src/plan/lowering.rs @@ -2393,6 +2393,7 @@ enum OnPredicate { // An equality predicate between the two sides. Eq(MirScalarExpr, MirScalarExpr), // a non-equality predicate between the two sides. + #[allow(dead_code)] Theta(MirScalarExpr), } diff --git a/src/sqllogictest/Cargo.toml b/src/sqllogictest/Cargo.toml index 612b9410988ba..3ec256e22145d 100644 --- a/src/sqllogictest/Cargo.toml +++ b/src/sqllogictest/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] anyhow = "1.0.66" bytes = "1.3.0" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } clap = { version = "3.2.24", features = ["derive"] } dec = "0.4.8" fallible-iterator = "0.2.0" diff --git a/src/stash/src/tests.rs b/src/stash/src/tests.rs index 43463aef2a534..7a656e3d5d9ed 100644 --- a/src/stash/src/tests.rs +++ b/src/stash/src/tests.rs @@ -752,14 +752,3 @@ async fn append(stash: &mut Stash, batches: Vec) -> Result<(), Stas }) .await } - -async fn get( - typed_collection: &TypedCollection, - stash: &mut Stash, -) -> Result, StashError> -where - K: Data, - V: Data, -{ - collection(stash, typed_collection.name()).await -} diff --git a/src/storage-client/Cargo.toml b/src/storage-client/Cargo.toml index 4a081589d42fd..618e90e79e1f4 100644 --- a/src/storage-client/Cargo.toml +++ b/src/storage-client/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] anyhow = "1.0.66" async-trait = "0.1.68" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } differential-dataflow = "0.12.0" http = "0.2.8" itertools = { version = "0.10.5" } diff --git a/src/storage-controller/Cargo.toml b/src/storage-controller/Cargo.toml index 0aa1f9b98f930..f1c0a0bef7407 100644 --- a/src/storage-controller/Cargo.toml +++ b/src/storage-controller/Cargo.toml @@ -13,7 +13,7 @@ workspace = true anyhow = "1.0.66" async-trait = "0.1.68" bytes = "1.3.0" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } derivative = "2.2.0" differential-dataflow = "0.12.0" futures = "0.3.25" diff --git a/src/storage/Cargo.toml b/src/storage/Cargo.toml index b199db7d27cfd..1a06a62bdbe1c 100644 --- a/src/storage/Cargo.toml +++ b/src/storage/Cargo.toml @@ -20,7 +20,7 @@ async-trait = "0.1.68" bytes = { version = "1.3.0", features = ["serde"] } bytesize = "1.1.0" bincode = "1" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } clap = { version = "3.2.24", features = ["derive", "env"] } crossbeam-channel = "0.5.8" csv-core = { version = "0.1.10" } diff --git a/src/storage/src/source/generator/auction.rs b/src/storage/src/source/generator/auction.rs index 3742d7984e848..97aa46fd0c754 100644 --- a/src/storage/src/source/generator/auction.rs +++ b/src/storage/src/source/generator/auction.rs @@ -132,7 +132,7 @@ impl Generator for Auction { packer.push(Datum::Int64(rng.gen_range(1..=max_seller_id))); // seller packer.push(Datum::String(AUCTIONS.choose(&mut rng).unwrap())); // item packer.push(Datum::TimestampTz( - (now + chrono::Duration::seconds(10)) + (now + chrono::Duration::try_seconds(10).unwrap()) .try_into() .expect("timestamp must fit"), )); // end time @@ -148,9 +148,10 @@ impl Generator for Auction { packer.push(Datum::Int64(counter)); // auction id packer.push(Datum::Int32(rng.gen_range(1..100))); // amount packer.push(Datum::TimestampTz( - (now + chrono::Duration::seconds(i)) - .try_into() - .expect("timestamp must fit"), + (now + chrono::Duration::try_seconds(i) + .expect("time must fit")) + .try_into() + .expect("timestamp must fit"), )); // bid time bid }; diff --git a/src/storage/src/source/kafka.rs b/src/storage/src/source/kafka.rs index 4654757d1c00d..6bb034708cbbd 100644 --- a/src/storage/src/source/kafka.rs +++ b/src/storage/src/source/kafka.rs @@ -16,7 +16,7 @@ use std::thread; use std::time::Duration; use anyhow::anyhow; -use chrono::NaiveDateTime; +use chrono::{DateTime, NaiveDateTime}; use differential_dataflow::{AsCollection, Collection}; use futures::StreamExt; use maplit::btreemap; @@ -1133,9 +1133,10 @@ fn construct_source_message( .to_millis() .expect("kafka sources always have upstream_time"); - let d: Datum = NaiveDateTime::from_timestamp_millis(ts) + let d: Datum = DateTime::from_timestamp_millis(ts) .and_then(|dt| { - let ct: Option> = dt.try_into().ok(); + let ct: Option> = + dt.naive_utc().try_into().ok(); ct }) .into(); diff --git a/src/testdrive/Cargo.toml b/src/testdrive/Cargo.toml index d058523177dee..12119e2873545 100644 --- a/src/testdrive/Cargo.toml +++ b/src/testdrive/Cargo.toml @@ -20,7 +20,7 @@ aws-sdk-sts = { version = "1.7.0", default-features = false, features = ["rt-tok aws-types = "1.1.1" byteorder = "1.4.3" bytes = "1.3.0" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } clap = { version = "3.2.24", features = ["derive"] } flate2 = "1.0.24" futures = "0.3.25" diff --git a/src/testdrive/src/action/sql.rs b/src/testdrive/src/action/sql.rs index 0c0216d8027ad..fd40a891ece08 100644 --- a/src/testdrive/src/action/sql.rs +++ b/src/testdrive/src/action/sql.rs @@ -704,7 +704,7 @@ impl<'a> FromSql<'a> for MzTimestamp { } } -struct MzAclItem(String); +struct MzAclItem(#[allow(dead_code)] String); impl<'a> FromSql<'a> for MzAclItem { fn from_sql(_ty: &Type, raw: &'a [u8]) -> Result> { diff --git a/src/testdrive/src/format/avro.rs b/src/testdrive/src/format/avro.rs index 43370ce9824f0..d1549093434ca 100644 --- a/src/testdrive/src/format/avro.rs +++ b/src/testdrive/src/format/avro.rs @@ -53,26 +53,18 @@ pub fn from_json(json: &JsonValue, schema: SchemaNode) -> Result { let ts = n.as_i64().unwrap(); - // TODO(benesch): rewrite to avoid `as`. - #[allow(clippy::as_conversions)] Ok(Value::Timestamp( - chrono::NaiveDateTime::from_timestamp_opt( - ts / 1_000, - ((ts % 1_000).abs() * 1_000_000) as u32, - ) - .unwrap(), + chrono::DateTime::from_timestamp_millis(ts) + .ok_or(anyhow!("timestamp out of bounds"))? + .naive_utc(), )) } (JsonValue::Number(ref n), SchemaPiece::TimestampMicro) => { let ts = n.as_i64().unwrap(); - // TODO(benesch): rewrite to avoid `as`. - #[allow(clippy::as_conversions)] Ok(Value::Timestamp( - chrono::NaiveDateTime::from_timestamp_opt( - ts / 1_000_000, - ((ts % 1_000_000).abs() * 1_000) as u32, - ) - .unwrap(), + chrono::DateTime::from_timestamp_micros(ts) + .ok_or(anyhow!("timestamp out of bounds"))? + .naive_utc(), )) } (JsonValue::Array(items), SchemaPiece::Array(inner)) => Ok(Value::Array( @@ -278,8 +270,8 @@ impl Debug for DebugValue { f, "Timestamp(\"{:?}\", {} micros, {} millis)", t, - t.timestamp_micros(), - t.timestamp_millis() + t.and_utc().timestamp_micros(), + t.and_utc().timestamp_millis() ), Value::Date(d) => write!( f, diff --git a/src/workspace-hack/Cargo.toml b/src/workspace-hack/Cargo.toml index 6ba1fd109ca84..41fe0f1851196 100644 --- a/src/workspace-hack/Cargo.toml +++ b/src/workspace-hack/Cargo.toml @@ -31,7 +31,7 @@ axum = { version = "0.6.20", features = ["headers", "ws"] } bstr = { version = "0.2.14" } byteorder = { version = "1.4.3" } bytes = { version = "1.4.0", features = ["serde"] } -chrono = { version = "0.4.25", default-features = false, features = ["alloc", "clock", "serde"] } +chrono = { version = "0.4.35", default-features = false, features = ["clock", "serde"] } clap = { version = "3.2.24", features = ["derive", "env", "wrap_help"] } console = { version = "0.15.5", default-features = false, features = ["ansi-parsing", "unicode-width"] } criterion = { version = "0.4.0", features = ["async_tokio", "html_reports"] } @@ -153,7 +153,7 @@ bstr = { version = "0.2.14" } byteorder = { version = "1.4.3" } bytes = { version = "1.4.0", features = ["serde"] } cc = { version = "1.0.83", default-features = false, features = ["parallel"] } -chrono = { version = "0.4.25", default-features = false, features = ["alloc", "clock", "serde"] } +chrono = { version = "0.4.35", default-features = false, features = ["clock", "serde"] } clap = { version = "3.2.24", features = ["derive", "env", "wrap_help"] } console = { version = "0.15.5", default-features = false, features = ["ansi-parsing", "unicode-width"] } criterion = { version = "0.4.0", features = ["async_tokio", "html_reports"] } diff --git a/test/pgtest-mz/datums.pt b/test/pgtest-mz/datums.pt index 64ad867c58cc3..0951484d9fdcf 100644 --- a/test/pgtest-mz/datums.pt +++ b/test/pgtest-mz/datums.pt @@ -35,9 +35,9 @@ DataRow {"fields":["9","NULL","NULL","NULL","NULL","NULL","NULL","NULL","Infinit DataRow {"fields":["6","NULL","32767","2147483647","9223372036854775807","NULL","NULL","NULL","3.4028235e+38","1.7976931348623157e+308","Infinity","NULL","NULL","NULL","NULL","NULL","NULL","NULL","NULL","00:00:00.000001","NULL","NULL","2015-09-18T23:56:04.123Z","NULL","2015-09-18T23:56:04.123Z","\"'\"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","=arwdUCRBN/u42"]} DataRow {"fields":["10","NULL","NULL","NULL","NULL","NULL","NULL","NULL","-Infinity","-Infinity","-Infinity","NULL","NULL","NULL","NULL","NULL","NULL","NULL","NULL","-178956970 years -8 months -2147483648 days -2562047788:00:54.775808","NULL","NULL","NULL","NULL","NULL","\"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","NULL"]} DataRow {"fields":["4","NULL","-32768","-2147483648","-9223372036854775808","255","32767","2147483647","-3.4028235e+38","-1.7976931348623157e+308","-Infinity","NULL","NULL","1970-01-01 00:00:00.123457","1970-01-01 00:00:00.123457","1970-01-01 00:00:00.123457","1970-01-01 00:00:00.123457+00","1970-01-01 00:00:00.123457+00","1970-01-01 00:00:00.123457+00","1 month","NULL","NULL","\"",".","\"","\"\"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","u42=arwdUCRBN/p"]} -DataRow {"fields":["5","NULL","-32767","-2147483647","-9223372036854775807","256","32768","2147483648","1.1754944e-38","2.2250738585072014e-308","0","NULL","NULL","2019-07-24 14:32:12.1234","2019-07-24 14:32:12.1234","2019-07-24 14:32:12.1234","NULL","NULL","NULL","1 day","NULL","NULL",".","NULL",".","\" \"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","=/u42"]} +DataRow {"fields":["5","NULL","-32767","-2147483647","-9223372036854775807","256","32768","2147483648","1.1754944e-38","2.2250738585072014e-308","0","NULL","NULL","2019-07-24 23:59:60.1234","2019-07-24 23:59:60.1234","2019-07-24 23:59:60.1234","NULL","NULL","NULL","1 day","NULL","NULL",".","NULL",".","\" \"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","=/u42"]} DataRow {"fields":["1","t","0","0","0","0","0","0","0","0","0","2000-01-01","00:00:00","1970-01-01 00:00:00","1970-01-01 00:00:00","1970-01-01 00:00:00","1970-01-01 00:00:00+00","1970-01-01 00:00:00+00","1970-01-01 00:00:00+00","00:00:00","\u0000","\\x",""," ","","true","00000000-0000-0000-0000-000000000000","0","0","0","0","NULL","0","=/p"]} -DataRow {"fields":["3","NULL","-1","-1","-1","65535","4294967295","18446744073709551615","-1","-1","-1","262143-12-31","NULL","262143-12-31 23:59:59","262143-12-31 23:59:59","262143-12-31 23:59:59","262143-12-31 23:59:59+00","262143-12-31 23:59:59+00","262143-12-31 23:59:59+00","-1 months -1 days -00:00:00.000001","NULL","\\xff","'","\"","'","null","NULL","NULL","NULL","NULL","NULL","NULL","NULL","u42=/p"]} +DataRow {"fields":["3","NULL","-1","-1","-1","65535","4294967295","18446744073709551615","-1","-1","-1","262142-12-31","NULL","262142-12-31 23:59:59","262142-12-31 23:59:59","262142-12-31 23:59:59","262142-12-31 23:59:59+00","262142-12-31 23:59:59+00","262142-12-31 23:59:59+00","-1 months -1 days -00:00:00.000001","NULL","\\xff","'","\"","'","null","NULL","NULL","NULL","NULL","NULL","NULL","NULL","u42=/p"]} DataRow {"fields":["2","f","1","1","1","1","1","1","1","1","1","4714-11-24 BC","23:59:59.999999","4714-12-31 00:00:00 BC","4714-12-31 00:00:00 BC","4714-12-31 00:00:00 BC","4714-12-31 00:00:00+00 BC","4714-12-31 00:00:00+00 BC","4714-12-31 00:00:00+00 BC","1 month 1 day 00:00:00.000001","[255]","\\x00"," ","'"," ","false","ffffffff-ffff-ffff-ffff-ffffffffffff","4294967295","4294967295","4294967295","4294967295","NULL","18446744073709551615","=arwdUCRBN/p"]} DataRow {"fields":["7","NULL","127","32767","2147483647","NULL","NULL","NULL","1.1920929e-7","2.220446049250313e-16","0.0000000000000002220446049250313","NULL","NULL","NULL","NULL","NULL","NULL","NULL","NULL","-1 months","NULL","NULL","xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx","NULL","xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx","\"\\\"\"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","NULL"]} CommandComplete {"tag":"SELECT 21"} @@ -72,9 +72,9 @@ DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\t","NULL","NULL", DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0006","NULL","[127, 255]","[127, 255, 255, 255]","[127, 255, 255, 255, 255, 255, 255, 255]","NULL","NULL","NULL","[127, 127, 255, 255]","[127, 239, 255, 255, 255, 255, 255, 255]","[0, 0, 255, 255, 208, 0, 0, 0]","NULL","NULL","NULL","NULL","NULL","NULL","NULL","NULL","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0001\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","NULL","NULL","2015-09-18T23:56:04.123Z","NULL","2015-09-18T23:56:04.123Z","\u0001\"'\"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","[112, 0, 0, 0, 0, 0, 0, 0, 0, 117, 42, 0, 0, 0, 0, 0, 0, 0, 15, 3, 0, 224, 0, 0, 0, 0]"]} DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\n","NULL","NULL","NULL","NULL","NULL","NULL","NULL","[255, 128, 0, 0]","[255, 240, 0, 0, 0, 0, 0, 0]","[0, 0, 255, 255, 240, 0, 0, 0]","NULL","NULL","NULL","NULL","NULL","NULL","NULL","NULL","[128, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 128, 0, 0, 0]","NULL","NULL","NULL","NULL","NULL","\u0001\"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","NULL"]} DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0004","NULL","[128, 0]","[128, 0, 0, 0]","[128, 0, 0, 0, 0, 0, 0, 0]","[0, 255]","[0, 0, 127, 255]","[0, 0, 0, 0, 127, 255, 255, 255]","[255, 127, 255, 255]","[255, 239, 255, 255, 255, 255, 255, 255]","[0, 0, 255, 255, 240, 0, 0, 0]","NULL","NULL","[255, 252, 162, 254, 196, 202, 2, 65]","[255, 252, 162, 254, 196, 202, 2, 65]","[255, 252, 162, 254, 196, 202, 2, 65]","[255, 252, 162, 254, 196, 202, 2, 65]","[255, 252, 162, 254, 196, 202, 2, 65]","[255, 252, 162, 254, 196, 202, 2, 65]","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0001","NULL","NULL","\"",".","\"","\u0001\"\"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","[117, 42, 0, 0, 0, 0, 0, 0, 0, 112, 0, 0, 0, 0, 0, 0, 0, 0, 15, 3, 0, 224, 0, 0, 0, 0]"]} -DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0005","NULL","[128, 1]","[128, 0, 0, 1]","[128, 0, 0, 0, 0, 0, 0, 1]","\u0001\u0000","[0, 0, 128, 0]","[0, 0, 0, 0, 128, 0, 0, 0]","[0, 128, 0, 0]","\u0000\u0010\u0000\u0000\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","NULL","NULL","[0, 2, 49, 108, 241, 141, 133, 16]","[0, 2, 49, 108, 241, 141, 133, 16]","[0, 2, 49, 108, 241, 141, 133, 16]","NULL","NULL","NULL","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0001\u0000\u0000\u0000\u0000","NULL","NULL",".","NULL",".","\u0001\" \"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","p\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000u*\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000"]} +DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0005","NULL","[128, 1]","[128, 0, 0, 1]","[128, 0, 0, 0, 0, 0, 0, 1]","\u0001\u0000","[0, 0, 128, 0]","[0, 0, 0, 0, 128, 0, 0, 0]","[0, 128, 0, 0]","\u0000\u0010\u0000\u0000\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","NULL","NULL","[0, 2, 49, 116, 224, 41, 242, 16]","[0, 2, 49, 116, 224, 41, 242, 16]","[0, 2, 49, 116, 224, 41, 242, 16]","NULL","NULL","NULL","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0001\u0000\u0000\u0000\u0000","NULL","NULL",".","NULL",".","\u0001\" \"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","p\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000u*\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000"]} DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0001","\u0001","\u0000\u0000","\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","\u0000\u0000","\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","[255, 252, 162, 254, 196, 200, 32, 0]","[255, 252, 162, 254, 196, 200, 32, 0]","[255, 252, 162, 254, 196, 200, 32, 0]","[255, 252, 162, 254, 196, 200, 32, 0]","[255, 252, 162, 254, 196, 200, 32, 0]","[255, 252, 162, 254, 196, 200, 32, 0]","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","\u0000","",""," ","","\u0001true","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000","\u0000\u0000\u0000\u0000","NULL","0","p\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000p\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000"]} -DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0003","NULL","[255, 255]","[255, 255, 255, 255]","[255, 255, 255, 255, 255, 255, 255, 255]","[255, 255]","[255, 255, 255, 255]","[255, 255, 255, 255, 255, 255, 255, 255]","[191, 128, 0, 0]","[191, 240, 0, 0, 0, 0, 0, 0]","\u0000\u0001\u0000\u0000@\u0000\u0000\u0000\u0000\u0001","[5, 169, 210, 220]","NULL","[113, 237, 121, 230, 207, 158, 157, 192]","[113, 237, 121, 230, 207, 158, 157, 192]","[113, 237, 121, 230, 207, 158, 157, 192]","[113, 237, 121, 230, 207, 158, 157, 192]","[113, 237, 121, 230, 207, 158, 157, 192]","[113, 237, 121, 230, 207, 158, 157, 192]","[255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]","NULL","[255]","'","\"","'","\u0001null","NULL","NULL","NULL","NULL","NULL","NULL","NULL","u*\u0000\u0000\u0000\u0000\u0000\u0000\u0000p\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000"]} +DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0003","NULL","[255, 255]","[255, 255, 255, 255]","[255, 255, 255, 255, 255, 255, 255, 255]","[255, 255]","[255, 255, 255, 255]","[255, 255, 255, 255, 255, 255, 255, 255]","[191, 128, 0, 0]","[191, 240, 0, 0, 0, 0, 0, 0]","\u0000\u0001\u0000\u0000@\u0000\u0000\u0000\u0000\u0001","[5, 169, 209, 111]","NULL","[113, 237, 93, 56, 67, 138, 189, 192]","[113, 237, 93, 56, 67, 138, 189, 192]","[113, 237, 93, 56, 67, 138, 189, 192]","[113, 237, 93, 56, 67, 138, 189, 192]","[113, 237, 93, 56, 67, 138, 189, 192]","[113, 237, 93, 56, 67, 138, 189, 192]","[255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]","NULL","[255]","'","\"","'","\u0001null","NULL","NULL","NULL","NULL","NULL","NULL","NULL","u*\u0000\u0000\u0000\u0000\u0000\u0000\u0000p\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000"]} DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0002","\u0000","\u0000\u0001","\u0000\u0000\u0000\u0001","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0001","\u0000\u0001","\u0000\u0000\u0000\u0001","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0001","[63, 128, 0, 0]","[63, 240, 0, 0, 0, 0, 0, 0]","\u0000\u0001\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0001","[255, 218, 151, 167]","[0, 0, 0, 20, 29, 215, 95, 255]","[253, 15, 127, 169, 145, 64, 128, 0]","[253, 15, 127, 169, 145, 64, 128, 0]","[253, 15, 127, 169, 145, 64, 128, 0]","[253, 15, 127, 169, 145, 64, 128, 0]","[253, 15, 127, 169, 145, 64, 128, 0]","[253, 15, 127, 169, 145, 64, 128, 0]","\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0001\u0000\u0000\u0000\u0001\u0000\u0000\u0000\u0001","[255]","\u0000"," ","'"," ","\u0001false","[255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]","[255, 255, 255, 255]","[255, 255, 255, 255]","[255, 255, 255, 255]","[255, 255, 255, 255]","NULL","18446744073709551615","[112, 0, 0, 0, 0, 0, 0, 0, 0, 112, 0, 0, 0, 0, 0, 0, 0, 0, 15, 3, 0, 224, 0, 0, 0, 0]"]} DataRow {"fields":["\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0007","NULL","\u0000","[0, 0, 127, 255]","[0, 0, 0, 0, 127, 255, 255, 255]","NULL","NULL","NULL","4\u0000\u0000\u0000","[60, 176, 0, 0, 0, 0, 0, 0]","[0, 9, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 8, 156, 17, 252, 36, 34, 12, 58]","NULL","NULL","NULL","NULL","NULL","NULL","NULL","NULL","[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255]","NULL","NULL","xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx","NULL","xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx","\u0001\"\\\"\"","NULL","NULL","NULL","NULL","NULL","NULL","NULL","NULL"]} CommandComplete {"tag":"SELECT 21"} diff --git a/test/sqllogictest/dates-times.slt b/test/sqllogictest/dates-times.slt index b8962c0a4bdc0..9ff204ef5c054 100644 --- a/test/sqllogictest/dates-times.slt +++ b/test/sqllogictest/dates-times.slt @@ -1391,21 +1391,21 @@ select ('0001-01-01'::date - '1721389days'::interval)::date 4714-12-31 BC query T -select ('0001-01-01'::date + '262142years 11months 30days'::interval)::date +select ('0001-01-01'::date + '262141years 11months 30days'::interval)::date ---- -262143-12-31 +262142-12-31 # Out of range for both dates and timestamps, but timestamp triggers first. query error timestamp out of range select ('0001-01-01'::date - '4713years 1months 8days')::date query error timestamp out of range -select ('0001-01-01'::date + '262142years 11months 30days')::date + '1day' +select ('0001-01-01'::date + '262141years 11months 30days')::date + '1day' query II -select ('0001-01-01'::date - '1721389days'::interval)::date - ('0001-01-01'::date + '262142years 11months 30days'::interval)::date, ('0001-01-01'::date + '262142years 11months 30days'::interval)::date - ('0001-01-01'::date - '1721389days'::interval)::date +select ('0001-01-01'::date - '1721389days'::interval)::date - ('0001-01-01'::date + '262141years 11months 30days'::interval)::date, ('0001-01-01'::date + '262141years 11months 30days'::interval)::date - ('0001-01-01'::date - '1721389days'::interval)::date ---- --97467152 97467152 +-97466787 97466787 query error timestamp out of range SELECT to_timestamp(9223372036854775808::float8); diff --git a/test/sqllogictest/funcs.slt b/test/sqllogictest/funcs.slt index 2357a4e927665..20bd17e1f7da0 100644 --- a/test/sqllogictest/funcs.slt +++ b/test/sqllogictest/funcs.slt @@ -60,7 +60,7 @@ SELECT date_trunc('week', TIMESTAMP '2020-08-02 00:00:00') 2020-07-27 00:00:00 query error timestamp out of range -SELECT date_trunc('week', make_timestamp(-262144, 1, 1, 0, 0, 0)) +SELECT date_trunc('week', make_timestamp(-262143, 1, 1, 0, 0, 0)) query T SELECT date_trunc('month', TIMESTAMP '2019-11-26 15:56:46.241150') diff --git a/test/sqllogictest/range.slt b/test/sqllogictest/range.slt index 995255f88c943..fa4272b6789fb 100644 --- a/test/sqllogictest/range.slt +++ b/test/sqllogictest/range.slt @@ -2410,7 +2410,7 @@ NULL # Test incrementing max date query error date out of range -SELECT daterange(('0001-01-01'::date + '262142years 11months 30days'::interval)::date, null, '()'); +SELECT daterange(('0001-01-01'::date + '262141years 11months 30days'::interval)::date, null, '()'); #parameterized input diff --git a/test/sqllogictest/timestamp.slt b/test/sqllogictest/timestamp.slt index 7e91ca1bc2836..83f01898f28ec 100644 --- a/test/sqllogictest/timestamp.slt +++ b/test/sqllogictest/timestamp.slt @@ -194,12 +194,12 @@ true true query T -SELECT '99999-01-01'::TIMESTAMP + '162144 y'; +SELECT '99999-01-01'::TIMESTAMP + '162143 y'; ---- -262143-01-01 00:00:00 +262142-01-01 00:00:00 query error timestamp out of range -SELECT '99999-01-01'::TIMESTAMP + '162145 y'; +SELECT '99999-01-01'::TIMESTAMP + '162144 y'; # date_bin_hopping simple conn=mz_system,user=mz_system diff --git a/test/sqllogictest/timestamptz.slt b/test/sqllogictest/timestamptz.slt index 98cc050358dcf..2a462b1924864 100644 --- a/test/sqllogictest/timestamptz.slt +++ b/test/sqllogictest/timestamptz.slt @@ -194,4 +194,4 @@ true true query error timestamp out of range -select timezone('1 day'::interval, '1-12-31'::timestamptz+'262142 years'::interval) +select timezone('1 day'::interval, '1-12-31'::timestamptz+'262141 years'::interval) diff --git a/test/test-util/Cargo.toml b/test/test-util/Cargo.toml index 04b8fb293355b..fc6cb9d8c0f84 100644 --- a/test/test-util/Cargo.toml +++ b/test/test-util/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] anyhow = "1.0.66" -chrono = { version = "0.4.23", default-features = false, features = ["std"] } +chrono = { version = "0.4.35", default-features = false, features = ["std"] } mz-kafka-util = { path = "../../src/kafka-util" } mz-ore = { path = "../../src/ore", features = ["async"] } rand = "0.8.5" diff --git a/test/testdrive/kafka-avro-sinks.td b/test/testdrive/kafka-avro-sinks.td index b7dd2de87a8e5..30e80aa453269 100644 --- a/test/testdrive/kafka-avro-sinks.td +++ b/test/testdrive/kafka-avro-sinks.td @@ -85,7 +85,7 @@ $ kafka-verify-data format=avro sink=materialize.public.interval_data_sink sort- (DATE '2000-01-01', TIMESTAMP '2000-01-01 10:10:10.111', TIMESTAMPTZ '2000-01-01 10:10:10.111+02'), (DATE '2000-02-01', TIMESTAMP '2000-02-01 10:10:10.111', TIMESTAMPTZ '2000-02-01 10:10:10.111+02'), (('0001-01-01'::DATE - '1721389days'::INTERVAL)::DATE, ('0001-01-01'::DATE - '1721389days'::INTERVAL)::TIMESTAMP, ('0001-01-01'::DATE - '1721389days'::INTERVAL)::TIMESTAMPTZ), - (('0001-01-01'::DATE + '262142years 11months 30days'::INTERVAL)::DATE, ('0001-01-01'::DATE + '262142years 11months 30days'::INTERVAL)::TIMESTAMP, ('0001-01-01'::DATE + '262142years 11months 30days'::INTERVAL)::TIMESTAMPTZ) + (('0001-01-01'::DATE + '262141years 11months 30days'::INTERVAL)::DATE, ('0001-01-01'::DATE + '262141years 11months 30days'::INTERVAL)::TIMESTAMP, ('0001-01-01'::DATE + '262141years 11months 30days'::INTERVAL)::TIMESTAMPTZ) > CREATE CLUSTER datetime_data_sink_cluster SIZE '${arg.default-storage-size}'; > CREATE SINK datetime_data_sink @@ -99,7 +99,7 @@ $ kafka-verify-data format=avro sink=materialize.public.datetime_data_sink sort- {"before": null, "after": {"row": {"date": -2440551, "ts": -210863606400000000, "ts_tz": -210863606400000000}}} {"before": null, "after": {"row": {"date": 10957, "ts": 946721410111000, "ts_tz": 946714210111000}}} {"before": null, "after": {"row": {"date": 10988, "ts": 949399810111000, "ts_tz": 949392610111000}}} -{"before": null, "after": {"row": {"date": 95026601, "ts": 8210298326400000000, "ts_tz": 8210298326400000000}}} +{"before": null, "after": {"row": {"date": 95026236, "ts": 8210266790400000000, "ts_tz": 8210266790400000000}}} > CREATE MATERIALIZED VIEW time_data (time) AS VALUES (TIME '01:02:03'), (TIME '01:02:04'), (TIME '00:00:00'), (TIME '23:59:59')