diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 465c88f3..6d22f448 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -9,26 +9,6 @@ updates: schedule: interval: weekly - package-ecosystem: pip - directory: "/stac_fastapi/api" - schedule: - interval: weekly - - package-ecosystem: pip - directory: "/stac_fastapi/api" - schedule: - interval: weekly - - package-ecosystem: pip - directory: "/stac_fastapi/types" - schedule: - interval: weekly - - package-ecosystem: pip - directory: "/stac_fastapi/extensions" - schedule: - interval: weekly - - package-ecosystem: pip - directory: "/stac_fastapi/pgstac" - schedule: - interval: weekly - - package-ecosystem: pip - directory: "/stac_fastapi/sqlalchemy" + directory: "/stac_fastapi" schedule: interval: weekly diff --git a/.github/workflows/cicd.yaml b/.github/workflows/cicd.yaml index 4c5c705b..d250cd64 100644 --- a/.github/workflows/cicd.yaml +++ b/.github/workflows/cicd.yaml @@ -1,9 +1,9 @@ -name: stac-fastapi +name: stac-fastapi-pgstac on: push: - branches: [ master ] + branches: [ main ] pull_request: - branches: [ master ] + branches: [ main ] jobs: test: @@ -46,61 +46,13 @@ jobs: - name: Lint code uses: pre-commit/action@v2.0.0 - - name: Install pipenv + - name: Install stac-fastapi.pgstac run: | - python -m pip install --upgrade pipenv wheel - - - name: Install types - run: | - pip install ./stac_fastapi/types[dev] - - - name: Install core api - run: | - pip install ./stac_fastapi/api[dev] - - - name: Install Extensions - run: | - pip install ./stac_fastapi/extensions[dev] - - - name: Install sqlalchemy stac-fastapi - run: | - pip install ./stac_fastapi/sqlalchemy[dev,server] - - - name: Install pgstac stac-fastapi - run: | - pip install ./stac_fastapi/pgstac[dev,server] - - - name: Run migration - run: | - cd stac_fastapi/sqlalchemy && alembic upgrade head - env: - POSTGRES_USER: username - POSTGRES_PASS: password - POSTGRES_DBNAME: postgis - POSTGRES_HOST: localhost - POSTGRES_PORT: 5432 - - - name: Run test suite - run: | - cd stac_fastapi/api && pipenv run pytest -svvv - env: - ENVIRONMENT: testing - - - name: Run test suite - run: | - cd stac_fastapi/sqlalchemy && pipenv run pytest -svvv - env: - ENVIRONMENT: testing - POSTGRES_USER: username - POSTGRES_PASS: password - POSTGRES_DBNAME: postgis - POSTGRES_HOST_READER: localhost - POSTGRES_HOST_WRITER: localhost - POSTGRES_PORT: 5432 + pip install .[dev,server] - name: Run test suite run: | - cd stac_fastapi/pgstac && pipenv run pytest -svvv + pytest -svvv env: ENVIRONMENT: testing POSTGRES_USER: username diff --git a/.github/workflows/deploy_mkdocs.yml b/.github/workflows/deploy_mkdocs.yml index 00aa6ddd..e4b54bac 100644 --- a/.github/workflows/deploy_mkdocs.yml +++ b/.github/workflows/deploy_mkdocs.yml @@ -3,7 +3,7 @@ name: Publish docs via GitHub Pages on: push: branches: - - master + - main paths: # Rebuild website when docs have changed or code has changed - 'README.md' @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - - name: Checkout master + - name: Checkout main uses: actions/checkout@v3 - name: Set up Python 3.8 @@ -28,11 +28,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - python -m pip install -e \ - stac_fastapi/api \ - stac_fastapi/types \ - stac_fastapi/extensions \ - stac_fastapi/sqlalchemy + python -m pip install -e . python -m pip install mkdocs mkdocs-material pdocs - name: update API docs diff --git a/CHANGES.md b/CHANGES.md index 6bffd8bc..80e9d3b5 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,6 +4,27 @@ ### Added +### Changed + +### Removed + +### Fixed + +## [2.4.1] + +### Added + +### Changed + +### Removed + +### Fixed +* `ciso8601` fails to build in some environments, instead use `pyiso8601` to parse datetimes. + +## [2.4.0] + +### Added + * Add hook to allow adding dependencies to routes. ([#295](https://github.com/stac-utils/stac-fastapi/pull/295)) * Ability to POST an ItemCollection to the collections/{collectionId}/items route. ([#367](https://github.com/stac-utils/stac-fastapi/pull/367)) * Add STAC API - Collections conformance class. ([383](https://github.com/stac-utils/stac-fastapi/pull/383)) @@ -43,6 +64,7 @@ from the request body, if present, and falls back to using the path parameter if no `"collection"` property is found in the body ([#425](https://github.com/stac-utils/stac-fastapi/pull/425)) * PGStac Backend Transactions endpoints return added Item/Collection instead of Item/Collection from request ([#424](https://github.com/stac-utils/stac-fastapi/pull/424)) +* Application no longer breaks on startup when pagination extension is not included ([#444](https://github.com/stac-utils/stac-fastapi/pull/444)) ## [2.3.0] diff --git a/Dockerfile b/Dockerfile index 5c218e27..1bd83318 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,8 +16,4 @@ WORKDIR /app COPY . /app -RUN pip install -e ./stac_fastapi/types[dev] && \ - pip install -e ./stac_fastapi/api[dev] && \ - pip install -e ./stac_fastapi/extensions[dev] && \ - pip install -e ./stac_fastapi/sqlalchemy[dev,server] && \ - pip install -e ./stac_fastapi/pgstac[dev,server] +RUN pip install -e .[dev,server] diff --git a/Dockerfile.docs b/Dockerfile.docs index f145b311..7640c06b 100644 --- a/Dockerfile.docs +++ b/Dockerfile.docs @@ -1,7 +1,7 @@ FROM python:3.8-slim # build-essential is required to build a wheel for ciso8601 -RUN apt update && apt install -y build-essential +RUN apt update && apt install -y build-essential git RUN python -m pip install --upgrade pip RUN python -m pip install mkdocs mkdocs-material pdocs @@ -10,11 +10,7 @@ COPY . /opt/src WORKDIR /opt/src -RUN python -m pip install \ - stac_fastapi/api \ - stac_fastapi/types \ - stac_fastapi/extensions \ - stac_fastapi/sqlalchemy +RUN python -m pip install . CMD ["pdocs", \ "as_markdown", \ diff --git a/Makefile b/Makefile index 36187c2e..6c503d6a 100644 --- a/Makefile +++ b/Makefile @@ -2,17 +2,12 @@ APP_HOST ?= 0.0.0.0 APP_PORT ?= 8080 EXTERNAL_APP_PORT ?= ${APP_PORT} -run_sqlalchemy = docker-compose run --rm \ - -p ${EXTERNAL_APP_PORT}:${APP_PORT} \ - -e APP_HOST=${APP_HOST} \ - -e APP_PORT=${APP_PORT} \ - app-sqlalchemy -run_pgstac = docker-compose run --rm \ +run_container = docker-compose run --rm \ -p ${EXTERNAL_APP_PORT}:${APP_PORT} \ -e APP_HOST=${APP_HOST} \ -e APP_PORT=${APP_PORT} \ - app-pgstac + app .PHONY: image image: @@ -22,63 +17,29 @@ image: docker-run-all: docker-compose up -.PHONY: docker-run-sqlalchemy -docker-run-sqlalchemy: image - $(run_sqlalchemy) - -.PHONY: docker-run-pgstac -docker-run-pgstac: image - $(run_pgstac) - -.PHONY: docker-shell-sqlalchemy -docker-shell-sqlalchemy: - $(run_sqlalchemy) /bin/bash - -.PHONY: docker-shell-pgstac -docker-shell-pgstac: - $(run_pgstac) /bin/bash - -.PHONY: test-sqlalchemy -test-sqlalchemy: run-joplin-sqlalchemy - $(run_sqlalchemy) /bin/bash -c 'export && ./scripts/wait-for-it.sh database:5432 && cd /app/stac_fastapi/sqlalchemy/tests/ && pytest -vvv' +.PHONY: docker-run-app +docker-run-app: image + $(run_app) -.PHONY: test-pgstac -test-pgstac: - $(run_pgstac) /bin/bash -c 'export && ./scripts/wait-for-it.sh database:5432 && cd /app/stac_fastapi/pgstac/tests/ && pytest -vvv' +.PHONY: docker-shell +docker-shell: + $(run_app) /bin/bash -.PHONY: test-api -test-api: - $(run_sqlalchemy) /bin/bash -c 'cd /app/stac_fastapi/api && pytest -svvv' +.PHONY: test +test: + $(run_app) /bin/bash -c 'export && ./scripts/wait-for-it.sh database:5432 && cd /app/tests/ && pytest -vvv' .PHONY: run-database run-database: docker-compose run --rm database -.PHONY: run-joplin-sqlalchemy -run-joplin-sqlalchemy: - docker-compose run --rm loadjoplin-sqlalchemy - -.PHONY: run-joplin-pgstac -run-joplin-pgstac: - docker-compose run --rm loadjoplin-pgstac - -.PHONY: test -test: test-sqlalchemy test-pgstac - -.PHONY: pybase-install -pybase-install: - pip install wheel && \ - pip install -e ./stac_fastapi/api[dev] && \ - pip install -e ./stac_fastapi/types[dev] && \ - pip install -e ./stac_fastapi/extensions[dev] - -.PHONY: pgstac-install -pgstac-install: pybase-install - pip install -e ./stac_fastapi/pgstac[dev,server] +.PHONY: run-joplin +run-joplin: + docker-compose run --rm loadjoplin -.PHONY: sqlalchemy-install -sqlalchemy-install: pybase-install - pip install -e ./stac_fastapi/sqlalchemy[dev,server] +.PHONY: install +pgstac-install: + pip install -e .[dev,server] .PHONY: docs-image docs-image: diff --git a/README.md b/README.md index 4db19e73..c90e0194 100644 --- a/README.md +++ b/README.md @@ -1,37 +1,28 @@
-
FastAPI implemention of the STAC API spec.
+FastAPI implemention of the STAC API spec using PgSTAC.
--- -**Documentation**: [https://stac-utils.github.io/stac-fastapi/](https://stac-utils.github.io/stac-fastapi/) +**Documentation**: [https://stac-utils.github.io/stac-fastapi-pgstac/](https://stac-utils.github.io/stac-fastapi-pgstac/) -**Source Code**: [https://github.com/stac-utils/stac-fastapi](https://github.com/stac-utils/stac-fastapi) +**Source Code**: [https://github.com/stac-utils/stac-fastapi](https://github.com/stac-utils/stac-fastapi-pgstac) --- -Python library for building a STAC compliant FastAPI application. The project is split up into several namespace -packages: - -- **stac_fastapi.api**: An API layer which enforces the [stac-api-spec](https://github.com/radiantearth/stac-api-spec). -- **stac_fastapi.extensions**: Abstract base classes for [STAC API extensions](https://github.com/radiantearth/stac-api-spec/blob/master/extensions.md) and third-party extensions. -- **stac_fastapi.types**: Shared types and abstract base classes used by the library. - -#### Backends -- **stac_fastapi.sqlalchemy**: Postgres backend implementation with sqlalchemy. -- **stac_fastapi.pgstac**: Postgres backend implementation with [PGStac](https://github.com/stac-utils/pgstac). +PostgreSQL/PostGIS backend implementation for the [stac-fastapi](https://github.com/stac-utils/stac-fastapi) library. `stac-fastapi` was initially developed by [arturo-ai](https://github.com/arturo-ai). @@ -39,26 +30,13 @@ packages: ```bash # Install from pypi.org -pip install stac-fastapi.api stac-fastapi.types stac-fastapi.extensions - -# Install a backend of your choice -pip install stac-fastapi.sqlalchemy -# or pip install stac-fastapi.pgstac #///////////////////// -# Install from sources - -git clone https://github.com/stac-utils/stac-fastapi.git && cd stac-fastapi -pip install \ - -e stac_fastapi/api \ - -e stac_fastapi/types \ - -e stac_fastapi/extensions - -# Install a backend of your choice -pip install -e stac_fastapi/sqlalchemy -# or -pip install -e stac_fastapi/pgstac +# Install from source + +git clone https://github.com/stac-utils/stac-fastapi-pgstac.git && cd stac-fastapi-pgstac +pip install -e . ``` ## Local Development @@ -69,14 +47,12 @@ make image make docker-run-all ``` -- The SQLAlchemy backend app will be available on
-
-
FastAPI implemention of the STAC API spec using PGStac
- - - ---- - -**Documentation**: [https://stac-utils.github.io/stac-fastapi/](https://stac-utils.github.io/stac-fastapi/) - -**Source Code**: [https://github.com/stac-utils/stac-fastapi](https://github.com/stac-utils/stac-fastapi) - ---- - -Stac FastAPI using the [PGStac](https://github.com/stac-utils/pgstac) backend. - -[PGStac](https://github.com/stac-utils/pgstac) is a separately managed PostgreSQL database that is designed for enhanced performance to be able to scale Stac FastAPI to be able to efficiently handle hundreds of millions of records. [PGStac](https://github.com/stac-utils/pgstac) automatically includes indexes on Item id, Collection id, Item Geometry, Item Datetime, and an Index for equality checks on any key in Item Properties. Additional indexes may be added to Item Properties to speed up the use of order, <, <=, >, and >= queries. - -Stac FastAPI acts as the HTTP interface validating any requests and data that is sent to the [PGStac](https://github.com/stac-utils/pgstac) backend and adds in Link items on data return relative to the service host. All other processing and search is provided directly using PGStac procedural sql / plpgsql functions on the database. - -PGStac stores all collection and item records as jsonb fields exactly as they come in allowing for any custom fields to be stored and retrieved transparently. - -While the Stac Sort Extension is fully supported, [PGStac](https://github.com/stac-utils/pgstac) is particularly enhanced to be able to sort by datetime (either ascending or descending). Sorting by anything other than datetime (the default if no sort is specified) on very large Stac repositories without very specific query limits (ie selecting a single day date range) will not have the same performance. For more than millions of records it is recommended to either set a low connection timeout on PostgreSQL or to disable use of the Sort Extension. - -`stac-fastapi pgstac` was initially added to `stac-fastapi` by [developmentseed](https://github.com/developmentseed). - -## Installation - -```shell -git clone https://github.com/stac-utils/stac-fastapi.git -cd stac-fastapi -pip install -e \ - stac_fastapi/api[dev] \ - stac_fastapi/types[dev] \ - stac_fastapi/extensions[dev] \ - stac_fastapi/pgstac[dev,server] -``` - -### Settings - -To configure PGStac stac-fastapi to [hydrate search result items in the API](https://github.com/stac-utils/pgstac#runtime-configurations), set the `USE_API_HYDRATE` environment variable to `true` or explicitly set the option in the PGStac Settings object. - -### Migrations - -PGStac is an external project and the may be used by multiple front ends. -For Stac FastAPI development, a docker image (which is pulled as part of the docker-compose) is available at -bitner/pgstac:[version] that has the full database already set up for PGStac. - -There is also a python utility as part of PGStac (pypgstac) that includes a migration utility. The pgstac -version required by stac-fastapi/pgstac is pinned by using the pinned version of pypgstac in the [setup](setup.py) file. - -In order to migrate database versions you can use the migration utility: - -```shell -pypgstac migrate -``` diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/__init__.py b/stac_fastapi/pgstac/__init__.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/__init__.py rename to stac_fastapi/pgstac/__init__.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/app.py b/stac_fastapi/pgstac/app.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/app.py rename to stac_fastapi/pgstac/app.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/config.py b/stac_fastapi/pgstac/config.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/config.py rename to stac_fastapi/pgstac/config.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/core.py b/stac_fastapi/pgstac/core.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/core.py rename to stac_fastapi/pgstac/core.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/db.py b/stac_fastapi/pgstac/db.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/db.py rename to stac_fastapi/pgstac/db.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/extensions/__init__.py b/stac_fastapi/pgstac/extensions/__init__.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/extensions/__init__.py rename to stac_fastapi/pgstac/extensions/__init__.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/extensions/query.py b/stac_fastapi/pgstac/extensions/query.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/extensions/query.py rename to stac_fastapi/pgstac/extensions/query.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/models/__init__.py b/stac_fastapi/pgstac/models/__init__.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/models/__init__.py rename to stac_fastapi/pgstac/models/__init__.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/models/links.py b/stac_fastapi/pgstac/models/links.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/models/links.py rename to stac_fastapi/pgstac/models/links.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/version.py b/stac_fastapi/pgstac/stac_fastapi/pgstac/version.py deleted file mode 100644 index 3c7bccdd..00000000 --- a/stac_fastapi/pgstac/stac_fastapi/pgstac/version.py +++ /dev/null @@ -1,2 +0,0 @@ -"""library version.""" -__version__ = "2.3.0" diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/transactions.py b/stac_fastapi/pgstac/transactions.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/transactions.py rename to stac_fastapi/pgstac/transactions.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/types/base_item_cache.py b/stac_fastapi/pgstac/types/base_item_cache.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/types/base_item_cache.py rename to stac_fastapi/pgstac/types/base_item_cache.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/types/search.py b/stac_fastapi/pgstac/types/search.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/types/search.py rename to stac_fastapi/pgstac/types/search.py diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/utils.py b/stac_fastapi/pgstac/utils.py similarity index 100% rename from stac_fastapi/pgstac/stac_fastapi/pgstac/utils.py rename to stac_fastapi/pgstac/utils.py diff --git a/stac_fastapi/api/stac_fastapi/api/version.py b/stac_fastapi/pgstac/version.py similarity index 51% rename from stac_fastapi/api/stac_fastapi/api/version.py rename to stac_fastapi/pgstac/version.py index 3c7bccdd..895f63a3 100644 --- a/stac_fastapi/api/stac_fastapi/api/version.py +++ b/stac_fastapi/pgstac/version.py @@ -1,2 +1,2 @@ """library version.""" -__version__ = "2.3.0" +__version__ = "2.4.1" diff --git a/stac_fastapi/sqlalchemy/README.md b/stac_fastapi/sqlalchemy/README.md deleted file mode 100644 index 40bd804e..00000000 --- a/stac_fastapi/sqlalchemy/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Requirements - -The SQLAlchemy backend requires **PostGIS>=3**. diff --git a/stac_fastapi/sqlalchemy/alembic.ini b/stac_fastapi/sqlalchemy/alembic.ini deleted file mode 100644 index 7dec6353..00000000 --- a/stac_fastapi/sqlalchemy/alembic.ini +++ /dev/null @@ -1,85 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = alembic - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# timezone to use when rendering the date -# within the migration file as well as the filename. -# string value is passed to dateutil.tz.gettz() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the -# "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; this defaults -# to alembic/versions. When using multiple version -# directories, initial revisions must be specified with --version-path -# version_locations = %(here)s/bar %(here)s/bat alembic/versions - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -;sqlalchemy.url = postgresql://alex:password@localhost:5432/postgres - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks=black -# black.type=console_scripts -# black.entrypoint=black -# black.options=-l 79 - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/stac_fastapi/sqlalchemy/alembic/README b/stac_fastapi/sqlalchemy/alembic/README deleted file mode 100644 index 98e4f9c4..00000000 --- a/stac_fastapi/sqlalchemy/alembic/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. \ No newline at end of file diff --git a/stac_fastapi/sqlalchemy/alembic/env.py b/stac_fastapi/sqlalchemy/alembic/env.py deleted file mode 100644 index 20af555b..00000000 --- a/stac_fastapi/sqlalchemy/alembic/env.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Migration environment.""" -import os -from logging.config import fileConfig - -from alembic import context -from sqlalchemy import engine_from_config, pool - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = None - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def get_connection_url() -> str: - """ - Get connection URL from environment variables - (see environment variables set in docker-compose) - """ - postgres_user = os.environ["POSTGRES_USER"] - postgres_pass = os.environ["POSTGRES_PASS"] - postgres_host = os.environ["POSTGRES_HOST"] - postgres_port = os.environ["POSTGRES_PORT"] - postgres_dbname = os.environ["POSTGRES_DBNAME"] - return f"postgresql://{postgres_user}:{postgres_pass}@{postgres_host}:{postgres_port}/{postgres_dbname}" - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = get_connection_url() - context.configure( - url=url, - target_metadata=target_metadata, - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - configuration = config.get_section(config.config_ini_section) - configuration["sqlalchemy.url"] = get_connection_url() - connectable = engine_from_config( - configuration, - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - with connectable.connect() as connection: - context.configure(connection=connection, target_metadata=target_metadata) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/stac_fastapi/sqlalchemy/alembic/script.py.mako b/stac_fastapi/sqlalchemy/alembic/script.py.mako deleted file mode 100644 index 2c015630..00000000 --- a/stac_fastapi/sqlalchemy/alembic/script.py.mako +++ /dev/null @@ -1,24 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/stac_fastapi/sqlalchemy/alembic/versions/131aab4d9e49_create_tables.py b/stac_fastapi/sqlalchemy/alembic/versions/131aab4d9e49_create_tables.py deleted file mode 100644 index efc33380..00000000 --- a/stac_fastapi/sqlalchemy/alembic/versions/131aab4d9e49_create_tables.py +++ /dev/null @@ -1,76 +0,0 @@ -"""create initial schema - -Revision ID: 131aab4d9e49 -Revises: -Create Date: 2020-02-09 13:03:09.336631 - -""" # noqa -import sqlalchemy as sa -from alembic import op -from geoalchemy2.types import Geometry -from sqlalchemy.dialects.postgresql import JSONB - -# revision identifiers, used by Alembic. -revision = "131aab4d9e49" -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - """upgrade to this revision""" - op.execute("CREATE SCHEMA data") - op.execute("CREATE EXTENSION IF NOT EXISTS postgis") - - # Create collections table - op.create_table( - "collections", - sa.Column("id", sa.VARCHAR(1024), nullable=False, primary_key=True), - sa.Column("stac_version", sa.VARCHAR(300)), - sa.Column("title", sa.VARCHAR(1024)), - sa.Column("stac_extensions", sa.ARRAY(sa.VARCHAR(300)), nullable=True), - sa.Column("description", sa.VARCHAR(1024), nullable=False), - sa.Column("keywords", sa.ARRAY(sa.VARCHAR(300))), - sa.Column("version", sa.VARCHAR(300)), - sa.Column("license", sa.VARCHAR(300), nullable=False), - sa.Column("providers", JSONB), - sa.Column("summaries", JSONB, nullable=True), - sa.Column("extent", JSONB), - sa.Column("links", JSONB, nullable=True), - schema="data", - ) - - # Create items table - op.create_table( - "items", - sa.Column("id", sa.VARCHAR(1024), nullable=False, primary_key=True), - sa.Column("stac_version", sa.VARCHAR(300)), - sa.Column("stac_extensions", sa.ARRAY(sa.VARCHAR(300)), nullable=True), - sa.Column("geometry", Geometry("POLYGON", srid=4326, spatial_index=True)), - sa.Column("bbox", sa.ARRAY(sa.NUMERIC), nullable=False), - sa.Column("properties", JSONB), - sa.Column("assets", JSONB), - sa.Column("collection_id", sa.VARCHAR(1024), nullable=False, index=True), - # These are usually in properties but defined as their own fields for indexing - sa.Column("datetime", sa.TIMESTAMP, nullable=False, index=True), - sa.Column("links", JSONB, nullable=True), - sa.ForeignKeyConstraint(["collection_id"], ["data.collections.id"]), - schema="data", - ) - - # Create pagination token table - op.create_table( - "tokens", - sa.Column("id", sa.VARCHAR(100), nullable=False, primary_key=True), - sa.Column("keyset", sa.VARCHAR(1000), nullable=False), - schema="data", - ) - - -def downgrade(): - """downgrade to previous revision""" - op.execute("DROP TABLE data.items") - op.execute("DROP TABLE data.collections") - op.execute("DROP TABLE data.tokens") - op.execute("DROP SCHEMA data") - op.execute("DROP EXTENSION IF EXISTS postgis") diff --git a/stac_fastapi/sqlalchemy/alembic/versions/407037cb1636_add_stac_1_0_0_fields.py b/stac_fastapi/sqlalchemy/alembic/versions/407037cb1636_add_stac_1_0_0_fields.py deleted file mode 100644 index fdf15cde..00000000 --- a/stac_fastapi/sqlalchemy/alembic/versions/407037cb1636_add_stac_1_0_0_fields.py +++ /dev/null @@ -1,27 +0,0 @@ -"""add-stac-1.0.0-fields - -Revision ID: 407037cb1636 -Revises: 77c019af60bf -Create Date: 2021-07-07 16:10:03.196942 - -""" -import sqlalchemy as sa -from alembic import op - -# revision identifiers, used by Alembic. -revision = "407037cb1636" -down_revision = "77c019af60bf" -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column( - "collections", - sa.Column("type", sa.VARCHAR(300), default="collection", nullable=False), - schema="data", - ) - - -def downgrade(): - op.drop_column("collections", "type") diff --git a/stac_fastapi/sqlalchemy/alembic/versions/5909bd10f2e6_change_item_geometry_column_type.py b/stac_fastapi/sqlalchemy/alembic/versions/5909bd10f2e6_change_item_geometry_column_type.py deleted file mode 100644 index 2c1edd98..00000000 --- a/stac_fastapi/sqlalchemy/alembic/versions/5909bd10f2e6_change_item_geometry_column_type.py +++ /dev/null @@ -1,34 +0,0 @@ -"""change item geometry column type - -Revision ID: 5909bd10f2e6 -Revises: 821aa04011e8 -Create Date: 2021-11-23 10:14:17.974565 - -""" -from alembic import op - -from stac_fastapi.sqlalchemy.models.database import GeojsonGeometry - -# revision identifiers, used by Alembic. -revision = "5909bd10f2e6" -down_revision = "821aa04011e8" -branch_labels = None -depends_on = None - - -def upgrade(): - op.alter_column( - schema="data", - table_name="items", - column_name="geometry", - type_=GeojsonGeometry("Geometry", srid=4326, spatial_index=True), - ) - - -def downgrade(): - op.alter_column( - schema="data", - table_name="items", - column_name="geometry", - type_=GeojsonGeometry("Polygon", srid=4326, spatial_index=True), - ) diff --git a/stac_fastapi/sqlalchemy/alembic/versions/7016c1bf3fbf_make_item_geometry_and_bbox_nullable.py b/stac_fastapi/sqlalchemy/alembic/versions/7016c1bf3fbf_make_item_geometry_and_bbox_nullable.py deleted file mode 100644 index 804361b0..00000000 --- a/stac_fastapi/sqlalchemy/alembic/versions/7016c1bf3fbf_make_item_geometry_and_bbox_nullable.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Make item geometry and bbox nullable - -Revision ID: 7016c1bf3fbf -Revises: 5909bd10f2e6 -Create Date: 2022-04-28 10:40:06.856826 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "7016c1bf3fbf" -down_revision = "5909bd10f2e6" -branch_labels = None -depends_on = None - - -def upgrade(): - op.alter_column( - schema="data", - table_name="items", - column_name="geometry", - nullable=True, - ) - op.alter_column( - schema="data", - table_name="items", - column_name="bbox", - nullable=True, - ) - - -def downgrade(): - # Downgrading will require the user to update or remove all null geometry - # cases from the DB, otherwise the downgrade migration will fail. - op.alter_column( - schema="data", - table_name="items", - column_name="geometry", - nullable=False, - ) - op.alter_column( - schema="data", - table_name="items", - column_name="bbox", - nullable=False, - ) diff --git a/stac_fastapi/sqlalchemy/alembic/versions/77c019af60bf_use_timestamptz_rather_than_timestamp.py b/stac_fastapi/sqlalchemy/alembic/versions/77c019af60bf_use_timestamptz_rather_than_timestamp.py deleted file mode 100644 index 0c6085fb..00000000 --- a/stac_fastapi/sqlalchemy/alembic/versions/77c019af60bf_use_timestamptz_rather_than_timestamp.py +++ /dev/null @@ -1,40 +0,0 @@ -"""use timestamptz rather than timestamp - -Revision ID: 77c019af60bf -Revises: 131aab4d9e49 -Create Date: 2021-03-02 11:51:43.539119 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "77c019af60bf" -down_revision = "131aab4d9e49" -branch_labels = None -depends_on = None - - -def upgrade(): - """upgrade to this revision""" - op.execute( - """ - ALTER TABLE - data.items - ALTER COLUMN datetime - TYPE timestamptz - ; - """ - ) - - -def downgrade(): - """downgrade from this revision""" - op.execute( - """ - ALTER TABLE - data.items - ALTER COLUMN datetime - TYPE timestamp - ; - """ - ) diff --git a/stac_fastapi/sqlalchemy/alembic/versions/821aa04011e8_change_pri_key_for_item.py b/stac_fastapi/sqlalchemy/alembic/versions/821aa04011e8_change_pri_key_for_item.py deleted file mode 100644 index 335b3e62..00000000 --- a/stac_fastapi/sqlalchemy/alembic/versions/821aa04011e8_change_pri_key_for_item.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Change pri key for Item - -Revision ID: 821aa04011e8 -Revises: 407037cb1636 -Create Date: 2021-10-11 12:10:34.148098 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "821aa04011e8" -down_revision = "407037cb1636" -branch_labels = None -depends_on = None - - -def upgrade(): - op.drop_constraint("items_pkey", "items", schema="data") - op.create_primary_key("items_pkey", "items", ["id", "collection_id"], schema="data") - - -def downgrade(): - op.drop_constraint("items_pkey", "items", schema="data") - op.create_primary_key("items_pkey", "items", ["id"], schema="data") diff --git a/stac_fastapi/sqlalchemy/pytest.ini b/stac_fastapi/sqlalchemy/pytest.ini deleted file mode 100644 index f11bd4ce..00000000 --- a/stac_fastapi/sqlalchemy/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -testpaths = tests -addopts = -sv \ No newline at end of file diff --git a/stac_fastapi/sqlalchemy/setup.cfg b/stac_fastapi/sqlalchemy/setup.cfg deleted file mode 100644 index 46ac9c3b..00000000 --- a/stac_fastapi/sqlalchemy/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[metadata] -version = attr: stac_fastapi.sqlalchemy.version.__version__ diff --git a/stac_fastapi/sqlalchemy/setup.py b/stac_fastapi/sqlalchemy/setup.py deleted file mode 100644 index c7d6e98b..00000000 --- a/stac_fastapi/sqlalchemy/setup.py +++ /dev/null @@ -1,62 +0,0 @@ -"""stac_fastapi: sqlalchemy module.""" - -from setuptools import find_namespace_packages, setup - -with open("README.md") as f: - desc = f.read() - -install_requires = [ - "attrs", - "pydantic[dotenv]", - "stac_pydantic>=2.0.3", - "stac-fastapi.types", - "stac-fastapi.api", - "stac-fastapi.extensions", - "sqlakeyset", - "geoalchemy2<0.8.0", - "sqlalchemy==1.3.23", - "shapely", - "psycopg2-binary", - "alembic", - "fastapi-utils", -] - -extra_reqs = { - "dev": [ - "pytest", - "pytest-cov", - "pre-commit", - "requests", - ], - "docs": ["mkdocs", "mkdocs-material", "pdocs"], - "server": ["uvicorn[standard]==0.17.0"], -} - - -setup( - name="stac-fastapi.sqlalchemy", - description="An implementation of STAC API based on the FastAPI framework.", - long_description=desc, - long_description_content_type="text/markdown", - python_requires=">=3.8", - classifiers=[ - "Intended Audience :: Developers", - "Intended Audience :: Information Technology", - "Intended Audience :: Science/Research", - "Programming Language :: Python :: 3.8", - "License :: OSI Approved :: MIT License", - ], - keywords="STAC FastAPI COG", - author="Arturo Engineering", - author_email="engineering@arturo.ai", - url="https://github.com/stac-utils/stac-fastapi", - license="MIT", - packages=find_namespace_packages(exclude=["alembic", "tests", "scripts"]), - zip_safe=False, - install_requires=install_requires, - tests_require=extra_reqs["dev"], - extras_require=extra_reqs, - entry_points={ - "console_scripts": ["stac-fastapi-sqlalchemy=stac_fastapi.sqlalchemy.app:run"] - }, -) diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/__init__.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/__init__.py deleted file mode 100644 index ee2522f7..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""sqlalchemy submodule.""" diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/app.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/app.py deleted file mode 100644 index 29a0894a..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/app.py +++ /dev/null @@ -1,77 +0,0 @@ -"""FastAPI application.""" -from stac_fastapi.api.app import StacApi -from stac_fastapi.api.models import create_get_request_model, create_post_request_model -from stac_fastapi.extensions.core import ( - ContextExtension, - FieldsExtension, - SortExtension, - TokenPaginationExtension, - TransactionExtension, -) -from stac_fastapi.extensions.third_party import BulkTransactionExtension -from stac_fastapi.sqlalchemy.config import SqlalchemySettings -from stac_fastapi.sqlalchemy.core import CoreCrudClient -from stac_fastapi.sqlalchemy.extensions import QueryExtension -from stac_fastapi.sqlalchemy.session import Session -from stac_fastapi.sqlalchemy.transactions import ( - BulkTransactionsClient, - TransactionsClient, -) - -settings = SqlalchemySettings() -session = Session.create_from_settings(settings) -extensions = [ - TransactionExtension(client=TransactionsClient(session=session), settings=settings), - BulkTransactionExtension(client=BulkTransactionsClient(session=session)), - FieldsExtension(), - QueryExtension(), - SortExtension(), - TokenPaginationExtension(), - ContextExtension(), -] - -post_request_model = create_post_request_model(extensions) - -api = StacApi( - settings=settings, - extensions=extensions, - client=CoreCrudClient( - session=session, extensions=extensions, post_request_model=post_request_model - ), - search_get_request_model=create_get_request_model(extensions), - search_post_request_model=post_request_model, -) -app = api.app - - -def run(): - """Run app from command line using uvicorn if available.""" - try: - import uvicorn - - uvicorn.run( - "stac_fastapi.sqlalchemy.app:app", - host=settings.app_host, - port=settings.app_port, - log_level="info", - reload=settings.reload, - ) - except ImportError: - raise RuntimeError("Uvicorn must be installed in order to use command") - - -if __name__ == "__main__": - run() - - -def create_handler(app): - """Create a handler to use with AWS Lambda if mangum available.""" - try: - from mangum import Mangum - - return Mangum(app) - except ImportError: - return None - - -handler = create_handler(app) diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/config.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/config.py deleted file mode 100644 index 340ef62b..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/config.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Postgres API configuration.""" -from typing import Set - -from stac_fastapi.types.config import ApiSettings - - -class SqlalchemySettings(ApiSettings): - """Postgres-specific API settings. - - Attributes: - postgres_user: postgres username. - postgres_pass: postgres password. - postgres_host_reader: hostname for the reader connection. - postgres_host_writer: hostname for the writer connection. - postgres_port: database port. - postgres_dbname: database name. - """ - - postgres_user: str - postgres_pass: str - postgres_host_reader: str - postgres_host_writer: str - postgres_port: str - postgres_dbname: str - - # Fields which are defined by STAC but not included in the database model - forbidden_fields: Set[str] = {"type"} - - # Fields which are item properties but indexed as distinct fields in the database model - indexed_fields: Set[str] = {"datetime"} - - @property - def reader_connection_string(self): - """Create reader psql connection string.""" - return f"postgresql://{self.postgres_user}:{self.postgres_pass}@{self.postgres_host_reader}:{self.postgres_port}/{self.postgres_dbname}" - - @property - def writer_connection_string(self): - """Create writer psql connection string.""" - return f"postgresql://{self.postgres_user}:{self.postgres_pass}@{self.postgres_host_writer}:{self.postgres_port}/{self.postgres_dbname}" diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/core.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/core.py deleted file mode 100644 index cd1ca9ee..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/core.py +++ /dev/null @@ -1,461 +0,0 @@ -"""Item crud client.""" -import json -import logging -import operator -from datetime import datetime -from typing import List, Optional, Set, Type, Union -from urllib.parse import urlencode, urljoin - -import attr -import geoalchemy2 as ga -import sqlalchemy as sa -import stac_pydantic -from fastapi import HTTPException -from pydantic import ValidationError -from shapely.geometry import Polygon as ShapelyPolygon -from shapely.geometry import shape -from sqlakeyset import get_page -from sqlalchemy import func -from sqlalchemy.orm import Session as SqlSession -from stac_pydantic.links import Relations -from stac_pydantic.shared import MimeTypes - -from stac_fastapi.sqlalchemy import serializers -from stac_fastapi.sqlalchemy.extensions.query import Operator -from stac_fastapi.sqlalchemy.models import database -from stac_fastapi.sqlalchemy.session import Session -from stac_fastapi.sqlalchemy.tokens import PaginationTokenClient -from stac_fastapi.types.config import Settings -from stac_fastapi.types.core import BaseCoreClient -from stac_fastapi.types.errors import NotFoundError -from stac_fastapi.types.search import BaseSearchPostRequest -from stac_fastapi.types.stac import Collection, Collections, Item, ItemCollection - -logger = logging.getLogger(__name__) - -NumType = Union[float, int] - - -@attr.s -class CoreCrudClient(PaginationTokenClient, BaseCoreClient): - """Client for core endpoints defined by stac.""" - - session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) - item_table: Type[database.Item] = attr.ib(default=database.Item) - collection_table: Type[database.Collection] = attr.ib(default=database.Collection) - item_serializer: Type[serializers.Serializer] = attr.ib( - default=serializers.ItemSerializer - ) - collection_serializer: Type[serializers.Serializer] = attr.ib( - default=serializers.CollectionSerializer - ) - - @staticmethod - def _lookup_id( - id: str, table: Type[database.BaseModel], session: SqlSession - ) -> Type[database.BaseModel]: - """Lookup row by id.""" - row = session.query(table).filter(table.id == id).first() - if not row: - raise NotFoundError(f"{table.__name__} {id} not found") - return row - - def all_collections(self, **kwargs) -> Collections: - """Read all collections from the database.""" - base_url = str(kwargs["request"].base_url) - with self.session.reader.context_session() as session: - collections = session.query(self.collection_table).all() - serialized_collections = [ - self.collection_serializer.db_to_stac(collection, base_url=base_url) - for collection in collections - ] - links = [ - { - "rel": Relations.root.value, - "type": MimeTypes.json, - "href": base_url, - }, - { - "rel": Relations.parent.value, - "type": MimeTypes.json, - "href": base_url, - }, - { - "rel": Relations.self.value, - "type": MimeTypes.json, - "href": urljoin(base_url, "collections"), - }, - ] - collection_list = Collections( - collections=serialized_collections or [], links=links - ) - return collection_list - - def get_collection(self, collection_id: str, **kwargs) -> Collection: - """Get collection by id.""" - base_url = str(kwargs["request"].base_url) - with self.session.reader.context_session() as session: - collection = self._lookup_id(collection_id, self.collection_table, session) - return self.collection_serializer.db_to_stac(collection, base_url) - - def item_collection( - self, collection_id: str, limit: int = 10, token: str = None, **kwargs - ) -> ItemCollection: - """Read an item collection from the database.""" - base_url = str(kwargs["request"].base_url) - with self.session.reader.context_session() as session: - collection_children = ( - session.query(self.item_table) - .join(self.collection_table) - .filter(self.collection_table.id == collection_id) - .order_by(self.item_table.datetime.desc(), self.item_table.id) - ) - count = None - if self.extension_is_enabled("ContextExtension"): - count_query = collection_children.statement.with_only_columns( - [func.count()] - ).order_by(None) - count = collection_children.session.execute(count_query).scalar() - token = self.get_token(token) if token else token - page = get_page(collection_children, per_page=limit, page=(token or False)) - # Create dynamic attributes for each page - page.next = ( - self.insert_token(keyset=page.paging.bookmark_next) - if page.paging.has_next - else None - ) - page.previous = ( - self.insert_token(keyset=page.paging.bookmark_previous) - if page.paging.has_previous - else None - ) - - links = [] - if page.next: - links.append( - { - "rel": Relations.next.value, - "type": "application/geo+json", - "href": f"{kwargs['request'].base_url}collections/{collection_id}/items?token={page.next}&limit={limit}", - "method": "GET", - } - ) - if page.previous: - links.append( - { - "rel": Relations.previous.value, - "type": "application/geo+json", - "href": f"{kwargs['request'].base_url}collections/{collection_id}/items?token={page.previous}&limit={limit}", - "method": "GET", - } - ) - - response_features = [] - for item in page: - response_features.append( - self.item_serializer.db_to_stac(item, base_url=base_url) - ) - - context_obj = None - if self.extension_is_enabled("ContextExtension"): - context_obj = { - "returned": len(page), - "limit": limit, - "matched": count, - } - - return ItemCollection( - type="FeatureCollection", - features=response_features, - links=links, - context=context_obj, - ) - - def get_item(self, item_id: str, collection_id: str, **kwargs) -> Item: - """Get item by id.""" - base_url = str(kwargs["request"].base_url) - with self.session.reader.context_session() as session: - db_query = session.query(self.item_table) - db_query = db_query.filter(self.item_table.collection_id == collection_id) - db_query = db_query.filter(self.item_table.id == item_id) - item = db_query.first() - if not item: - raise NotFoundError(f"{self.item_table.__name__} {item_id} not found") - return self.item_serializer.db_to_stac(item, base_url=base_url) - - def get_search( - self, - collections: Optional[List[str]] = None, - ids: Optional[List[str]] = None, - bbox: Optional[List[NumType]] = None, - datetime: Optional[Union[str, datetime]] = None, - limit: Optional[int] = 10, - query: Optional[str] = None, - token: Optional[str] = None, - fields: Optional[List[str]] = None, - sortby: Optional[str] = None, - **kwargs, - ) -> ItemCollection: - """GET search catalog.""" - # Parse request parameters - base_args = { - "collections": collections, - "ids": ids, - "bbox": bbox, - "limit": limit, - "token": token, - "query": json.loads(query) if query else query, - } - - if datetime: - base_args["datetime"] = datetime - - if sortby: - # https://github.com/radiantearth/stac-spec/tree/master/api-spec/extensions/sort#http-get-or-post-form - sort_param = [] - for sort in sortby: - sort_param.append( - { - "field": sort[1:], - "direction": "asc" if sort[0] == "+" else "desc", - } - ) - base_args["sortby"] = sort_param - - if fields: - includes = set() - excludes = set() - for field in fields: - if field[0] == "-": - excludes.add(field[1:]) - elif field[0] == "+": - includes.add(field[1:]) - else: - includes.add(field) - base_args["fields"] = {"include": includes, "exclude": excludes} - - # Do the request - try: - search_request = self.post_request_model(**base_args) - except ValidationError: - raise HTTPException(status_code=400, detail="Invalid parameters provided") - resp = self.post_search(search_request, request=kwargs["request"]) - - # Pagination - page_links = [] - for link in resp["links"]: - if link["rel"] == Relations.next or link["rel"] == Relations.previous: - query_params = dict(kwargs["request"].query_params) - if link["body"] and link["merge"]: - query_params.update(link["body"]) - link["method"] = "GET" - link["href"] = f"{link['body']}?{urlencode(query_params)}" - link["body"] = None - link["merge"] = False - page_links.append(link) - else: - page_links.append(link) - resp["links"] = page_links - return resp - - def post_search( - self, search_request: BaseSearchPostRequest, **kwargs - ) -> ItemCollection: - """POST search catalog.""" - base_url = str(kwargs["request"].base_url) - with self.session.reader.context_session() as session: - token = ( - self.get_token(search_request.token) if search_request.token else False - ) - query = session.query(self.item_table) - - # Filter by collection - count = None - if search_request.collections: - query = query.join(self.collection_table).filter( - sa.or_( - *[ - self.collection_table.id == col_id - for col_id in search_request.collections - ] - ) - ) - - # Sort - if search_request.sortby: - sort_fields = [ - getattr( - self.item_table.get_field(sort.field), - sort.direction.value, - )() - for sort in search_request.sortby - ] - sort_fields.append(self.item_table.id) - query = query.order_by(*sort_fields) - else: - # Default sort is date - query = query.order_by( - self.item_table.datetime.desc(), self.item_table.id - ) - - # Ignore other parameters if ID is present - if search_request.ids: - id_filter = sa.or_( - *[self.item_table.id == i for i in search_request.ids] - ) - items = query.filter(id_filter).order_by(self.item_table.id) - page = get_page(items, per_page=search_request.limit, page=token) - if self.extension_is_enabled("ContextExtension"): - count = len(search_request.ids) - page.next = ( - self.insert_token(keyset=page.paging.bookmark_next) - if page.paging.has_next - else None - ) - page.previous = ( - self.insert_token(keyset=page.paging.bookmark_previous) - if page.paging.has_previous - else None - ) - - else: - # Spatial query - geom = None - if search_request.intersects is not None: - geom = shape(search_request.intersects) - elif search_request.bbox: - if len(search_request.bbox) == 4: - geom = ShapelyPolygon.from_bounds(*search_request.bbox) - elif len(search_request.bbox) == 6: - """Shapely doesn't support 3d bounding boxes we'll just use the 2d portion""" - bbox_2d = [ - search_request.bbox[0], - search_request.bbox[1], - search_request.bbox[3], - search_request.bbox[4], - ] - geom = ShapelyPolygon.from_bounds(*bbox_2d) - - if geom: - filter_geom = ga.shape.from_shape(geom, srid=4326) - query = query.filter( - ga.func.ST_Intersects(self.item_table.geometry, filter_geom) - ) - - # Temporal query - if search_request.datetime: - # Two tailed query (between) - dts = search_request.datetime.split("/") - # Non-interval date ex. "2000-02-02T00:00:00.00Z" - if len(dts) == 1: - query = query.filter(self.item_table.datetime == dts[0]) - # is there a benefit to between instead of >= and <= ? - elif dts[0] not in ["", ".."] and dts[1] not in ["", ".."]: - query = query.filter(self.item_table.datetime.between(*dts)) - # All items after the start date - elif dts[0] not in ["", ".."]: - query = query.filter(self.item_table.datetime >= dts[0]) - # All items before the end date - elif dts[1] not in ["", ".."]: - query = query.filter(self.item_table.datetime <= dts[1]) - - # Query fields - if search_request.query: - for (field_name, expr) in search_request.query.items(): - field = self.item_table.get_field(field_name) - for (op, value) in expr.items(): - if op == Operator.gte: - query = query.filter(operator.ge(field, value)) - elif op == Operator.lte: - query = query.filter(operator.le(field, value)) - else: - query = query.filter(op.operator(field, value)) - - if self.extension_is_enabled("ContextExtension"): - count_query = query.statement.with_only_columns( - [func.count()] - ).order_by(None) - count = query.session.execute(count_query).scalar() - page = get_page(query, per_page=search_request.limit, page=token) - # Create dynamic attributes for each page - page.next = ( - self.insert_token(keyset=page.paging.bookmark_next) - if page.paging.has_next - else None - ) - page.previous = ( - self.insert_token(keyset=page.paging.bookmark_previous) - if page.paging.has_previous - else None - ) - - links = [] - if page.next: - links.append( - { - "rel": Relations.next.value, - "type": "application/geo+json", - "href": f"{kwargs['request'].base_url}search", - "method": "POST", - "body": {"token": page.next}, - "merge": True, - } - ) - if page.previous: - links.append( - { - "rel": Relations.previous.value, - "type": "application/geo+json", - "href": f"{kwargs['request'].base_url}search", - "method": "POST", - "body": {"token": page.previous}, - "merge": True, - } - ) - - response_features = [] - filter_kwargs = {} - - for item in page: - response_features.append( - self.item_serializer.db_to_stac(item, base_url=base_url) - ) - - # Use pydantic includes/excludes syntax to implement fields extension - if self.extension_is_enabled("FieldsExtension"): - if search_request.query is not None: - query_include: Set[str] = set( - [ - k - if k in Settings.get().indexed_fields - else f"properties.{k}" - for k in search_request.query.keys() - ] - ) - if not search_request.fields.include: - search_request.fields.include = query_include - else: - search_request.fields.include.union(query_include) - - filter_kwargs = search_request.fields.filter_fields - # Need to pass through `.json()` for proper serialization - # of datetime - response_features = [ - json.loads(stac_pydantic.Item(**feat).json(**filter_kwargs)) - for feat in response_features - ] - - context_obj = None - if self.extension_is_enabled("ContextExtension"): - context_obj = { - "returned": len(page), - "limit": search_request.limit, - "matched": count, - } - - return ItemCollection( - type="FeatureCollection", - features=response_features, - links=links, - context=context_obj, - ) diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/extensions/__init__.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/extensions/__init__.py deleted file mode 100644 index d97a001c..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/extensions/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -"""sqlalchemy extensions modifications.""" - -from .query import Operator, QueryableTypes, QueryExtension - -__all__ = ["Operator", "QueryableTypes", "QueryExtension"] diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/extensions/query.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/extensions/query.py deleted file mode 100644 index 36f7a771..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/extensions/query.py +++ /dev/null @@ -1,124 +0,0 @@ -"""STAC SQLAlchemy specific query search model. - -# TODO: replace with stac-pydantic -""" - -import logging -import operator -from dataclasses import dataclass -from enum import auto -from types import DynamicClassAttribute -from typing import Any, Callable, Dict, Optional, Union - -import sqlalchemy as sa -from pydantic import BaseModel, ValidationError, root_validator -from pydantic.error_wrappers import ErrorWrapper -from stac_pydantic.utils import AutoValueEnum - -from stac_fastapi.extensions.core.query import QueryExtension as QueryExtensionBase - -logger = logging.getLogger("uvicorn") -logger.setLevel(logging.INFO) -# Be careful: https://github.com/samuelcolvin/pydantic/issues/1423#issuecomment-642797287 -NumType = Union[float, int] - - -class Operator(str, AutoValueEnum): - """Defines the set of operators supported by the API.""" - - eq = auto() - ne = auto() - lt = auto() - lte = auto() - gt = auto() - gte = auto() - - # TODO: These are defined in the spec but aren't currently implemented by the api - # startsWith = auto() - # endsWith = auto() - # contains = auto() - # in = auto() - - @DynamicClassAttribute - def operator(self) -> Callable[[Any, Any], bool]: - """Return python operator.""" - return getattr(operator, self._value_) - - -class Queryables(str, AutoValueEnum): - """Queryable fields. - - Define an enum of queryable fields and their data type. Queryable fields are explicitly defined for two reasons: - 1. So the caller knows which fields they can query by - 2. Because JSONB queries with sqlalchemy ORM require casting the type of the field at runtime - (see ``QueryableTypes``) - - # TODO: Let the user define these in a config file - """ - - orientation = auto() - gsd = auto() - epsg = "proj:epsg" - height = auto() - width = auto() - minzoom = "cog:minzoom" - maxzoom = "cog:maxzoom" - dtype = "cog:dtype" - foo = "foo" - - -@dataclass -class QueryableTypes: - """Defines a set of queryable fields. - - # TODO: Let the user define these in a config file - # TODO: There is a much better way of defining this field <> type mapping than two enums with same keys - """ - - orientation = sa.String - gsd = sa.Float - epsg = sa.Integer - height = sa.Integer - width = sa.Integer - minzoom = sa.Integer - maxzoom = sa.Integer - dtype = sa.String - - -class QueryExtensionPostRequest(BaseModel): - """Queryable validation. - - Add queryables validation to the POST request - to raise errors for unsupported querys. - """ - - query: Optional[Dict[Queryables, Dict[Operator, Any]]] - - @root_validator(pre=True) - def validate_query_fields(cls, values: Dict) -> Dict: - """Validate query fields.""" - logger.debug(f"Validating SQLAlchemySTACSearch {cls} {values}") - if "query" in values and values["query"]: - queryable_fields = Queryables.__members__.values() - for field_name in values["query"]: - if field_name not in queryable_fields: - raise ValidationError( - [ - ErrorWrapper( - ValueError(f"Cannot search on field: {field_name}"), - "STACSearch", - ) - ], - QueryExtensionPostRequest, - ) - return values - - -class QueryExtension(QueryExtensionBase): - """Query Extenson. - - Override the POST request model to add validation against - supported fields - """ - - POST = QueryExtensionPostRequest diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/models/__init__.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/models/__init__.py deleted file mode 100644 index 67d205ef..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/models/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""stac_fastapi.postgres.models module.""" diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/models/database.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/models/database.py deleted file mode 100644 index ed9d8cef..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/models/database.py +++ /dev/null @@ -1,99 +0,0 @@ -"""SQLAlchemy ORM models.""" - -import json -from typing import Optional - -import geoalchemy2 as ga -import sqlalchemy as sa -from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.ext.declarative import declarative_base - -from stac_fastapi.sqlalchemy.extensions.query import Queryables, QueryableTypes - -BaseModel = declarative_base() - - -class GeojsonGeometry(ga.Geometry): - """Custom geoalchemy type which returns GeoJSON.""" - - from_text = "ST_GeomFromGeoJSON" - - def result_processor(self, dialect: str, coltype): - """Override default processer to return GeoJSON.""" - - def process(value: Optional[bytes]): - if value is not None: - geom = ga.shape.to_shape( - ga.elements.WKBElement( - value, srid=self.srid, extended=self.extended - ) - ) - return json.loads(json.dumps(geom.__geo_interface__)) - - return process - - -class Collection(BaseModel): # type:ignore - """Collection orm model.""" - - __tablename__ = "collections" - __table_args__ = {"schema": "data"} - - id = sa.Column(sa.VARCHAR(1024), nullable=False, primary_key=True) - stac_version = sa.Column(sa.VARCHAR(300)) - stac_extensions = sa.Column(sa.ARRAY(sa.VARCHAR(300)), nullable=True) - title = sa.Column(sa.VARCHAR(1024)) - description = sa.Column(sa.VARCHAR(1024), nullable=False) - keywords = sa.Column(sa.ARRAY(sa.VARCHAR(300))) - version = sa.Column(sa.VARCHAR(300)) - license = sa.Column(sa.VARCHAR(300), nullable=False) - providers = sa.Column(JSONB) - summaries = sa.Column(JSONB, nullable=True) - extent = sa.Column(JSONB) - links = sa.Column(JSONB) - children = sa.orm.relationship("Item", lazy="dynamic") - type = sa.Column(sa.VARCHAR(300), nullable=False) - - -class Item(BaseModel): # type:ignore - """Item orm model.""" - - __tablename__ = "items" - __table_args__ = {"schema": "data"} - - id = sa.Column(sa.VARCHAR(1024), nullable=False, primary_key=True) - stac_version = sa.Column(sa.VARCHAR(300)) - stac_extensions = sa.Column(sa.ARRAY(sa.VARCHAR(300)), nullable=True) - geometry = sa.Column( - GeojsonGeometry("GEOMETRY", srid=4326, spatial_index=True), nullable=True - ) - bbox = sa.Column(sa.ARRAY(sa.NUMERIC), nullable=True) - properties = sa.Column(JSONB) - assets = sa.Column(JSONB) - collection_id = sa.Column( - sa.VARCHAR(1024), sa.ForeignKey(Collection.id), nullable=False, primary_key=True - ) - parent_collection = sa.orm.relationship("Collection", back_populates="children") - datetime = sa.Column(sa.TIMESTAMP(timezone=True), nullable=False) - links = sa.Column(JSONB) - - @classmethod - def get_field(cls, field_name): - """Get a model field.""" - try: - return getattr(cls, field_name) - except AttributeError: - # Use a JSONB field - return cls.properties[(field_name)].cast( - getattr(QueryableTypes, Queryables(field_name).name) - ) - - -class PaginationToken(BaseModel): # type:ignore - """Pagination orm model.""" - - __tablename__ = "tokens" - __table_args__ = {"schema": "data"} - - id = sa.Column(sa.VARCHAR(100), nullable=False, primary_key=True) - keyset = sa.Column(sa.VARCHAR(1000), nullable=False) diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/models/search.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/models/search.py deleted file mode 100644 index f87f7a8b..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/models/search.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Queryable data types for sqlalchemy backend.""" - -from dataclasses import dataclass - -import sqlalchemy as sa - - -@dataclass -class QueryableTypes: - """Defines a set of queryable fields. - - # TODO: Let the user define these in a config file - # TODO: There is a much better way of defining this field <> type mapping than two enums with same keys - """ - - orientation = sa.String - gsd = sa.Float - epsg = sa.Integer - height = sa.Integer - width = sa.Integer - minzoom = sa.Integer - maxzoom = sa.Integer - dtype = sa.String diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/serializers.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/serializers.py deleted file mode 100644 index 948d06e3..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/serializers.py +++ /dev/null @@ -1,171 +0,0 @@ -"""Serializers.""" -import abc -import json -from typing import TypedDict - -import attr -import geoalchemy2 as ga -from pystac.utils import datetime_to_str - -from stac_fastapi.sqlalchemy.models import database -from stac_fastapi.types import stac as stac_types -from stac_fastapi.types.config import Settings -from stac_fastapi.types.links import CollectionLinks, ItemLinks, resolve_links -from stac_fastapi.types.rfc3339 import now_to_rfc3339_str, rfc3339_str_to_datetime - - -@attr.s # type:ignore -class Serializer(abc.ABC): - """Defines serialization methods between the API and the data model.""" - - @classmethod - @abc.abstractmethod - def db_to_stac(cls, db_model: database.BaseModel, base_url: str) -> TypedDict: - """Transform database model to stac.""" - ... - - @classmethod - @abc.abstractmethod - def stac_to_db( - cls, stac_data: TypedDict, exclude_geometry: bool = False - ) -> database.BaseModel: - """Transform stac to database model.""" - ... - - @classmethod - def row_to_dict(cls, db_model: database.BaseModel): - """Transform a database model to it's dictionary representation.""" - d = {} - for column in db_model.__table__.columns: - value = getattr(db_model, column.name) - if value: - d[column.name] = value - return d - - -class ItemSerializer(Serializer): - """Serialization methods for STAC items.""" - - @classmethod - def db_to_stac(cls, db_model: database.Item, base_url: str) -> stac_types.Item: - """Transform database model to stac item.""" - properties = db_model.properties.copy() - indexed_fields = Settings.get().indexed_fields - for field in indexed_fields: - # Use getattr to accommodate extension namespaces - field_value = getattr(db_model, field.split(":")[-1]) - if field == "datetime": - field_value = datetime_to_str(field_value) - properties[field] = field_value - item_id = db_model.id - collection_id = db_model.collection_id - item_links = ItemLinks( - collection_id=collection_id, item_id=item_id, base_url=base_url - ).create_links() - - db_links = db_model.links - if db_links: - item_links += resolve_links(db_links, base_url) - - stac_extensions = db_model.stac_extensions or [] - - # The custom geometry we are using emits geojson if the geometry is bound to the database - # Otherwise it will return a geoalchemy2 WKBElement - # TODO: It's probably best to just remove the custom geometry type - geometry = db_model.geometry - if isinstance(geometry, ga.elements.WKBElement): - geometry = ga.shape.to_shape(geometry).__geo_interface__ - if isinstance(geometry, str): - geometry = json.loads(geometry) - - bbox = db_model.bbox - if bbox is not None: - bbox = [float(x) for x in db_model.bbox] - - return stac_types.Item( - type="Feature", - stac_version=db_model.stac_version, - stac_extensions=stac_extensions, - id=db_model.id, - collection=db_model.collection_id, - geometry=geometry, - bbox=bbox, - properties=properties, - links=item_links, - assets=db_model.assets, - ) - - @classmethod - def stac_to_db( - cls, stac_data: TypedDict, exclude_geometry: bool = False - ) -> database.Item: - """Transform stac item to database model.""" - indexed_fields = {} - for field in Settings.get().indexed_fields: - # Use getattr to accommodate extension namespaces - field_value = stac_data["properties"][field] - if field == "datetime": - field_value = rfc3339_str_to_datetime(field_value) - indexed_fields[field.split(":")[-1]] = field_value - - # TODO: Exclude indexed fields from the properties jsonb field to prevent duplication - - now = now_to_rfc3339_str() - if "created" not in stac_data["properties"]: - stac_data["properties"]["created"] = now - stac_data["properties"]["updated"] = now - - geometry = stac_data["geometry"] - if geometry is not None: - geometry = json.dumps(geometry) - - return database.Item( - id=stac_data["id"], - collection_id=stac_data["collection"], - stac_version=stac_data["stac_version"], - stac_extensions=stac_data.get("stac_extensions"), - geometry=geometry, - bbox=stac_data.get("bbox"), - properties=stac_data["properties"], - assets=stac_data["assets"], - **indexed_fields, - ) - - -class CollectionSerializer(Serializer): - """Serialization methods for STAC collections.""" - - @classmethod - def db_to_stac(cls, db_model: database.Collection, base_url: str) -> TypedDict: - """Transform database model to stac collection.""" - collection_links = CollectionLinks( - collection_id=db_model.id, base_url=base_url - ).create_links() - - db_links = db_model.links - if db_links: - collection_links += resolve_links(db_links, base_url) - - stac_extensions = db_model.stac_extensions or [] - - return stac_types.Collection( - type="Collection", - id=db_model.id, - stac_extensions=stac_extensions, - stac_version=db_model.stac_version, - title=db_model.title, - description=db_model.description, - keywords=db_model.keywords, - license=db_model.license, - providers=db_model.providers, - summaries=db_model.summaries, - extent=db_model.extent, - links=collection_links, - ) - - @classmethod - def stac_to_db( - cls, stac_data: TypedDict, exclude_geometry: bool = False - ) -> database.Collection: - """Transform stac collection to database model.""" - return database.Collection(**dict(stac_data)) diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/session.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/session.py deleted file mode 100644 index 79119c4a..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/session.py +++ /dev/null @@ -1,62 +0,0 @@ -"""database session management.""" -import logging -import os -from contextlib import contextmanager -from typing import Iterator - -import attr -import psycopg2 -import sqlalchemy as sa -from fastapi_utils.session import FastAPISessionMaker as _FastAPISessionMaker -from sqlalchemy.orm import Session as SqlSession - -from stac_fastapi.sqlalchemy.config import SqlalchemySettings -from stac_fastapi.types import errors - -logger = logging.getLogger(__name__) - - -class FastAPISessionMaker(_FastAPISessionMaker): - """FastAPISessionMaker.""" - - @contextmanager - def context_session(self) -> Iterator[SqlSession]: - """Override base method to include exception handling.""" - try: - yield from self.get_db() - except sa.exc.StatementError as e: - if isinstance(e.orig, psycopg2.errors.UniqueViolation): - raise errors.ConflictError("resource already exists") from e - elif isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): - raise errors.ForeignKeyError("collection does not exist") from e - logger.error(e, exc_info=True) - raise errors.DatabaseError("unhandled database error") - - -@attr.s -class Session: - """Database session management.""" - - reader_conn_string: str = attr.ib() - writer_conn_string: str = attr.ib() - - @classmethod - def create_from_env(cls): - """Create from environment.""" - return cls( - reader_conn_string=os.environ["READER_CONN_STRING"], - writer_conn_string=os.environ["WRITER_CONN_STRING"], - ) - - @classmethod - def create_from_settings(cls, settings: SqlalchemySettings) -> "Session": - """Create a Session object from settings.""" - return cls( - reader_conn_string=settings.reader_connection_string, - writer_conn_string=settings.writer_connection_string, - ) - - def __attrs_post_init__(self): - """Post init handler.""" - self.reader: FastAPISessionMaker = FastAPISessionMaker(self.reader_conn_string) - self.writer: FastAPISessionMaker = FastAPISessionMaker(self.writer_conn_string) diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/tokens.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/tokens.py deleted file mode 100644 index 19920ab9..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/tokens.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Pagination token client.""" -import abc -import logging -import os -from base64 import urlsafe_b64encode -from typing import Type - -import attr -from sqlalchemy.orm import Session as SqlSession - -from stac_fastapi.sqlalchemy.models import database -from stac_fastapi.sqlalchemy.session import Session -from stac_fastapi.types.errors import DatabaseError - -logger = logging.getLogger(__name__) - - -@attr.s -class PaginationTokenClient(abc.ABC): - """Pagination token specific CRUD operations.""" - - session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) - token_table: Type[database.PaginationToken] = attr.ib( - default=database.PaginationToken - ) - - @staticmethod - @abc.abstractmethod - def _lookup_id( - id: str, table: Type[database.BaseModel], session: SqlSession - ) -> Type[database.BaseModel]: - """Lookup row by id.""" - ... - - def insert_token(self, keyset: str, tries: int = 0) -> str: # type:ignore - """Insert a keyset into the database.""" - # uid has collision chance of 1e-7 percent - uid = urlsafe_b64encode(os.urandom(6)).decode() - with self.session.writer.context_session() as session: - try: - token = database.PaginationToken(id=uid, keyset=keyset) - session.add(token) - return uid - except DatabaseError: - # Try again if uid already exists in the database - # TODO: Explicitely check for ConflictError (if insert fails for other reasons it should be raised) - if tries > 5: - raise - self.insert_token(keyset, tries=tries + 1) - - def get_token(self, token_id: str) -> str: - """Retrieve a keyset from the database.""" - with self.session.reader.context_session() as session: - token = self._lookup_id(token_id, self.token_table, session) - return token.keyset diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/transactions.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/transactions.py deleted file mode 100644 index 644b82f2..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/transactions.py +++ /dev/null @@ -1,201 +0,0 @@ -"""transactions extension client.""" - -import logging -from typing import Optional, Type, Union - -import attr -from fastapi import HTTPException -from starlette.responses import Response - -from stac_fastapi.extensions.third_party.bulk_transactions import ( - BaseBulkTransactionsClient, - Items, -) -from stac_fastapi.sqlalchemy import serializers -from stac_fastapi.sqlalchemy.models import database -from stac_fastapi.sqlalchemy.session import Session -from stac_fastapi.types import stac as stac_types -from stac_fastapi.types.core import BaseTransactionsClient -from stac_fastapi.types.errors import NotFoundError - -logger = logging.getLogger(__name__) - - -@attr.s -class TransactionsClient(BaseTransactionsClient): - """Transactions extension specific CRUD operations.""" - - session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) - collection_table: Type[database.Collection] = attr.ib(default=database.Collection) - item_table: Type[database.Item] = attr.ib(default=database.Item) - item_serializer: Type[serializers.Serializer] = attr.ib( - default=serializers.ItemSerializer - ) - collection_serializer: Type[serializers.Serializer] = attr.ib( - default=serializers.CollectionSerializer - ) - - def create_item( - self, - collection_id: str, - item: Union[stac_types.Item, stac_types.ItemCollection], - **kwargs, - ) -> Optional[stac_types.Item]: - """Create item.""" - base_url = str(kwargs["request"].base_url) - - # If a feature collection is posted - if item["type"] == "FeatureCollection": - bulk_client = BulkTransactionsClient(session=self.session) - bulk_client.bulk_item_insert(items=item["features"]) - return None - - # Otherwise a single item has been posted - body_collection_id = item.get("collection") - if body_collection_id is not None and collection_id != body_collection_id: - raise HTTPException( - status_code=400, - detail=f"Collection ID from path parameter ({collection_id}) does not match Collection ID from Item ({body_collection_id})", - ) - item["collection"] = collection_id - data = self.item_serializer.stac_to_db(item) - with self.session.writer.context_session() as session: - session.add(data) - return self.item_serializer.db_to_stac(data, base_url) - - def create_collection( - self, collection: stac_types.Collection, **kwargs - ) -> Optional[Union[stac_types.Collection, Response]]: - """Create collection.""" - base_url = str(kwargs["request"].base_url) - data = self.collection_serializer.stac_to_db(collection) - with self.session.writer.context_session() as session: - session.add(data) - return self.collection_serializer.db_to_stac(data, base_url=base_url) - - def update_item( - self, collection_id: str, item_id: str, item: stac_types.Item, **kwargs - ) -> Optional[Union[stac_types.Item, Response]]: - """Update item.""" - body_collection_id = item.get("collection") - if body_collection_id is not None and collection_id != body_collection_id: - raise HTTPException( - status_code=400, - detail=f"Collection ID from path parameter ({collection_id}) does not match Collection ID from Item ({body_collection_id})", - ) - item["collection"] = collection_id - body_item_id = item["id"] - if body_item_id != item_id: - raise HTTPException( - status_code=400, - detail=f"Item ID from path parameter ({item_id}) does not match Item ID from Item ({body_item_id})", - ) - base_url = str(kwargs["request"].base_url) - with self.session.reader.context_session() as session: - query = session.query(self.item_table).filter( - self.item_table.id == item["id"] - ) - query = query.filter(self.item_table.collection_id == item["collection"]) - if not query.scalar(): - raise NotFoundError( - f"Item {item['id']} in collection {item['collection']}" - ) - # SQLAlchemy orm updates don't seem to like geoalchemy types - db_model = self.item_serializer.stac_to_db(item) - query.update(self.item_serializer.row_to_dict(db_model)) - stac_item = self.item_serializer.db_to_stac(db_model, base_url) - - return stac_item - - def update_collection( - self, collection: stac_types.Collection, **kwargs - ) -> Optional[Union[stac_types.Collection, Response]]: - """Update collection.""" - base_url = str(kwargs["request"].base_url) - with self.session.reader.context_session() as session: - query = session.query(self.collection_table).filter( - self.collection_table.id == collection["id"] - ) - if not query.scalar(): - raise NotFoundError(f"Item {collection['id']} not found") - - # SQLAlchemy orm updates don't seem to like geoalchemy types - db_model = self.collection_serializer.stac_to_db(collection) - query.update(self.collection_serializer.row_to_dict(db_model)) - - return self.collection_serializer.db_to_stac(db_model, base_url) - - def delete_item( - self, item_id: str, collection_id: str, **kwargs - ) -> Optional[Union[stac_types.Item, Response]]: - """Delete item.""" - base_url = str(kwargs["request"].base_url) - with self.session.writer.context_session() as session: - query = session.query(self.item_table).filter( - self.item_table.collection_id == collection_id - ) - query = query.filter(self.item_table.id == item_id) - data = query.first() - if not data: - raise NotFoundError( - f"Item {item_id} not found in collection {collection_id}" - ) - query.delete() - return self.item_serializer.db_to_stac(data, base_url=base_url) - - def delete_collection( - self, collection_id: str, **kwargs - ) -> Optional[Union[stac_types.Collection, Response]]: - """Delete collection.""" - base_url = str(kwargs["request"].base_url) - with self.session.writer.context_session() as session: - query = session.query(self.collection_table).filter( - self.collection_table.id == collection_id - ) - data = query.first() - if not data: - raise NotFoundError(f"Collection {collection_id} not found") - query.delete() - return self.collection_serializer.db_to_stac(data, base_url=base_url) - - -@attr.s -class BulkTransactionsClient(BaseBulkTransactionsClient): - """Postgres bulk transactions.""" - - session: Session = attr.ib(default=attr.Factory(Session.create_from_env)) - debug: bool = attr.ib(default=False) - item_table: Type[database.Item] = attr.ib(default=database.Item) - item_serializer: Type[serializers.Serializer] = attr.ib( - default=serializers.ItemSerializer - ) - - def __attrs_post_init__(self): - """Create sqlalchemy engine.""" - self.engine = self.session.writer.cached_engine - - def _preprocess_item(self, item: stac_types.Item) -> stac_types.Item: - """Preprocess items to match data model. - - # TODO: dedup with GetterDict logic (ref #58) - """ - db_model = self.item_serializer.stac_to_db(item) - return self.item_serializer.row_to_dict(db_model) - - def bulk_item_insert( - self, items: Items, chunk_size: Optional[int] = None, **kwargs - ) -> str: - """Bulk item insertion using sqlalchemy core. - - https://docs.sqlalchemy.org/en/13/faq/performance.html#i-m-inserting-400-000-rows-with-the-orm-and-it-s-really-slow - """ - # Use items.items because schemas.Items is a model with an items key - processed_items = [self._preprocess_item(item) for item in items] - return_msg = f"Successfully added {len(processed_items)} items." - if chunk_size: - for chunk in self._chunks(processed_items, chunk_size): - self.engine.execute(self.item_table.__table__.insert(), chunk) - return return_msg - - self.engine.execute(self.item_table.__table__.insert(), processed_items) - return return_msg diff --git a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/version.py b/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/version.py deleted file mode 100644 index 3c7bccdd..00000000 --- a/stac_fastapi/sqlalchemy/stac_fastapi/sqlalchemy/version.py +++ /dev/null @@ -1,2 +0,0 @@ -"""library version.""" -__version__ = "2.3.0" diff --git a/stac_fastapi/sqlalchemy/tests/__init__.py b/stac_fastapi/sqlalchemy/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/sqlalchemy/tests/api/__init__.py b/stac_fastapi/sqlalchemy/tests/api/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/sqlalchemy/tests/api/test_api.py b/stac_fastapi/sqlalchemy/tests/api/test_api.py deleted file mode 100644 index 09304169..00000000 --- a/stac_fastapi/sqlalchemy/tests/api/test_api.py +++ /dev/null @@ -1,436 +0,0 @@ -from datetime import datetime, timedelta - -from ..conftest import MockStarletteRequest - -STAC_CORE_ROUTES = [ - "GET /", - "GET /collections", - "GET /collections/{collection_id}", - "GET /collections/{collection_id}/items", - "GET /collections/{collection_id}/items/{item_id}", - "GET /conformance", - "GET /search", - "POST /search", -] - -STAC_TRANSACTION_ROUTES = [ - "DELETE /collections/{collection_id}", - "DELETE /collections/{collection_id}/items/{item_id}", - "POST /collections", - "POST /collections/{collection_id}/items", - "PUT /collections", - "PUT /collections/{collection_id}/items/{item_id}", -] - - -def test_post_search_content_type(app_client): - params = {"limit": 1} - resp = app_client.post("search", json=params) - assert resp.headers["content-type"] == "application/geo+json" - - -def test_get_search_content_type(app_client): - resp = app_client.get("search") - assert resp.headers["content-type"] == "application/geo+json" - - -def test_api_headers(app_client): - resp = app_client.get("/api") - assert ( - resp.headers["content-type"] == "application/vnd.oai.openapi+json;version=3.0" - ) - assert resp.status_code == 200 - - -def test_core_router(api_client): - core_routes = set(STAC_CORE_ROUTES) - api_routes = set( - [f"{list(route.methods)[0]} {route.path}" for route in api_client.app.routes] - ) - assert not core_routes - api_routes - - -def test_transactions_router(api_client): - transaction_routes = set(STAC_TRANSACTION_ROUTES) - api_routes = set( - [f"{list(route.methods)[0]} {route.path}" for route in api_client.app.routes] - ) - assert not transaction_routes - api_routes - - -def test_app_transaction_extension(app_client, load_test_data): - item = load_test_data("test_item.json") - resp = app_client.post(f"/collections/{item['collection']}/items", json=item) - assert resp.status_code == 200 - - -def test_app_search_response(load_test_data, app_client, postgres_transactions): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - resp = app_client.get("/search", params={"collections": ["test-collection"]}) - assert resp.status_code == 200 - resp_json = resp.json() - - assert resp_json.get("type") == "FeatureCollection" - # stac_version and stac_extensions were removed in v1.0.0-beta.3 - assert resp_json.get("stac_version") is None - assert resp_json.get("stac_extensions") is None - - -def test_app_search_response_multipolygon( - load_test_data, app_client, postgres_transactions -): - item = load_test_data("test_item_multipolygon.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - resp = app_client.get("/search", params={"collections": ["test-collection"]}) - assert resp.status_code == 200 - resp_json = resp.json() - - assert resp_json.get("type") == "FeatureCollection" - assert resp_json.get("features")[0]["geometry"]["type"] == "MultiPolygon" - - -def test_app_search_response_geometry_null( - load_test_data, app_client, postgres_transactions -): - item = load_test_data("test_item_geometry_null.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - resp = app_client.get("/search", params={"collections": ["test-collection"]}) - assert resp.status_code == 200 - resp_json = resp.json() - - assert resp_json.get("type") == "FeatureCollection" - assert resp_json.get("features")[0]["geometry"] is None - assert resp_json.get("features")[0]["bbox"] is None - - -def test_app_context_extension(load_test_data, app_client, postgres_transactions): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - resp = app_client.get("/search", params={"collections": ["test-collection"]}) - assert resp.status_code == 200 - resp_json = resp.json() - assert "context" in resp_json - assert resp_json["context"]["returned"] == resp_json["context"]["matched"] == 1 - - -def test_app_fields_extension(load_test_data, app_client, postgres_transactions): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - resp = app_client.get("/search", params={"collections": ["test-collection"]}) - assert resp.status_code == 200 - resp_json = resp.json() - assert list(resp_json["features"][0]["properties"]) == ["datetime"] - - -def test_app_query_extension_gt(load_test_data, app_client, postgres_transactions): - test_item = load_test_data("test_item.json") - postgres_transactions.create_item( - test_item["collection"], test_item, request=MockStarletteRequest - ) - - params = {"query": {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"]}}} - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -def test_app_query_extension_gte(load_test_data, app_client, postgres_transactions): - test_item = load_test_data("test_item.json") - postgres_transactions.create_item( - test_item["collection"], test_item, request=MockStarletteRequest - ) - - params = {"query": {"proj:epsg": {"gte": test_item["properties"]["proj:epsg"]}}} - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -def test_app_query_extension_limit_eq0(app_client): - params = {"limit": 0} - resp = app_client.post("/search", json=params) - assert resp.status_code == 400 - - -def test_app_query_extension_limit_lt0( - load_test_data, app_client, postgres_transactions -): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - params = {"limit": -1} - resp = app_client.post("/search", json=params) - assert resp.status_code == 400 - - -def test_app_query_extension_limit_gt10000( - load_test_data, app_client, postgres_transactions -): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - params = {"limit": 10001} - resp = app_client.post("/search", json=params) - assert resp.status_code == 400 - - -def test_app_query_extension_limit_10000( - load_test_data, app_client, postgres_transactions -): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - params = {"limit": 10000} - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - - -def test_app_sort_extension(load_test_data, app_client, postgres_transactions): - first_item = load_test_data("test_item.json") - item_date = datetime.strptime( - first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ" - ) - postgres_transactions.create_item( - first_item["collection"], first_item, request=MockStarletteRequest - ) - - second_item = load_test_data("test_item.json") - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = another_item_date.strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - postgres_transactions.create_item( - second_item["collection"], second_item, request=MockStarletteRequest - ) - - params = { - "collections": [first_item["collection"]], - "sortby": [{"field": "datetime", "direction": "desc"}], - } - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == first_item["id"] - assert resp_json["features"][1]["id"] == second_item["id"] - - -def test_search_invalid_date(load_test_data, app_client, postgres_transactions): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - params = { - "datetime": "2020-XX-01/2020-10-30", - "collections": [item["collection"]], - } - - resp = app_client.post("/search", json=params) - assert resp.status_code == 400 - - -def test_search_point_intersects(load_test_data, app_client, postgres_transactions): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - point = [150.04, -33.14] - intersects = {"type": "Point", "coordinates": point} - - params = { - "intersects": intersects, - "collections": [item["collection"]], - } - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -def test_datetime_non_interval(load_test_data, app_client, postgres_transactions): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - alternate_formats = [ - "2020-02-12T12:30:22+00:00", - "2020-02-12T12:30:22.00Z", - "2020-02-12T12:30:22Z", - "2020-02-12T12:30:22.00+00:00", - ] - for date in alternate_formats: - params = { - "datetime": date, - "collections": [item["collection"]], - } - - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - # datetime is returned in this format "2020-02-12T12:30:22+00:00" - assert resp_json["features"][0]["properties"]["datetime"][0:19] == date[0:19] - - -def test_bbox_3d(load_test_data, app_client, postgres_transactions): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - australia_bbox = [106.343365, -47.199523, 0.1, 168.218365, -19.437288, 0.1] - params = { - "bbox": australia_bbox, - "collections": [item["collection"]], - } - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -def test_search_line_string_intersects( - load_test_data, app_client, postgres_transactions -): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - line = [[150.04, -33.14], [150.22, -33.89]] - intersects = {"type": "LineString", "coordinates": line} - - params = { - "intersects": intersects, - "collections": [item["collection"]], - } - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 1 - - -def test_app_fields_extension_return_all_properties( - load_test_data, app_client, postgres_transactions -): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - resp = app_client.get( - "/search", params={"collections": ["test-collection"], "fields": "properties"} - ) - assert resp.status_code == 200 - resp_json = resp.json() - feature = resp_json["features"][0] - assert len(feature["properties"]) >= len(item["properties"]) - for expected_prop, expected_value in item["properties"].items(): - if expected_prop in ("datetime", "created", "updated"): - assert feature["properties"][expected_prop][0:19] == expected_value[0:19] - else: - assert feature["properties"][expected_prop] == expected_value - - -def test_landing_forwarded_header(load_test_data, app_client, postgres_transactions): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - response = app_client.get( - "/", - headers={ - "Forwarded": "proto=https;host=test:1234", - "X-Forwarded-Proto": "http", - "X-Forwarded-Port": "4321", - }, - ).json() - for link in response["links"]: - assert link["href"].startswith("https://test:1234/") - - -def test_app_search_response_forwarded_header( - load_test_data, app_client, postgres_transactions -): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - resp = app_client.get( - "/search", - params={"collections": ["test-collection"]}, - headers={"Forwarded": "proto=https;host=testserver:1234"}, - ) - for feature in resp.json()["features"]: - for link in feature["links"]: - assert link["href"].startswith("https://testserver:1234/") - - -def test_app_search_response_x_forwarded_headers( - load_test_data, app_client, postgres_transactions -): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - resp = app_client.get( - "/search", - params={"collections": ["test-collection"]}, - headers={ - "X-Forwarded-Port": "1234", - "X-Forwarded-Proto": "https", - }, - ) - for feature in resp.json()["features"]: - for link in feature["links"]: - assert link["href"].startswith("https://testserver:1234/") - - -def test_app_search_response_duplicate_forwarded_headers( - load_test_data, app_client, postgres_transactions -): - item = load_test_data("test_item.json") - postgres_transactions.create_item( - item["collection"], item, request=MockStarletteRequest - ) - - resp = app_client.get( - "/search", - params={"collections": ["test-collection"]}, - headers={ - "Forwarded": "proto=https;host=testserver:1234", - "X-Forwarded-Port": "4321", - "X-Forwarded-Proto": "http", - }, - ) - for feature in resp.json()["features"]: - for link in feature["links"]: - assert link["href"].startswith("https://testserver:1234/") diff --git a/stac_fastapi/sqlalchemy/tests/clients/__init__.py b/stac_fastapi/sqlalchemy/tests/clients/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/sqlalchemy/tests/clients/test_postgres.py b/stac_fastapi/sqlalchemy/tests/clients/test_postgres.py deleted file mode 100644 index da69c78b..00000000 --- a/stac_fastapi/sqlalchemy/tests/clients/test_postgres.py +++ /dev/null @@ -1,376 +0,0 @@ -import uuid -from copy import deepcopy -from typing import Callable - -import pytest -from stac_pydantic import Collection, Item -from tests.conftest import MockStarletteRequest - -from stac_fastapi.api.app import StacApi -from stac_fastapi.extensions.third_party.bulk_transactions import Items -from stac_fastapi.sqlalchemy.core import CoreCrudClient -from stac_fastapi.sqlalchemy.transactions import ( - BulkTransactionsClient, - TransactionsClient, -) -from stac_fastapi.types.errors import ConflictError, NotFoundError - - -def test_create_collection( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - data = load_test_data("test_collection.json") - resp = postgres_transactions.create_collection(data, request=MockStarletteRequest) - assert Collection(**data).dict(exclude={"links"}) == Collection(**resp).dict( - exclude={"links"} - ) - coll = postgres_core.get_collection(data["id"], request=MockStarletteRequest) - assert coll["id"] == data["id"] - - -def test_create_collection_already_exists( - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - data = load_test_data("test_collection.json") - postgres_transactions.create_collection(data, request=MockStarletteRequest) - - with pytest.raises(ConflictError): - postgres_transactions.create_collection(data, request=MockStarletteRequest) - - -def test_update_collection( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - data = load_test_data("test_collection.json") - postgres_transactions.create_collection(data, request=MockStarletteRequest) - - data["keywords"].append("new keyword") - postgres_transactions.update_collection(data, request=MockStarletteRequest) - - coll = postgres_core.get_collection(data["id"], request=MockStarletteRequest) - assert "new keyword" in coll["keywords"] - - -def test_delete_collection( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - data = load_test_data("test_collection.json") - postgres_transactions.create_collection(data, request=MockStarletteRequest) - - deleted = postgres_transactions.delete_collection( - data["id"], request=MockStarletteRequest - ) - - with pytest.raises(NotFoundError): - postgres_core.get_collection(deleted["id"], request=MockStarletteRequest) - - -def test_get_collection( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - data = load_test_data("test_collection.json") - postgres_transactions.create_collection(data, request=MockStarletteRequest) - coll = postgres_core.get_collection(data["id"], request=MockStarletteRequest) - assert Collection(**data).dict(exclude={"links"}) == Collection(**coll).dict( - exclude={"links"} - ) - assert coll["id"] == data["id"] - - -def test_get_item( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - collection_data = load_test_data("test_collection.json") - postgres_transactions.create_collection( - collection_data, request=MockStarletteRequest - ) - data = load_test_data("test_item.json") - postgres_transactions.create_item( - collection_data["id"], data, request=MockStarletteRequest - ) - coll = postgres_core.get_item( - item_id=data["id"], - collection_id=data["collection"], - request=MockStarletteRequest, - ) - assert coll["id"] == data["id"] - assert coll["collection"] == data["collection"] - - -def test_get_collection_items( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - item = load_test_data("test_item.json") - - for _ in range(5): - item["id"] = str(uuid.uuid4()) - postgres_transactions.create_item( - coll["id"], item, request=MockStarletteRequest - ) - - fc = postgres_core.item_collection(coll["id"], request=MockStarletteRequest) - assert len(fc["features"]) == 5 - - for item in fc["features"]: - assert item["collection"] == coll["id"] - - -def test_create_item( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - item = load_test_data("test_item.json") - postgres_transactions.create_item(coll["id"], item, request=MockStarletteRequest) - resp = postgres_core.get_item( - item["id"], item["collection"], request=MockStarletteRequest - ) - assert Item(**item).dict( - exclude={"links": ..., "properties": {"created", "updated"}} - ) == Item(**resp).dict(exclude={"links": ..., "properties": {"created", "updated"}}) - - -def test_create_item_already_exists( - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - item = load_test_data("test_item.json") - postgres_transactions.create_item(coll["id"], item, request=MockStarletteRequest) - - with pytest.raises(ConflictError): - postgres_transactions.create_item( - coll["id"], item, request=MockStarletteRequest - ) - - -def test_create_duplicate_item_different_collections( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - # create test-collection - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - # create test-collection-2 - coll["id"] = "test-collection-2" - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - # add item to test-collection - item = load_test_data("test_item.json") - postgres_transactions.create_item( - "test-collection", item, request=MockStarletteRequest - ) - - # get item from test-collection - resp = postgres_core.get_item( - item["id"], item["collection"], request=MockStarletteRequest - ) - assert Item(**item).dict( - exclude={"links": ..., "properties": {"created", "updated"}} - ) == Item(**resp).dict(exclude={"links": ..., "properties": {"created", "updated"}}) - - # add item to test-collection-2 - item["collection"] = "test-collection-2" - postgres_transactions.create_item( - "test-collection-2", item, request=MockStarletteRequest - ) - - # get item with same id from test-collection-2 - resp = postgres_core.get_item( - item["id"], item["collection"], request=MockStarletteRequest - ) - assert Item(**item).dict( - exclude={"links": ..., "properties": {"created", "updated"}} - ) == Item(**resp).dict(exclude={"links": ..., "properties": {"created", "updated"}}) - - -def test_update_item( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - item = load_test_data("test_item.json") - postgres_transactions.create_item(coll["id"], item, request=MockStarletteRequest) - - item["properties"]["foo"] = "bar" - postgres_transactions.update_item( - coll["id"], item["id"], item, request=MockStarletteRequest - ) - - updated_item = postgres_core.get_item( - item["id"], item["collection"], request=MockStarletteRequest - ) - assert updated_item["properties"]["foo"] == "bar" - - -def test_update_geometry( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - item = load_test_data("test_item.json") - postgres_transactions.create_item(coll["id"], item, request=MockStarletteRequest) - - item["geometry"]["coordinates"] = [[[0, 0], [0, 0], [0, 0], [0, 0], [0, 0]]] - postgres_transactions.update_item( - coll["id"], item["id"], item, request=MockStarletteRequest - ) - - updated_item = postgres_core.get_item( - item["id"], item["collection"], request=MockStarletteRequest - ) - assert updated_item["geometry"]["coordinates"] == item["geometry"]["coordinates"] - - -def test_delete_item( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - item = load_test_data("test_item.json") - postgres_transactions.create_item(coll["id"], item, request=MockStarletteRequest) - - postgres_transactions.delete_item( - item["id"], item["collection"], request=MockStarletteRequest - ) - - with pytest.raises(NotFoundError): - postgres_core.get_item( - item["id"], item["collection"], request=MockStarletteRequest - ) - - -def test_bulk_item_insert( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - postgres_bulk_transactions: BulkTransactionsClient, - load_test_data: Callable, -): - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - item = load_test_data("test_item.json") - - items = {} - for _ in range(10): - _item = deepcopy(item) - _item["id"] = str(uuid.uuid4()) - items[_item["id"]] = _item - - fc = postgres_core.item_collection(coll["id"], request=MockStarletteRequest) - assert len(fc["features"]) == 0 - - postgres_bulk_transactions.bulk_item_insert(Items(items=items)) - - fc = postgres_core.item_collection(coll["id"], request=MockStarletteRequest) - assert len(fc["features"]) == 10 - - for item in items.values(): - postgres_transactions.delete_item( - item["id"], item["collection"], request=MockStarletteRequest - ) - - -def test_bulk_item_insert_chunked( - postgres_transactions: TransactionsClient, - postgres_bulk_transactions: BulkTransactionsClient, - load_test_data: Callable, -): - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - item = load_test_data("test_item.json") - - items = [] - for _ in range(10): - _item = deepcopy(item) - _item["id"] = str(uuid.uuid4()) - items.append(_item) - - postgres_bulk_transactions.bulk_item_insert(items=items, chunk_size=2) - - for item in items: - postgres_transactions.delete_item( - item["id"], item["collection"], request=MockStarletteRequest - ) - - -def test_feature_collection_insert( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, -): - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - item = load_test_data("test_item.json") - - features = [] - for _ in range(10): - _item = deepcopy(item) - _item["id"] = str(uuid.uuid4()) - features.append(_item) - - feature_collection = {"type": "FeatureCollection", "features": features} - - postgres_transactions.create_item( - coll["id"], feature_collection, request=MockStarletteRequest - ) - - fc = postgres_core.item_collection(coll["id"], request=MockStarletteRequest) - assert len(fc["features"]) >= 10 - - for item in features: - postgres_transactions.delete_item( - item["id"], item["collection"], request=MockStarletteRequest - ) - - -def test_landing_page_no_collection_title( - postgres_core: CoreCrudClient, - postgres_transactions: TransactionsClient, - load_test_data: Callable, - api_client: StacApi, -): - class MockStarletteRequestWithApp(MockStarletteRequest): - app = api_client.app - - coll = load_test_data("test_collection.json") - del coll["title"] - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - landing_page = postgres_core.landing_page(request=MockStarletteRequestWithApp) - for link in landing_page["links"]: - if link["href"].split("/")[-1] == coll["id"]: - assert link["title"] diff --git a/stac_fastapi/sqlalchemy/tests/conftest.py b/stac_fastapi/sqlalchemy/tests/conftest.py deleted file mode 100644 index 7abd9150..00000000 --- a/stac_fastapi/sqlalchemy/tests/conftest.py +++ /dev/null @@ -1,154 +0,0 @@ -import json -import os -from typing import Callable, Dict - -import pytest -from starlette.testclient import TestClient - -from stac_fastapi.api.app import StacApi -from stac_fastapi.api.models import create_request_model -from stac_fastapi.extensions.core import ( - ContextExtension, - FieldsExtension, - SortExtension, - TokenPaginationExtension, - TransactionExtension, -) -from stac_fastapi.sqlalchemy.config import SqlalchemySettings -from stac_fastapi.sqlalchemy.core import CoreCrudClient -from stac_fastapi.sqlalchemy.extensions import QueryExtension -from stac_fastapi.sqlalchemy.models import database -from stac_fastapi.sqlalchemy.session import Session -from stac_fastapi.sqlalchemy.transactions import ( - BulkTransactionsClient, - TransactionsClient, -) -from stac_fastapi.types.config import Settings -from stac_fastapi.types.search import BaseSearchGetRequest, BaseSearchPostRequest - -DATA_DIR = os.path.join(os.path.dirname(__file__), "data") - - -class TestSettings(SqlalchemySettings): - class Config: - env_file = ".env.test" - - -settings = TestSettings() -Settings.set(settings) - - -@pytest.fixture(autouse=True) -def cleanup(postgres_core: CoreCrudClient, postgres_transactions: TransactionsClient): - yield - collections = postgres_core.all_collections(request=MockStarletteRequest) - for coll in collections["collections"]: - if coll["id"].split("-")[0] == "test": - # Delete the items - items = postgres_core.item_collection( - coll["id"], limit=100, request=MockStarletteRequest - ) - for feat in items["features"]: - postgres_transactions.delete_item( - feat["id"], feat["collection"], request=MockStarletteRequest - ) - - # Delete the collection - postgres_transactions.delete_collection( - coll["id"], request=MockStarletteRequest - ) - - -@pytest.fixture -def load_test_data() -> Callable[[str], Dict]: - def load_file(filename: str) -> Dict: - with open(os.path.join(DATA_DIR, filename)) as file: - return json.load(file) - - return load_file - - -class MockStarletteRequest: - base_url = "http://test-server" - - -@pytest.fixture -def db_session() -> Session: - return Session( - reader_conn_string=settings.reader_connection_string, - writer_conn_string=settings.writer_connection_string, - ) - - -@pytest.fixture -def postgres_core(db_session): - return CoreCrudClient( - session=db_session, - item_table=database.Item, - collection_table=database.Collection, - token_table=database.PaginationToken, - ) - - -@pytest.fixture -def postgres_transactions(db_session): - return TransactionsClient( - session=db_session, - item_table=database.Item, - collection_table=database.Collection, - ) - - -@pytest.fixture -def postgres_bulk_transactions(db_session): - return BulkTransactionsClient(session=db_session) - - -@pytest.fixture -def api_client(db_session): - settings = SqlalchemySettings() - extensions = [ - TransactionExtension( - client=TransactionsClient(session=db_session), settings=settings - ), - ContextExtension(), - SortExtension(), - FieldsExtension(), - QueryExtension(), - TokenPaginationExtension(), - ] - - get_request_model = create_request_model( - "SearchGetRequest", - base_model=BaseSearchGetRequest, - extensions=extensions, - request_type="GET", - ) - - post_request_model = create_request_model( - "SearchPostRequest", - base_model=BaseSearchPostRequest, - extensions=extensions, - request_type="POST", - ) - - return StacApi( - settings=settings, - client=CoreCrudClient( - session=db_session, - extensions=extensions, - post_request_model=post_request_model, - ), - extensions=extensions, - search_get_request_model=get_request_model, - search_post_request_model=post_request_model, - ) - - -@pytest.fixture -def app_client(api_client, load_test_data, postgres_transactions): - coll = load_test_data("test_collection.json") - postgres_transactions.create_collection(coll, request=MockStarletteRequest) - - with TestClient(api_client.app) as test_app: - yield test_app diff --git a/stac_fastapi/sqlalchemy/tests/data/test_collection.json b/stac_fastapi/sqlalchemy/tests/data/test_collection.json deleted file mode 100644 index 5028bfea..00000000 --- a/stac_fastapi/sqlalchemy/tests/data/test_collection.json +++ /dev/null @@ -1,167 +0,0 @@ -{ - "id": "test-collection", - "stac_extensions": ["https://stac-extensions.github.io/eo/v1.0.0/schema.json"], - "type": "Collection", - "description": "Landat 8 imagery radiometrically calibrated and orthorectified using gound points and Digital Elevation Model (DEM) data to correct relief displacement.", - "stac_version": "1.0.0", - "license": "PDDL-1.0", - "summaries": { - "platform": ["landsat-8"], - "instruments": ["oli", "tirs"], - "gsd": [30], - "eo:bands": [ - { - "name": "B1", - "common_name": "coastal", - "center_wavelength": 0.44, - "full_width_half_max": 0.02 - }, - { - "name": "B2", - "common_name": "blue", - "center_wavelength": 0.48, - "full_width_half_max": 0.06 - }, - { - "name": "B3", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.06 - }, - { - "name": "B4", - "common_name": "red", - "center_wavelength": 0.65, - "full_width_half_max": 0.04 - }, - { - "name": "B5", - "common_name": "nir", - "center_wavelength": 0.86, - "full_width_half_max": 0.03 - }, - { - "name": "B6", - "common_name": "swir16", - "center_wavelength": 1.6, - "full_width_half_max": 0.08 - }, - { - "name": "B7", - "common_name": "swir22", - "center_wavelength": 2.2, - "full_width_half_max": 0.2 - }, - { - "name": "B8", - "common_name": "pan", - "center_wavelength": 0.59, - "full_width_half_max": 0.18 - }, - { - "name": "B9", - "common_name": "cirrus", - "center_wavelength": 1.37, - "full_width_half_max": 0.02 - }, - { - "name": "B10", - "common_name": "lwir11", - "center_wavelength": 10.9, - "full_width_half_max": 0.8 - }, - { - "name": "B11", - "common_name": "lwir12", - "center_wavelength": 12, - "full_width_half_max": 1 - } - ] - }, - "extent": { - "spatial": { - "bbox": [ - [ - -180.0, - -90.0, - 180.0, - 90.0 - ] - ] - }, - "temporal": { - "interval": [ - [ - "2013-06-01", - null - ] - ] - } - }, - "links": [ - { - "href": "http://localhost:8081/collections/landsat-8-l1", - "rel": "self", - "type": "application/json" - }, - { - "href": "http://localhost:8081/", - "rel": "parent", - "type": "application/json" - }, - { - "href": "http://localhost:8081/collections/landsat-8-l1/items", - "rel": "item", - "type": "application/geo+json" - }, - { - "href": "http://localhost:8081/", - "rel": "root", - "type": "application/json" - } - ], - "title": "Landsat 8 L1", - "keywords": [ - "landsat", - "earth observation", - "usgs" - ], - "providers": [ - { - "name": "USGS", - "roles": [ - "producer" - ], - "url": "https://landsat.usgs.gov/" - }, - { - "name": "Planet Labs", - "roles": [ - "processor" - ], - "url": "https://github.com/landsat-pds/landsat_ingestor" - }, - { - "name": "AWS", - "roles": [ - "host" - ], - "url": "https://landsatonaws.com/" - }, - { - "name": "Development Seed", - "roles": [ - "processor" - ], - "url": "https://github.com/sat-utils/sat-api" - }, - { - "name": "Earth Search by Element84", - "description": "API of Earth on AWS datasets", - "roles": [ - "host" - ], - "url": "https://element84.com" - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/sqlalchemy/tests/data/test_item.json b/stac_fastapi/sqlalchemy/tests/data/test_item.json deleted file mode 100644 index 2b7fdd86..00000000 --- a/stac_fastapi/sqlalchemy/tests/data/test_item.json +++ /dev/null @@ -1,505 +0,0 @@ -{ - "type": "Feature", - "id": "test-item", - "stac_version": "1.0.0", - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "geometry": { - "coordinates": [ - [ - [ - 152.15052873427666, - -33.82243006904891 - ], - [ - 150.1000346138806, - -34.257132625788756 - ], - [ - 149.5776607193635, - -32.514709769700254 - ], - [ - 151.6262528041627, - -32.08081674221862 - ], - [ - 152.15052873427666, - -33.82243006904891 - ] - ] - ], - "type": "Polygon" - }, - "properties": { - "datetime": "2020-02-12T12:30:22Z", - "landsat:scene_id": "LC82081612020043LGN00", - "landsat:row": "161", - "gsd": 15, - "eo:bands": [ - { - "gsd": 30, - "name": "B1", - "common_name": "coastal", - "center_wavelength": 0.44, - "full_width_half_max": 0.02 - }, - { - "gsd": 30, - "name": "B2", - "common_name": "blue", - "center_wavelength": 0.48, - "full_width_half_max": 0.06 - }, - { - "gsd": 30, - "name": "B3", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.06 - }, - { - "gsd": 30, - "name": "B4", - "common_name": "red", - "center_wavelength": 0.65, - "full_width_half_max": 0.04 - }, - { - "gsd": 30, - "name": "B5", - "common_name": "nir", - "center_wavelength": 0.86, - "full_width_half_max": 0.03 - }, - { - "gsd": 30, - "name": "B6", - "common_name": "swir16", - "center_wavelength": 1.6, - "full_width_half_max": 0.08 - }, - { - "gsd": 30, - "name": "B7", - "common_name": "swir22", - "center_wavelength": 2.2, - "full_width_half_max": 0.2 - }, - { - "gsd": 15, - "name": "B8", - "common_name": "pan", - "center_wavelength": 0.59, - "full_width_half_max": 0.18 - }, - { - "gsd": 30, - "name": "B9", - "common_name": "cirrus", - "center_wavelength": 1.37, - "full_width_half_max": 0.02 - }, - { - "gsd": 100, - "name": "B10", - "common_name": "lwir11", - "center_wavelength": 10.9, - "full_width_half_max": 0.8 - }, - { - "gsd": 100, - "name": "B11", - "common_name": "lwir12", - "center_wavelength": 12, - "full_width_half_max": 1 - } - ], - "landsat:revision": "00", - "view:sun_azimuth": -148.83296771, - "instrument": "OLI_TIRS", - "landsat:product_id": "LC08_L1GT_208161_20200212_20200212_01_RT", - "eo:cloud_cover": 0, - "landsat:tier": "RT", - "landsat:processing_level": "L1GT", - "landsat:column": "208", - "platform": "landsat-8", - "proj:epsg": 32756, - "view:sun_elevation": -37.30791534, - "view:off_nadir": 0, - "height": 2500, - "width": 2500 - }, - "bbox": [ - 149.57574, - -34.25796, - 152.15194, - -32.07915 - ], - "collection": "test-collection", - "assets": { - "ANG": { - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ANG.txt", - "type": "text/plain", - "title": "Angle Coefficients File", - "description": "Collection 2 Level-1 Angle Coefficients File (ANG)" - }, - "SR_B1": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B1.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Coastal/Aerosol Band (B1)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B1", - "common_name": "coastal", - "center_wavelength": 0.44, - "full_width_half_max": 0.02 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Coastal/Aerosol Band (B1) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B2": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B2.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Blue Band (B2)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B2", - "common_name": "blue", - "center_wavelength": 0.48, - "full_width_half_max": 0.06 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Blue Band (B2) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B3": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B3.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Green Band (B3)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B3", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.06 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Green Band (B3) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B4": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B4.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Red Band (B4)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B4", - "common_name": "red", - "center_wavelength": 0.65, - "full_width_half_max": 0.04 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Red Band (B4) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B5": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B5.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Near Infrared Band 0.8 (B5)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B5", - "common_name": "nir08", - "center_wavelength": 0.86, - "full_width_half_max": 0.03 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Near Infrared Band 0.8 (B5) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B6": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B6.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Short-wave Infrared Band 1.6 (B6)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B6", - "common_name": "swir16", - "center_wavelength": 1.6, - "full_width_half_max": 0.08 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Short-wave Infrared Band 1.6 (B6) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "SR_B7": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B7.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Short-wave Infrared Band 2.2 (B7)", - "eo:bands": [ - { - "gsd": 30, - "name": "SR_B7", - "common_name": "swir22", - "center_wavelength": 2.2, - "full_width_half_max": 0.2 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Collection 2 Level-2 Short-wave Infrared Band 2.2 (B7) Surface Reflectance", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_QA": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_QA.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Surface Temperature Quality Assessment Band", - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Surface Temperature Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_B10": { - "gsd": 100, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_B10.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Surface Temperature Band (B10)", - "eo:bands": [ - { - "gsd": 100, - "name": "ST_B10", - "common_name": "lwir11", - "center_wavelength": 10.9, - "full_width_half_max": 0.8 - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Surface Temperature Band (B10) Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "MTL.txt": { - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.txt", - "type": "text/plain", - "title": "Product Metadata File", - "description": "Collection 2 Level-1 Product Metadata File (MTL)" - }, - "MTL.xml": { - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.xml", - "type": "application/xml", - "title": "Product Metadata File (xml)", - "description": "Collection 2 Level-1 Product Metadata File (xml)" - }, - "ST_DRAD": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_DRAD.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Downwelled Radiance Band", - "eo:bands": [ - { - "gsd": 30, - "name": "ST_DRAD", - "description": "downwelled radiance" - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Downwelled Radiance Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_EMIS": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMIS.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Emissivity Band", - "eo:bands": [ - { - "gsd": 30, - "name": "ST_EMIS", - "description": "emissivity" - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Emissivity Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - }, - "ST_EMSD": { - "gsd": 30, - "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMSD.TIF", - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "title": "Emissivity Standard Deviation Band", - "eo:bands": [ - { - "gsd": 30, - "name": "ST_EMSD", - "description": "emissivity standard deviation" - } - ], - "proj:shape": [ - 7731, - 7591 - ], - "description": "Landsat Collection 2 Level-2 Emissivity Standard Deviation Band Surface Temperature Product", - "proj:transform": [ - 30, - 0, - 304185, - 0, - -30, - -843585 - ] - } - }, - "links": [ - { - "href": "http://localhost:8081/collections/landsat-8-l1/items/LC82081612020043", - "rel": "self", - "type": "application/geo+json" - }, - { - "href": "http://localhost:8081/collections/landsat-8-l1", - "rel": "parent", - "type": "application/json" - }, - { - "href": "http://localhost:8081/collections/landsat-8-l1", - "rel": "collection", - "type": "application/json" - }, - { - "href": "http://localhost:8081/", - "rel": "root", - "type": "application/json" - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/sqlalchemy/tests/data/test_item_geometry_null.json b/stac_fastapi/sqlalchemy/tests/data/test_item_geometry_null.json deleted file mode 100644 index 27ef327a..00000000 --- a/stac_fastapi/sqlalchemy/tests/data/test_item_geometry_null.json +++ /dev/null @@ -1,169 +0,0 @@ -{ - "type": "Feature", - "stac_version": "1.0.0", - "stac_extensions": [ - "https://landsat.usgs.gov/stac/landsat-ard-extension/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json", - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/alternate-assets/v1.1.0/schema.json", - "https://stac-extensions.github.io/storage/v1.0.0/schema.json" - ], - "id": "LE07_CU_002012_20150101_20210502_02_BA", - "description": "Landsat Collection 2 Level-3 Burned Area Product", - "geometry": null, - "properties": { - "datetime": "2015-01-01T18:39:12.4885358Z", - "platform": "LANDSAT_7", - "instruments": [ - "ETM" - ], - "landsat:grid_horizontal": "02", - "landsat:grid_vertical": "12", - "landsat:grid_region": "CU", - "landsat:scene_count": 1, - "eo:cloud_cover": 0.0759, - "landsat:cloud_shadow_cover": 0.1394, - "landsat:snow_ice_cover": 0, - "landsat:fill": 95.4286, - "proj:epsg": null, - "proj:shape": [ - 5000, - 5000 - ], - "proj:transform": [ - 30, - 0, - -2265585, - 0, - -30, - 1514805 - ], - "created": "2022-02-08T20:07:38.885Z", - "updated": "2022-02-08T20:07:38.885Z" - }, - "assets": { - "index": { - "title": "HTML index page", - "type": "text/html", - "roles": [ - "metadata" - ], - "href": "https://landsatlook.usgs.gov/stac-browser/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02" - }, - "bp": { - "title": "Burn Probability", - "description": "Collection 2 Level-3 Albers Burn Probability Burned Area", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "data" - ], - "href": "https://landsatlook.usgs.gov/level-3/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02_BP.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat-level-3/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02_BP.TIF" - } - } - }, - "bc": { - "title": "Burn Classification", - "description": "Collection 2 Level-3 Albers Burn Classification Burned Area", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "data" - ], - "href": "https://landsatlook.usgs.gov/level-3/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02_BC.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat-level-3/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02_BC.TIF" - } - } - }, - "quick_look": { - "title": "Quick Look File", - "description": "Collection 2 Level-3 Albers Quick Look File Burned Area", - "type": "image/png", - "roles": [ - "data" - ], - "href": "https://landsatlook.usgs.gov/level-3/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02_QuickLook.png", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat-level-3/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02_QuickLook.png" - } - } - }, - "xml": { - "title": "Extensible Metadata File", - "description": "Collection 2 Level-3 Albers Extensible Metadata File Burned Area", - "type": "application/xml", - "roles": [ - "metadata" - ], - "href": "https://landsatlook.usgs.gov/level-3/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02.xml", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat-level-3/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02.xml" - } - } - }, - "json": { - "title": "Extensible Metadata File (json)", - "description": "Collection 2 Level-3 Albers Extensible Metadata File (json) Burned Area", - "type": "application/json", - "roles": [ - "metadata" - ], - "href": "https://landsatlook.usgs.gov/level-3/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02.json", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat-level-3/collection02/BA/2015/CU/002/012/LE07_CU_002012_20150101_20210502_02_BA/LE07_CU_002012_20150101_20210502_02.json" - } - } - } - }, - "links": [ - { - "rel": "self", - "href": "https://landsatlook.usgs.gov/stac-server/collections/landsat-c2l3-ba/items/LE07_CU_002012_20150101_20210502_02_BA" - }, - { - "rel": "derived_from", - "href": "https://landsatlook.usgs.gov/stac-server/collections/landsat-c2ard-sr/items/LE07_CU_002012_20150101_20210502_02_SR" - }, - { - "rel": "derived_from", - "href": "https://landsatlook.usgs.gov/stac-server/collections/landsat-c2ard-st/items/LE07_CU_002012_20150101_20210502_02_ST" - }, - { - "rel": "derived_from", - "href": "https://landsatlook.usgs.gov/stac-server/collections/landsat-c2ard-ta/items/LE07_CU_002012_20150101_20210502_02_TOA" - }, - { - "rel": "derived_from", - "href": "https://landsatlook.usgs.gov/stac-server/collections/landsat-c2ard-bt/items/LE07_CU_002012_20150101_20210502_02_BT" - }, - { - "rel": "parent", - "href": "https://landsatlook.usgs.gov/stac-server/collections/landsat-c2l3-ba" - }, - { - "rel": "collection", - "href": "https://landsatlook.usgs.gov/stac-server/collections/landsat-c2l3-ba" - }, - { - "rel": "root", - "href": "https://landsatlook.usgs.gov/stac-server/" - } - ], - "collection": "test-collection" - } \ No newline at end of file diff --git a/stac_fastapi/sqlalchemy/tests/data/test_item_multipolygon.json b/stac_fastapi/sqlalchemy/tests/data/test_item_multipolygon.json deleted file mode 100644 index f5701c3a..00000000 --- a/stac_fastapi/sqlalchemy/tests/data/test_item_multipolygon.json +++ /dev/null @@ -1,454 +0,0 @@ -{ - "type": "Feature", - "stac_version": "1.0.0", - "stac_extensions": [ - "https://landsat.usgs.gov/stac/landsat-extension/v1.1.1/schema.json", - "https://stac-extensions.github.io/view/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json", - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/alternate-assets/v1.1.0/schema.json", - "https://stac-extensions.github.io/storage/v1.0.0/schema.json" - ], - "id": "LE07_L2SP_092013_20211007_20211104_02_T2_SR", - "description": "Landsat Collection 2 Level-2 Surface Reflectance Product", - "bbox": [ - 175.93215804933186, - 65.93036549677463, - -178.26673562596073, - 68.07019813171695 - ], - "geometry": { - "type": "MultiPolygon", - "coordinates": [ - [ - [ - [ - 180.0, - 67.67956138964027 - ], - [ - 177.4008122028755, - 68.07019813171695 - ], - [ - 175.93215804933186, - 66.54096344674578 - ], - [ - 180.0, - 65.93733582837588 - ], - [ - 180.0, - 67.67956138964027 - ] - ] - ], - [ - [ - [ - -180.0, - 65.93733582837588 - ], - [ - -179.95302698810534, - 65.93036549677463 - ], - [ - -178.3207049853914, - 67.36419976494292 - ], - [ - -178.26673562596073, - 67.41036545485302 - ], - [ - -178.27732165481333, - 67.42065687448587 - ], - [ - -180.0, - 67.67956138964027 - ], - [ - -180.0, - 65.93733582837588 - ] - ] - ] - ] - }, - "properties": { - "datetime": "2021-10-07T22:29:48Z", - "eo:cloud_cover": 50.0, - "view:sun_azimuth": 158.59868248, - "view:sun_elevation": 15.64343101, - "platform": "LANDSAT_7", - "instruments": [ - "ETM" - ], - "view:off_nadir": 0, - "landsat:cloud_cover_land": 0.0, - "landsat:wrs_type": "2", - "landsat:wrs_path": "092", - "landsat:wrs_row": "013", - "landsat:scene_id": "LE70920132021280ASN00", - "landsat:collection_category": "T2", - "landsat:collection_number": "02", - "landsat:correction": "L2SP", - "proj:epsg": 32660, - "proj:shape": [ - 8011, - 8731 - ], - "proj:transform": [ - 30.0, - 0.0, - 446085.0, - 0.0, - -30.0, - 7553415.0 - ] - }, - "assets": { - "thumbnail": { - "title": "Thumbnail image", - "type": "image/jpeg", - "roles": [ - "thumbnail" - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_thumb_small.jpeg", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_thumb_small.jpeg" - } - } - }, - "reduced_resolution_browse": { - "title": "Reduced resolution browse image", - "type": "image/jpeg", - "roles": [ - "overview" - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_thumb_large.jpeg", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_thumb_large.jpeg" - } - } - }, - "index": { - "title": "HTML index page", - "type": "text/html", - "roles": [ - "metadata" - ], - "href": "https://landsatlook.usgs.gov/stac-browser/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2" - }, - "blue": { - "title": "Blue Band (B1)", - "description": "Collection 2 Level-2 Blue Band (B1) Surface Reflectance", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "data" - ], - "eo:bands": [ - { - "name": "B1", - "common_name": "blue", - "gsd": 30, - "center_wavelength": 0.48 - } - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B1.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B1.TIF" - } - } - }, - "green": { - "title": "Green Band (B2)", - "description": "Collection 2 Level-2 Green Band (B2) Surface Reflectance", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "data" - ], - "eo:bands": [ - { - "name": "B2", - "common_name": "green", - "gsd": 30, - "center_wavelength": 0.56 - } - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B2.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B2.TIF" - } - } - }, - "red": { - "title": "Red Band (B3)", - "description": "Collection 2 Level-2 Red Band (B3) Surface Reflectance", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "data" - ], - "eo:bands": [ - { - "name": "B3", - "common_name": "red", - "gsd": 30, - "center_wavelength": 0.65 - } - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B3.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B3.TIF" - } - } - }, - "nir08": { - "title": "Near Infrared Band 0.8 (B4)", - "description": "Collection 2 Level-2 Near Infrared Band 0.8 (B4) Surface Reflectance", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "data", - "reflectance" - ], - "eo:bands": [ - { - "name": "B4", - "common_name": "nir08", - "gsd": 30, - "center_wavelength": 0.86 - } - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B4.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B4.TIF" - } - } - }, - "swir16": { - "title": "Short-wave Infrared Band 1.6 (B5)", - "description": "Collection 2 Level-2 Short-wave Infrared Band 1.6 (B6) Surface Reflectance", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "data", - "reflectance" - ], - "eo:bands": [ - { - "name": "B5", - "common_name": "swir16", - "gsd": 30, - "center_wavelength": 1.6 - } - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B5.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B5.TIF" - } - } - }, - "swir22": { - "title": "Short-wave Infrared Band 2.2 (B7)", - "description": "Collection 2 Level-2 Short-wave Infrared Band 2.2 (B7) Surface Reflectance", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "data", - "reflectance" - ], - "eo:bands": [ - { - "name": "B7", - "common_name": "swir22", - "gsd": 30, - "center_wavelength": 2.2 - } - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B7.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_B7.TIF" - } - } - }, - "atmos_opacity": { - "title": "Atmospheric Opacity Band", - "description": "Collection 2 Level-2 Atmospheric Opacity Band Surface Reflectance", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "data" - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_ATMOS_OPACITY.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_ATMOS_OPACITY.TIF" - } - } - }, - "cloud_qa": { - "title": "Cloud Quality Analysis Band", - "description": "Collection 2 Level-2 Cloud Quality Opacity Band Surface Reflectance", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "metadata", - "cloud", - "cloud-shadow", - "snow-ice", - "water-mask" - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_CLOUD_QA.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_CLOUD_QA.TIF" - } - } - }, - "ANG.txt": { - "title": "Angle Coefficients File", - "description": "Collection 2 Level-2 Angle Coefficients File (ANG)", - "type": "text/plain", - "roles": [ - "metadata" - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_ANG.txt", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_ANG.txt" - } - } - }, - "MTL.txt": { - "title": "Product Metadata File", - "description": "Collection 2 Level-2 Product Metadata File (MTL)", - "type": "text/plain", - "roles": [ - "metadata" - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_MTL.txt", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_MTL.txt" - } - } - }, - "MTL.xml": { - "title": "Product Metadata File (xml)", - "description": "Collection 2 Level-1 Product Metadata File (xml)", - "type": "application/xml", - "roles": [ - "metadata" - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_MTL.xml", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_MTL.xml" - } - } - }, - "MTL.json": { - "title": "Product Metadata File (json)", - "description": "Collection 2 Level-2 Product Metadata File (json)", - "type": "application/json", - "roles": [ - "metadata" - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_MTL.json", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_MTL.json" - } - } - }, - "qa_pixel": { - "title": "Pixel Quality Assessment Band", - "description": "Collection 2 Level-2 Pixel Quality Assessment Band Surface Reflectance", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "cloud", - "cloud-shadow", - "snow-ice", - "water-mask" - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_QA_PIXEL.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_QA_PIXEL.TIF" - } - } - }, - "qa_radsat": { - "title": "Radiometric Saturation Quality Assessment Band", - "description": "Collection 2 Level-2 Radiometric Saturation Quality Assessment Band Surface Reflectance", - "type": "image/vnd.stac.geotiff; cloud-optimized=true", - "roles": [ - "saturation" - ], - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_QA_RADSAT.TIF", - "alternate": { - "s3": { - "storage:platform": "AWS", - "storage:requester_pays": true, - "href": "s3://usgs-landsat/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_QA_RADSAT.TIF" - } - } - } - }, - "links": [ - { - "rel": "root", - "href": "https://landsatlook.usgs.gov/data/catalog.json" - }, - { - "rel": "parent", - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/catalog.json" - }, - { - "rel": "collection", - "href": "https://landsatlook.usgs.gov/data/collection02/landsat-c2l2-sr.json" - }, - { - "rel": "self", - "href": "https://landsatlook.usgs.gov/data/collection02/level-2/standard/etm/2021/092/013/LE07_L2SP_092013_20211007_20211104_02_T2/LE07_L2SP_092013_20211007_20211104_02_T2_SR_stac.json" - } - ], - "collection": "test-collection" -} diff --git a/stac_fastapi/sqlalchemy/tests/features/__init__.py b/stac_fastapi/sqlalchemy/tests/features/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/sqlalchemy/tests/features/test_custom_models.py b/stac_fastapi/sqlalchemy/tests/features/test_custom_models.py deleted file mode 100644 index 400c14ec..00000000 --- a/stac_fastapi/sqlalchemy/tests/features/test_custom_models.py +++ /dev/null @@ -1,75 +0,0 @@ -# from typing import Type -# -# import sqlalchemy as sa -# from starlette.testclient import TestClient -# -# # TODO: move these -# from stac_api.models.database import Item -# from stac_api.models.schemas import Collection -# -# from stac_fastapi.api.app import StacApi -# from stac_fastapi.extensions.core import TransactionExtension -# from stac_fastapi.postgres.core import CoreCrudClient, Session -# from stac_fastapi.postgres.transactions import TransactionsClient -# from stac_fastapi.postgres.config import PostgresSettings -# -# -# from ..conftest import MockStarletteRequest -# -# -# class CustomItem(Item): -# foo = sa.Column(sa.VARCHAR(10)) -# -# -# def create_app(item_model: Type[Item], db_session: Session) -> StacApi: -# """Create application with a custom sqlalchemy item""" -# api = StacApi( -# settings=PostgresSettings(indexed_fields={"datetime", "foo"}), -# extensions=[ -# TransactionExtension( -# client=TransactionsClient(item_table=item_model, session=db_session) -# ) -# ], -# client=CoreCrudClient(item_table=item_model, session=db_session), -# ) -# return api -# -# -# def test_custom_item(load_test_data, postgres_transactions, db_session): -# api = create_app(CustomItem, db_session) -# transactions = TransactionsClient(item_table=CustomItem, session=db_session) -# -# with TestClient(api.app) as test_client: -# # Ingest a collection -# coll = Collection.parse_obj(load_test_data("test_collection.json")) -# transactions.create_collection(coll, request=MockStarletteRequest) -# -# # Modify the table to match our custom item -# # This would typically be done with alembic -# db_session.writer.cached_engine.execute( -# "ALTER TABLE data.items ADD COLUMN foo VARCHAR(10)" -# ) -# -# # Post an item -# test_item = load_test_data("test_item.json") -# test_item["properties"]["foo"] = "hello" -# resp = test_client.post( -# f"/collections/{test_item['collection']}/items", json=test_item -# ) -# assert resp.status_code == 200 -# assert resp.json()["properties"]["foo"] == "hello" -# -# # Search for the item -# body = {"query": {"foo": {"eq": "hello"}}} -# resp = test_client.post("/search", json=body) -# assert resp.status_code == 200 -# resp_json = resp.json() -# assert len(resp_json["features"]) == 1 -# assert resp_json["features"][0]["properties"]["foo"] == "hello" -# -# # Cleanup -# transactions.delete_item(test_item["id"], request=MockStarletteRequest) -# transactions.delete_collection(coll.id, request=MockStarletteRequest) -# db_session.writer.cached_engine.execute( -# "ALTER TABLE data.items DROP COLUMN foo" -# ) diff --git a/stac_fastapi/sqlalchemy/tests/resources/__init__.py b/stac_fastapi/sqlalchemy/tests/resources/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/sqlalchemy/tests/resources/test_collection.py b/stac_fastapi/sqlalchemy/tests/resources/test_collection.py deleted file mode 100644 index 275b2684..00000000 --- a/stac_fastapi/sqlalchemy/tests/resources/test_collection.py +++ /dev/null @@ -1,118 +0,0 @@ -import pystac - - -def test_create_and_delete_collection(app_client, load_test_data): - """Test creation and deletion of a collection""" - test_collection = load_test_data("test_collection.json") - test_collection["id"] = "test" - - resp = app_client.post("/collections", json=test_collection) - assert resp.status_code == 200 - - resp = app_client.delete(f"/collections/{test_collection['id']}") - assert resp.status_code == 200 - - -def test_create_collection_conflict(app_client, load_test_data): - """Test creation of a collection which already exists""" - # This collection ID is created in the fixture, so this should be a conflict - test_collection = load_test_data("test_collection.json") - resp = app_client.post("/collections", json=test_collection) - assert resp.status_code == 409 - - -def test_delete_missing_collection(app_client): - """Test deletion of a collection which does not exist""" - resp = app_client.delete("/collections/missing-collection") - assert resp.status_code == 404 - - -def test_update_collection_already_exists(app_client, load_test_data): - """Test updating a collection which already exists""" - test_collection = load_test_data("test_collection.json") - test_collection["keywords"].append("test") - resp = app_client.put("/collections", json=test_collection) - assert resp.status_code == 200 - - resp = app_client.get(f"/collections/{test_collection['id']}") - assert resp.status_code == 200 - resp_json = resp.json() - assert "test" in resp_json["keywords"] - - -def test_update_new_collection(app_client, load_test_data): - """Test updating a collection which does not exist (same as creation)""" - test_collection = load_test_data("test_collection.json") - test_collection["id"] = "new-test-collection" - - resp = app_client.put("/collections", json=test_collection) - assert resp.status_code == 404 - - -def test_collection_not_found(app_client): - """Test read a collection which does not exist""" - resp = app_client.get("/collections/does-not-exist") - assert resp.status_code == 404 - - -def test_returns_valid_collection(app_client, load_test_data): - """Test validates fetched collection with jsonschema""" - test_collection = load_test_data("test_collection.json") - resp = app_client.put("/collections", json=test_collection) - assert resp.status_code == 200 - - resp = app_client.get(f"/collections/{test_collection['id']}") - assert resp.status_code == 200 - resp_json = resp.json() - - # Mock root to allow validation - mock_root = pystac.Catalog( - id="test", description="test desc", href="https://example.com" - ) - collection = pystac.Collection.from_dict( - resp_json, root=mock_root, preserve_dict=False - ) - collection.validate() - - -def test_get_collection_forwarded_header(app_client, load_test_data): - test_collection = load_test_data("test_collection.json") - app_client.put("/collections", json=test_collection) - - resp = app_client.get( - f"/collections/{test_collection['id']}", - headers={"Forwarded": "proto=https;host=testserver:1234"}, - ) - for link in resp.json()["links"]: - assert link["href"].startswith("https://testserver:1234/") - - -def test_get_collection_x_forwarded_headers(app_client, load_test_data): - test_collection = load_test_data("test_collection.json") - app_client.put("/collections", json=test_collection) - - resp = app_client.get( - f"/collections/{test_collection['id']}", - headers={ - "X-Forwarded-Port": "1234", - "X-Forwarded-Proto": "https", - }, - ) - for link in resp.json()["links"]: - assert link["href"].startswith("https://testserver:1234/") - - -def test_get_collection_duplicate_forwarded_headers(app_client, load_test_data): - test_collection = load_test_data("test_collection.json") - app_client.put("/collections", json=test_collection) - - resp = app_client.get( - f"/collections/{test_collection['id']}", - headers={ - "Forwarded": "proto=https;host=testserver:1234", - "X-Forwarded-Port": "4321", - "X-Forwarded-Proto": "http", - }, - ) - for link in resp.json()["links"]: - assert link["href"].startswith("https://testserver:1234/") diff --git a/stac_fastapi/sqlalchemy/tests/resources/test_conformance.py b/stac_fastapi/sqlalchemy/tests/resources/test_conformance.py deleted file mode 100644 index cb85c744..00000000 --- a/stac_fastapi/sqlalchemy/tests/resources/test_conformance.py +++ /dev/null @@ -1,68 +0,0 @@ -import urllib.parse - -import pytest - - -@pytest.fixture -def response(app_client): - return app_client.get("/") - - -@pytest.fixture -def response_json(response): - return response.json() - - -def get_link(landing_page, rel_type): - return next( - filter(lambda link: link["rel"] == rel_type, landing_page["links"]), None - ) - - -def test_landing_page_health(response): - """Test landing page""" - assert response.status_code == 200 - assert response.headers["content-type"] == "application/json" - - -# Parameters for test_landing_page_links test below. -# Each tuple has the following values (in this order): -# - Rel type of link to test -# - Expected MIME/Media Type -# - Expected relative path -link_tests = [ - ("root", "application/json", "/"), - ("conformance", "application/json", "/conformance"), - ("service-doc", "text/html", "/api.html"), - ("service-desc", "application/vnd.oai.openapi+json;version=3.0", "/api"), -] - - -@pytest.mark.parametrize("rel_type,expected_media_type,expected_path", link_tests) -def test_landing_page_links( - response_json, app_client, rel_type, expected_media_type, expected_path -): - link = get_link(response_json, rel_type) - - assert link is not None, f"Missing {rel_type} link in landing page" - assert link.get("type") == expected_media_type - - link_path = urllib.parse.urlsplit(link.get("href")).path - assert link_path == expected_path - - resp = app_client.get(link_path) - assert resp.status_code == 200 - - -# This endpoint currently returns a 404 for empty result sets, but testing for this response -# code here seems meaningless since it would be the same as if the endpoint did not exist. Once -# https://github.com/stac-utils/stac-fastapi/pull/227 has been merged we can add this to the -# parameterized tests above. -def test_search_link(response_json): - search_link = get_link(response_json, "search") - - assert search_link is not None - assert search_link.get("type") == "application/geo+json" - - search_path = urllib.parse.urlsplit(search_link.get("href")).path - assert search_path == "/search" diff --git a/stac_fastapi/sqlalchemy/tests/resources/test_item.py b/stac_fastapi/sqlalchemy/tests/resources/test_item.py deleted file mode 100644 index e19ac774..00000000 --- a/stac_fastapi/sqlalchemy/tests/resources/test_item.py +++ /dev/null @@ -1,992 +0,0 @@ -import json -import os -import time -import uuid -from copy import deepcopy -from datetime import datetime, timedelta, timezone -from random import randint -from urllib.parse import parse_qs, urlparse, urlsplit - -import pystac -from pydantic.datetime_parse import parse_datetime -from pystac.utils import datetime_to_str -from shapely.geometry import Polygon - -from stac_fastapi.sqlalchemy.core import CoreCrudClient -from stac_fastapi.types.core import LandingPageMixin -from stac_fastapi.types.rfc3339 import rfc3339_str_to_datetime - - -def test_create_and_delete_item(app_client, load_test_data): - """Test creation and deletion of a single item (transactions extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - resp = app_client.delete( - f"/collections/{test_item['collection']}/items/{resp.json()['id']}" - ) - assert resp.status_code == 200 - - -def test_create_item_conflict(app_client, load_test_data): - """Test creation of an item which already exists (transactions extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 409 - - -def test_create_item_duplicate(app_client, load_test_data): - """Test creation of an item id which already exists but in a different collection(transactions extension)""" - - # add test_item to test-collection - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - # add test_item to test-collection again, resource already exists - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 409 - - # create "test-collection-2" - collection_2 = load_test_data("test_collection.json") - collection_2["id"] = "test-collection-2" - resp = app_client.post("/collections", json=collection_2) - assert resp.status_code == 200 - - # add test_item to test-collection-2, posts successfully - test_item["collection"] = "test-collection-2" - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - -def test_delete_item_duplicate(app_client, load_test_data): - """Test creation of an item id which already exists but in a different collection(transactions extension)""" - - # add test_item to test-collection - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - # create "test-collection-2" - collection_2 = load_test_data("test_collection.json") - collection_2["id"] = "test-collection-2" - resp = app_client.post("/collections", json=collection_2) - assert resp.status_code == 200 - - # add test_item to test-collection-2 - test_item["collection"] = "test-collection-2" - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - # delete test_item from test-collection - test_item["collection"] = "test-collection" - resp = app_client.delete( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 200 - - # test-item in test-collection has already been deleted - resp = app_client.delete( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 404 - - # test-item in test-collection-2 still exists, was not deleted - test_item["collection"] = "test-collection-2" - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 409 - - -def test_update_item_duplicate(app_client, load_test_data): - """Test creation of an item id which already exists but in a different collection(transactions extension)""" - - # add test_item to test-collection - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - # create "test-collection-2" - collection_2 = load_test_data("test_collection.json") - collection_2["id"] = "test-collection-2" - resp = app_client.post("/collections", json=collection_2) - assert resp.status_code == 200 - - # add test_item to test-collection-2 - test_item["collection"] = "test-collection-2" - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - # update gsd in test_item, test-collection-2 - test_item["properties"]["gsd"] = 16 - resp = app_client.put( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - json=test_item, - ) - assert resp.status_code == 200 - updated_item = resp.json() - assert updated_item["properties"]["gsd"] == 16 - - # update gsd in test_item, test-collection - test_item["collection"] = "test-collection" - test_item["properties"]["gsd"] = 17 - resp = app_client.put( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - json=test_item, - ) - assert resp.status_code == 200 - updated_item = resp.json() - assert updated_item["properties"]["gsd"] == 17 - - # test_item in test-collection, updated gsd = 17 - resp = app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 200 - item = resp.json() - assert item["properties"]["gsd"] == 17 - - # test_item in test-collection-2, updated gsd = 16 - test_item["collection"] = "test-collection-2" - resp = app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 200 - item = resp.json() - assert item["properties"]["gsd"] == 16 - - -def test_delete_missing_item(app_client, load_test_data): - """Test deletion of an item which does not exist (transactions extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.delete(f"/collections/{test_item['collection']}/items/hijosh") - assert resp.status_code == 404 - - -def test_create_item_missing_collection(app_client, load_test_data): - """Test creation of an item without a parent collection (transactions extension)""" - test_item = load_test_data("test_item.json") - test_item["collection"] = "stac is cool" - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 424 - - -def test_update_item_already_exists(app_client, load_test_data): - """Test updating an item which already exists (transactions extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - assert test_item["properties"]["gsd"] != 16 - test_item["properties"]["gsd"] = 16 - resp = app_client.put( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - json=test_item, - ) - updated_item = resp.json() - assert updated_item["properties"]["gsd"] == 16 - - -def test_update_new_item(app_client, load_test_data): - """Test updating an item which does not exist (transactions extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.put( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - json=test_item, - ) - assert resp.status_code == 404 - - -def test_update_item_missing_collection(app_client, load_test_data): - """Test updating an item without a parent collection (transactions extension)""" - test_item = load_test_data("test_item.json") - - # Create the item - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - # Try to update collection of the item - test_item["collection"] = "stac is cool" - resp = app_client.put( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - json=test_item, - ) - assert resp.status_code == 404 - - -def test_update_item_geometry(app_client, load_test_data): - test_item = load_test_data("test_item.json") - - # Create the item - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - # Update the geometry of the item - test_item["geometry"]["coordinates"] = [[[0, 0], [0, 0], [0, 0], [0, 0], [0, 0]]] - resp = app_client.put( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - json=test_item, - ) - assert resp.status_code == 200 - - # Fetch the updated item - resp = app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert resp.status_code == 200 - assert resp.json()["geometry"]["coordinates"] == [ - [[0, 0], [0, 0], [0, 0], [0, 0], [0, 0]] - ] - - -def test_get_item(app_client, load_test_data): - """Test read an item by id (core)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - get_item = app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert get_item.status_code == 200 - - -def test_returns_valid_item(app_client, load_test_data): - """Test validates fetched item with jsonschema""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - get_item = app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}" - ) - assert get_item.status_code == 200 - item_dict = get_item.json() - # Mock root to allow validation - mock_root = pystac.Catalog( - id="test", description="test desc", href="https://example.com" - ) - item = pystac.Item.from_dict(item_dict, preserve_dict=False, root=mock_root) - item.validate() - - -def test_get_item_collection(app_client, load_test_data): - """Test read an item collection (core)""" - item_count = randint(1, 4) - test_item = load_test_data("test_item.json") - - for idx in range(item_count): - _test_item = deepcopy(test_item) - _test_item["id"] = test_item["id"] + str(idx) - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=_test_item - ) - assert resp.status_code == 200 - - resp = app_client.get(f"/collections/{test_item['collection']}/items") - assert resp.status_code == 200 - - item_collection = resp.json() - assert item_collection["context"]["matched"] == len(range(item_count)) - - -def test_pagination(app_client, load_test_data): - """Test item collection pagination (paging extension)""" - item_count = 10 - test_item = load_test_data("test_item.json") - - for idx in range(item_count): - _test_item = deepcopy(test_item) - _test_item["id"] = test_item["id"] + str(idx) - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=_test_item - ) - assert resp.status_code == 200 - - resp = app_client.get( - f"/collections/{test_item['collection']}/items", params={"limit": 3} - ) - assert resp.status_code == 200 - first_page = resp.json() - assert first_page["context"]["returned"] == 3 - - url_components = urlsplit(first_page["links"][0]["href"]) - resp = app_client.get(f"{url_components.path}?{url_components.query}") - assert resp.status_code == 200 - second_page = resp.json() - assert second_page["context"]["returned"] == 3 - - -def test_item_timestamps(app_client, load_test_data): - """Test created and updated timestamps (common metadata)""" - test_item = load_test_data("test_item.json") - start_time = datetime.now(timezone.utc) - time.sleep(2) - # Confirm `created` timestamp - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - item = resp.json() - created_dt = parse_datetime(item["properties"]["created"]) - assert resp.status_code == 200 - assert start_time < created_dt < datetime.now(timezone.utc) - - time.sleep(2) - # Confirm `updated` timestamp - item["properties"]["proj:epsg"] = 4326 - resp = app_client.put( - f"/collections/{test_item['collection']}/items/{item['id']}", json=item - ) - assert resp.status_code == 200 - updated_item = resp.json() - - # Created shouldn't change on update - assert item["properties"]["created"] == updated_item["properties"]["created"] - assert parse_datetime(updated_item["properties"]["updated"]) > created_dt - - -def test_item_search_by_id_post(app_client, load_test_data): - """Test POST search by item id (core)""" - ids = ["test1", "test2", "test3"] - for id in ids: - test_item = load_test_data("test_item.json") - test_item["id"] = id - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - params = {"collections": [test_item["collection"]], "ids": ids} - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == len(ids) - assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) - - -def test_item_search_spatial_query_post(app_client, load_test_data): - """Test POST search with spatial query (core)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - } - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -def test_item_search_temporal_query_post(app_client, load_test_data): - """Test POST search with single-tailed spatio-temporal query (core)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) - item_date = item_date + timedelta(seconds=1) - - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - "datetime": f"../{datetime_to_str(item_date)}", - } - resp = app_client.post("/search", json=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -def test_item_search_temporal_window_post(app_client, load_test_data): - """Test POST search with two-tailed spatio-temporal query (core)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) - item_date_before = item_date - timedelta(seconds=1) - item_date_after = item_date + timedelta(seconds=1) - - params = { - "collections": [test_item["collection"]], - "intersects": test_item["geometry"], - "datetime": f"{datetime_to_str(item_date_before)}/{datetime_to_str(item_date_after)}", - } - resp = app_client.post("/search", json=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -def test_item_search_temporal_open_window(app_client, load_test_data): - """Test POST search with open spatio-temporal query (core)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - for dt in ["/", "../", "/..", "../.."]: - resp = app_client.post("/search", json={"datetime": dt}) - assert resp.status_code == 400 - - -def test_item_search_sort_post(app_client, load_test_data): - """Test POST search with sorting (sort extension)""" - first_item = load_test_data("test_item.json") - item_date = rfc3339_str_to_datetime(first_item["properties"]["datetime"]) - resp = app_client.post( - f"/collections/{first_item['collection']}/items", json=first_item - ) - assert resp.status_code == 200 - - second_item = load_test_data("test_item.json") - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = datetime_to_str(another_item_date) - resp = app_client.post( - f"/collections/{second_item['collection']}/items", json=second_item - ) - assert resp.status_code == 200 - - params = { - "collections": [first_item["collection"]], - "sortby": [{"field": "datetime", "direction": "desc"}], - } - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == first_item["id"] - assert resp_json["features"][1]["id"] == second_item["id"] - - -def test_item_search_by_id_get(app_client, load_test_data): - """Test GET search by item id (core)""" - ids = ["test1", "test2", "test3"] - for id in ids: - test_item = load_test_data("test_item.json") - test_item["id"] = id - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - params = {"collections": test_item["collection"], "ids": ",".join(ids)} - resp = app_client.get("/search", params=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == len(ids) - assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) - - -def test_item_search_bbox_get(app_client, load_test_data): - """Test GET search with spatial query (core)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - params = { - "collections": test_item["collection"], - "bbox": ",".join([str(coord) for coord in test_item["bbox"]]), - } - resp = app_client.get("/search", params=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -def test_item_search_get_without_collections(app_client, load_test_data): - """Test GET search without specifying collections""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - params = { - "bbox": ",".join([str(coord) for coord in test_item["bbox"]]), - } - resp = app_client.get("/search", params=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -def test_item_search_temporal_window_get(app_client, load_test_data): - """Test GET search with spatio-temporal query (core)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) - item_date_before = item_date - timedelta(seconds=1) - item_date_after = item_date + timedelta(seconds=1) - - params = { - "collections": test_item["collection"], - "bbox": ",".join([str(coord) for coord in test_item["bbox"]]), - "datetime": f"{datetime_to_str(item_date_before)}/{datetime_to_str(item_date_after)}", - } - resp = app_client.get("/search", params=params) - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -def test_item_search_sort_get(app_client, load_test_data): - """Test GET search with sorting (sort extension)""" - first_item = load_test_data("test_item.json") - item_date = rfc3339_str_to_datetime(first_item["properties"]["datetime"]) - resp = app_client.post( - f"/collections/{first_item['collection']}/items", json=first_item - ) - assert resp.status_code == 200 - - second_item = load_test_data("test_item.json") - second_item["id"] = "another-item" - another_item_date = item_date - timedelta(days=1) - second_item["properties"]["datetime"] = datetime_to_str(another_item_date) - resp = app_client.post( - f"/collections/{second_item['collection']}/items", json=second_item - ) - assert resp.status_code == 200 - params = {"collections": [first_item["collection"]], "sortby": "-datetime"} - resp = app_client.get("/search", params=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == first_item["id"] - assert resp_json["features"][1]["id"] == second_item["id"] - - -def test_item_search_post_without_collection(app_client, load_test_data): - """Test POST search without specifying a collection""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - params = { - "bbox": test_item["bbox"], - } - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert resp_json["features"][0]["id"] == test_item["id"] - - -def test_item_search_properties_jsonb(app_client, load_test_data): - """Test POST search with JSONB query (query extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - # EPSG is a JSONB key - params = {"query": {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}}} - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -def test_item_search_properties_field(app_client, load_test_data): - """Test POST search indexed field with query (query extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - # Orientation is an indexed field - params = {"query": {"orientation": {"eq": "south"}}} - resp = app_client.post("/search", json=params) - assert resp.status_code == 200 - resp_json = resp.json() - assert len(resp_json["features"]) == 0 - - -def test_item_search_get_query_extension(app_client, load_test_data): - """Test GET search with JSONB query (query extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - # EPSG is a JSONB key - params = { - "collections": [test_item["collection"]], - "query": json.dumps( - {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}} - ), - } - resp = app_client.get("/search", params=params) - assert resp.json()["context"]["returned"] == 0 - - params["query"] = json.dumps( - {"proj:epsg": {"eq": test_item["properties"]["proj:epsg"]}} - ) - resp = app_client.get("/search", params=params) - resp_json = resp.json() - assert resp_json["context"]["returned"] == 1 - assert ( - resp_json["features"][0]["properties"]["proj:epsg"] - == test_item["properties"]["proj:epsg"] - ) - - -def test_get_missing_item_collection(app_client): - """Test reading a collection which does not exist""" - resp = app_client.get("/collections/invalid-collection/items") - assert resp.status_code == 200 - - -def test_pagination_item_collection(app_client, load_test_data): - """Test item collection pagination links (paging extension)""" - test_item = load_test_data("test_item.json") - ids = [] - - # Ingest 5 items - for idx in range(5): - uid = str(uuid.uuid4()) - test_item["id"] = uid - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - ids.append(uid) - - # Paginate through all 5 items with a limit of 1 (expecting 5 requests) - page = app_client.get( - f"/collections/{test_item['collection']}/items", params={"limit": 1} - ) - idx = 0 - item_ids = [] - while True: - idx += 1 - page_data = page.json() - item_ids.append(page_data["features"][0]["id"]) - next_link = list(filter(lambda l: l["rel"] == "next", page_data["links"])) - if not next_link: - break - query_params = parse_qs(urlparse(next_link[0]["href"]).query) - page = app_client.get( - f"/collections/{test_item['collection']}/items", - params=query_params, - ) - - # Our limit is 1 so we expect len(ids) number of requests before we run out of pages - assert idx == len(ids) - - # Confirm we have paginated through all items - assert not set(item_ids) - set(ids) - - -def test_pagination_post(app_client, load_test_data): - """Test POST pagination (paging extension)""" - test_item = load_test_data("test_item.json") - ids = [] - - # Ingest 5 items - for idx in range(5): - uid = str(uuid.uuid4()) - test_item["id"] = uid - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - ids.append(uid) - - # Paginate through all 5 items with a limit of 1 (expecting 5 requests) - request_body = {"ids": ids, "limit": 1} - page = app_client.post("/search", json=request_body) - idx = 0 - item_ids = [] - while True: - idx += 1 - page_data = page.json() - item_ids.append(page_data["features"][0]["id"]) - next_link = list(filter(lambda l: l["rel"] == "next", page_data["links"])) - if not next_link: - break - # Merge request bodies - request_body.update(next_link[0]["body"]) - page = app_client.post("/search", json=request_body) - - # Our limit is 1 so we expect len(ids) number of requests before we run out of pages - assert idx == len(ids) - - # Confirm we have paginated through all items - assert not set(item_ids) - set(ids) - - -def test_pagination_token_idempotent(app_client, load_test_data): - """Test that pagination tokens are idempotent (paging extension)""" - test_item = load_test_data("test_item.json") - ids = [] - - # Ingest 5 items - for idx in range(5): - uid = str(uuid.uuid4()) - test_item["id"] = uid - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - ids.append(uid) - - page = app_client.get("/search", params={"ids": ",".join(ids), "limit": 3}) - page_data = page.json() - next_link = list(filter(lambda l: l["rel"] == "next", page_data["links"])) - - # Confirm token is idempotent - resp1 = app_client.get( - "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) - ) - resp2 = app_client.get( - "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) - ) - resp1_data = resp1.json() - resp2_data = resp2.json() - - # Two different requests with the same pagination token should return the same items - assert [item["id"] for item in resp1_data["features"]] == [ - item["id"] for item in resp2_data["features"] - ] - - -def test_field_extension_get(app_client, load_test_data): - """Test GET search with included fields (fields extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - params = {"fields": "+properties.proj:epsg,+properties.gsd"} - resp = app_client.get("/search", params=params) - feat_properties = resp.json()["features"][0]["properties"] - assert not set(feat_properties) - {"proj:epsg", "gsd", "datetime"} - - -def test_field_extension_post(app_client, load_test_data): - """Test POST search with included and excluded fields (fields extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - body = { - "fields": { - "exclude": ["assets.B1"], - "include": ["properties.eo:cloud_cover", "properties.orientation"], - } - } - - resp = app_client.post("/search", json=body) - resp_json = resp.json() - assert "B1" not in resp_json["features"][0]["assets"].keys() - assert not set(resp_json["features"][0]["properties"]) - { - "orientation", - "eo:cloud_cover", - "datetime", - } - - -def test_field_extension_exclude_and_include(app_client, load_test_data): - """Test POST search including/excluding same field (fields extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - body = { - "fields": { - "exclude": ["properties.eo:cloud_cover"], - "include": ["properties.eo:cloud_cover"], - } - } - - resp = app_client.post("/search", json=body) - resp_json = resp.json() - assert "eo:cloud_cover" not in resp_json["features"][0]["properties"] - - -def test_field_extension_exclude_default_includes(app_client, load_test_data): - """Test POST search excluding a forbidden field (fields extension)""" - test_item = load_test_data("test_item.json") - resp = app_client.post( - f"/collections/{test_item['collection']}/items", json=test_item - ) - assert resp.status_code == 200 - - body = {"fields": {"exclude": ["geometry"]}} - - resp = app_client.post("/search", json=body) - resp_json = resp.json() - assert "geometry" not in resp_json["features"][0] - - -def test_search_intersects_and_bbox(app_client): - """Test POST search intersects and bbox are mutually exclusive (core)""" - bbox = [-118, 34, -117, 35] - geoj = Polygon.from_bounds(*bbox).__geo_interface__ - params = {"bbox": bbox, "intersects": geoj} - resp = app_client.post("/search", json=params) - assert resp.status_code == 400 - - -def test_get_missing_item(app_client, load_test_data): - """Test read item which does not exist (transactions extension)""" - test_coll = load_test_data("test_collection.json") - resp = app_client.get(f"/collections/{test_coll['id']}/items/invalid-item") - assert resp.status_code == 404 - - -def test_search_invalid_query_field(app_client): - body = {"query": {"gsd": {"lt": 100}, "invalid-field": {"eq": 50}}} - resp = app_client.post("/search", json=body) - assert resp.status_code == 400 - - -def test_search_bbox_errors(app_client): - body = {"query": {"bbox": [0]}} - resp = app_client.post("/search", json=body) - assert resp.status_code == 400 - - body = {"query": {"bbox": [100.0, 0.0, 0.0, 105.0, 1.0, 1.0]}} - resp = app_client.post("/search", json=body) - assert resp.status_code == 400 - - params = {"bbox": "100.0,0.0,0.0,105.0"} - resp = app_client.get("/search", params=params) - assert resp.status_code == 400 - - -def test_conformance_classes_configurable(): - """Test conformance class configurability""" - landing = LandingPageMixin() - landing_page = landing._landing_page( - base_url="http://test/test", - conformance_classes=["this is a test"], - extension_schemas=[], - ) - assert landing_page["conformsTo"][0] == "this is a test" - - # Update environment to avoid key error on client instantiation - os.environ["READER_CONN_STRING"] = "testing" - os.environ["WRITER_CONN_STRING"] = "testing" - client = CoreCrudClient(base_conformance_classes=["this is a test"]) - assert client.conformance_classes()[0] == "this is a test" - - -def test_search_datetime_validation_errors(app_client): - bad_datetimes = [ - "37-01-01T12:00:27.87Z", - "1985-13-12T23:20:50.52Z", - "1985-12-32T23:20:50.52Z", - "1985-12-01T25:20:50.52Z", - "1985-12-01T00:60:50.52Z", - "1985-12-01T00:06:61.52Z", - "1990-12-31T23:59:61Z", - "1986-04-12T23:20:50.52Z/1985-04-12T23:20:50.52Z", - ] - for dt in bad_datetimes: - body = {"query": {"datetime": dt}} - resp = app_client.post("/search", json=body) - assert resp.status_code == 400 - - resp = app_client.get("/search?datetime={}".format(dt)) - assert resp.status_code == 400 - - -def test_get_item_forwarded_header(app_client, load_test_data): - test_item = load_test_data("test_item.json") - app_client.post(f"/collections/{test_item['collection']}/items", json=test_item) - get_item = app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - headers={"Forwarded": "proto=https;host=testserver:1234"}, - ) - for link in get_item.json()["links"]: - assert link["href"].startswith("https://testserver:1234/") - - -def test_get_item_x_forwarded_headers(app_client, load_test_data): - test_item = load_test_data("test_item.json") - app_client.post(f"/collections/{test_item['collection']}/items", json=test_item) - get_item = app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - headers={ - "X-Forwarded-Port": "1234", - "X-Forwarded-Proto": "https", - }, - ) - for link in get_item.json()["links"]: - assert link["href"].startswith("https://testserver:1234/") - - -def test_get_item_duplicate_forwarded_headers(app_client, load_test_data): - test_item = load_test_data("test_item.json") - app_client.post(f"/collections/{test_item['collection']}/items", json=test_item) - get_item = app_client.get( - f"/collections/{test_item['collection']}/items/{test_item['id']}", - headers={ - "Forwarded": "proto=https;host=testserver:1234", - "X-Forwarded-Port": "4321", - "X-Forwarded-Proto": "http", - }, - ) - for link in get_item.json()["links"]: - assert link["href"].startswith("https://testserver:1234/") diff --git a/stac_fastapi/sqlalchemy/tests/resources/test_mgmt.py b/stac_fastapi/sqlalchemy/tests/resources/test_mgmt.py deleted file mode 100644 index 0a11e38e..00000000 --- a/stac_fastapi/sqlalchemy/tests/resources/test_mgmt.py +++ /dev/null @@ -1,9 +0,0 @@ -def test_ping_no_param(app_client): - """ - Test ping endpoint with a mocked client. - Args: - app_client (TestClient): mocked client fixture - """ - res = app_client.get("/_mgmt/ping") - assert res.status_code == 200 - assert res.json() == {"message": "PONG"} diff --git a/stac_fastapi/testdata/joplin/collection.json b/stac_fastapi/testdata/joplin/collection.json deleted file mode 100644 index 992e64b9..00000000 --- a/stac_fastapi/testdata/joplin/collection.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "id": "joplin", - "description": "This imagery was acquired by the NOAA Remote Sensing Division to support NOAA national security and emergency response requirements. In addition, it will be used for ongoing research efforts for testing and developing standards for airborne digital imagery. Individual images have been combined into a larger mosaic and tiled for distribution. The approximate ground sample distance (GSD) for each pixel is 35 cm (1.14 feet).", - "stac_version": "1.0.0", - "license": "public-domain", - "links": [ - { - "rel": "license", - "href": "https://creativecommons.org/licenses/publicdomain/", - "title": "public domain" - } - ], - "type": "Collection", - "extent": { - "spatial": { - "bbox": [ - [ - -94.6911621, - 37.0332547, - -94.402771, - 37.1077651 - ] - ] - }, - "temporal": { - "interval": [ - [ - "2000-02-01T00:00:00Z", - "2000-02-12T00:00:00Z" - ] - ] - } - } -} diff --git a/stac_fastapi/testdata/joplin/feature.geojson b/stac_fastapi/testdata/joplin/feature.geojson deleted file mode 100644 index 47db3190..00000000 --- a/stac_fastapi/testdata/joplin/feature.geojson +++ /dev/null @@ -1,59 +0,0 @@ -{ - "id": "f2cca2a3-288b-4518-8a3e-a4492bb60b08", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6884155, - 37.0595608 - ], - [ - -94.6884155, - 37.0332547 - ], - [ - -94.6554565, - 37.0332547 - ], - [ - -94.6554565, - 37.0595608 - ], - [ - -94.6884155, - 37.0595608 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C350000e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6884155, - 37.0332547, - -94.6554565, - 37.0595608 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" -} \ No newline at end of file diff --git a/stac_fastapi/testdata/joplin/index.geojson b/stac_fastapi/testdata/joplin/index.geojson deleted file mode 100644 index 1bc8dde5..00000000 --- a/stac_fastapi/testdata/joplin/index.geojson +++ /dev/null @@ -1,1775 +0,0 @@ -{ - "type": "FeatureCollection", - "features": [ - { - "id": "f2cca2a3-288b-4518-8a3e-a4492bb60b08", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6884155, - 37.0595608 - ], - [ - -94.6884155, - 37.0332547 - ], - [ - -94.6554565, - 37.0332547 - ], - [ - -94.6554565, - 37.0595608 - ], - [ - -94.6884155, - 37.0595608 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C350000e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6884155, - 37.0332547, - -94.6554565, - 37.0595608 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "a7e125ba-565d-4aa2-bbf3-c57a9087c2e3", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6884155, - 37.0814756 - ], - [ - -94.6884155, - 37.0551771 - ], - [ - -94.6582031, - 37.0551771 - ], - [ - -94.6582031, - 37.0814756 - ], - [ - -94.6884155, - 37.0814756 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C350000e4105000n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6884155, - 37.0551771, - -94.6582031, - 37.0814756 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "f7f164c9-cfdf-436d-a3f0-69864c38ba2a", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6911621, - 37.1033841 - ], - [ - -94.6911621, - 37.0770932 - ], - [ - -94.6582031, - 37.0770932 - ], - [ - -94.6582031, - 37.1033841 - ], - [ - -94.6911621, - 37.1033841 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C350000e4107500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6911621, - 37.0770932, - -94.6582031, - 37.1033841 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "ea0fddf4-56f9-4a16-8a0b-f6b0b123b7cf", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6609497, - 37.0595608 - ], - [ - -94.6609497, - 37.0332547 - ], - [ - -94.6279907, - 37.0332547 - ], - [ - -94.6279907, - 37.0595608 - ], - [ - -94.6609497, - 37.0595608 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C352500e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6609497, - 37.0332547, - -94.6279907, - 37.0595608 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "c811e716-ab07-4d80-ac95-6670f8713bc4", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6609497, - 37.0814756 - ], - [ - -94.6609497, - 37.0551771 - ], - [ - -94.6279907, - 37.0551771 - ], - [ - -94.6279907, - 37.0814756 - ], - [ - -94.6609497, - 37.0814756 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C352500e4105000n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6609497, - 37.0551771, - -94.6279907, - 37.0814756 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "d4eccfa2-7d77-4624-9e2a-3f59102285bb", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6609497, - 37.1033841 - ], - [ - -94.6609497, - 37.0770932 - ], - [ - -94.6279907, - 37.0770932 - ], - [ - -94.6279907, - 37.1033841 - ], - [ - -94.6609497, - 37.1033841 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C352500e4107500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6609497, - 37.0770932, - -94.6279907, - 37.1033841 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "fe916452-ba6f-4631-9154-c249924a122d", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6334839, - 37.0595608 - ], - [ - -94.6334839, - 37.0332547 - ], - [ - -94.6005249, - 37.0332547 - ], - [ - -94.6005249, - 37.0595608 - ], - [ - -94.6334839, - 37.0595608 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C355000e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6334839, - 37.0332547, - -94.6005249, - 37.0595608 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "85f923a5-a81f-4acd-bc7f-96c7c915f357", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6334839, - 37.0814756 - ], - [ - -94.6334839, - 37.0551771 - ], - [ - -94.6005249, - 37.0551771 - ], - [ - -94.6005249, - 37.0814756 - ], - [ - -94.6334839, - 37.0814756 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C355000e4105000n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6334839, - 37.0551771, - -94.6005249, - 37.0814756 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "29c53e17-d7d1-4394-a80f-36763c8f42dc", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6334839, - 37.1055746 - ], - [ - -94.6334839, - 37.0792845 - ], - [ - -94.6005249, - 37.0792845 - ], - [ - -94.6005249, - 37.1055746 - ], - [ - -94.6334839, - 37.1055746 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C355000e4107500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6334839, - 37.0792845, - -94.6005249, - 37.1055746 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "e0a02e4e-aa0c-412e-8f63-6f5344f829df", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6060181, - 37.0595608 - ], - [ - -94.6060181, - 37.0332547 - ], - [ - -94.5730591, - 37.0332547 - ], - [ - -94.5730591, - 37.0595608 - ], - [ - -94.6060181, - 37.0595608 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C357500e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6060181, - 37.0332547, - -94.5730591, - 37.0595608 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "047ab5f0-dce1-4166-a00d-425a3dbefe02", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6060181, - 37.0814756 - ], - [ - -94.6060181, - 37.057369 - ], - [ - -94.5730591, - 37.057369 - ], - [ - -94.5730591, - 37.0814756 - ], - [ - -94.6060181, - 37.0814756 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C357500e4105000n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6060181, - 37.057369, - -94.5730591, - 37.0814756 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "57f88dd2-e4e0-48e6-a2b6-7282d4ab8ea4", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.6060181, - 37.1055746 - ], - [ - -94.6060181, - 37.0792845 - ], - [ - -94.5730591, - 37.0792845 - ], - [ - -94.5730591, - 37.1055746 - ], - [ - -94.6060181, - 37.1055746 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C357500e4107500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.6060181, - 37.0792845, - -94.5730591, - 37.1055746 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "68f2c2b2-4bce-4c40-9a0d-782c1be1f4f2", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.5758057, - 37.0595608 - ], - [ - -94.5758057, - 37.0332547 - ], - [ - -94.5428467, - 37.0332547 - ], - [ - -94.5428467, - 37.0595608 - ], - [ - -94.5758057, - 37.0595608 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C360000e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.5758057, - 37.0332547, - -94.5428467, - 37.0595608 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "d8461d8c-3d2b-4e4e-a931-7ae61ca06dbf", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.5758057, - 37.0836668 - ], - [ - -94.5758057, - 37.057369 - ], - [ - -94.5455933, - 37.057369 - ], - [ - -94.5455933, - 37.0836668 - ], - [ - -94.5758057, - 37.0836668 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C360000e4105000n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.5758057, - 37.057369, - -94.5455933, - 37.0836668 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "aeedef30-cbdd-4364-8781-dbb42d148c99", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.5785522, - 37.1055746 - ], - [ - -94.5785522, - 37.0792845 - ], - [ - -94.5455933, - 37.0792845 - ], - [ - -94.5455933, - 37.1055746 - ], - [ - -94.5785522, - 37.1055746 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C360000e4107500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.5785522, - 37.0792845, - -94.5455933, - 37.1055746 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "9ef4279f-386c-40c7-ad71-8de5d9543aa4", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.5483398, - 37.0595608 - ], - [ - -94.5483398, - 37.0354472 - ], - [ - -94.5153809, - 37.0354472 - ], - [ - -94.5153809, - 37.0595608 - ], - [ - -94.5483398, - 37.0595608 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C362500e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.5483398, - 37.0354472, - -94.5153809, - 37.0595608 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "70cc6c05-9fe0-436a-a264-a52515f3f242", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.5483398, - 37.0836668 - ], - [ - -94.5483398, - 37.057369 - ], - [ - -94.5153809, - 37.057369 - ], - [ - -94.5153809, - 37.0836668 - ], - [ - -94.5483398, - 37.0836668 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C362500e4105000n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.5483398, - 37.057369, - -94.5153809, - 37.0836668 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "d191a6fd-7881-4421-805c-e246371e5cc4", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.5483398, - 37.1055746 - ], - [ - -94.5483398, - 37.0792845 - ], - [ - -94.5181274, - 37.0792845 - ], - [ - -94.5181274, - 37.1055746 - ], - [ - -94.5483398, - 37.1055746 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C362500e4107500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.5483398, - 37.0792845, - -94.5181274, - 37.1055746 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "d144adde-df4a-45e8-bed9-f085f91486a2", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.520874, - 37.0617526 - ], - [ - -94.520874, - 37.0354472 - ], - [ - -94.487915, - 37.0354472 - ], - [ - -94.487915, - 37.0617526 - ], - [ - -94.520874, - 37.0617526 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C365000e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.520874, - 37.0354472, - -94.487915, - 37.0617526 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "a4c32abd-9791-422b-87ab-b0f3fa36f053", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.520874, - 37.0836668 - ], - [ - -94.520874, - 37.057369 - ], - [ - -94.487915, - 37.057369 - ], - [ - -94.487915, - 37.0836668 - ], - [ - -94.520874, - 37.0836668 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C365000e4105000n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.520874, - 37.057369, - -94.487915, - 37.0836668 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "4610c58e-39f4-4d9d-94ba-ceddbf9ac570", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.520874, - 37.1055746 - ], - [ - -94.520874, - 37.0792845 - ], - [ - -94.487915, - 37.0792845 - ], - [ - -94.487915, - 37.1055746 - ], - [ - -94.520874, - 37.1055746 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C365000e4107500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.520874, - 37.0792845, - -94.487915, - 37.1055746 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "145fa700-16d4-4d34-98e0-7540d5c0885f", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.4934082, - 37.0617526 - ], - [ - -94.4934082, - 37.0354472 - ], - [ - -94.4604492, - 37.0354472 - ], - [ - -94.4604492, - 37.0617526 - ], - [ - -94.4934082, - 37.0617526 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C367500e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.4934082, - 37.0354472, - -94.4604492, - 37.0617526 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "a89dc7b8-a580-435b-8176-d8e4386d620c", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.4934082, - 37.0836668 - ], - [ - -94.4934082, - 37.057369 - ], - [ - -94.4604492, - 37.057369 - ], - [ - -94.4604492, - 37.0836668 - ], - [ - -94.4934082, - 37.0836668 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C367500e4105000n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.4934082, - 37.057369, - -94.4604492, - 37.0836668 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "386dfa13-c2b4-4ce6-8e6f-fcac73f4e64e", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.4934082, - 37.1055746 - ], - [ - -94.4934082, - 37.0792845 - ], - [ - -94.4604492, - 37.0792845 - ], - [ - -94.4604492, - 37.1055746 - ], - [ - -94.4934082, - 37.1055746 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C367500e4107500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.4934082, - 37.0792845, - -94.4604492, - 37.1055746 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "4d8a8e40-d089-4ca7-92c8-27d810ee07bf", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.4631958, - 37.0617526 - ], - [ - -94.4631958, - 37.0354472 - ], - [ - -94.4329834, - 37.0354472 - ], - [ - -94.4329834, - 37.0617526 - ], - [ - -94.4631958, - 37.0617526 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C370000e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.4631958, - 37.0354472, - -94.4329834, - 37.0617526 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "f734401c-2df0-4694-a353-cdd3ea760cdc", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.4631958, - 37.0836668 - ], - [ - -94.4631958, - 37.057369 - ], - [ - -94.4329834, - 37.057369 - ], - [ - -94.4329834, - 37.0836668 - ], - [ - -94.4631958, - 37.0836668 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C370000e4105000n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.4631958, - 37.057369, - -94.4329834, - 37.0836668 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "da6ef938-c58f-4bab-9d4e-89f6ae667da2", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.4659424, - 37.1077651 - ], - [ - -94.4659424, - 37.0814756 - ], - [ - -94.4329834, - 37.0814756 - ], - [ - -94.4329834, - 37.1077651 - ], - [ - -94.4659424, - 37.1077651 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C370000e4107500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.4659424, - 37.0814756, - -94.4329834, - 37.1077651 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "ad420ced-b005-472b-a6df-3838c2b74504", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.43573, - 37.0617526 - ], - [ - -94.43573, - 37.0354472 - ], - [ - -94.402771, - 37.0354472 - ], - [ - -94.402771, - 37.0617526 - ], - [ - -94.43573, - 37.0617526 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C372500e4102500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.43573, - 37.0354472, - -94.402771, - 37.0617526 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "f490b7af-0019-45e2-854b-3854d07fd063", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.43573, - 37.0836668 - ], - [ - -94.43573, - 37.0595608 - ], - [ - -94.402771, - 37.0595608 - ], - [ - -94.402771, - 37.0836668 - ], - [ - -94.43573, - 37.0836668 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C372500e4105000n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.43573, - 37.0595608, - -94.402771, - 37.0836668 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - }, - { - "id": "b853f353-4b72-44d5-aa44-c07dfd307138", - "type": "Feature", - "collection": "joplin", - "links": [], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -94.43573, - 37.1077651 - ], - [ - -94.43573, - 37.0814756 - ], - [ - -94.4055176, - 37.0814756 - ], - [ - -94.4055176, - 37.1077651 - ], - [ - -94.43573, - 37.1077651 - ] - ] - ] - }, - "properties": { - "proj:epsg": 3857, - "orientation": "nadir", - "height": 2500, - "width": 2500, - "datetime": "2000-02-02T00:00:00Z", - "gsd": 0.5971642834779395 - }, - "assets": { - "COG": { - "type": "image/tiff; application=geotiff; profile=cloud-optimized", - "href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C372500e4107500n.tif", - "title": "NOAA STORM COG" - } - }, - "bbox": [ - -94.43573, - 37.0814756, - -94.4055176, - 37.1077651 - ], - "stac_extensions": [ - "https://stac-extensions.github.io/eo/v1.0.0/schema.json", - "https://stac-extensions.github.io/projection/v1.0.0/schema.json" - ], - "stac_version": "1.0.0" - } - ] -} \ No newline at end of file diff --git a/stac_fastapi/types/README.md b/stac_fastapi/types/README.md deleted file mode 100644 index e69de29b..00000000 diff --git a/stac_fastapi/types/setup.cfg b/stac_fastapi/types/setup.cfg deleted file mode 100644 index d65b1217..00000000 --- a/stac_fastapi/types/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[metadata] -version = attr: stac_fastapi.types.version.__version__ diff --git a/stac_fastapi/types/setup.py b/stac_fastapi/types/setup.py deleted file mode 100644 index 3d9f77ef..00000000 --- a/stac_fastapi/types/setup.py +++ /dev/null @@ -1,52 +0,0 @@ -"""stac_fastapi: types module.""" - -from setuptools import find_namespace_packages, setup - -with open("README.md") as f: - desc = f.read() - -install_requires = [ - "fastapi>=0.73.0", - "attrs", - "pydantic[dotenv]", - "stac_pydantic==2.0.*", - "pystac==1.*", - "ciso8601~=2.2.0", -] - -extra_reqs = { - "dev": [ - "pytest", - "pytest-cov", - "pytest-asyncio", - "pre-commit", - "requests", - ], - "docs": ["mkdocs", "mkdocs-material", "pdocs"], -} - - -setup( - name="stac-fastapi.types", - description="An implementation of STAC API based on the FastAPI framework.", - long_description=desc, - long_description_content_type="text/markdown", - python_requires=">=3.8", - classifiers=[ - "Intended Audience :: Developers", - "Intended Audience :: Information Technology", - "Intended Audience :: Science/Research", - "Programming Language :: Python :: 3.8", - "License :: OSI Approved :: MIT License", - ], - keywords="STAC FastAPI COG", - author="Arturo Engineering", - author_email="engineering@arturo.ai", - url="https://github.com/stac-utils/stac-fastapi", - license="MIT", - packages=find_namespace_packages(exclude=["alembic", "tests", "scripts"]), - zip_safe=False, - install_requires=install_requires, - tests_require=extra_reqs["dev"], - extras_require=extra_reqs, -) diff --git a/stac_fastapi/types/stac_fastapi/types/__init__.py b/stac_fastapi/types/stac_fastapi/types/__init__.py deleted file mode 100644 index e1a54d43..00000000 --- a/stac_fastapi/types/stac_fastapi/types/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""backend submodule.""" diff --git a/stac_fastapi/types/stac_fastapi/types/config.py b/stac_fastapi/types/stac_fastapi/types/config.py deleted file mode 100644 index a5ffbb95..00000000 --- a/stac_fastapi/types/stac_fastapi/types/config.py +++ /dev/null @@ -1,54 +0,0 @@ -"""stac_fastapi.types.config module.""" -from typing import Optional, Set - -from pydantic import BaseSettings - - -class ApiSettings(BaseSettings): - """ApiSettings. - - Defines api configuration, potentially through environment variables. - See https://pydantic-docs.helpmanual.io/usage/settings/. - Attributes: - environment: name of the environment (ex. dev/prod). - debug: toggles debug mode. - forbidden_fields: set of fields defined by STAC but not included in the database. - indexed_fields: - set of fields which are usually in `item.properties` but are indexed as distinct columns in - the database. - """ - - # TODO: Remove `default_includes` attribute so we can use `pydantic.BaseSettings` instead - default_includes: Optional[Set[str]] = None - - app_host: str = "0.0.0.0" - app_port: int = 8000 - reload: bool = True - enable_response_models: bool = False - - openapi_url: str = "/api" - docs_url: str = "/api.html" - - class Config: - """model config (https://pydantic-docs.helpmanual.io/usage/model_config/).""" - - extra = "allow" - env_file = ".env" - - -class Settings: - """Holds the global instance of settings.""" - - _instance: Optional[ApiSettings] = None - - @classmethod - def set(cls, base_settings: ApiSettings): - """Set the global settings.""" - cls._instance = base_settings - - @classmethod - def get(cls) -> ApiSettings: - """Get the settings. If they have not yet been set, throws an exception.""" - if cls._instance is None: - raise ValueError("Settings have not yet been set.") - return cls._instance diff --git a/stac_fastapi/types/stac_fastapi/types/conformance.py b/stac_fastapi/types/stac_fastapi/types/conformance.py deleted file mode 100644 index 49f1323b..00000000 --- a/stac_fastapi/types/stac_fastapi/types/conformance.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Conformance Classes.""" -from enum import Enum - - -class STACConformanceClasses(str, Enum): - """Conformance classes for the STAC API spec.""" - - CORE = "https://api.stacspec.org/v1.0.0-rc.1/core" - OGC_API_FEAT = "https://api.stacspec.org/v1.0.0-rc.1/ogcapi-features" - COLLECTIONS = "https://api.stacspec.org/v1.0.0-rc.1/collections" - ITEM_SEARCH = "https://api.stacspec.org/v1.0.0-rc.1/item-search" - - -class OAFConformanceClasses(str, Enum): - """Conformance classes for OGC API - Features.""" - - CORE = "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core" - OPEN_API = "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30" - GEOJSON = "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson" - - -BASE_CONFORMANCE_CLASSES = [ - STACConformanceClasses.CORE, - STACConformanceClasses.OGC_API_FEAT, - STACConformanceClasses.COLLECTIONS, - STACConformanceClasses.ITEM_SEARCH, - OAFConformanceClasses.CORE, - OAFConformanceClasses.OPEN_API, - OAFConformanceClasses.GEOJSON, -] diff --git a/stac_fastapi/types/stac_fastapi/types/core.py b/stac_fastapi/types/stac_fastapi/types/core.py deleted file mode 100644 index bce7ca2a..00000000 --- a/stac_fastapi/types/stac_fastapi/types/core.py +++ /dev/null @@ -1,755 +0,0 @@ -"""Base clients.""" -import abc -from datetime import datetime -from typing import Any, Dict, List, Optional, Union -from urllib.parse import urljoin - -import attr -from fastapi import Request -from stac_pydantic.links import Relations -from stac_pydantic.shared import MimeTypes -from stac_pydantic.version import STAC_VERSION -from starlette.responses import Response - -from stac_fastapi.types import stac as stac_types -from stac_fastapi.types.conformance import BASE_CONFORMANCE_CLASSES -from stac_fastapi.types.extension import ApiExtension -from stac_fastapi.types.requests import get_base_url -from stac_fastapi.types.search import BaseSearchPostRequest -from stac_fastapi.types.stac import Conformance - -NumType = Union[float, int] -StacType = Dict[str, Any] - - -@attr.s # type:ignore -class BaseTransactionsClient(abc.ABC): - """Defines a pattern for implementing the STAC API Transaction Extension.""" - - @abc.abstractmethod - def create_item( - self, collection_id: str, item: stac_types.Item, **kwargs - ) -> Optional[Union[stac_types.Item, Response]]: - """Create a new item. - - Called with `POST /collections/{collection_id}/items`. - - Args: - item: the item - collection_id: the id of the collection from the resource path - - Returns: - The item that was created. - - """ - ... - - @abc.abstractmethod - def update_item( - self, collection_id: str, item_id: str, item: stac_types.Item, **kwargs - ) -> Optional[Union[stac_types.Item, Response]]: - """Perform a complete update on an existing item. - - Called with `PUT /collections/{collection_id}/items`. It is expected that this item already exists. The update - should do a diff against the saved item and perform any necessary updates. Partial updates are not supported - by the transactions extension. - - Args: - item: the item (must be complete) - collection_id: the id of the collection from the resource path - - Returns: - The updated item. - """ - ... - - @abc.abstractmethod - def delete_item( - self, item_id: str, collection_id: str, **kwargs - ) -> Optional[Union[stac_types.Item, Response]]: - """Delete an item from a collection. - - Called with `DELETE /collections/{collection_id}/items/{item_id}` - - Args: - item_id: id of the item. - collection_id: id of the collection. - - Returns: - The deleted item. - """ - ... - - @abc.abstractmethod - def create_collection( - self, collection: stac_types.Collection, **kwargs - ) -> Optional[Union[stac_types.Collection, Response]]: - """Create a new collection. - - Called with `POST /collections`. - - Args: - collection: the collection - - Returns: - The collection that was created. - """ - ... - - @abc.abstractmethod - def update_collection( - self, collection: stac_types.Collection, **kwargs - ) -> Optional[Union[stac_types.Collection, Response]]: - """Perform a complete update on an existing collection. - - Called with `PUT /collections`. It is expected that this item already exists. The update should do a diff - against the saved collection and perform any necessary updates. Partial updates are not supported by the - transactions extension. - - Args: - collection: the collection (must be complete) - collection_id: the id of the collection from the resource path - - Returns: - The updated collection. - """ - ... - - @abc.abstractmethod - def delete_collection( - self, collection_id: str, **kwargs - ) -> Optional[Union[stac_types.Collection, Response]]: - """Delete a collection. - - Called with `DELETE /collections/{collection_id}` - - Args: - collection_id: id of the collection. - - Returns: - The deleted collection. - """ - ... - - -@attr.s # type:ignore -class AsyncBaseTransactionsClient(abc.ABC): - """Defines a pattern for implementing the STAC transaction extension.""" - - @abc.abstractmethod - async def create_item( - self, collection_id: str, item: stac_types.Item, **kwargs - ) -> Optional[Union[stac_types.Item, Response]]: - """Create a new item. - - Called with `POST /collections/{collection_id}/items`. - - Args: - item: the item - - Returns: - The item that was created. - - """ - ... - - @abc.abstractmethod - async def update_item( - self, collection_id: str, item_id: str, item: stac_types.Item, **kwargs - ) -> Optional[Union[stac_types.Item, Response]]: - """Perform a complete update on an existing item. - - Called with `PUT /collections/{collection_id}/items`. It is expected that this item already exists. The update - should do a diff against the saved item and perform any necessary updates. Partial updates are not supported - by the transactions extension. - - Args: - item: the item (must be complete) - - Returns: - The updated item. - """ - ... - - @abc.abstractmethod - async def delete_item( - self, item_id: str, collection_id: str, **kwargs - ) -> Optional[Union[stac_types.Item, Response]]: - """Delete an item from a collection. - - Called with `DELETE /collections/{collection_id}/items/{item_id}` - - Args: - item_id: id of the item. - collection_id: id of the collection. - - Returns: - The deleted item. - """ - ... - - @abc.abstractmethod - async def create_collection( - self, collection: stac_types.Collection, **kwargs - ) -> Optional[Union[stac_types.Collection, Response]]: - """Create a new collection. - - Called with `POST /collections`. - - Args: - collection: the collection - - Returns: - The collection that was created. - """ - ... - - @abc.abstractmethod - async def update_collection( - self, collection: stac_types.Collection, **kwargs - ) -> Optional[Union[stac_types.Collection, Response]]: - """Perform a complete update on an existing collection. - - Called with `PUT /collections`. It is expected that this item already exists. The update should do a diff - against the saved collection and perform any necessary updates. Partial updates are not supported by the - transactions extension. - - Args: - collection: the collection (must be complete) - - Returns: - The updated collection. - """ - ... - - @abc.abstractmethod - async def delete_collection( - self, collection_id: str, **kwargs - ) -> Optional[Union[stac_types.Collection, Response]]: - """Delete a collection. - - Called with `DELETE /collections/{collection_id}` - - Args: - collection_id: id of the collection. - - Returns: - The deleted collection. - """ - ... - - -@attr.s -class LandingPageMixin(abc.ABC): - """Create a STAC landing page (GET /).""" - - stac_version: str = attr.ib(default=STAC_VERSION) - landing_page_id: str = attr.ib(default="stac-fastapi") - title: str = attr.ib(default="stac-fastapi") - description: str = attr.ib(default="stac-fastapi") - - def _landing_page( - self, - base_url: str, - conformance_classes: List[str], - extension_schemas: List[str], - ) -> stac_types.LandingPage: - landing_page = stac_types.LandingPage( - type="Catalog", - id=self.landing_page_id, - title=self.title, - description=self.description, - stac_version=self.stac_version, - conformsTo=conformance_classes, - links=[ - { - "rel": Relations.self.value, - "type": MimeTypes.json, - "href": base_url, - }, - { - "rel": Relations.root.value, - "type": MimeTypes.json, - "href": base_url, - }, - { - "rel": "data", - "type": MimeTypes.json, - "href": urljoin(base_url, "collections"), - }, - { - "rel": Relations.conformance.value, - "type": MimeTypes.json, - "title": "STAC/WFS3 conformance classes implemented by this server", - "href": urljoin(base_url, "conformance"), - }, - { - "rel": Relations.search.value, - "type": MimeTypes.geojson, - "title": "STAC search", - "href": urljoin(base_url, "search"), - "method": "GET", - }, - { - "rel": Relations.search.value, - "type": MimeTypes.geojson, - "title": "STAC search", - "href": urljoin(base_url, "search"), - "method": "POST", - }, - ], - stac_extensions=extension_schemas, - ) - return landing_page - - -@attr.s # type:ignore -class BaseCoreClient(LandingPageMixin, abc.ABC): - """Defines a pattern for implementing STAC api core endpoints. - - Attributes: - extensions: list of registered api extensions. - """ - - base_conformance_classes: List[str] = attr.ib( - factory=lambda: BASE_CONFORMANCE_CLASSES - ) - extensions: List[ApiExtension] = attr.ib(default=attr.Factory(list)) - post_request_model = attr.ib(default=BaseSearchPostRequest) - - def conformance_classes(self) -> List[str]: - """Generate conformance classes by adding extension conformance to base conformance classes.""" - base_conformance_classes = self.base_conformance_classes.copy() - - for extension in self.extensions: - extension_classes = getattr(extension, "conformance_classes", []) - base_conformance_classes.extend(extension_classes) - - return list(set(base_conformance_classes)) - - def extension_is_enabled(self, extension: str) -> bool: - """Check if an api extension is enabled.""" - return any([type(ext).__name__ == extension for ext in self.extensions]) - - def list_conformance_classes(self): - """Return a list of conformance classes, including implemented extensions.""" - base_conformance = BASE_CONFORMANCE_CLASSES - - for extension in self.extensions: - extension_classes = getattr(extension, "conformance_classes", []) - base_conformance.extend(extension_classes) - - return base_conformance - - def landing_page(self, **kwargs) -> stac_types.LandingPage: - """Landing page. - - Called with `GET /`. - - Returns: - API landing page, serving as an entry point to the API. - """ - request: Request = kwargs["request"] - base_url = get_base_url(request) - extension_schemas = [ - schema.schema_href for schema in self.extensions if schema.schema_href - ] - landing_page = self._landing_page( - base_url=base_url, - conformance_classes=self.conformance_classes(), - extension_schemas=extension_schemas, - ) - - # Add Collections links - collections = self.all_collections(request=kwargs["request"]) - for collection in collections["collections"]: - landing_page["links"].append( - { - "rel": Relations.child.value, - "type": MimeTypes.json.value, - "title": collection.get("title") or collection.get("id"), - "href": urljoin(base_url, f"collections/{collection['id']}"), - } - ) - - # Add OpenAPI URL - landing_page["links"].append( - { - "rel": "service-desc", - "type": "application/vnd.oai.openapi+json;version=3.0", - "title": "OpenAPI service description", - "href": urljoin( - str(request.base_url), request.app.openapi_url.lstrip("/") - ), - } - ) - - # Add human readable service-doc - landing_page["links"].append( - { - "rel": "service-doc", - "type": "text/html", - "title": "OpenAPI service documentation", - "href": urljoin( - str(request.base_url), request.app.docs_url.lstrip("/") - ), - } - ) - - return landing_page - - def conformance(self, **kwargs) -> stac_types.Conformance: - """Conformance classes. - - Called with `GET /conformance`. - - Returns: - Conformance classes which the server conforms to. - """ - return Conformance(conformsTo=self.conformance_classes()) - - @abc.abstractmethod - def post_search( - self, search_request: BaseSearchPostRequest, **kwargs - ) -> stac_types.ItemCollection: - """Cross catalog search (POST). - - Called with `POST /search`. - - Args: - search_request: search request parameters. - - Returns: - ItemCollection containing items which match the search criteria. - """ - ... - - @abc.abstractmethod - def get_search( - self, - collections: Optional[List[str]] = None, - ids: Optional[List[str]] = None, - bbox: Optional[List[NumType]] = None, - datetime: Optional[Union[str, datetime]] = None, - limit: Optional[int] = 10, - query: Optional[str] = None, - token: Optional[str] = None, - fields: Optional[List[str]] = None, - sortby: Optional[str] = None, - **kwargs, - ) -> stac_types.ItemCollection: - """Cross catalog search (GET). - - Called with `GET /search`. - - Returns: - ItemCollection containing items which match the search criteria. - """ - ... - - @abc.abstractmethod - def get_item(self, item_id: str, collection_id: str, **kwargs) -> stac_types.Item: - """Get item by id. - - Called with `GET /collections/{collection_id}/items/{item_id}`. - - Args: - item_id: Id of the item. - collection_id: Id of the collection. - - Returns: - Item. - """ - ... - - @abc.abstractmethod - def all_collections(self, **kwargs) -> stac_types.Collections: - """Get all available collections. - - Called with `GET /collections`. - - Returns: - A list of collections. - """ - ... - - @abc.abstractmethod - def get_collection(self, collection_id: str, **kwargs) -> stac_types.Collection: - """Get collection by id. - - Called with `GET /collections/{collection_id}`. - - Args: - collection_id: Id of the collection. - - Returns: - Collection. - """ - ... - - @abc.abstractmethod - def item_collection( - self, collection_id: str, limit: int = 10, token: str = None, **kwargs - ) -> stac_types.ItemCollection: - """Get all items from a specific collection. - - Called with `GET /collections/{collection_id}/items` - - Args: - collection_id: id of the collection. - limit: number of items to return. - token: pagination token. - - Returns: - An ItemCollection. - """ - ... - - -@attr.s # type:ignore -class AsyncBaseCoreClient(LandingPageMixin, abc.ABC): - """Defines a pattern for implementing STAC api core endpoints. - - Attributes: - extensions: list of registered api extensions. - """ - - base_conformance_classes: List[str] = attr.ib( - factory=lambda: BASE_CONFORMANCE_CLASSES - ) - extensions: List[ApiExtension] = attr.ib(default=attr.Factory(list)) - post_request_model = attr.ib(default=BaseSearchPostRequest) - - def conformance_classes(self) -> List[str]: - """Generate conformance classes by adding extension conformance to base conformance classes.""" - conformance_classes = self.base_conformance_classes.copy() - - for extension in self.extensions: - extension_classes = getattr(extension, "conformance_classes", []) - conformance_classes.extend(extension_classes) - - return list(set(conformance_classes)) - - def extension_is_enabled(self, extension: str) -> bool: - """Check if an api extension is enabled.""" - return any([type(ext).__name__ == extension for ext in self.extensions]) - - async def landing_page(self, **kwargs) -> stac_types.LandingPage: - """Landing page. - - Called with `GET /`. - - Returns: - API landing page, serving as an entry point to the API. - """ - request: Request = kwargs["request"] - base_url = get_base_url(request) - extension_schemas = [ - schema.schema_href for schema in self.extensions if schema.schema_href - ] - landing_page = self._landing_page( - base_url=base_url, - conformance_classes=self.conformance_classes(), - extension_schemas=extension_schemas, - ) - collections = await self.all_collections(request=kwargs["request"]) - for collection in collections["collections"]: - landing_page["links"].append( - { - "rel": Relations.child.value, - "type": MimeTypes.json.value, - "title": collection.get("title") or collection.get("id"), - "href": urljoin(base_url, f"collections/{collection['id']}"), - } - ) - - # Add OpenAPI URL - landing_page["links"].append( - { - "rel": "service-desc", - "type": "application/vnd.oai.openapi+json;version=3.0", - "title": "OpenAPI service description", - "href": urljoin( - str(request.base_url), request.app.openapi_url.lstrip("/") - ), - } - ) - - # Add human readable service-doc - landing_page["links"].append( - { - "rel": "service-doc", - "type": "text/html", - "title": "OpenAPI service documentation", - "href": urljoin( - str(request.base_url), request.app.docs_url.lstrip("/") - ), - } - ) - - return landing_page - - async def conformance(self, **kwargs) -> stac_types.Conformance: - """Conformance classes. - - Called with `GET /conformance`. - - Returns: - Conformance classes which the server conforms to. - """ - return Conformance(conformsTo=self.conformance_classes()) - - @abc.abstractmethod - async def post_search( - self, search_request: BaseSearchPostRequest, **kwargs - ) -> stac_types.ItemCollection: - """Cross catalog search (POST). - - Called with `POST /search`. - - Args: - search_request: search request parameters. - - Returns: - ItemCollection containing items which match the search criteria. - """ - ... - - @abc.abstractmethod - async def get_search( - self, - collections: Optional[List[str]] = None, - ids: Optional[List[str]] = None, - bbox: Optional[List[NumType]] = None, - datetime: Optional[Union[str, datetime]] = None, - limit: Optional[int] = 10, - query: Optional[str] = None, - token: Optional[str] = None, - fields: Optional[List[str]] = None, - sortby: Optional[str] = None, - **kwargs, - ) -> stac_types.ItemCollection: - """Cross catalog search (GET). - - Called with `GET /search`. - - Returns: - ItemCollection containing items which match the search criteria. - """ - ... - - @abc.abstractmethod - async def get_item( - self, item_id: str, collection_id: str, **kwargs - ) -> stac_types.Item: - """Get item by id. - - Called with `GET /collections/{collection_id}/items/{item_id}`. - - Args: - item_id: Id of the item. - collection_id: Id of the collection. - - Returns: - Item. - """ - ... - - @abc.abstractmethod - async def all_collections(self, **kwargs) -> stac_types.Collections: - """Get all available collections. - - Called with `GET /collections`. - - Returns: - A list of collections. - """ - ... - - @abc.abstractmethod - async def get_collection( - self, collection_id: str, **kwargs - ) -> stac_types.Collection: - """Get collection by id. - - Called with `GET /collections/{collection_id}`. - - Args: - collection_id: Id of the collection. - - Returns: - Collection. - """ - ... - - @abc.abstractmethod - async def item_collection( - self, collection_id: str, limit: int = 10, token: str = None, **kwargs - ) -> stac_types.ItemCollection: - """Get all items from a specific collection. - - Called with `GET /collections/{collection_id}/items` - - Args: - collection_id: id of the collection. - limit: number of items to return. - token: pagination token. - - Returns: - An ItemCollection. - """ - ... - - -@attr.s -class AsyncBaseFiltersClient(abc.ABC): - """Defines a pattern for implementing the STAC filter extension.""" - - async def get_queryables( - self, collection_id: Optional[str] = None, **kwargs - ) -> Dict[str, Any]: - """Get the queryables available for the given collection_id. - - If collection_id is None, returns the intersection of all - queryables over all collections. - - This base implementation returns a blank queryable schema. This is not allowed - under OGC CQL but it is allowed by the STAC API Filter Extension - - https://github.com/radiantearth/stac-api-spec/tree/master/fragments/filter#queryables - """ - return { - "$schema": "https://json-schema.org/draft/2019-09/schema", - "$id": "https://example.org/queryables", - "type": "object", - "title": "Queryables for Example STAC API", - "description": "Queryable names for the example STAC API Item Search filter.", - "properties": {}, - } - - -@attr.s -class BaseFiltersClient(abc.ABC): - """Defines a pattern for implementing the STAC filter extension.""" - - def get_queryables( - self, collection_id: Optional[str] = None, **kwargs - ) -> Dict[str, Any]: - """Get the queryables available for the given collection_id. - - If collection_id is None, returns the intersection of all - queryables over all collections. - - This base implementation returns a blank queryable schema. This is not allowed - under OGC CQL but it is allowed by the STAC API Filter Extension - - https://github.com/radiantearth/stac-api-spec/tree/master/fragments/filter#queryables - """ - return { - "$schema": "https://json-schema.org/draft/2019-09/schema", - "$id": "https://example.org/queryables", - "type": "object", - "title": "Queryables for Example STAC API", - "description": "Queryable names for the example STAC API Item Search filter.", - "properties": {}, - } diff --git a/stac_fastapi/types/stac_fastapi/types/errors.py b/stac_fastapi/types/stac_fastapi/types/errors.py deleted file mode 100644 index 9bd51ed0..00000000 --- a/stac_fastapi/types/stac_fastapi/types/errors.py +++ /dev/null @@ -1,41 +0,0 @@ -"""stac_fastapi.types.errors module.""" - - -class StacApiError(Exception): - """Generic API error.""" - - pass - - -class ConflictError(StacApiError): - """Database conflict.""" - - pass - - -class NotFoundError(StacApiError): - """Resource not found.""" - - pass - - -class ForeignKeyError(StacApiError): - """Foreign key error (collection does not exist).""" - - pass - - -class DatabaseError(StacApiError): - """Generic database errors.""" - - pass - - -class InvalidQueryParameter(StacApiError): - """Error for unknown or invalid query parameters. - - Used to capture errors that should respond according to - http://docs.opengeospatial.org/is/17-069r3/17-069r3.html#query_parameters - """ - - pass diff --git a/stac_fastapi/types/stac_fastapi/types/extension.py b/stac_fastapi/types/stac_fastapi/types/extension.py deleted file mode 100644 index 1e4774b4..00000000 --- a/stac_fastapi/types/stac_fastapi/types/extension.py +++ /dev/null @@ -1,37 +0,0 @@ -"""base api extension.""" -import abc -from typing import List, Optional - -import attr -from fastapi import FastAPI -from pydantic import BaseModel - - -@attr.s -class ApiExtension(abc.ABC): - """Abstract base class for defining API extensions.""" - - GET = None - POST = None - - def get_request_model(self, verb: Optional[str] = "GET") -> Optional[BaseModel]: - """Return the request model for the extension.method. - - The model can differ based on HTTP verb - """ - return getattr(self, verb) - - conformance_classes: List[str] = attr.ib(factory=list) - schema_href: Optional[str] = attr.ib(default=None) - - @abc.abstractmethod - def register(self, app: FastAPI) -> None: - """Register the extension with a FastAPI application. - - Args: - app: target FastAPI application. - - Returns: - None - """ - pass diff --git a/stac_fastapi/types/stac_fastapi/types/links.py b/stac_fastapi/types/stac_fastapi/types/links.py deleted file mode 100644 index 0349984b..00000000 --- a/stac_fastapi/types/stac_fastapi/types/links.py +++ /dev/null @@ -1,110 +0,0 @@ -"""link helpers.""" - -from typing import Any, Dict, List -from urllib.parse import urljoin - -import attr -from stac_pydantic.links import Relations -from stac_pydantic.shared import MimeTypes - -# These can be inferred from the item/collection so they aren't included in the database -# Instead they are dynamically generated when querying the database using the classes defined below -INFERRED_LINK_RELS = ["self", "item", "parent", "collection", "root"] - - -def filter_links(links: List[Dict]) -> List[Dict]: - """Remove inferred links.""" - return [link for link in links if link["rel"] not in INFERRED_LINK_RELS] - - -def resolve_links(links: list, base_url: str) -> List[Dict]: - """Convert relative links to absolute links.""" - filtered_links = filter_links(links) - for link in filtered_links: - link.update({"href": urljoin(base_url, link["href"])}) - return filtered_links - - -@attr.s -class BaseLinks: - """Create inferred links common to collections and items.""" - - collection_id: str = attr.ib() - base_url: str = attr.ib() - - def root(self) -> Dict[str, Any]: - """Return the catalog root.""" - return dict(rel=Relations.root, type=MimeTypes.json, href=self.base_url) - - -@attr.s -class CollectionLinks(BaseLinks): - """Create inferred links specific to collections.""" - - def self(self) -> Dict[str, Any]: - """Create the `self` link.""" - return dict( - rel=Relations.self, - type=MimeTypes.json, - href=urljoin(self.base_url, f"collections/{self.collection_id}"), - ) - - def parent(self) -> Dict[str, Any]: - """Create the `parent` link.""" - return dict(rel=Relations.parent, type=MimeTypes.json, href=self.base_url) - - def items(self) -> Dict[str, Any]: - """Create the `items` link.""" - return dict( - rel="items", - type=MimeTypes.geojson, - href=urljoin(self.base_url, f"collections/{self.collection_id}/items"), - ) - - def create_links(self) -> List[Dict[str, Any]]: - """Return all inferred links.""" - return [self.self(), self.parent(), self.items(), self.root()] - - -@attr.s -class ItemLinks(BaseLinks): - """Create inferred links specific to items.""" - - item_id: str = attr.ib() - - def self(self) -> Dict[str, Any]: - """Create the `self` link.""" - return dict( - rel=Relations.self, - type=MimeTypes.geojson, - href=urljoin( - self.base_url, - f"collections/{self.collection_id}/items/{self.item_id}", - ), - ) - - def parent(self) -> Dict[str, Any]: - """Create the `parent` link.""" - return dict( - rel=Relations.parent, - type=MimeTypes.json, - href=urljoin(self.base_url, f"collections/{self.collection_id}"), - ) - - def collection(self) -> Dict[str, Any]: - """Create the `collection` link.""" - return dict( - rel=Relations.collection, - type=MimeTypes.json, - href=urljoin(self.base_url, f"collections/{self.collection_id}"), - ) - - def create_links(self) -> List[Dict[str, Any]]: - """Return all inferred links.""" - links = [ - self.self(), - self.parent(), - self.collection(), - self.root(), - ] - return links diff --git a/stac_fastapi/types/stac_fastapi/types/requests.py b/stac_fastapi/types/stac_fastapi/types/requests.py deleted file mode 100644 index 7ce0e81a..00000000 --- a/stac_fastapi/types/stac_fastapi/types/requests.py +++ /dev/null @@ -1,14 +0,0 @@ -"""requests helpers.""" - -from starlette.requests import Request - - -def get_base_url(request: Request) -> str: - """Get base URL with respect of APIRouter prefix.""" - app = request.app - if not app.state.router_prefix: - return str(request.base_url) - else: - return "{}{}/".format( - str(request.base_url), app.state.router_prefix.lstrip("/") - ) diff --git a/stac_fastapi/types/stac_fastapi/types/rfc3339.py b/stac_fastapi/types/stac_fastapi/types/rfc3339.py deleted file mode 100644 index 0ba46003..00000000 --- a/stac_fastapi/types/stac_fastapi/types/rfc3339.py +++ /dev/null @@ -1,75 +0,0 @@ -"""rfc3339.""" - -from datetime import datetime, timezone -from typing import Optional, Tuple - -import ciso8601 -from pystac.utils import datetime_to_str - - -def rfc3339_str_to_datetime(s: str) -> datetime: - """Convert a string conforming to RFC 3339 to a :class:`datetime.datetime`. - - Uses :meth:`ciso8601.parse_rfc3339` under the hood. - - Args: - s (str) : The string to convert to :class:`datetime.datetime`. - - Returns: - str: The datetime represented by the ISO8601 (RFC 3339) formatted string. - - Raises: - ValueError: If the string is not a valid RFC 3339 string. - """ - return ciso8601.parse_rfc3339(s) - - -def str_to_interval( - interval: str, -) -> Optional[Tuple[Optional[datetime], Optional[datetime]]]: - """Extract a tuple of datetimes from an interval string. - - Interval strings are defined by - OGC API - Features Part 1 for the datetime query parameter value. These follow the - form '1985-04-12T23:20:50.52Z/1986-04-12T23:20:50.52Z', and allow either the start - or end (but not both) to be open-ended with '..' or ''. - - Args: - interval (str) : The interval string to convert to a :class:`datetime.datetime` - tuple. - - Raises: - ValueError: If the string is not a valid interval string. - """ - if not interval: - raise ValueError("Empty interval string is invalid.") - - values = interval.split("/") - if len(values) != 2: - raise ValueError( - f"Interval string '{interval}' contains more than one forward slash." - ) - - start = None - end = None - if not values[0] in ["..", ""]: - start = rfc3339_str_to_datetime(values[0]) - if not values[1] in ["..", ""]: - end = rfc3339_str_to_datetime(values[1]) - - if start is None and end is None: - raise ValueError("Double open-ended intervals are not allowed.") - if start is not None and end is not None and start > end: - raise ValueError("Start datetime cannot be before end datetime.") - else: - return start, end - - -def now_in_utc() -> datetime: - """Return a datetime value of now with the UTC timezone applied.""" - return datetime.now(timezone.utc) - - -def now_to_rfc3339_str() -> str: - """Return an RFC 3339 string representing now.""" - return datetime_to_str(now_in_utc()) diff --git a/stac_fastapi/types/stac_fastapi/types/search.py b/stac_fastapi/types/stac_fastapi/types/search.py deleted file mode 100644 index f12c3c51..00000000 --- a/stac_fastapi/types/stac_fastapi/types/search.py +++ /dev/null @@ -1,201 +0,0 @@ -"""stac_fastapi.types.search module. - -# TODO: replace with stac-pydantic -""" - -import abc -import operator -from datetime import datetime -from enum import auto -from types import DynamicClassAttribute -from typing import Any, Callable, Dict, List, Optional, Union - -import attr -from geojson_pydantic.geometries import ( - LineString, - MultiLineString, - MultiPoint, - MultiPolygon, - Point, - Polygon, - _GeometryBase, -) -from pydantic import BaseModel, conint, validator -from stac_pydantic.shared import BBox -from stac_pydantic.utils import AutoValueEnum - -from stac_fastapi.types.rfc3339 import rfc3339_str_to_datetime, str_to_interval - -# Be careful: https://github.com/samuelcolvin/pydantic/issues/1423#issuecomment-642797287 -NumType = Union[float, int] - - -class Operator(str, AutoValueEnum): - """Defines the set of operators supported by the API.""" - - eq = auto() - ne = auto() - lt = auto() - lte = auto() - gt = auto() - gte = auto() - - # TODO: These are defined in the spec but aren't currently implemented by the api - # startsWith = auto() - # endsWith = auto() - # contains = auto() - # in = auto() - - @DynamicClassAttribute - def operator(self) -> Callable[[Any, Any], bool]: - """Return python operator.""" - return getattr(operator, self._value_) - - -def str2list(x: str) -> Optional[List]: - """Convert string to list base on , delimiter.""" - if x: - return x.split(",") - - -@attr.s # type:ignore -class APIRequest(abc.ABC): - """Generic API Request base class.""" - - def kwargs(self) -> Dict: - """Transform api request params into format which matches the signature of the endpoint.""" - return self.__dict__ - - -@attr.s -class BaseSearchGetRequest(APIRequest): - """Base arguments for GET Request.""" - - collections: Optional[str] = attr.ib(default=None, converter=str2list) - ids: Optional[str] = attr.ib(default=None, converter=str2list) - bbox: Optional[str] = attr.ib(default=None, converter=str2list) - intersects: Optional[str] = attr.ib(default=None, converter=str2list) - datetime: Optional[str] = attr.ib(default=None) - limit: Optional[int] = attr.ib(default=10) - - -class BaseSearchPostRequest(BaseModel): - """Search model. - - Replace base model in STAC-pydantic as it includes additional fields, - not in the core model. - https://github.com/radiantearth/stac-api-spec/tree/master/item-search#query-parameter-table - - PR to fix this: - https://github.com/stac-utils/stac-pydantic/pull/100 - """ - - collections: Optional[List[str]] - ids: Optional[List[str]] - bbox: Optional[BBox] - intersects: Optional[ - Union[Point, MultiPoint, LineString, MultiLineString, Polygon, MultiPolygon] - ] - datetime: Optional[str] - limit: Optional[conint(gt=0, le=10000)] = 10 - - @property - def start_date(self) -> Optional[datetime]: - """Extract the start date from the datetime string.""" - interval = str_to_interval(self.datetime) - return interval[0] if interval else None - - @property - def end_date(self) -> Optional[datetime]: - """Extract the end date from the datetime string.""" - interval = str_to_interval(self.datetime) - return interval[1] if interval else None - - @validator("intersects") - def validate_spatial(cls, v, values): - """Check bbox and intersects are not both supplied.""" - if v and values["bbox"]: - raise ValueError("intersects and bbox parameters are mutually exclusive") - return v - - @validator("bbox") - def validate_bbox(cls, v: BBox): - """Check order of supplied bbox coordinates.""" - if v: - # Validate order - if len(v) == 4: - xmin, ymin, xmax, ymax = v - else: - xmin, ymin, min_elev, xmax, ymax, max_elev = v - if max_elev < min_elev: - raise ValueError( - "Maximum elevation must greater than minimum elevation" - ) - - if xmax < xmin: - raise ValueError( - "Maximum longitude must be greater than minimum longitude" - ) - - if ymax < ymin: - raise ValueError( - "Maximum longitude must be greater than minimum longitude" - ) - - # Validate against WGS84 - if xmin < -180 or ymin < -90 or xmax > 180 or ymax > 90: - raise ValueError("Bounding box must be within (-180, -90, 180, 90)") - - return v - - @validator("datetime") - def validate_datetime(cls, v): - """Validate datetime.""" - if "/" in v: - values = v.split("/") - else: - # Single date is interpreted as end date - values = ["..", v] - - dates = [] - for value in values: - if value == ".." or value == "": - dates.append("..") - continue - - # throws ValueError if invalid RFC 3339 string - dates.append(rfc3339_str_to_datetime(value)) - - if dates[0] == ".." and dates[1] == "..": - raise ValueError( - "Invalid datetime range, both ends of range may not be open" - ) - - if ".." not in dates and dates[0] > dates[1]: - raise ValueError( - "Invalid datetime range, must match format (begin_date, end_date)" - ) - - return v - - @property - def spatial_filter(self) -> Optional[_GeometryBase]: - """Return a geojson-pydantic object representing the spatial filter for the search request. - - Check for both because the ``bbox`` and ``intersects`` parameters are mutually exclusive. - """ - if self.bbox: - return Polygon( - coordinates=[ - [ - [self.bbox[0], self.bbox[3]], - [self.bbox[2], self.bbox[3]], - [self.bbox[2], self.bbox[1]], - [self.bbox[0], self.bbox[1]], - [self.bbox[0], self.bbox[3]], - ] - ] - ) - if self.intersects: - return self.intersects - return diff --git a/stac_fastapi/types/stac_fastapi/types/stac.py b/stac_fastapi/types/stac_fastapi/types/stac.py deleted file mode 100644 index ef61c2f3..00000000 --- a/stac_fastapi/types/stac_fastapi/types/stac.py +++ /dev/null @@ -1,89 +0,0 @@ -"""STAC types.""" -import sys -from typing import Any, Dict, List, Optional, Union - -# Avoids a Pydantic error: -# TypeError: You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.9.2. -# Without it, there is no way to differentiate required and optional fields when subclassed. -if sys.version_info < (3, 9, 2): - from typing_extensions import TypedDict -else: - from typing import TypedDict - -NumType = Union[float, int] - - -class LandingPage(TypedDict, total=False): - """STAC Landing Page.""" - - type: str - stac_version: str - stac_extensions: Optional[List[str]] - id: str - title: str - description: str - conformsTo: List[str] - links: List[Dict[str, Any]] - - -class Conformance(TypedDict): - """STAC Conformance Classes.""" - - conformsTo: List[str] - - -class Catalog(TypedDict, total=False): - """STAC Catalog.""" - - type: str - stac_version: str - stac_extensions: Optional[List[str]] - id: str - title: Optional[str] - description: str - links: List[Dict[str, Any]] - - -class Collection(Catalog, total=False): - """STAC Collection.""" - - keywords: List[str] - license: str - providers: List[Dict[str, Any]] - extent: Dict[str, Any] - summaries: Dict[str, Any] - assets: Dict[str, Any] - - -class Item(TypedDict, total=False): - """STAC Item.""" - - type: str - stac_version: str - stac_extensions: Optional[List[str]] - id: str - geometry: Dict[str, Any] - bbox: List[NumType] - properties: Dict[str, Any] - links: List[Dict[str, Any]] - assets: Dict[str, Any] - collection: str - - -class ItemCollection(TypedDict, total=False): - """STAC Item Collection.""" - - type: str - features: List[Item] - links: List[Dict[str, Any]] - context: Optional[Dict[str, int]] - - -class Collections(TypedDict, total=False): - """All collections endpoint. - - https://github.com/radiantearth/stac-api-spec/tree/master/collections - """ - - collections: List[Collection] - links: List[Dict[str, Any]] diff --git a/stac_fastapi/types/stac_fastapi/types/version.py b/stac_fastapi/types/stac_fastapi/types/version.py deleted file mode 100644 index 3c7bccdd..00000000 --- a/stac_fastapi/types/stac_fastapi/types/version.py +++ /dev/null @@ -1,2 +0,0 @@ -"""library version.""" -__version__ = "2.3.0" diff --git a/stac_fastapi/types/tests/test_rfc3339.py b/stac_fastapi/types/tests/test_rfc3339.py deleted file mode 100644 index 0a402699..00000000 --- a/stac_fastapi/types/tests/test_rfc3339.py +++ /dev/null @@ -1,105 +0,0 @@ -from datetime import timezone - -import pytest - -from stac_fastapi.types.rfc3339 import ( - now_in_utc, - now_to_rfc3339_str, - rfc3339_str_to_datetime, - str_to_interval, -) - -invalid_datetimes = [ - "1985-04-12", # date only - "1937-01-01T12:00:27.87+0100", # invalid TZ format, no sep : - "37-01-01T12:00:27.87Z", # invalid year, must be 4 digits - "1985-12-12T23:20:50.52", # no TZ - "21985-12-12T23:20:50.52Z", # year must be 4 digits - "1985-13-12T23:20:50.52Z", # month > 12 - "1985-12-32T23:20:50.52Z", # day > 31 - "1985-12-01T25:20:50.52Z", # hour > 24 - "1985-12-01T00:60:50.52Z", # minute > 59 - "1985-12-01T00:06:61.52Z", # second > 60 - "1985-04-12T23:20:50.Z", # fractional sec . but no frac secs - "1985-04-12T23:20:50,Z", # fractional sec , but no frac secs - "1990-12-31T23:59:61Z", # second > 60 w/o fractional seconds - "1985-04-12T23:20:50,52Z", # comma as frac sec sep allowed in ISO8601 but not RFC3339 -] - -valid_datetimes = [ - "1985-04-12T23:20:50.52Z", - "1996-12-19T16:39:57-00:00", - "1996-12-19T16:39:57+00:00", - "1996-12-19T16:39:57-08:00", - "1996-12-19T16:39:57+08:00", - "1937-01-01T12:00:27.87+01:00", - "1985-04-12T23:20:50.52Z", - "1937-01-01T12:00:27.8710+01:00", - "1937-01-01T12:00:27.8+01:00", - "1937-01-01T12:00:27.8Z", - "2020-07-23T00:00:00.000+03:00", - "2020-07-23T00:00:00+03:00", - "1985-04-12t23:20:50.000z", - "2020-07-23T00:00:00Z", - "2020-07-23T00:00:00.0Z", - "2020-07-23T00:00:00.01Z", - "2020-07-23T00:00:00.012Z", - "2020-07-23T00:00:00.0123Z", - "2020-07-23T00:00:00.01234Z", - "2020-07-23T00:00:00.012345Z", - "2020-07-23T00:00:00.0123456Z", - "2020-07-23T00:00:00.01234567Z", - "2020-07-23T00:00:00.012345678Z", -] - -invalid_intervals = [ - "/" - "../" - "/.." - "../.." - "/1984-04-12T23:20:50.52Z/1985-04-12T23:20:50.52Z", # extra start / - "1984-04-12T23:20:50.52Z/1985-04-12T23:20:50.52Z/", # extra end / - "1986-04-12T23:20:50.52Z/1985-04-12T23:20:50.52Z", # start > end -] - -valid_intervals = [ - "../1985-04-12T23:20:50.52Z", - "1985-04-12T23:20:50.52Z/..", - "/1985-04-12T23:20:50.52Z", - "1985-04-12T23:20:50.52Z/", - "1985-04-12T23:20:50.52Z/1986-04-12T23:20:50.52Z", - "1985-04-12T23:20:50.52+01:00/1986-04-12T23:20:50.52+01:00", - "1985-04-12T23:20:50.52-01:00/1986-04-12T23:20:50.52-01:00", -] - - -@pytest.mark.parametrize("test_input", invalid_datetimes) -def test_parse_invalid_str_to_datetime(test_input): - with pytest.raises(ValueError): - rfc3339_str_to_datetime(test_input) - - -@pytest.mark.parametrize("test_input", valid_datetimes) -def test_parse_valid_str_to_datetime(test_input): - assert rfc3339_str_to_datetime(test_input) - - -@pytest.mark.parametrize("test_input", invalid_intervals) -def test_parse_invalid_interval_to_datetime(test_input): - with pytest.raises(ValueError): - str_to_interval(test_input) - - -@pytest.mark.parametrize("test_input", valid_intervals) -def test_parse_valid_interval_to_datetime(test_input): - assert str_to_interval(test_input) - - -def test_now_functions() -> None: - now1 = now_in_utc() - now2 = now_in_utc() - - assert now1 < now2 - assert now1.tzinfo == timezone.utc - - rfc3339_str_to_datetime(now_to_rfc3339_str()) diff --git a/stac_fastapi/pgstac/tests/__init__.py b/tests/__init__.py similarity index 100% rename from stac_fastapi/pgstac/tests/__init__.py rename to tests/__init__.py diff --git a/stac_fastapi/pgstac/tests/api/__init__.py b/tests/api/__init__.py similarity index 100% rename from stac_fastapi/pgstac/tests/api/__init__.py rename to tests/api/__init__.py diff --git a/stac_fastapi/pgstac/tests/api/test_api.py b/tests/api/test_api.py similarity index 100% rename from stac_fastapi/pgstac/tests/api/test_api.py rename to tests/api/test_api.py diff --git a/stac_fastapi/pgstac/tests/clients/__init__.py b/tests/clients/__init__.py similarity index 100% rename from stac_fastapi/pgstac/tests/clients/__init__.py rename to tests/clients/__init__.py diff --git a/stac_fastapi/pgstac/tests/clients/test_postgres.py b/tests/clients/test_postgres.py similarity index 100% rename from stac_fastapi/pgstac/tests/clients/test_postgres.py rename to tests/clients/test_postgres.py diff --git a/stac_fastapi/pgstac/tests/conftest.py b/tests/conftest.py similarity index 100% rename from stac_fastapi/pgstac/tests/conftest.py rename to tests/conftest.py diff --git a/stac_fastapi/pgstac/tests/data/joplin/collection.json b/tests/data/joplin/collection.json similarity index 100% rename from stac_fastapi/pgstac/tests/data/joplin/collection.json rename to tests/data/joplin/collection.json diff --git a/stac_fastapi/pgstac/tests/data/joplin/index.geojson b/tests/data/joplin/index.geojson similarity index 100% rename from stac_fastapi/pgstac/tests/data/joplin/index.geojson rename to tests/data/joplin/index.geojson diff --git a/stac_fastapi/pgstac/tests/data/test2_collection.json b/tests/data/test2_collection.json similarity index 100% rename from stac_fastapi/pgstac/tests/data/test2_collection.json rename to tests/data/test2_collection.json diff --git a/stac_fastapi/pgstac/tests/data/test2_item.json b/tests/data/test2_item.json similarity index 100% rename from stac_fastapi/pgstac/tests/data/test2_item.json rename to tests/data/test2_item.json diff --git a/stac_fastapi/pgstac/tests/data/test_collection.json b/tests/data/test_collection.json similarity index 100% rename from stac_fastapi/pgstac/tests/data/test_collection.json rename to tests/data/test_collection.json diff --git a/stac_fastapi/pgstac/tests/data/test_item.json b/tests/data/test_item.json similarity index 100% rename from stac_fastapi/pgstac/tests/data/test_item.json rename to tests/data/test_item.json diff --git a/stac_fastapi/pgstac/tests/data/test_item2.json b/tests/data/test_item2.json similarity index 100% rename from stac_fastapi/pgstac/tests/data/test_item2.json rename to tests/data/test_item2.json diff --git a/stac_fastapi/pgstac/tests/resources/__init__.py b/tests/resources/__init__.py similarity index 100% rename from stac_fastapi/pgstac/tests/resources/__init__.py rename to tests/resources/__init__.py diff --git a/stac_fastapi/pgstac/tests/resources/test_collection.py b/tests/resources/test_collection.py similarity index 100% rename from stac_fastapi/pgstac/tests/resources/test_collection.py rename to tests/resources/test_collection.py diff --git a/stac_fastapi/pgstac/tests/resources/test_conformance.py b/tests/resources/test_conformance.py similarity index 100% rename from stac_fastapi/pgstac/tests/resources/test_conformance.py rename to tests/resources/test_conformance.py diff --git a/stac_fastapi/pgstac/tests/resources/test_item.py b/tests/resources/test_item.py similarity index 100% rename from stac_fastapi/pgstac/tests/resources/test_item.py rename to tests/resources/test_item.py diff --git a/stac_fastapi/pgstac/tests/resources/test_mgmt.py b/tests/resources/test_mgmt.py similarity index 100% rename from stac_fastapi/pgstac/tests/resources/test_mgmt.py rename to tests/resources/test_mgmt.py