diff --git a/databend_sqlalchemy/databend_dialect.py b/databend_sqlalchemy/databend_dialect.py index 516c1a3..ca3bbac 100644 --- a/databend_sqlalchemy/databend_dialect.py +++ b/databend_sqlalchemy/databend_dialect.py @@ -23,7 +23,6 @@ Table("some_table", metadata, ..., databend_transient=True|False) """ - import decimal import re import operator @@ -60,6 +59,17 @@ CHAR, TIMESTAMP, ) + +import sqlalchemy +from sqlalchemy import types as sqltypes +from sqlalchemy.sql.base import Executable + +# Check SQLAlchemy version +if sqlalchemy.__version__.startswith('2.'): + from sqlalchemy.types import DOUBLE +else: + from .types import DOUBLE + from sqlalchemy.engine import ExecutionContext, default from sqlalchemy.exc import DBAPIError, NoSuchTableError @@ -71,7 +81,7 @@ AzureBlobStorage, AmazonS3, ) -from .types import INTERVAL +from .types import INTERVAL, TINYINT, BITMAP, GEOMETRY, GEOGRAPHY RESERVED_WORDS = { "Error", @@ -693,6 +703,7 @@ def __init__(self, key_type, value_type): super(MAP, self).__init__() + class DatabendDate(sqltypes.DATE): __visit_name__ = "DATE" @@ -793,12 +804,26 @@ class DatabendInterval(INTERVAL): render_bind_cast = True +class DatabendBitmap(BITMAP): + render_bind_cast = True + + +class DatabendTinyInt(TINYINT): + render_bind_cast = True + + +class DatabendGeometry(GEOMETRY): + render_bind_cast = True + +class DatabendGeography(GEOGRAPHY): + render_bind_cast = True + # Type converters ischema_names = { "bigint": BIGINT, "int": INTEGER, "smallint": SMALLINT, - "tinyint": SMALLINT, + "tinyint": DatabendTinyInt, "int64": BIGINT, "int32": INTEGER, "int16": SMALLINT, @@ -813,7 +838,7 @@ class DatabendInterval(INTERVAL): "datetime": DatabendDateTime, "timestamp": DatabendDateTime, "float": FLOAT, - "double": FLOAT, + "double": DOUBLE, "float64": FLOAT, "float32": FLOAT, "string": VARCHAR, @@ -826,8 +851,13 @@ class DatabendInterval(INTERVAL): "binary": BINARY, "time": DatabendTime, "interval": DatabendInterval, + "bitmap": DatabendBitmap, + "geometry": DatabendGeometry, + "geography": DatabendGeography } + + # Column spec colspecs = { sqltypes.Interval: DatabendInterval, @@ -1227,6 +1257,29 @@ def visit_TIME(self, type_, **kw): def visit_INTERVAL(self, type, **kw): return "INTERVAL" + def visit_DOUBLE(self, type_, **kw): + return "DOUBLE" + + def visit_TINYINT(self, type_, **kw): + return "TINYINT" + + def visit_FLOAT(self, type_, **kw): + return "FLOAT" + + def visit_BITMAP(self, type_, **kw): + return "BITMAP" + + def visit_GEOMETRY(self, type_, **kw): + if type_.srid is not None: + return f"GEOMETRY(SRID {type_.srid})" + return "GEOMETRY" + + def visit_GEOGRAPHY(self, type_, **kw): + if type_.srid is not None: + return f"GEOGRAPHY(SRID {type_.srid})" + return "GEOGRAPHY" + + class DatabendDDLCompiler(compiler.DDLCompiler): def visit_primary_key_constraint(self, constraint, **kw): diff --git a/databend_sqlalchemy/dml.py b/databend_sqlalchemy/dml.py index 6e5aae4..cb2ad2b 100644 --- a/databend_sqlalchemy/dml.py +++ b/databend_sqlalchemy/dml.py @@ -251,6 +251,7 @@ class Compression(Enum): RAW_DEFLATE = "RAW_DEFLATE" XZ = "XZ" SNAPPY = "SNAPPY" + ZIP = "ZIP" class CopyFormat(ClauseElement): diff --git a/databend_sqlalchemy/types.py b/databend_sqlalchemy/types.py index e4f637d..e5a737f 100644 --- a/databend_sqlalchemy/types.py +++ b/databend_sqlalchemy/types.py @@ -3,6 +3,7 @@ import datetime as dt from typing import Optional, Type, Any +from sqlalchemy import func from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.sql import sqltypes from sqlalchemy.sql import type_api @@ -73,3 +74,80 @@ def process(value: dt.timedelta) -> str: return f"to_interval('{value.total_seconds()} seconds')" return process + + +class TINYINT(sqltypes.Integer): + __visit_name__ = "TINYINT" + native = True + + +class DOUBLE(sqltypes.Float): + __visit_name__ = "DOUBLE" + native = True + + +class FLOAT(sqltypes.Float): + __visit_name__ = "FLOAT" + native = True + + +# The “CamelCase” types are to the greatest degree possible database agnostic + +# For these datatypes, specific SQLAlchemy dialects provide backend-specific “UPPERCASE” datatypes, for a SQL type that has no analogue on other backends + + +class BITMAP(sqltypes.TypeEngine): + __visit_name__ = "BITMAP" + render_bind_cast = True + + def __init__(self, **kwargs): + super(BITMAP, self).__init__() + + def process_result_value(self, value, dialect): + if value is None: + return None + # Databend returns bitmaps as strings of comma-separated integers + return set(int(x) for x in value.split(',') if x) + + def bind_expression(self, bindvalue): + return func.to_bitmap(bindvalue, type_=self) + + def column_expression(self, col): + # Convert bitmap to string using a custom function + return func.to_string(col, type_=sqltypes.String) + + def bind_processor(self, dialect): + def process(value): + if value is None: + return None + if isinstance(value, set): + return ','.join(str(x) for x in sorted(value)) + return str(value) + return process + + def result_processor(self, dialect, coltype): + def process(value): + if value is None: + return None + return set(int(x) for x in value.split(',') if x) + return process + + +class GEOMETRY(sqltypes.TypeEngine): + __visit_name__ = "GEOMETRY" + + def __init__(self, srid=None): + super(GEOMETRY, self).__init__() + self.srid = srid + + + +class GEOGRAPHY(sqltypes.TypeEngine): + __visit_name__ = "GEOGRAPHY" + native = True + + def __init__(self, srid=None): + super(GEOGRAPHY, self).__init__() + self.srid = srid + + diff --git a/tests/conftest.py b/tests/conftest.py index 2b6d9a5..ab4b9aa 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,4 @@ from sqlalchemy.dialects import registry -from sqlalchemy import event, Engine, text import pytest registry.register("databend.databend", "databend_sqlalchemy.databend_dialect", "DatabendDialect") @@ -9,9 +8,18 @@ from sqlalchemy.testing.plugin.pytestplugin import * +from packaging import version +import sqlalchemy +if version.parse(sqlalchemy.__version__) >= version.parse('2.0.0'): + from sqlalchemy import event, text + from sqlalchemy import Engine + + + @event.listens_for(Engine, "connect") + def receive_engine_connect(conn, r): + cur = conn.cursor() + cur.execute('SET global format_null_as_str = 0') + cur.execute('SET global enable_geo_create_table = 1') + cur.close() + -@event.listens_for(Engine, "connect") -def receive_engine_connect(conn, r): - cur = conn.cursor() - cur.execute('SET global format_null_as_str = 0') - cur.close() diff --git a/tests/test_copy_into.py b/tests/test_copy_into.py index 3d193cf..c679510 100644 --- a/tests/test_copy_into.py +++ b/tests/test_copy_into.py @@ -27,6 +27,8 @@ FileColumnClause, StageClause, ) +import sqlalchemy +from packaging import version class CompileDatabendCopyIntoTableTest(fixtures.TestBase, AssertsCompiledSQL): @@ -215,51 +217,52 @@ def define_tables(cls, metadata): Column("data", String(50)), ) - def test_copy_into_stage_and_table(self, connection): - # create stage - connection.execute(text('CREATE OR REPLACE STAGE mystage')) - # copy into stage from random table limiting 1000 - table = self.tables.random_data - query = table.select().limit(1000) + if version.parse(sqlalchemy.__version__) >= version.parse('2.0.0'): + def test_copy_into_stage_and_table(self, connection): + # create stage + connection.execute(text('CREATE OR REPLACE STAGE mystage')) + # copy into stage from random table limiting 1000 + table = self.tables.random_data + query = table.select().limit(1000) - copy_into = CopyIntoLocation( - target=StageClause( - name='mystage' - ), - from_=query, - file_format=ParquetFormat(), - options=CopyIntoLocationOptions() - ) - r = connection.execute( - copy_into - ) - eq_(r.rowcount, 1000) - copy_into_results = r.context.copy_into_location_results() - eq_(copy_into_results['rows_unloaded'], 1000) - # eq_(copy_into_results['input_bytes'], 16250) # input bytes will differ, the table is random - # eq_(copy_into_results['output_bytes'], 4701) # output bytes differs + copy_into = CopyIntoLocation( + target=StageClause( + name='mystage' + ), + from_=query, + file_format=ParquetFormat(), + options=CopyIntoLocationOptions() + ) + r = connection.execute( + copy_into + ) + eq_(r.rowcount, 1000) + copy_into_results = r.context.copy_into_location_results() + eq_(copy_into_results['rows_unloaded'], 1000) + # eq_(copy_into_results['input_bytes'], 16250) # input bytes will differ, the table is random + # eq_(copy_into_results['output_bytes'], 4701) # output bytes differs - # now copy into table + # now copy into table - copy_into_table = CopyIntoTable( - target=self.tables.loaded, - from_=StageClause( - name='mystage' - ), - file_format=ParquetFormat(), - options=CopyIntoTableOptions() - ) - r = connection.execute( - copy_into_table - ) - eq_(r.rowcount, 1000) - copy_into_table_results = r.context.copy_into_table_results() - assert len(copy_into_table_results) == 1 - result = copy_into_table_results[0] - assert result['file'].endswith('.parquet') - eq_(result['rows_loaded'], 1000) - eq_(result['errors_seen'], 0) - eq_(result['first_error'], None) - eq_(result['first_error_line'], None) + copy_into_table = CopyIntoTable( + target=self.tables.loaded, + from_=StageClause( + name='mystage' + ), + file_format=ParquetFormat(), + options=CopyIntoTableOptions() + ) + r = connection.execute( + copy_into_table + ) + eq_(r.rowcount, 1000) + copy_into_table_results = r.context.copy_into_table_results() + assert len(copy_into_table_results) == 1 + result = copy_into_table_results[0] + assert result['file'].endswith('.parquet') + eq_(result['rows_loaded'], 1000) + eq_(result['errors_seen'], 0) + eq_(result['first_error'], None) + eq_(result['first_error_line'], None) diff --git a/tests/test_sqlalchemy.py b/tests/test_sqlalchemy.py index 6b9625e..64a04c2 100644 --- a/tests/test_sqlalchemy.py +++ b/tests/test_sqlalchemy.py @@ -13,15 +13,33 @@ from sqlalchemy.testing.suite import LongNameBlowoutTest as _LongNameBlowoutTest from sqlalchemy.testing.suite import QuotedNameArgumentTest as _QuotedNameArgumentTest from sqlalchemy.testing.suite import JoinTest as _JoinTest -from sqlalchemy.testing.suite import BizarroCharacterFKResolutionTest as _BizarroCharacterFKResolutionTest + from sqlalchemy.testing.suite import ServerSideCursorsTest as _ServerSideCursorsTest -from sqlalchemy.testing.suite import EnumTest as _EnumTest + from sqlalchemy.testing.suite import CTETest as _CTETest from sqlalchemy.testing.suite import JSONTest as _JSONTest +from sqlalchemy.testing.suite import IntegerTest as _IntegerTest + from sqlalchemy import types as sql_types -from sqlalchemy import testing, select -from sqlalchemy.testing import config, eq_ +from sqlalchemy.testing import config +from sqlalchemy import testing, Table, Column, Integer +from sqlalchemy.testing import eq_, fixtures, assertions + +from databend_sqlalchemy.types import TINYINT, BITMAP, DOUBLE, GEOMETRY, GEOGRAPHY +from packaging import version +import sqlalchemy +if version.parse(sqlalchemy.__version__) >= version.parse('2.0.0'): + from sqlalchemy.testing.suite import BizarroCharacterFKResolutionTest as _BizarroCharacterFKResolutionTest + from sqlalchemy.testing.suite import EnumTest as _EnumTest +else: + from sqlalchemy.testing.suite import ComponentReflectionTest as _ComponentReflectionTest + + class ComponentReflectionTest(_ComponentReflectionTest): + + @testing.skip("databend") + def test_get_indexes(self): + pass class ComponentReflectionTestExtra(_ComponentReflectionTestExtra): @@ -186,9 +204,9 @@ def test_get_indexes(self, name): class JoinTest(_JoinTest): __requires__ = ("foreign_keys",) - -class BizarroCharacterFKResolutionTest(_BizarroCharacterFKResolutionTest): - __requires__ = ("foreign_keys",) +if version.parse(sqlalchemy.__version__) >= version.parse('2.0.0'): + class BizarroCharacterFKResolutionTest(_BizarroCharacterFKResolutionTest): + __requires__ = ("foreign_keys",) class BinaryTest(_BinaryTest): @@ -274,13 +292,13 @@ def test_roundtrip_fetchall(self): def test_roundtrip_fetchmany(self): pass +if version.parse(sqlalchemy.__version__) >= version.parse('2.0.0'): + class EnumTest(_EnumTest): + __backend__ = True -class EnumTest(_EnumTest): - __backend__ = True - - @testing.skip("databend") # Skipped because no supporting enums yet - def test_round_trip_executemany(self, connection): - pass + @testing.skip("databend") # Skipped because no supporting enums yet + def test_round_trip_executemany(self, connection): + pass class CTETest(_CTETest): @@ -318,3 +336,332 @@ def define_tables(cls, metadata): # ToDo - this does not yet work def test_path_typed_comparison(self, datatype, value): pass + + +class IntegerTest(_IntegerTest, fixtures.TablesTest): + + @classmethod + def define_tables(cls, metadata): + Table( + "tiny_int_table", + metadata, + Column("id", TINYINT) + ) + + def test_tinyint_write_and_read(self, connection): + tiny_int_table = self.tables.tiny_int_table + + # Insert a value + connection.execute( + tiny_int_table.insert(), + [{"id": 127}] # 127 is typically the maximum value for a signed TINYINT + ) + + # Read the value back + result = connection.execute(select(tiny_int_table.c.id)).scalar() + + # Verify the value + eq_(result, 127) + + # Test with minimum value + connection.execute( + tiny_int_table.insert(), + [{"id": -128}] # -128 is typically the minimum value for a signed TINYINT + ) + + result = connection.execute(select(tiny_int_table.c.id).order_by(tiny_int_table.c.id)).first()[0] + eq_(result, -128) + + def test_tinyint_overflow(self, connection): + tiny_int_table = self.tables.tiny_int_table + + # This should raise an exception as it's outside the TINYINT range + with assertions.expect_raises(Exception): # Replace with specific exception if known + connection.execute( + tiny_int_table.insert(), + [{"id": 128}] # 128 is typically outside the range of a signed TINYINT + ) + + with assertions.expect_raises(Exception): # Replace with specific exception if known + connection.execute( + tiny_int_table.insert(), + [{"id": -129}] # -129 is typically outside the range of a signed TINYINT + ) + + +class BitmapTest(fixtures.TablesTest): + + @classmethod + def define_tables(cls, metadata): + Table( + "bitmap_table", + metadata, + Column("id", Integer), + Column("bitmap_data", BITMAP) + ) + + """ + Perform a simple test using Databend's bitmap data type to check + that the bitmap data is correctly inserted and retrieved.' + """ + def test_bitmap_write_and_read(self, connection): + bitmap_table = self.tables.bitmap_table + + # Insert a value + connection.execute( + bitmap_table.insert(), + [{"id": 1, "bitmap_data": '1,2,3'}] + ) + + # Read the value back + result = connection.execute( + select(bitmap_table.c.bitmap_data).where(bitmap_table.c.id == 1) + ).scalar() + + # Verify the value + eq_(result, ('1,2,3')) + + """ + Perform a simple test using one of Databend's bitmap operations to check + that the Bitmap data is correctly manipulated.' + """ + def test_bitmap_operations(self, connection): + bitmap_table = self.tables.bitmap_table + + # Insert two values + connection.execute( + bitmap_table.insert(), + [ + {"id": 1, "bitmap_data": "1,4,5"}, + {"id": 2, "bitmap_data": "4,5"} + ] + ) + + # Perform a bitmap AND operation and convert the result to a string + result = connection.execute( + select(func.to_string(func.bitmap_and( + bitmap_table.c.bitmap_data, + func.to_bitmap("3,4,5") + ))).where(bitmap_table.c.id == 1) + ).scalar() + + # Verify the result + eq_(result, "4,5") + + +class DoubleTest(fixtures.TablesTest): + + @classmethod + def define_tables(cls, metadata): + Table( + "double_table", + metadata, + Column("id", Integer), + Column("double_data", DOUBLE) + ) + + def test_double_write_and_read(self, connection): + double_table = self.tables.double_table + + # Insert a value + connection.execute( + double_table.insert(), + [{"id": 1, "double_data": -1.7976931348623157E+308}] + ) + + connection.execute( + double_table.insert(), + [{"id": 2, "double_data": 1.7976931348623157E+308}] + ) + + # Read the value back + result = connection.execute( + select(double_table.c.double_data).where(double_table.c.id == 1) + ).scalar() + + # Verify the value + eq_(result, -1.7976931348623157E+308) + + # Read the value back + result = connection.execute( + select(double_table.c.double_data).where(double_table.c.id == 2) + ).scalar() + + # Verify the value + eq_(result, 1.7976931348623157E+308) + + + def test_double_overflow(self, connection): + double_table = self.tables.double_table + + # This should raise an exception as it's outside the DOUBLE range + with assertions.expect_raises(Exception): + connection.execute( + double_table.insert(), + [{"id": 3, "double_data": float('inf')}] + ) + + with assertions.expect_raises(Exception): + connection.execute( + double_table.insert(), + [{"id": 3, "double_data": float('-inf')}] + ) + + +class GeometryTest(fixtures.TablesTest): + + @classmethod + def define_tables(cls, metadata): + Table( + "geometry_table", + metadata, + Column("id", Integer), + Column("geometry_data", GEOMETRY) + ) + + """ + Perform a simple test using Databend's Geometry data type to check + that the data is correctly inserted and retrieved.' + """ + def test_geometry_write_and_read(self, connection): + geometry_table = self.tables.geometry_table + + # Insert a value + connection.execute( + geometry_table.insert(), + [{"id": 1, "geometry_data": 'POINT(10 20)'}] + ) + connection.execute( + geometry_table.insert(), + [{"id": 2, "geometry_data": 'LINESTRING(10 20, 30 40, 50 60)'}] + ) + connection.execute( + geometry_table.insert(), + [{"id": 3, "geometry_data": 'POLYGON((10 20, 30 40, 50 60, 10 20))'}] + ) + connection.execute( + geometry_table.insert(), + [{"id": 4, "geometry_data": 'MULTIPOINT((10 20), (30 40), (50 60))'}] + ) + connection.execute( + geometry_table.insert(), + [{"id": 5, "geometry_data": 'MULTILINESTRING((10 20, 30 40), (50 60, 70 80))'}] + ) + connection.execute( + geometry_table.insert(), + [{"id": 6, "geometry_data": 'MULTIPOLYGON(((10 20, 30 40, 50 60, 10 20)), ((15 25, 25 35, 35 45, 15 25)))'}] + ) + connection.execute( + geometry_table.insert(), + [{"id": 7, "geometry_data": 'GEOMETRYCOLLECTION(POINT(10 20), LINESTRING(10 20, 30 40), POLYGON((10 20, 30 40, 50 60, 10 20)))'}] + ) + + result = connection.execute( + select(geometry_table.c.geometry_data).where(geometry_table.c.id == 1) + ).scalar() + eq_(result, ('{"type": "Point", "coordinates": [10,20]}')) + result = connection.execute( + select(geometry_table.c.geometry_data).where(geometry_table.c.id == 2) + ).scalar() + eq_(result, ('{"type": "LineString", "coordinates": [[10,20],[30,40],[50,60]]}')) + result = connection.execute( + select(geometry_table.c.geometry_data).where(geometry_table.c.id == 3) + ).scalar() + eq_(result, ('{"type": "Polygon", "coordinates": [[[10,20],[30,40],[50,60],[10,20]]]}')) + result = connection.execute( + select(geometry_table.c.geometry_data).where(geometry_table.c.id == 4) + ).scalar() + eq_(result, ('{"type": "MultiPoint", "coordinates": [[10,20],[30,40],[50,60]]}')) + result = connection.execute( + select(geometry_table.c.geometry_data).where(geometry_table.c.id == 5) + ).scalar() + eq_(result, ('{"type": "MultiLineString", "coordinates": [[[10,20],[30,40]],[[50,60],[70,80]]]}')) + result = connection.execute( + select(geometry_table.c.geometry_data).where(geometry_table.c.id == 6) + ).scalar() + eq_(result, ('{"type": "MultiPolygon", "coordinates": [[[[10,20],[30,40],[50,60],[10,20]]],[[[15,25],[25,35],[35,45],[15,25]]]]}')) + result = connection.execute( + select(geometry_table.c.geometry_data).where(geometry_table.c.id == 7) + ).scalar() + eq_(result, ('{"type": "GeometryCollection", "geometries": [{"type": "Point", "coordinates": [10,20]},{"type": "LineString", "coordinates": [[10,20],[30,40]]},{"type": "Polygon", "coordinates": [[[10,20],[30,40],[50,60],[10,20]]]}]}')) + + + + + +class GeographyTest(fixtures.TablesTest): + + @classmethod + def define_tables(cls, metadata): + Table( + "geography_table", + metadata, + Column("id", Integer), + Column("geography_data", GEOGRAPHY) + ) + + """ + Perform a simple test using Databend's Geography data type to check + that the data is correctly inserted and retrieved.' + """ + def test_geography_write_and_read(self, connection): + geography_table = self.tables.geography_table + + # Insert a value + connection.execute( + geography_table.insert(), + [{"id": 1, "geography_data": 'POINT(10 20)'}] + ) + connection.execute( + geography_table.insert(), + [{"id": 2, "geography_data": 'LINESTRING(10 20, 30 40, 50 60)'}] + ) + connection.execute( + geography_table.insert(), + [{"id": 3, "geography_data": 'POLYGON((10 20, 30 40, 50 60, 10 20))'}] + ) + connection.execute( + geography_table.insert(), + [{"id": 4, "geography_data": 'MULTIPOINT((10 20), (30 40), (50 60))'}] + ) + connection.execute( + geography_table.insert(), + [{"id": 5, "geography_data": 'MULTILINESTRING((10 20, 30 40), (50 60, 70 80))'}] + ) + connection.execute( + geography_table.insert(), + [{"id": 6, "geography_data": 'MULTIPOLYGON(((10 20, 30 40, 50 60, 10 20)), ((15 25, 25 35, 35 45, 15 25)))'}] + ) + connection.execute( + geography_table.insert(), + [{"id": 7, "geography_data": 'GEOMETRYCOLLECTION(POINT(10 20), LINESTRING(10 20, 30 40), POLYGON((10 20, 30 40, 50 60, 10 20)))'}] + ) + + result = connection.execute( + select(geography_table.c.geography_data).where(geography_table.c.id == 1) + ).scalar() + eq_(result, ('{"type": "Point", "coordinates": [10,20]}')) + result = connection.execute( + select(geography_table.c.geography_data).where(geography_table.c.id == 2) + ).scalar() + eq_(result, ('{"type": "LineString", "coordinates": [[10,20],[30,40],[50,60]]}')) + result = connection.execute( + select(geography_table.c.geography_data).where(geography_table.c.id == 3) + ).scalar() + eq_(result, ('{"type": "Polygon", "coordinates": [[[10,20],[30,40],[50,60],[10,20]]]}')) + result = connection.execute( + select(geography_table.c.geography_data).where(geography_table.c.id == 4) + ).scalar() + eq_(result, ('{"type": "MultiPoint", "coordinates": [[10,20],[30,40],[50,60]]}')) + result = connection.execute( + select(geography_table.c.geography_data).where(geography_table.c.id == 5) + ).scalar() + eq_(result, ('{"type": "MultiLineString", "coordinates": [[[10,20],[30,40]],[[50,60],[70,80]]]}')) + result = connection.execute( + select(geography_table.c.geography_data).where(geography_table.c.id == 6) + ).scalar() + eq_(result, ('{"type": "MultiPolygon", "coordinates": [[[[10,20],[30,40],[50,60],[10,20]]],[[[15,25],[25,35],[35,45],[15,25]]]]}')) + result = connection.execute( + select(geography_table.c.geography_data).where(geography_table.c.id == 7) + ).scalar() + eq_(result, ('{"type": "GeometryCollection", "geometries": [{"type": "Point", "coordinates": [10,20]},{"type": "LineString", "coordinates": [[10,20],[30,40]]},{"type": "Polygon", "coordinates": [[[10,20],[30,40],[50,60],[10,20]]]}]}')) \ No newline at end of file