From 90d4dc0f34b225ae05197820c50a4d56e28ce2a2 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Mon, 14 Mar 2022 00:02:14 +0100 Subject: [PATCH 01/18] Fix reflection for SpatiaLite columns --- geoalchemy2/__init__.py | 121 +++++++++++- setup.cfg | 4 +- tests/__init__.py | 8 + tests/conftest.py | 8 +- tests/data/spatialite_ge_4.sqlite | Bin 0 -> 221184 bytes tests/data/spatialite_lt_4.sqlite | Bin 0 -> 44032 bytes tests/schema_fixtures.py | 19 ++ tests/test_functional.py | 48 ++++- tests/test_functional_sqlite.py | 307 ++++++++++++++++++++++++++++++ 9 files changed, 501 insertions(+), 14 deletions(-) create mode 100644 tests/data/spatialite_ge_4.sqlite create mode 100644 tests/data/spatialite_lt_4.sqlite diff --git a/geoalchemy2/__init__.py b/geoalchemy2/__init__.py index d16b244e..0d8145e1 100644 --- a/geoalchemy2/__init__.py +++ b/geoalchemy2/__init__.py @@ -5,6 +5,7 @@ from sqlalchemy import Index from sqlalchemy import Table from sqlalchemy import event +from sqlalchemy import text from sqlalchemy.sql import expression from sqlalchemy.sql import func from sqlalchemy.sql import select @@ -176,10 +177,20 @@ def dispatch(current_event, table, bind): _check_spatial_type(col.type, Geometry, bind.dialect) and check_management(col, bind.dialect.name) ): + dimension = col.type.dimension if bind.dialect.name == 'sqlite': col.type = col._actual_type del col._actual_type create_func = func.RecoverGeometryColumn + if col.type.dimension == 4: + dimension = 'XYZM' + elif col.type.dimension == 2: + dimension = 'XY' + else: + if col.type.geometry_type.endswith('M'): + dimension = 'XYM' + else: + dimension = 'XYZ' else: create_func = func.AddGeometryColumn args = [table.schema] if table.schema else [] @@ -188,7 +199,7 @@ def dispatch(current_event, table, bind): col.name, col.type.srid, col.type.geometry_type, - col.type.dimension + dimension ]) if col.type.use_typmod is not None: args.append(col.type.use_typmod) @@ -241,9 +252,117 @@ def dispatch(current_event, table, bind): idx.create(bind=bind) elif current_event == 'after-drop': + if bind.dialect.name == 'sqlite': + # Remove spatial index tables + for idx in table.indexes: + if any( + [ + _check_spatial_type(i.type, (Geometry, Geography, Raster)) + for i in idx.columns.values() + ] + ): + bind.execute(text("""DROP TABLE IF EXISTS {};""".format(idx.name))) # Restore original column list including managed Geometry columns table.columns = table.info.pop('_saved_columns') + @event.listens_for(Table, "column_reflect") + def _reflect_geometry_column(inspector, table, column_info): + if not isinstance(column_info.get("type"), Geometry): + return + + if inspector.bind.dialect.name == "postgresql": + geo_type = column_info["type"] + geometry_type = geo_type.geometry_type + coord_dimension = geo_type.dimension + if geometry_type.endswith("ZM"): + coord_dimension = 4 + elif geometry_type[-1] in ["Z", "M"]: + coord_dimension = 3 + + # Query to check a given column has spatial index + # has_index_query = """SELECT (indexrelid IS NOT NULL) AS has_index + # FROM ( + # SELECT + # n.nspname, + # c.relname, + # c.oid AS relid, + # a.attname, + # a.attnum + # FROM pg_attribute a + # INNER JOIN pg_class c ON (a.attrelid=c.oid) + # INNER JOIN pg_type t ON (a.atttypid=t.oid) + # INNER JOIN pg_namespace n ON (c.relnamespace=n.oid) + # WHERE t.typname='geometry' + # AND c.relkind='r' + # ) g + # LEFT JOIN pg_index i ON (g.relid = i.indrelid AND g.attnum = ANY(i.indkey)) + # WHERE relname = '{}' AND attname = '{}'""".format( + # table.name, column_info["name"] + # ) + # if table.schema is not None: + # has_index_query += " AND nspname = '{}'".format(table.schema) + # spatial_index = inspector.bind.execute(text(has_index_query)).scalar() + + # NOTE: For now we just set the spatial_index attribute to False because the indexes + # are already retrieved by the reflection process. + + # Set attributes + column_info["type"].geometry_type = geometry_type + column_info["type"].dimension = coord_dimension + column_info["type"].spatial_index = False + # column_info["type"].spatial_index = bool(spatial_index) + elif inspector.bind.dialect.name == "sqlite": + # Get geometry type, SRID and spatial index from the SpatiaLite metadata + col_attributes = inspector.bind.execute( + text("""SELECT * FROM "geometry_columns" + WHERE f_table_name = '{}' and f_geometry_column = '{}' + """.format( + table.name, column_info["name"] + )) + ).fetchone() + if col_attributes is not None: + _, _, geometry_type, coord_dimension, srid, spatial_index = col_attributes + + if isinstance(geometry_type, int): + geometry_type_str = str(geometry_type) + if geometry_type >= 1000: + first_digit = geometry_type_str[0] + has_z = first_digit in ["1", "3"] + has_m = first_digit in ["2", "3"] + else: + has_z = has_m = False + geometry_type = { + "0": "GEOMETRY", + "1": "POINT", + "2": "LINESTRING", + "3": "POLYGON", + "4": "MULTIPOINT", + "5": "MULTILINESTRING", + "6": "MULTIPOLYGON", + "7": "GEOMETRYCOLLECTION", + }[geometry_type_str[-1]] + if has_z: + geometry_type += "Z" + if has_m: + geometry_type += "M" + else: + if "Z" in coord_dimension: + geometry_type += "Z" + if "M" in coord_dimension: + geometry_type += "M" + coord_dimension = { + "XY": 2, + "XYZ": 3, + "XYM": 3, + "XYZM": 4, + }.get(coord_dimension, coord_dimension) + + # Set attributes + column_info["type"].geometry_type = geometry_type + column_info["type"].dimension = coord_dimension + column_info["type"].srid = srid + column_info["type"].spatial_index = bool(spatial_index) + _setup_ddl_event_listeners() diff --git a/setup.cfg b/setup.cfg index 2ce7b65b..472fcb5b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -22,6 +22,8 @@ passenv= SPATIALITE_DB_PATH setenv= COVERAGE_FILE = {env:COVERAGE_FILE:.coverage-{envname}} + EXPECTED_COV = 93 + pypy3: EXPECTED_COV = 92 deps= sqla11: SQLAlchemy==1.1.2 sqlalatest: SQLAlchemy @@ -38,7 +40,7 @@ commands= --cov-report term-missing \ --cov-report html:reports/coverage-{envname} \ --cov-report xml:reports/coverage-{envname}.xml \ - --cov-fail-under=93 \ + --cov-fail-under={env:EXPECTED_COV} \ --html reports/pytest-{envname}.html \ --junit-xml=reports/pytest-{envname}.xml \ --self-contained-html \ diff --git a/tests/__init__.py b/tests/__init__.py index eee6ea87..f2b7eee7 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,3 +1,4 @@ +import os import re import pytest @@ -76,3 +77,10 @@ def select(args): def format_wkt(wkt): return wkt.replace(", ", ",") + + +def load_spatialite(dbapi_conn, connection_record): + """Load SpatiaLite extension in SQLite DB.""" + dbapi_conn.enable_load_extension(True) + dbapi_conn.load_extension(os.environ['SPATIALITE_LIBRARY_PATH']) + dbapi_conn.enable_load_extension(False) diff --git a/tests/conftest.py b/tests/conftest.py index 1694e9fc..ee185773 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ from . import get_postgis_version from . import get_postgres_major_version +from . import load_spatialite from .schema_fixtures import * # noqa @@ -80,13 +81,6 @@ def _engine_echo(request): return _engine_echo -def load_spatialite(dbapi_conn, connection_record): - """Load SpatiaLite extension in SQLite DB.""" - dbapi_conn.enable_load_extension(True) - dbapi_conn.load_extension(os.environ['SPATIALITE_LIBRARY_PATH']) - dbapi_conn.enable_load_extension(False) - - @pytest.fixture def engine(db_url, _engine_echo): """Provide an engine to test database.""" diff --git a/tests/data/spatialite_ge_4.sqlite b/tests/data/spatialite_ge_4.sqlite new file mode 100644 index 0000000000000000000000000000000000000000..836094330559d43c2c6548c4aab56d7b0e40aeba GIT binary patch literal 221184 zcmeI5UvL{qe%}F7BtS?Mw-JuU@ld7?mxe^#sA)8as4 zNMaWRxBw`TmvXsQ5>J+rir<_(cXp!7oSbH*dhdQI??p7wQ;W{%#LsI5F^tB-@37|1IMP^}f#t93IB%q^v7SJKAH?7ItTqi%L%Bti0IOSx^+ zSV@1dVq_NKztx3>q?N#IkrRu#YAt)utQ0m1X5LttzmZ;EnZ0o{nW&WSgWbEOnlYbQ zNncAZB@^ae%`DZj+l5kg&$zO<3YG~M_dOSLdy1TJ(akxxm|0#~nuU89JD+Cn-QQg2 zC>b}G=5Nd{-7&7G?-(P_112dC2k&Sy!PTx>1FKc-F zm|!esj4SDdG_=^Cow&X$^w(DW-J z6TL&(8C}b&xe_O$YLjD+UW?K8e0i28UKdH|9%it&BHo95`i z8fxVX6*HI5mP^I`hFTh)Om1m=h^;#UIpaT7jo9G)lc?MauLbG3Kn!(Ddwha0Jg$@B zF@zXxW1LEW;cq3ogFiu&Fz_0E~&b+Wmd|z z2qEiE1w2T08^WY~Ay=a6f~)ylZFif!*{PI2G1qIv$q?m2@@}b6W54J2z>1XX-yqq( zRIU^tsDd8v7NjW?-pmtOMP|-XLFiN60k1)vhQh`4aRH8mllhp3caOw|&Yg>Xf15Ux ztUmdDuh2wtYP0czQ(vonJ9~I55;IFglMRdkyu9l7N1^vY3(+w^**c`?2o1XrE3_K(BtMr*~-ioNf$pfV26ECnSjV4)oGF)6MSQp|fYBcdY(w1-Di6 zvt6^aZrZUfd)p(3L`obX6P6@o9er@JD>jr$MIYYfeFO=bw_wUtuI$@S$6Yxh)@%)% zTsjEQyhmm%iVb|amEo=XjIyB`-$NQCU(O>aVf=d#woV}QHc-jNev zvX!+cC{uRGL_tg$eehB=MsPp;h-bqB1lH_!ywI(p!tw%=04@`zDO<|trwPgmV8UpH zD~fV3GrBsv3c5M}3A{{g!pkI?l26=)m(&OFGQJEiZ)MKCXJe~L|R&UU^+0@L%sWZvtoA0HU7U!?HSuRXYUrbF-Cnshuj!m4Oo|u?Ce_=8S#|++E zT$*3G^U;~~&E;!nl4qvJC&$m+y{kyMFgbC7q>vNPX}H@pIOkEiUs&HdlN?uMzc@3V zBH33n^D7^n$(x&y9deG1r>3XQPfpBCoKMZnOiik6m`P1cP#Zp&hlgB%lSsAQJQU6> zF0H%=rdgK(kbONpyS$>Zc*?SPG7^gn#;^CE&Maoqt8n&tqfmm=HRp_LW|^JAT7cT# zf;&0MZ+^*spm22BD*UCB45jCCm6} zjYr;!Y(mB4K(Y0~BK!C(`2dVD?72KF-fbN|vZ14_gEwQ6v9;r#uj>r*e z$IA`!a6c%}IEwKp$%O7^O=yh5_}I+MdBKGDyW-#|W>pNxCu9P8AgK4EsJ7K8aigT( z`fgO~*r3{JvC+u|ns2_OL^fCP{L5fiq4lC!dv>Cils7qXCVC&L-u`ynR2DJMZPzXehXn5 z{%Sp*|L>@lMPrZv5!B!C2v01`j~ zNB{{S0VIF~kU&Qf!2SP@Y7R682_OL^fCP{L5p` z2_OL^fCP{L5eNB{{S0VIF~kN^@u0!RP}Ac2k|fcyU))f{LH5{1uIsH1dsp{Kmter2_OL^fCP{L z5HRCAy) zNB{{S0VIF~kN^@u0!RP}AOR$R1lmFX_y6016)QyoNB{{S0VIF~kN^@u0!RP}AOR%M zQ3P=RzoVK1jX?rP00|%gB!C2v01`j~NB{{S0VL2C0=WO*7OYq)5`2#O*NLB*JJrn-KZ_rU55_Z# znRMbvbTIlQ`Rz+Q|Ns0oDq4&LkN^@u0!RP}AOR$R1dsp{KmtghQ35#sZIy z2_OL^fCP{L5N5&EB#Y_Ka5@O`?zPj_lxciySpP_bp3YZP59rV-YL=;MJ}ZmnqEE136v>#Un$U~VZryOK6mX5U>%8zlRq6aBF^-@?~qV3rm>R#V8zHR!hfp^3rQ=1A(Rt~xoRzY&#V+S3TEC|nZJ=m4f0>4eSNLn&#W#N&@n9E}al%|#y% zQ!)Ew^-1a%1niR$bhVMCH&RM#!)XNjIXYWjKH}n^GHH08%cC)|DnViOs^Wzvqc=wC zmi8*`VWclMG&2)DIA!}cmCsbW$)kc0)Oo0HsnnDLASE0P$c^tjTy1~HWNw6WOzY)x zC7;a~w#`ztP%bHQR4awN;xgCW!D*1&5cyTD(L#0_w&lcWaJG6H^d4zZayy|d$(2#Q z!Kynj^5lxyin)=k?pKWwnq%~OqIW1eqwDNyuDYS?*rV5Cv^^i+_8G6#gH)f?o~n^9 z^jJY1FpLWTNg85!(R$My9azJ+oS|ao^4W5!xZhAq!;{G^ZI6F-hX-f;r>YSfoPQFP zAKwK`Fo>U!eg zH+o}3latW{2&y1Dk^~O1*q-82T+o>!1Bk3o*?aj$OD6gXbeP?&r4P}eJfYZ zZWIeUJ7zwc+cT?NQgvs`tdwmLLe`xMc#!Ni#4!0nu0+)ZSM#~r?lya~Qz?I9uGff@ zA&STQChPSN83vE*%7D-Xqfq#RxAn;_T|m;(P{byOGX7&!q;q-H!_= zBp!C}rZ=I#b6IQPF~Hqz@5l)-6Utf?lqox8dMc)jK6oh_Be)-a#Is=m0&8|VUg%a) zVR->b0GA1q<1OX$(*$J&Fq^Z&6-7Ciwp^WEU05-OS4*Fk%J)md$pmqiQ#F|GiI18D z^SoF23i9Sgp=9QVNBfWcG?Iw?Rpi*cM6U1u_Wq>rcf0>q??(UEyN4rx1xddJ|9b{M z$9~%Ni@Vc-dl&`k>1JegV(S&C>WAbB9$gxW4NXr+e`AogzNEowmMBem@uZ*xWtL`r zG@aWQN>#H04asKqPA_oVZ5)2iji=!ME92vD)Blgbr4K;~w2Ivw7)(L-O;nLLi)Q#L zxM7sU`|Fgd;3VosUV?kJJUwX|aVV}EzHy^~++y_y8^s#I`d4~~?%&c0QZsFUm6~Ec zTPSUm^{Zc8p06IsIvXNh(ow7?|Zs)Xy;}h_`-zP%_D+a6fgHr(l9uDCLSQ z`g6paOjyI9vc}SGv6#J=EAF~sBUi9cszHye=riQYyAX5e(}@ezr>+&tYx>k}Sp2FM zzURz_lL=-Htg5l65qdBqLjVW0xQsyCn7%vY@uX)FSp6r$MCoQqr)YCk>9V zcfKstEJU=rnuUq9S~QI+%cX*-e=0tp+uN>U|rPxq99etWodw8zn$UcFu z=VhVG_~mTY?PH&5uFq(DOr7B&V8`LB7rITI>V@7T>nGIcbNi*rOcyxDT%#4%{p!AmqQ^ zz!ZYSn2OFqE`ROd&Y%^Sz3L}nu3xDU@%?frh={owL#r6anC8Zqd!)l?12eVa1CB3J z)z79UlE43VJo0g*>&yP1_y7LUpB??pzMu5fVt*Q&?)}%j*Lwb4_qQQ2^v}U-Z^TZm zo{K+8*D8fg2VlHlIl>!(Q>+iUAwn%juEXcNdpdlEd~i8%lFNSv7W2? z@cPwnTG>Wqk^nEAxibIN-l0~F68&GdwElV^ty{}IvVa=Wx>W_!`g~?Ny|mJtvh6jf zPE~e$%xjgh8?{b6u)+C-)=2x)*JDG&Gttkx=>|<9zh~{blXYixeDa1ZzWL0R^aqAe zfT&7cC}HiZf8Kk>y4wtQH;Cdsf#YH71%$6F?q(OT?xuUia9*=O$*zCG5vFK93r(7kf7PdCI+!c;G!?2hG;wdf;A1e%@TK%%~|>jwBWr9tL53;COoTW?}G>7N!(qBy)|c- z+I!D&I4M`Dlq=cw9Na=I?ixlTf%Fwvc;}Y!fG1GTR_$A`{Y2-E>{P(&YkmjL)cuTZ zX6O#0&_v_m?5p&^+4uYWrYS@}KDRh6D8wgY=o!gYA)hy$B~+&Y1d`;0A)TobYnz(t z(h8nQXe~StIWR&W$bNcT8x9Lc8n5_EL**15X=oE)d-%;$mNdF;X=t;moi)A1tCBRd zbx>)9*+g(~&~vPMX^_LGDrE#cSLBqJgrO-fu70QQ%zaM0(DaoH&CgtJFDgCOn6Awxy1+`ulzwiFJ)dVy_&T?;7j>%VU3h z^w$$Jp(p&$QNg~SMyF1FbT0l7KCqx(n~;vZhFqRd*TQCP+zN%1vB^aWsWxlH0cIBE z68yJeOe80hQ|>au;tHK0%Z27M#t6+hYE73PeRU*u>h`(#<7pi;NBHEp8SH(NGY!r) z`$QD;Bf140CqGvQ!jg1s~@sM>FQw;;kuM3Ym=R-@s&TP*(~iF+%!M%@}FEs zTh;*On`jO7SuT<5V1~AGHBi0>XQ$t~iCj!t7nG7MZ+GzaH>jkaz)G%qgz)Gqq{q|M z!zA4buh;1El*gvS(r%-MLdYpKpWf1zinmcieeG6j; zjHCgqkM`IHSUnf)LI3^v^liGTCge6|m)TZEohUo$F3FMTfE+J_El&5dNlAI}Y>XbX zon1D_zsUrh8PVg?)=C<^YVfCSlZJDu7Am9XXcJeL7H_yWw!;|Dt?WG;NsLv?g3C(7 z9;D93FbK8xx&j8F4_lTIrIdJIJAE$Qb?Xs1>8%)!}gjO5^Kr zo35%l7#c1B6S6fKdygq7Qe9LbN4I)Qi|X1#90X8Tq&^Pd8ljI^$sZWx<{D~dn@!) zTdNhPRGR6N3+~kw2G*k+kg6;4(Ic}hgd{0Hr#x$IA326IQ~G)gZr&$Or2Qr7u~VDVV1hWecsVBS>nHSB&Jcy%@YVW(o1AT8N>1B&ptx*O?GI!aq_P zTyGYAj&`_qdX5o5c}+MRfAd>}$;tSWRoX|zlQ-F1u_&FO zA(!|C2t_it1d$Wh42;~EvuXgj+!Hu(7{^LsOAIudwW5$4)5}0;IFw$#v=b79dr;Lh@MOz5q$|7ps)>6%4cldq>jfmodOq z@-KM?{tFr4GWV9`R(v;un~D1p2B#+b+ZtFV>#b9h_{9xwCZVPrJT?1FG-Pq!khZcv z6b+a8WgSTiw1pE3>8mTox8dXL@~RDNkMEHzYvgp;h^>^%@UW!WM-SdgS)t=+9+$`l z@o?j_RGPU)d5mV+p7l7*>j(YAkrCDO|Iyw>X|N}sM$d(^YTWX%qU@x1e4>QAA2c3%DBA`@Rc1fo8WqZQ&tqZYJ*Jk2R zg*el*nc_%LbwEp&AUSfSt3fG6Ri&Y%sH#XUMUQdx(su6wkA(kEcvajhQk6yVJ2KID&TWH!=QjSBb18iQAO(mQ~?AW zZmbPvq}II(#hH>_Ra>SnPgyc$J8NEtI%(|`o$^eotev7pHA#bNz9@5? zLhz}2{x*;Ju3 zx5sDM_&^pIoYvwFkq2L%P?ctVQWzyvQ}@85gm4!;(bD9Rd3F%}txM7xth3(a@U}W; za5>O*4XHGf1LvZJY|K43r4ZhOs47yg90+!VaLrj+A71)=t!aU)^WhpDk>+&3V#h_czbqZ_={N zytepE*}6J{q}jc`l3cd@Bt)S9o1T1fHUdN;hVqf5dcTD!m0JrO;5E^sU3d@Pcqevh zZYKWl4j~_T9fq=iq}dZ zQt>3)GrQaGT6#~}{#UJ3YfSQalt6m7YGHckCV4&;2Nt#)$@tXPVZv_0<_$ zW@($;G>I_%Ap!EoZtd?C zw0#13@11JvXE*9v*tp2Wd37re;a}?3sgC)aXV!OIpT$#~SC9h%ZntfprCJx`K4tx} zuxUfBvp4RtptAy^`5JWBIAJh!E?XlZIsf0&)f4IeW@01uH;()>_!WQteu#o%;orGw*@x)Txs*XIn2$`({0Nm zX5hdX90)DTd|$x8IebU-x76QL3zoLqOUs7VQ8LZ$_0HrnH~)|R*GOVLws+*u`Y*w+ z`14{BIQTdNpIu7FpR!MN!XLCOC{A5i?v}Yr`x8xYJj4Af_R`SnTMzSA-0169b%=r2 zuR8UZ>)E9b5l?#S2;ijLk*sl}J8>(T(?kf;dY|QmEBG!?GMoG_z{i%;@h6`;yLnXL z?nalr^6m#;t!+;gohfVA%utHvinm$J9kZubSHW%G)H~R^E=~U3r6xzFEPAZ74kSsJ zTeOmDV&WAdNLZ?1*;o`==YD~GH9wWEQ>WUE(8WJyT?lE5CP^MNqsq8Jzc%F!dNU~1}8I3eMdEPSkY z#}K}V+(!`@aF3z(NP>MKT#{J6A)gf1m22koBYzMDbQ#Oz}b#S+DxQ^5#RFdX;vFyqh=q(B<8{$$}txC(<%`U%ds# z(ZQiRA8h92P1fpL;mo;un5?;Il0GBJ(y30I*v7LD7*jgWxhsn8>%Y-t@q)9hYfO)lGh`r=J0A4 zxh-;X{-(A>xy@T@jUckAO-r}7Xzh}1inDoX!lwIJn_jw_T{0``Vglwov1gN`VtOSMApW_{=BJP?4U`nk9W0BhlN;?C_KIB}zLSzYh5$QIJU@eg2mb2|QH(s78jZ8$c- ztxa0K+@tVXVcV3(@6B~}S?2$^0bE&Kh&h$A9-WtPA1ahr}bvfsZD)(|^Z8{XP;{xy?H}w@I~gkJfyK@K8)`iE^8_)EYsk7CEX-OSgPm z+F9eI>jY~p@WWDFI$zAPb*evR_Js(VX|Hg~ABtZ-^r)jn4%(pF_q^IQ-(ODrtLI_zi#B!~frDEg!TRZR{3+X_wWs_{ZsK1^k*-#=`ZWV7 ztG%S5@R*(zF3rLzxmDod-|fPZ9hS-JSk+@m2>ONf%T+>!O#5Db^7sI5-J1p_KL7`-TUsvnIPsfmU z58)VEuHMsktOZu8#6};V%N1vXL(APQcMlGupH{gNl9P2)EdL5Y%*|GAF&vz}D=R*?D{fh66rbCz zZFv;0E*nnqI!HY_-{Rs8<+8crOikew->i*?p!mj3Y>na%j(Qx6#H{=`oavZQfL~zW=WlN&LOoKkpgu|FEmpaWhLa z=mjHiFtY}$taI@v_qkJo;0EadNXPM6y+w1pQQ-~A*Q9DHVM8`IvGS=mgj@8fO+8o( zj~ZgM7Rv`1u3g{Lu&vg4JJv3GW3_HlZLB_0ZNgUZR)GRmv4{|{3Q6{oz~Faba|^V~ z{~=%|J8n~o6jdC{t)pA6f)3TJyyFsZG^ZqK+4gGHy${#+qK*DC6>MyaM6K>5bJT~I z?jw1%Nouimc`GX~ztX^xb=x}2uL@mKZ|YeXYkv8a+$yk(^_f?bdeqLaM+#OKX1e8;V4X389URc!BCwcU zcR?!Ggf?nPJJy8o!*1Dz{BJvK)Oug9jX}I_YfRLu*0oCTwa#VB$6E9M%}D=v;_t?O z-gBXA^97r1;^TAzfrEujI^}=*DszT#u!>HbxU-iX?YzTN`f*-7TgJ@<{k$cQQu3Hwk{y}OY)C6uDkWMG zmF6sJP&sR3Q=fVRYuAnWd}`|u?KWZSp=-AZTidVQ*G(xd<+LlrrCJXm?L+Zyn5#hN zf=QOnalgcxu&$SuZLiyj$}Q>m~wpe1lk^4u3>7Itz5YTkY%%8%HPJ8$g&9&EvGNr*t9Nh ztXqNXX%@(;wwoZ_uWzi6fK|o@^Q~c)PE@A)>6o`p^08c!J^$}dMY>_q-}iCv_i*mt2~*`4 z(uUtS@kwm(^5yt%PLu9>&s?vSE7@Xh->g)*Ka!@qZhOo3=hL?hT^?h0IkB8xNYAYp z!^=ClS|L|l$*mP3(wtp3=$*2){T(xD2uC|1%{}%;(pX=!_sd9O=~5DEtJbpj%t`@1 zG@Q4p=Po3TO8Gt%-!0Whn)4=Un0qy|RLgD`O4&V<#J?9(_odW*H#N5>mL>0nRG}=S z?kBD;E#8nHf~THX&CD-mWO(mGJ9*fV#veposPCcbnt$35`6JVx_xU19??z zh4re0?tS?B>fKy%c`LW$)twufdDwC53oj5mWf1MMJIEDcgQ-;f>1PD9*saOpi1&)# z4bY{a!*c)?GndbnOGVn(oHt2htB}u|CDMe}Z@C|Y=f7m|kj=hq>JbSY_6D+M|rl7HcyH4MYYqw?ln)+~|X*h*FyHV$eS5SiD{??rxW=q*Af> z@zD<7p~UNaAN0gPVlnBKht?OJ`;sV5(KwP8Ne=0fEYI<}O`z)6kx18Hki)CEleJzh zRcn=8p;UVt1_M5Z;dWdaS+`b4l+0e|O(PBr($WPvIZ@K>|ns2_OL^fCP{L5@>+H{q5M$`SbD5U#3gS>7uy} z%e}eE{x!3_ZNhe;Syf$g=Kf17tFsHn%Iv!fX+xFKSY4jaTr=#&4mZchXs#F=dgG1w z=P`O?dyYI$X5IYn${X<+#2fG_Bcp$F_0!nkJMYAQ>#}DhgZ~S`!m0+J;N0A^gwKET z=Hab&lq>l{DOV&@K>Js+!a*j{&iXd@HffZ0i^VLR*U(8e_cm$349MIxD_pX8C23ew z2`+!aI}6y9Eow^jnv+k1 TwrkxLTeM{@s$6g2T<-q^Nn-(5 literal 0 HcmV?d00001 diff --git a/tests/data/spatialite_lt_4.sqlite b/tests/data/spatialite_lt_4.sqlite new file mode 100644 index 0000000000000000000000000000000000000000..08db5c08d408ff89fd1b9f835852f40992d30166 GIT binary patch literal 44032 zcmeG_U2G%AdAp=6Dzq=Q<0$l<9ebnLwPaC@M4e72HwRadXj!;B>OxAs9Ng=cN3Nv3 zqez-dolXVvaCffj!UdYIzBCVMfj%S!TA=7d3$#Vye&07U z%bD3-ik4&hLJx7KxZix=H#6USzccII>UXdBJ3>N7 zefsZM(*QQ3(e(Oby!0UC7-Fmxuc+d*m1s#$B zF-)K+uBFFwv$OV%UhP)XmMcfD56^&8G+dAqbxsyOAXZKu3* z<%)ByvUzp2a>Kb?y5X$uY;SFrf!ozmc{`t_0k}9aXS?)0z~?mitR@d?&a+N*)SiFf z1-;rqM{l_6bzLTf*p?2?S#6oQ|F)e$aX&baW#e|eT{k!mxM0L(I!-F05kwMnPY_2w z3o5Z6JAZ13WsuKyJ7aui)jY;pMAvWj;m%Mgqf20R2HK z^3GMA$Of^ z)Ov>pm!Rq5d1toOb{pPquu#N&4SLxDSSh;w9Dqp?!W~s8Zx!y;) zu(*72VR1QMT)9{%E-e>}i%S<4^N^*yxmDTRzVY@{=~{JTDnGS6zc@d2^JYxTg~j3p zY{4w21ifu^J+F1-*Y~IL^D*cbSLPQGeW$#+{q|JD+XHmKEX*$~pI=%mt`wIRR#wh0 z#*JZRp;(L`!+AD_+(+V*u_ot&}}siOU-)3%lVi}ZE)V?IU# zUn~i1rzfDwEiL9=)KYoM_MPCc4n&Zn^_qN7zrduAd7IZtDk^lz(xW_ z0;4Gb6aPnZUCpeF1cs4-iT}e$$>10XjHU!k{2$GAHM24j7)Amn{tqK1gJUExni4Sa ze>B(C%*se$7zvp8Ka7+Nj*-A zj0A>}fQkRZNXg(B35=!$O#C0sbv3gx5*S7TCjJj2C4*xmFq#rD_CK2IYG!35FpLC@ z{SPB0gJUExni4SfKbq@mW@RKWj0DjBQ|W)P(*Frx=3^vaBruW^cw#IynR@CM?D2`& zF?-Vfh#m!q`~NBXLn~$3AKJI=+ao#oM`cnw?n=_*zwi3qQQ$Uf$M}Hi9z1?K==pVc z2zWgFX_yf`3jFmmgPtiKWv4@)eE9t*GP$`q`w!;m!E}1Ne*I{TYC6+w|xXpMn7!z1XFnO_q`rIU;Mytx%Zs)tsQv6xdJ?n0BiSRFag-d zT-z#Fw=1jgz;dT^C$b#MbIYgYspcILA3e7_?`Y5bo5^P-zPmAe-`J{@HaE)jJoM>7 zoSaJOjZ&plUMp4gUIJ%&*Oh5^^=Hb@I9p|By>z7n4}@P^t*))Emu8s#|I|vIwSQ_K z*+-9xsf_+O20DXoFWwo%!SUhl@BhFzq|hpx_kZB~RPg;D^fr>dJI!bD z;ppdkkR(OTA#jiGFfhgcwEYt+{g?KC+dr{A+l$)2fqZfaY$m_?N6(_&1s#HKZlMUj z`S^J5=5kUEp}SgqyU>30JwgON^Kdzv$t^G2@12Qt>Vy=_@QH!$o#=EQ|JG_{?akH7 zbaCmzjFi(yyrh$W2+zlExa}4^fm?r*P(HiwHyU0mra?-%(QVh+$dr(M3a5=CRlW8} zOk{_y3-nYe`{geUPV+EF)H}jLG7kPY=Ko{X&sqMz3qLxaT-OJ4uz&lN75jdgqORY# z=hDD1u*Hw?Yh2wn%j>1@ISK)$Gx0=O7AfzKKah$PCS`hUz?veF;g|7ib-wPb&Lu=P^wgBug18}G7$AbuS2 z&}@uuL}viC;c0Y(Y6B5N7U$(ygC=xxkPx{&b4sUOU&`&FJ7q<20K+8rmotNnIL-ly zZ9-j;UwZgEPWs&94$5nx{_3F!Ts$Ad`zfztMPamum&)p{R(^3VH) zXET$FixWS{;A-71x}C#=4sA`hTVNltAg(iSZI-S(F%-^fHR~v!D(ptCL5DBaa+%!A zFHhXhkfK)E>m)bvO_}oVkbh-+XY~qytEG(K>{K_)8_rwcJ`QV5xn+9hLGf#u$!~pY z;@upMK3qZZcE$)G>@*;C!w));2(pAuw8}t^S8q4GkQy%p zyjcqZf7h?W*%xf#2@Z7y%yB8s3k+smG={)C^18LXK7Rq=bl|-pt!Tn~X`5jO5{zRt z+rR}5;o2PyRd3+$Og;?)Q$vAqW6(|GhTG7I-7n9=v!rL{gxFQ@hpj z>L@R_1(L*rLzthmI`ssE1bVO47(~sQ6BOfW?T&ZFZ+X-4dIRdiMu!6}^!7csSM$Gvt>6p0qRT=Y)rq7%|jQ9jYZg(*h2)&Bq!fd$P4gxpeJ`g75 zw?H3R_<*ZqI(5B21=~N`_qrY`WFbj+oRdvIjy%rtwRxh~AkqZsppUWIZ%Yic__L?Y?EQakF(Si zTbXl2+5@;jNMvY&R))zjCQL1f1%({~F&vSN3NGSWaESM1JR@Tq6HyqCFmT_kH4laN z?1yuL_Az1M98_K$&!C*2mYN9_f09%QCW{c6P^%2gX9-V49cpKi-o-5^{UC}wup-L2I<4Ka|R9JA4_!A)-}LkTAMd2~mFRPa#Gfdb~Z zO4bxgK!GM?EIe^V8vIGhK`-*9oM=pBmQS;sQsg5m1Go!H@~^ zjwB8gwP~84AlFBG*FR7V`W>$jIuD`L2<urxTwR)@O0|YtpH+FnWg+VWyL~=s^&or#JWW}pRNEnRq3BdB&MZ}ju-xDRQlSB%5jM7 zA9W`}&s9q){I%X`(|~A)%>QlsuPyjzKEGZP_@mdJ%S^(V$)Elv&+y6F=)bjWJH!FQ z()5NW_M~=0<=TYF@u9atbI6Sj)GGR)le)W?`J*aIUfILr&H@JP64p827p& zeGZc_VxvHXf7TMSkZCkyqyFLU5Kn9r^SvMnbRk_50`E_0MvEOl9X5-x4lvp<93|s$ zE@3PW3jq?23I=umCf5J9^*s()7J`<&j=sc)w1O_r-{sBE??69I> zkzL0z+p5GtLqi#|2bO>9=NcQ8t(|MmcWy`{j#)am0ZbdhR%N|Z0XhvVIe}O~^5{T%-Me>$a6zfUi&W^TD@Ypmg;|uk+w~(M9)qW!>udUEN}iw zSYmHQLSh>$94B#u7dA^`pQo^X~+9f1TiG|-Rf57xh)nVg%O2$zu( zOR!O~l*+ZXoJw2LlE$naS?S5E%+RF#rk_ehY7!Q2)KzBmVwo8RTW8iH5-_?c1*OYS z6tIYE0vH_Laue{wCY{D_)nS)1yb(t2MQRju%AQt#Tnbp=O2HkL^Ec4s<&*q@Dk6^G zL&b`!2=WqWoEQFo+FjVS3#kNvw=2U8krXJ$RLhqTRR!y~ip7PfveBuB!OG!3v$zBV z-jn8UUA{#V+N(y*nxih9yc9c>#hJk`(q4pwXC4TAG|vC!+g3zCNthE(3@81u_y17% zVv(axfhFq+l`xlJ5|z3nCm#PN)&*i(T0fLzk++T{E+Ru2V*d}PAED>Ub<+MHqfHPF zt0b7Jq5b1MxZ89;z)9OajOO!H&n7RHn`9i#0z@b{D8wrqRnzTX=d;ZMjF;aL5+W!d z#DQCC;Z_Yuocaf9O}r;p1|pu~I#_24NWlrWU7Gy^6n^^JEZnNK;299&F2ttWzeXM+ z{J>n`zz8A2zOYmAJ8En7eJ+|hJE4VZIl3V+w8$X1CmBzf*r7MHvHlPivtNeo8kp#T zVdN}`^`n3ZSMlk5KnjeHNIm)uO@_@tcekQBmX@1KJSA&XY*(D~)e}qy2Mrpe(4c>d zaYoH{|5ny!0s&KACJ6;4zD&~7+V7lkQ=$c~o` z9@=CWxUa(|2!Hndn=fW2mzO7ga8nFi@kNP1eNGW7eP&hYZ3dUS9|K@aJP5#ub$2gD zt;1>L{qPuuKAe3RA$jmaNrW(wnJb7aF-N@E$ik$ihZewA;)ock0%YS%^6Wf8z_AQK zVLV`DT4gMkP@?m}Sc5o;0%|ZPfyFo}XHG08hNn(AsigK8YK)7?v|%fw6bT8ARs3cx zNr76-Dyco5T8iYjL}~`lYp!Tgg!ahqbj5BVbdy2C-%qiq()>r5Q!szTr>`N=Kx49q ae!9xXuhCt{|D#KTAulwb0eT$X|NH;)dE8?F literal 0 HcmV?d00001 diff --git a/tests/schema_fixtures.py b/tests/schema_fixtures.py index 41b195f4..cc8bd042 100644 --- a/tests/schema_fixtures.py +++ b/tests/schema_fixtures.py @@ -2,6 +2,8 @@ import pytest from sqlalchemy import Column from sqlalchemy import Integer +from sqlalchemy import MetaData +from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.sql import func from sqlalchemy.types import TypeDecorator @@ -169,3 +171,20 @@ class IndexTestWithoutSchema(base): geom2 = Column(Geometry(geometry_type='POINT', srid=4326, management=True)) return IndexTestWithoutSchema + + +@pytest.fixture +def reflection_tables_metadata(): + metadata = MetaData() + base = declarative_base(metadata=metadata) + + class Lake(base): + __tablename__ = 'lake' + id = Column(Integer, primary_key=True) + geom = Column(Geometry(geometry_type='LINESTRING', srid=4326)) + geom_no_idx = Column(Geometry(geometry_type='LINESTRING', srid=4326, spatial_index=False)) + geom_z = Column(Geometry(geometry_type='LINESTRINGZ', srid=4326, dimension=3)) + geom_m = Column(Geometry(geometry_type='LINESTRINGM', srid=4326, dimension=3)) + geom_zm = Column(Geometry(geometry_type='LINESTRINGZM', srid=4326, dimension=4)) + + return metadata diff --git a/tests/test_functional.py b/tests/test_functional.py index c3020e52..aaf55118 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -601,21 +601,59 @@ def test_insert(self, conn, ConstrainedLake, setup_tables): class TestReflection(): - def test_reflection(self, conn, Lake, setup_tables, schema): + @pytest.fixture + def setup_reflection_tables(self, reflection_tables_metadata, conn): + reflection_tables_metadata.drop_all(conn, checkfirst=True) + reflection_tables_metadata.create_all(conn) + + def test_reflection(self, conn, setup_reflection_tables): skip_pg12_sa1217(conn) t = Table( 'lake', MetaData(), - schema=schema, autoload_with=conn) - type_ = t.c.geom.type - assert isinstance(type_, Geometry) - if get_postgis_version(conn).startswith('1.') or conn.dialect.name == "sqlite": + + if get_postgis_version(conn).startswith('1.'): + type_ = t.c.geom.type + assert isinstance(type_, Geometry) assert type_.geometry_type == 'GEOMETRY' assert type_.srid == -1 else: + type_ = t.c.geom.type + assert isinstance(type_, Geometry) assert type_.geometry_type == 'LINESTRING' assert type_.srid == 4326 + assert type_.dimension == 2 + + type_ = t.c.geom_no_idx.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRING' + assert type_.srid == 4326 + assert type_.dimension == 2 + + type_ = t.c.geom_z.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRINGZ' + assert type_.srid == 4326 + assert type_.dimension == 3 + + type_ = t.c.geom_m.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRINGM' + assert type_.srid == 4326 + assert type_.dimension == 3 + + type_ = t.c.geom_zm.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRINGZM' + assert type_.srid == 4326 + assert type_.dimension == 4 + + # Drop the table + t.drop(bind=conn) + + # Recreate the table to check that the reflected properties are correct + t.create(bind=conn) def test_raster_reflection(self, conn, Ocean, setup_tables): skip_pg12_sa1217(conn) diff --git a/tests/test_functional_sqlite.py b/tests/test_functional_sqlite.py index 885fe795..afdcf2a8 100644 --- a/tests/test_functional_sqlite.py +++ b/tests/test_functional_sqlite.py @@ -1,12 +1,19 @@ +import os import platform +import shutil +from pathlib import Path import pytest from shapely.geometry import LineString from sqlalchemy import CheckConstraint from sqlalchemy import Column from sqlalchemy import Integer +from sqlalchemy import MetaData from sqlalchemy import String +from sqlalchemy import Table +from sqlalchemy import create_engine from sqlalchemy import text +from sqlalchemy.event import listen from sqlalchemy.exc import IntegrityError from sqlalchemy.sql import func @@ -14,6 +21,7 @@ from geoalchemy2.elements import WKTElement from geoalchemy2.shape import from_shape +from . import load_spatialite from . import select if platform.python_implementation().lower() == 'pypy': @@ -178,3 +186,302 @@ def test_insert(self, conn, ConstrainedLake, setup_tables): conn.execute(ConstrainedLake.__table__.insert(), [ {'a_str': None, 'geom': None, 'checked_str': 'should fail'}, ]) + + +class TestReflection(): + + def _copy_and_connect_db(self, input_db, tmp_db, engine_echo): + if 'SPATIALITE_LIBRARY_PATH' not in os.environ: + pytest.skip('SPATIALITE_LIBRARY_PATH is not defined, skip SpatiaLite tests') + + shutil.copyfile(input_db, tmp_db) + + db_url = f"sqlite:///{tmp_db}" + engine = create_engine(db_url, echo=engine_echo) + listen(engine, 'connect', load_spatialite) + + conn = engine.connect() + + return conn + + @pytest.fixture + def spatialite_3_conn(self, tmpdir, _engine_echo): + return self._copy_and_connect_db( + Path(__file__).parent / "data" / "spatialite_lt_4.sqlite", + tmpdir / "test_geoalchemy2_spatialite_lt_4.sqlite", + _engine_echo + ) + + @pytest.fixture + def spatialite_4_conn(self, tmpdir, _engine_echo): + return self._copy_and_connect_db( + Path(__file__).parent / "data" / "spatialite_ge_4.sqlite", + tmpdir / "test_geoalchemy2_spatialite_ge_4.sqlite", + _engine_echo + ) + + @pytest.fixture + def setup_reflection_tables_lt_4(self, reflection_tables_metadata, spatialite_3_conn): + reflection_tables_metadata.drop_all(spatialite_3_conn, checkfirst=True) + reflection_tables_metadata.create_all(spatialite_3_conn) + + @pytest.fixture + def setup_reflection_tables_ge_4(self, reflection_tables_metadata, spatialite_4_conn): + reflection_tables_metadata.drop_all(spatialite_4_conn, checkfirst=True) + reflection_tables_metadata.create_all(spatialite_4_conn) + + def test_reflection_spatialite_lt_4(self, spatialite_3_conn, setup_reflection_tables_lt_4): + t = Table( + 'lake', + MetaData(), + autoload_with=spatialite_3_conn) + + type_ = t.c.geom.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRING' + assert type_.srid == 4326 + assert type_.dimension == 2 + + type_ = t.c.geom_no_idx.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRING' + assert type_.srid == 4326 + assert type_.dimension == 2 + + type_ = t.c.geom_z.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRINGZ' + assert type_.srid == 4326 + assert type_.dimension == 3 + + type_ = t.c.geom_m.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRINGM' + assert type_.srid == 4326 + assert type_.dimension == 3 + + type_ = t.c.geom_zm.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRINGZM' + assert type_.srid == 4326 + assert type_.dimension == 4 + + # Drop the table + t.drop(bind=spatialite_3_conn) + + # Query to check the tables + query_tables = text( + """SELECT + name + FROM + sqlite_master + WHERE + type ='table' AND + name NOT LIKE 'sqlite_%' + ORDER BY tbl_name;""" + ) + + # Query to check the indices + query_indexes = text( + """SELECT * FROM geometry_columns ORDER BY f_table_name, f_geometry_column;""" + ) + + # Check the indices + geom_cols = spatialite_3_conn.execute(query_indexes).fetchall() + assert geom_cols == [] + + # Check the tables + all_tables = [i[0] for i in spatialite_3_conn.execute(query_tables).fetchall()] + assert all_tables == [ + 'SpatialIndex', + 'geometry_columns', + 'geometry_columns_auth', + 'layer_statistics', + 'spatial_ref_sys', + 'spatialite_history', + 'views_geometry_columns', + 'views_layer_statistics', + 'virts_geometry_columns', + 'virts_layer_statistics', + ] + + # Recreate the table to check that the reflected properties are correct + t.create(bind=spatialite_3_conn) + + # Check the actual properties + geom_cols = spatialite_3_conn.execute(query_indexes).fetchall() + assert geom_cols == [ + ('lake', 'geom', 'LINESTRING', 'XY', 4326, 1), + ('lake', 'geom_m', 'LINESTRING', 'XYM', 4326, 1), + ('lake', 'geom_no_idx', 'LINESTRING', 'XY', 4326, 0), + ('lake', 'geom_z', 'LINESTRING', 'XYZ', 4326, 1), + ('lake', 'geom_zm', 'LINESTRING', 'XYZM', 4326, 1), + ] + + all_tables = [i[0] for i in spatialite_3_conn.execute(query_tables).fetchall()] + assert all_tables == [ + 'SpatialIndex', + 'geometry_columns', + 'geometry_columns_auth', + 'idx_lake_geom', + 'idx_lake_geom_m', + 'idx_lake_geom_m_node', + 'idx_lake_geom_m_parent', + 'idx_lake_geom_m_rowid', + 'idx_lake_geom_node', + 'idx_lake_geom_parent', + 'idx_lake_geom_rowid', + 'idx_lake_geom_z', + 'idx_lake_geom_z_node', + 'idx_lake_geom_z_parent', + 'idx_lake_geom_z_rowid', + 'idx_lake_geom_zm', + 'idx_lake_geom_zm_node', + 'idx_lake_geom_zm_parent', + 'idx_lake_geom_zm_rowid', + 'lake', + 'layer_statistics', + 'spatial_ref_sys', + 'spatialite_history', + 'views_geometry_columns', + 'views_layer_statistics', + 'virts_geometry_columns', + 'virts_layer_statistics', + ] + + def test_reflection_spatialite_ge_4(self, spatialite_4_conn, setup_reflection_tables_ge_4): + t = Table( + 'lake', + MetaData(), + autoload_with=spatialite_4_conn) + + type_ = t.c.geom.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRING' + assert type_.srid == 4326 + assert type_.dimension == 2 + + type_ = t.c.geom_no_idx.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRING' + assert type_.srid == 4326 + assert type_.dimension == 2 + + type_ = t.c.geom_z.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRINGZ' + assert type_.srid == 4326 + assert type_.dimension == 3 + + type_ = t.c.geom_m.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRINGM' + assert type_.srid == 4326 + assert type_.dimension == 3 + + type_ = t.c.geom_zm.type + assert isinstance(type_, Geometry) + assert type_.geometry_type == 'LINESTRINGZM' + assert type_.srid == 4326 + assert type_.dimension == 4 + + # Drop the table + t.drop(bind=spatialite_4_conn) + + # Query to check the tables + query_tables = text( + """SELECT + name + FROM + sqlite_master + WHERE + type ='table' AND + name NOT LIKE 'sqlite_%' + ORDER BY tbl_name;""" + ) + + # Query to check the indices + query_indexes = text( + """SELECT * FROM geometry_columns ORDER BY f_table_name, f_geometry_column;""" + ) + + # Check the indices + geom_cols = spatialite_4_conn.execute(query_indexes).fetchall() + assert geom_cols == [] + + # Check the tables + all_tables = [i[0] for i in spatialite_4_conn.execute(query_tables).fetchall()] + assert all_tables == [ + 'ElementaryGeometries', + 'SpatialIndex', + 'geometry_columns', + 'geometry_columns_auth', + 'geometry_columns_field_infos', + 'geometry_columns_statistics', + 'geometry_columns_time', + 'spatial_ref_sys', + 'spatial_ref_sys_aux', + 'spatialite_history', + 'sql_statements_log', + 'views_geometry_columns', + 'views_geometry_columns_auth', + 'views_geometry_columns_field_infos', + 'views_geometry_columns_statistics', + 'virts_geometry_columns', + 'virts_geometry_columns_auth', + 'virts_geometry_columns_field_infos', + 'virts_geometry_columns_statistics', + ] + + # Recreate the table to check that the reflected properties are correct + t.create(bind=spatialite_4_conn) + + # Check the actual properties + geom_cols = spatialite_4_conn.execute(query_indexes).fetchall() + assert geom_cols == [ + ('lake', 'geom', 2, 2, 4326, 1), + ('lake', 'geom_m', 2002, 3, 4326, 1), + ('lake', 'geom_no_idx', 2, 2, 4326, 0), + ('lake', 'geom_z', 1002, 3, 4326, 1), + ('lake', 'geom_zm', 3002, 4, 4326, 1), + ] + + all_tables = [i[0] for i in spatialite_4_conn.execute(query_tables).fetchall()] + assert all_tables == [ + 'ElementaryGeometries', + 'SpatialIndex', + 'geometry_columns', + 'geometry_columns_auth', + 'geometry_columns_field_infos', + 'geometry_columns_statistics', + 'geometry_columns_time', + 'idx_lake_geom', + 'idx_lake_geom_m', + 'idx_lake_geom_m_node', + 'idx_lake_geom_m_parent', + 'idx_lake_geom_m_rowid', + 'idx_lake_geom_node', + 'idx_lake_geom_parent', + 'idx_lake_geom_rowid', + 'idx_lake_geom_z', + 'idx_lake_geom_z_node', + 'idx_lake_geom_z_parent', + 'idx_lake_geom_z_rowid', + 'idx_lake_geom_zm', + 'idx_lake_geom_zm_node', + 'idx_lake_geom_zm_parent', + 'idx_lake_geom_zm_rowid', + 'lake', + 'spatial_ref_sys', + 'spatial_ref_sys_aux', + 'spatialite_history', + 'sql_statements_log', + 'views_geometry_columns', + 'views_geometry_columns_auth', + 'views_geometry_columns_field_infos', + 'views_geometry_columns_statistics', + 'virts_geometry_columns', + 'virts_geometry_columns_auth', + 'virts_geometry_columns_field_infos', + 'virts_geometry_columns_statistics', + ] From df88590723bc4b1f2dfacf8663d65f14e04aad36 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Wed, 23 Mar 2022 21:59:49 +0100 Subject: [PATCH 02/18] Set _column_flag to False for spatial indexes The _column_flag attribute is used to skip index creation in the to_metadata() method. --- geoalchemy2/__init__.py | 2 ++ tests/test_functional.py | 12 ++++++++++++ 2 files changed, 14 insertions(+) diff --git a/geoalchemy2/__init__.py b/geoalchemy2/__init__.py index 0d8145e1..c9541ac6 100644 --- a/geoalchemy2/__init__.py +++ b/geoalchemy2/__init__.py @@ -97,12 +97,14 @@ def after_parent_attach(column, table): column, postgresql_using='gist', postgresql_ops=postgresql_ops, + _column_flag=True, ) elif _check_spatial_type(column.type, Raster): Index( _spatial_idx_name(table, column), func.ST_ConvexHull(column), postgresql_using='gist', + _column_flag=True, ) def dispatch(current_event, table, bind): diff --git a/tests/test_functional.py b/tests/test_functional.py index aaf55118..4df9d0c1 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -661,3 +661,15 @@ def test_raster_reflection(self, conn, Ocean, setup_tables): t = Table('ocean', MetaData(), autoload_with=conn) type_ = t.c.rast.type assert isinstance(type_, Raster) + + +class TestToMetadata: + + def test_to_metadata(self, Lake): + new_meta = MetaData() + new_Lake = Lake.__table__.to_metadata(new_meta) + + assert Lake != new_Lake + + # Check that the spatial index was not duplicated + assert len(new_Lake.indexes) == 1 From 84bb0d0cefc1d5c6f3f927aa677260db314b5122 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Fri, 6 May 2022 20:53:31 +0200 Subject: [PATCH 03/18] Add indexes the same way as in SQLAlchemy and add test with alembic --- geoalchemy2/__init__.py | 27 ++-- geoalchemy2/alembic_helpers.py | 3 + setup.cfg | 1 + tests/alembic_config/alembic.ini | 102 +++++++++++++ tests/test_alembic_migrations.py | 254 +++++++++++++++++++++++++++++++ tests/test_functional.py | 5 +- 6 files changed, 379 insertions(+), 13 deletions(-) create mode 100644 tests/alembic_config/alembic.ini create mode 100644 tests/test_alembic_migrations.py diff --git a/geoalchemy2/__init__.py b/geoalchemy2/__init__.py index c9541ac6..9fddfa66 100644 --- a/geoalchemy2/__init__.py +++ b/geoalchemy2/__init__.py @@ -92,19 +92,23 @@ def after_parent_attach(column, table): postgresql_ops = {column.name: "gist_geometry_ops_nd"} else: postgresql_ops = {} - Index( - _spatial_idx_name(table, column), - column, - postgresql_using='gist', - postgresql_ops=postgresql_ops, - _column_flag=True, + table.append_constraint( + Index( + _spatial_idx_name(table, column), + column, + postgresql_using='gist', + postgresql_ops=postgresql_ops, + _column_flag=True, + ) ) elif _check_spatial_type(column.type, Raster): - Index( - _spatial_idx_name(table, column), - func.ST_ConvexHull(column), - postgresql_using='gist', - _column_flag=True, + table.append_constraint( + Index( + _spatial_idx_name(table, column), + func.ST_ConvexHull(column), + postgresql_using='gist', + _column_flag=True, + ) ) def dispatch(current_event, table, bind): @@ -236,6 +240,7 @@ def dispatch(current_event, table, bind): col, postgresql_using='gist', postgresql_ops=postgresql_ops, + _column_flag=True, ) idx.create(bind=bind) diff --git a/geoalchemy2/alembic_helpers.py b/geoalchemy2/alembic_helpers.py index 38a73a2a..8a74cf38 100644 --- a/geoalchemy2/alembic_helpers.py +++ b/geoalchemy2/alembic_helpers.py @@ -113,6 +113,7 @@ def add_geospatial_column(operations, operation): geospatial_core_type.srid, geospatial_core_type.geometry_type )) + # TODO: Add Index if geospatial_core_type.spatial_index is True elif "postgresql" in dialect.name: operations.add_column( table_name, @@ -121,6 +122,7 @@ def add_geospatial_column(operations, operation): operation.column ) ) + # TODO: Add Index if geospatial_core_type.spatial_index is True @Operations.implementation_for(DropGeospatialColumn) @@ -149,6 +151,7 @@ def drop_geospatial_column(operations, operation): sqlite_version = conn.execute(text("SELECT sqlite_version();")).scalar() if parse_version(sqlite_version) >= parse_version("3.35"): operations.drop_column(table_name, column_name) + # TODO: Drop Index? elif "postgresql" in dialect.name: operations.drop_column(table_name, column_name) diff --git a/setup.cfg b/setup.cfg index 472fcb5b..6e566900 100644 --- a/setup.cfg +++ b/setup.cfg @@ -25,6 +25,7 @@ setenv= EXPECTED_COV = 93 pypy3: EXPECTED_COV = 92 deps= + alembic sqla11: SQLAlchemy==1.1.2 sqlalatest: SQLAlchemy !pypy3: psycopg2 diff --git a/tests/alembic_config/alembic.ini b/tests/alembic_config/alembic.ini new file mode 100644 index 00000000..80741b25 --- /dev/null +++ b/tests/alembic_config/alembic.ini @@ -0,0 +1,102 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. Valid values are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # default: use os.pathsep + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# The URL will be set dynamicaly during the test initialization +; sqlalchemy.url = postgresql://gis:gis@localhost/gis +; sqlalchemy.url = sqlite:///spatialdb + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = DEBUG +handlers = console +qualname = + +[logger_sqlalchemy] +level = DEBUG +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/tests/test_alembic_migrations.py b/tests/test_alembic_migrations.py new file mode 100644 index 00000000..9b8c66aa --- /dev/null +++ b/tests/test_alembic_migrations.py @@ -0,0 +1,254 @@ +"""Test alembic migrations of spatial columns.""" +import re + +import sqlalchemy as sa # noqa (This import is only used in the migration scripts) +from alembic.autogenerate import compare_metadata +from alembic.autogenerate import produce_migrations +from alembic.autogenerate import render_python_code +from alembic.migration import MigrationContext +from alembic.operations import Operations +from sqlalchemy import Column +from sqlalchemy import Integer +from sqlalchemy import MetaData +from sqlalchemy import Table +from sqlalchemy import text + +from geoalchemy2 import Geometry +from geoalchemy2 import alembic_helpers + + +def filter_tables(name, type_, parent_names): + """Filter tables that we don't care about.""" + return type_ != "table" or name in ["lake", "alembic_table"] + + +class TestAutogenerate: + def test_no_diff(self, conn, Lake, setup_tables): + """Check that the autogeneration detects spatial types properly.""" + metadata = MetaData() + + Table( + "lake", + metadata, + Column("id", Integer, primary_key=True), + Column( + "geom", + Geometry( + geometry_type="LINESTRING", + srid=4326, + management=Lake.__table__.c.geom.type.management, + ), + ), + schema=Lake.__table__.schema, + ) + + mc = MigrationContext.configure( + conn, + opts={ + "include_object": alembic_helpers.include_object, + "include_name": filter_tables, + }, + ) + + diff = compare_metadata(mc, metadata) + + assert diff == [] + + def test_diff(self, conn, Lake, setup_tables): + """Check that the autogeneration detects spatial types properly.""" + metadata = MetaData() + + Table( + "lake", + metadata, + Column("id", Integer, primary_key=True), + Column("new_col", Integer, primary_key=True), + Column( + "geom", + Geometry( + geometry_type="LINESTRING", + srid=4326, + management=Lake.__table__.c.geom.type.management, + ), + ), + Column( + "new_geom_col", + Geometry( + geometry_type="LINESTRING", + srid=4326, + management=Lake.__table__.c.geom.type.management, + ), + ), + schema=Lake.__table__.schema, + ) + + mc = MigrationContext.configure( + conn, + opts={ + "include_object": alembic_helpers.include_object, + "include_name": filter_tables, + }, + ) + + diff = compare_metadata(mc, metadata) + + # Check column of type Integer + add_new_col = diff[0] + assert add_new_col[0] == "add_column" + assert add_new_col[1] is None + assert add_new_col[2] == "lake" + assert add_new_col[3].name == "new_col" + assert isinstance(add_new_col[3].type, Integer) + assert add_new_col[3].primary_key is True + assert add_new_col[3].nullable is False + + # Check column of type Geometry + add_new_geom_col = diff[1] + assert add_new_geom_col[0] == "add_column" + assert add_new_geom_col[1] is None + assert add_new_geom_col[2] == "lake" + assert add_new_geom_col[3].name == "new_geom_col" + assert isinstance(add_new_geom_col[3].type, Geometry) + assert add_new_geom_col[3].primary_key is False + assert add_new_geom_col[3].nullable is True + assert add_new_geom_col[3].type.srid == 4326 + assert add_new_geom_col[3].type.geometry_type == "LINESTRING" + assert add_new_geom_col[3].type.name == "geometry" + assert add_new_geom_col[3].type.dimension == 2 + + +def test_migration(conn, metadata): + """Test the actual migration of spatial types.""" + Table( + "alembic_table", + metadata, + Column("id", Integer, primary_key=True), + Column("int_col", Integer, index=True), + Column( + "geom", + Geometry( + geometry_type="POINT", + srid=4326, + management=False, + ), + ), + # The managed column does not work for now + # Column( + # "managed_geom", + # Geometry( + # geometry_type="POINT", + # srid=4326, + # management=True, + # ), + # ), + Column( + "geom_no_idx", + Geometry( + geometry_type="POINT", + srid=4326, + spatial_index=False, + ), + ), + ) + + mc = MigrationContext.configure( + conn, + opts={ + "include_object": alembic_helpers.include_object, + "include_name": filter_tables, + "user_module_prefix": "geoalchemy2.types.", + }, + ) + + migration_script = produce_migrations(mc, metadata) + upgrade_script = render_python_code( + migration_script.upgrade_ops, render_item=alembic_helpers.render_item + ) + downgrade_script = render_python_code( + migration_script.downgrade_ops, render_item=alembic_helpers.render_item + ) + + op = Operations(mc) # noqa (This variable is only used in the migration scripts) + + # Compile and execute the upgrade part of the migration script + eval(compile(upgrade_script.replace(" ", ""), "upgrade_script.py", "exec")) + + if conn.dialect.name == "postgresql": + # Postgresql dialect + + # Query to check the indexes + index_query = text( + """SELECT indexname, indexdef + FROM pg_indexes + WHERE + tablename = 'alembic_table' + ORDER BY indexname;""" + ) + indexes = conn.execute(index_query).fetchall() + + expected_indices = [ + ( + "alembic_table_pkey", + """CREATE UNIQUE INDEX alembic_table_pkey + ON gis.alembic_table + USING btree (id)""", + ), + ( + "idx_alembic_table_geom", + """CREATE INDEX idx_alembic_table_geom + ON gis.alembic_table + USING gist (geom)""", + ), + ( + "ix_alembic_table_int_col", + """CREATE INDEX ix_alembic_table_int_col + ON gis.alembic_table + USING btree (int_col)""", + ), + ] + + assert len(indexes) == 3 + + for idx, expected_idx in zip(indexes, expected_indices): + assert idx[0] == expected_idx[0] + assert idx[1] == re.sub("\n *", " ", expected_idx[1]) + + elif conn.dialect.name == "sqlite": + # SQLite dialect + + # Query to check the indexes + query_indexes = text( + """SELECT * + FROM geometry_columns + WHERE f_table_name = 'alembic_table' + ORDER BY f_table_name, f_geometry_column;""" + ) + + # Check the actual properties + geom_cols = conn.execute(query_indexes).fetchall() + assert geom_cols == [ + ("alembic_table", "geom", 1, 2, 4326, 1), + ("alembic_table", "geom_no_idx", 1, 2, 4326, 0), + ] + + # Compile and execute the downgrade part of the migration script + eval(compile(downgrade_script.replace(" ", ""), "downgrade_script.py", "exec")) + + if conn.dialect.name == "postgresql": + # Postgresql dialect + # Here the indexes are attached to the table so if the DROP TABLE works it's ok + pass + elif conn.dialect.name == "sqlite": + # SQLite dialect + + # Query to check the indexes + query_indexes = text( + """SELECT * + FROM geometry_columns + WHERE f_table_name = 'alembic_table' + ORDER BY f_table_name, f_geometry_column;""" + ) + + # Now the indexes should have been dropped + geom_cols = conn.execute(query_indexes).fetchall() + assert geom_cols == [] diff --git a/tests/test_functional.py b/tests/test_functional.py index 4df9d0c1..54dcaca3 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -28,6 +28,7 @@ from sqlalchemy.exc import OperationalError from sqlalchemy.exc import ProgrammingError from sqlalchemy.sql import func +from sqlalchemy.testing.assertions import ComparesTables from geoalchemy2 import Geometry from geoalchemy2 import Raster @@ -663,13 +664,13 @@ def test_raster_reflection(self, conn, Ocean, setup_tables): assert isinstance(type_, Raster) -class TestToMetadata: +class TestToMetadata(ComparesTables): def test_to_metadata(self, Lake): new_meta = MetaData() new_Lake = Lake.__table__.to_metadata(new_meta) - assert Lake != new_Lake + self.assert_tables_equal(Lake.__table__, new_Lake) # Check that the spatial index was not duplicated assert len(new_Lake.indexes) == 1 From 59b3e3860748ebd860382b8439447a0663daf3bd Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Wed, 25 May 2022 00:22:15 +0200 Subject: [PATCH 04/18] Fix Alembic helpers and related tests --- doc/alembic.rst | 49 +--- geoalchemy2/__init__.py | 68 +++-- geoalchemy2/alembic_helpers.py | 452 +++++++++++++++++++++++++++--- tests/__init__.py | 22 ++ tests/conftest.py | 79 ++++-- tests/data/spatialite_ge_4.sqlite | Bin 221184 -> 217088 bytes tests/data/spatialite_lt_4.sqlite | Bin 44032 -> 46080 bytes tests/test_alembic_migrations.py | 444 ++++++++++++++++++++++------- tests/test_functional_sqlite.py | 87 +++--- 9 files changed, 917 insertions(+), 284 deletions(-) diff --git a/doc/alembic.rst b/doc/alembic.rst index 1850accc..2e67bc46 100644 --- a/doc/alembic.rst +++ b/doc/alembic.rst @@ -173,7 +173,7 @@ in ``my_package.custom_types``, you just have to edit the ``env.py`` file like t if spatial_type: return spatial_type - # For the cumstom type + # For the custom type if obj_type == 'type' and isinstance(obj, TheCustomType): import_name = obj.__class__.__name__ autogen_context.imports.add(f"from my_package.custom_types import {import_name}") @@ -206,54 +206,17 @@ in ``my_package.custom_types``, you just have to edit the ``env.py`` file like t Then the proper imports will be automatically added in the migration scripts. -Add / Drop columns ------------------- +Add / Drop columns or tables +---------------------------- -Some dialects (like SQLite) require some specific management to alter columns of a table. In this +Some dialects (like SQLite) require some specific management to alter columns or tables. In this case, other dedicated helpers are provided to handle this. For example, if one wants to add and drop columns in a SQLite database, the ``env.py`` file should look like the following: .. code-block:: python - from alembic.autogenerate import rewriter - - writer = rewriter.Rewriter() - - - @writer.rewrites(ops.AddColumnOp) - def add_geo_column(context, revision, op): - """This function replaces the default AddColumnOp by a geospatial-specific one.""" - col_type = op.column.type - if isinstance(col_type, TypeDecorator): - dialect = context.bind().dialect - col_type = col_type.load_dialect_impl(dialect) - if isinstance(col_type, (Geometry, Geography, Raster)): - new_op = AddGeospatialColumn(op.table_name, op.column, op.schema) - else: - new_op = op - return new_op - - - @writer.rewrites(ops.DropColumnOp) - def drop_geo_column(context, revision, op): - """This function replaces the default DropColumnOp by a geospatial-specific one.""" - col_type = op.to_column().type - if isinstance(col_type, TypeDecorator): - dialect = context.bind.dialect - col_type = col_type.load_dialect_impl(dialect) - if isinstance(col_type, (Geometry, Geography, Raster)): - new_op = DropGeospatialColumn(op.table_name, op.column_name, op.schema) - else: - new_op = op - return new_op - - - def load_spatialite(dbapi_conn, connection_record): - """Load SpatiaLite extension in SQLite DB.""" - dbapi_conn.enable_load_extension(True) - dbapi_conn.load_extension(os.environ['SPATIALITE_LIBRARY_PATH']) - dbapi_conn.enable_load_extension(False) - dbapi_conn.execute('SELECT InitSpatialMetaData()') + from geoalchemy2.alembic_helpers import load_spatialite + from geoalchemy2.alembic_helpers import writer def run_migrations_offline(): diff --git a/geoalchemy2/__init__.py b/geoalchemy2/__init__.py index 9fddfa66..08c6b775 100644 --- a/geoalchemy2/__init__.py +++ b/geoalchemy2/__init__.py @@ -47,7 +47,37 @@ def _spatial_idx_name(table, column): def check_management(column, dialect): - return getattr(column.type, "management", False) is True or dialect == 'sqlite' + return getattr(column.type, "management", False) is True or dialect.name == 'sqlite' + + +def _get_gis_cols(table, spatial_types, dialect, check_col_management=False): + return [ + col + for col in table.columns + if ( + isinstance(col, Column) + and _check_spatial_type(col.type, spatial_types, dialect) + and ( + not check_col_management + or check_management(col, dialect) + ) + ) + ] + + +def _get_spatialite_attrs(bind, table_name, col_name): + col_attributes = bind.execute( + text("""SELECT * FROM "geometry_columns" + WHERE f_table_name = '{}' and f_geometry_column = '{}' + """.format( + table_name, col_name + )) + ).fetchone() + return col_attributes + + +def _get_spatialite_version(bind): + return bind.execute(text("""SELECT spatialite_version();""")).fetchone()[0] def _setup_ddl_event_listeners(): @@ -115,9 +145,7 @@ def dispatch(current_event, table, bind): if current_event in ('before-create', 'before-drop'): # Filter Geometry columns from the table with management=True # Note: Geography and PostGIS >= 2.0 don't need this - gis_cols = [col for col in table.columns if - _check_spatial_type(col.type, Geometry, bind.dialect) - and check_management(col, bind.dialect.name)] + gis_cols = _get_gis_cols(table, Geometry, bind.dialect, check_col_management=True) # Find all other columns that are not managed Geometries regular_cols = [x for x in table.columns if x not in gis_cols] @@ -137,6 +165,14 @@ def dispatch(current_event, table, bind): for col in gis_cols: if bind.dialect.name == 'sqlite': drop_func = 'DiscardGeometryColumn' + + # Disable spatial indexes if present + stmt = select(*_format_select_args(getattr(func, 'CheckSpatialIndex')(table.name, col.name))) + if bind.execute(stmt).fetchone()[0] is not None: + stmt = select(*_format_select_args(getattr(func, 'DisableSpatialIndex')(table.name, col.name))) + stmt = stmt.execution_options(autocommit=True) + bind.execute(stmt) + bind.execute(text("""DROP TABLE IF EXISTS idx_{}_{};""".format(table.name, col.name))) elif bind.dialect.name == 'postgresql': drop_func = 'DropGeometryColumn' else: @@ -157,7 +193,7 @@ def dispatch(current_event, table, bind): for col in table.info["_saved_columns"]: if ( _check_spatial_type(col.type, Geometry, bind.dialect) - and check_management(col, bind.dialect.name) + and check_management(col, bind.dialect) ) and col in idx.columns.values(): table.indexes.remove(idx) if ( @@ -181,7 +217,7 @@ def dispatch(current_event, table, bind): # Add the managed Geometry columns with AddGeometryColumn() if ( _check_spatial_type(col.type, Geometry, bind.dialect) - and check_management(col, bind.dialect.name) + and check_management(col, bind.dialect) ): dimension = col.type.dimension if bind.dialect.name == 'sqlite': @@ -229,7 +265,7 @@ def dispatch(current_event, table, bind): # management=False), define it and create it if ( not [i for i in table.indexes if col in i.columns.values()] - and check_management(col, bind.dialect.name) + and check_management(col, bind.dialect) ): if col.type.use_N_D_index: postgresql_ops = {col.name: "gist_geometry_ops_nd"} @@ -259,16 +295,6 @@ def dispatch(current_event, table, bind): idx.create(bind=bind) elif current_event == 'after-drop': - if bind.dialect.name == 'sqlite': - # Remove spatial index tables - for idx in table.indexes: - if any( - [ - _check_spatial_type(i.type, (Geometry, Geography, Raster)) - for i in idx.columns.values() - ] - ): - bind.execute(text("""DROP TABLE IF EXISTS {};""".format(idx.name))) # Restore original column list including managed Geometry columns table.columns = table.info.pop('_saved_columns') @@ -320,13 +346,7 @@ def _reflect_geometry_column(inspector, table, column_info): # column_info["type"].spatial_index = bool(spatial_index) elif inspector.bind.dialect.name == "sqlite": # Get geometry type, SRID and spatial index from the SpatiaLite metadata - col_attributes = inspector.bind.execute( - text("""SELECT * FROM "geometry_columns" - WHERE f_table_name = '{}' and f_geometry_column = '{}' - """.format( - table.name, column_info["name"] - )) - ).fetchone() + col_attributes = _get_spatialite_attrs(inspector.bind, table.name, column_info["name"]) if col_attributes is not None: _, _, geometry_type, coord_dimension, srid, spatial_index = col_attributes diff --git a/geoalchemy2/alembic_helpers.py b/geoalchemy2/alembic_helpers.py index 8a74cf38..a9b3a77f 100644 --- a/geoalchemy2/alembic_helpers.py +++ b/geoalchemy2/alembic_helpers.py @@ -1,7 +1,14 @@ """Some helpers to use with Alembic migration tool.""" +import os + from alembic.autogenerate import renderers +from alembic.autogenerate import rewriter from alembic.autogenerate.render import _add_column +from alembic.autogenerate.render import _add_index +from alembic.autogenerate.render import _add_table from alembic.autogenerate.render import _drop_column +from alembic.autogenerate.render import _drop_index +from alembic.autogenerate.render import _drop_table from alembic.operations import Operations from alembic.operations import ops from packaging.version import parse as parse_version @@ -13,9 +20,15 @@ from geoalchemy2 import Geometry from geoalchemy2 import Raster from geoalchemy2 import _check_spatial_type +from geoalchemy2 import _get_gis_cols +from geoalchemy2 import _get_spatialite_version +from geoalchemy2 import check_management from geoalchemy2 import func +writer = rewriter.Rewriter() + + def render_item(obj_type, obj, autogen_context): """Apply custom rendering for selected items.""" if obj_type == 'type' and isinstance(obj, (Geometry, Geography, Raster)): @@ -23,31 +36,12 @@ def render_item(obj_type, obj, autogen_context): autogen_context.imports.add(f"from geoalchemy2 import {import_name}") return "%r" % obj - # default rendering for other objects + # Default rendering for other objects return False -def include_object(obj, name, obj_type, reflected, compare_to): - """Do not include spatial indexes if they are automatically created by GeoAlchemy2.""" - if obj_type == "index": - if len(obj.expressions) == 1: - try: - col = obj.expressions[0] - if ( - _check_spatial_type(col.type, (Geometry, Geography, Raster)) - and col.type.spatial_index - ): - return False - except AttributeError: - pass - # Never include the spatial_ref_sys table - if (obj_type == "table" and name == "spatial_ref_sys"): - return False - return True - - @Operations.register_operation("add_geospatial_column") -class AddGeospatialColumn(ops.AddColumnOp): +class AddGeospatialColumnOp(ops.AddColumnOp): """ Add a Geospatial Column in an Alembic migration context. This methodology originates from: https://alembic.sqlalchemy.org/en/latest/api/operations.html#operation-plugins @@ -61,13 +55,13 @@ def add_geospatial_column(cls, operations, table_name, column, schema=None): def reverse(self): """Used to autogenerate the downgrade function.""" - return DropGeospatialColumn.from_column_and_tablename( + return DropGeospatialColumnOp.from_column_and_tablename( self.schema, self.table_name, self.column.name ) @Operations.register_operation("drop_geospatial_column") -class DropGeospatialColumn(ops.DropColumnOp): +class DropGeospatialColumnOp(ops.DropColumnOp): """Drop a Geospatial Column in an Alembic migration context.""" @classmethod @@ -79,18 +73,18 @@ def drop_geospatial_column(cls, operations, table_name, column_name, schema=None def reverse(self): """Used to autogenerate the downgrade function.""" - return AddGeospatialColumn.from_column_and_tablename( + return AddGeospatialColumnOp.from_column_and_tablename( self.schema, self.table_name, self.column ) -@Operations.implementation_for(AddGeospatialColumn) +@Operations.implementation_for(AddGeospatialColumnOp) def add_geospatial_column(operations, operation): """Handle the actual column addition according to the dialect backend. Args: operations: Operations object from alembic base, defining high level migration operations. - operation: AddGeospatialColumn call, with attributes for table_name, column_name, + operation: AddGeospatialColumnOp call, with attributes for table_name, column_name, column_type, and optional keywords. """ @@ -111,27 +105,25 @@ def add_geospatial_column(operations, operation): table_name, column_name, geospatial_core_type.srid, - geospatial_core_type.geometry_type + geospatial_core_type.geometry_type, + geospatial_core_type.dimension, + not geospatial_core_type.nullable, )) - # TODO: Add Index if geospatial_core_type.spatial_index is True elif "postgresql" in dialect.name: operations.add_column( table_name, - Column( - column_name, - operation.column - ) + operation.column, + schema=operation.schema, ) - # TODO: Add Index if geospatial_core_type.spatial_index is True -@Operations.implementation_for(DropGeospatialColumn) +@Operations.implementation_for(DropGeospatialColumnOp) def drop_geospatial_column(operations, operation): """Handle the actual column removal according to the dialect backend. Args: operations: Operations object from alembic base, defining high level migration operations. - operation: AddGeospatialColumn call, with attributes for table_name, column_name, + operation: AddGeospatialColumnOp call, with attributes for table_name, column_name, column_type, and optional keywords. """ @@ -141,8 +133,11 @@ def drop_geospatial_column(operations, operation): dialect = operations.get_bind().dialect if "sqlite" in dialect.name: + # Discard the column and remove associated index + # (the column is not actually dropped here) operations.execute(func.DiscardGeometryColumn(table_name, column_name)) - # This second drop column call is necessary; SpatiaLite was designed for a SQLite that did + + # This second drop column call is necessary: SpatiaLite was designed for a SQLite that did # not support dropping columns from tables at all. DiscardGeometryColumn removes associated # metadata and triggers from the DB associated with a geospatial column, without removing # the column itself. The next call actually removes the geospatial column, IF the underlying @@ -151,20 +146,399 @@ def drop_geospatial_column(operations, operation): sqlite_version = conn.execute(text("SELECT sqlite_version();")).scalar() if parse_version(sqlite_version) >= parse_version("3.35"): operations.drop_column(table_name, column_name) - # TODO: Drop Index? elif "postgresql" in dialect.name: operations.drop_column(table_name, column_name) -@renderers.dispatch_for(AddGeospatialColumn) +@renderers.dispatch_for(AddGeospatialColumnOp) def render_add_geo_column(autogen_context, op): """Render the add_geospatial_column operation in migration script.""" col_render = _add_column(autogen_context, op) return col_render.replace(".add_column(", ".add_geospatial_column(") -@renderers.dispatch_for(DropGeospatialColumn) +@renderers.dispatch_for(DropGeospatialColumnOp) def render_drop_geo_column(autogen_context, op): """Render the drop_geospatial_column operation in migration script.""" col_render = _drop_column(autogen_context, op) return col_render.replace(".drop_column(", ".drop_geospatial_column(") + + +@writer.rewrites(ops.AddColumnOp) +def add_geo_column(context, revision, op): + """This function replaces the default AddColumnOp by a geospatial-specific one.""" + col_type = op.column.type + if isinstance(col_type, TypeDecorator): + dialect = context.bind.dialect + col_type = col_type.load_dialect_impl(dialect) + if isinstance(col_type, (Geometry, Geography, Raster)): + op.column.type.spatial_index = False + new_op = AddGeospatialColumnOp(op.table_name, op.column, op.schema) + else: + new_op = op + return new_op + + +@writer.rewrites(ops.DropColumnOp) +def drop_geo_column(context, revision, op): + """This function replaces the default DropColumnOp by a geospatial-specific one.""" + col_type = op.to_column().type + if isinstance(col_type, TypeDecorator): + dialect = context.bind.dialect + col_type = col_type.load_dialect_impl(dialect) + if isinstance(col_type, (Geometry, Geography, Raster)): + new_op = DropGeospatialColumnOp(op.table_name, op.column_name, op.schema) + else: + new_op = op + return new_op + + +@Operations.register_operation("create_geospatial_table") +class CreateGeospatialTableOp(ops.CreateTableOp): + """ + Create a Geospatial Table in an Alembic migration context. This methodology originates from: + https://alembic.sqlalchemy.org/en/latest/api/operations.html#operation-plugins + """ + + @classmethod + def create_geospatial_table(cls, operations, table_name, *columns, **kw): + """Handle the different situations arising from creating geospatial table to a DB.""" + op = cls(table_name, columns, **kw) + return operations.invoke(op) + + def reverse(self): + """Used to autogenerate the downgrade function.""" + return DropGeospatialColumnOp.from_table( + self.to_table(), + _namespace_metadata=self._namespace_metadata, + ) + + @classmethod + def from_table( + cls, table: "Table", _namespace_metadata=None + ) -> "CreateGeospatialTableOp": + obj = super().from_table(table, _namespace_metadata) + return obj + + def to_table( + self, migration_context=None + ) -> "Table": + table = super().to_table(migration_context) + for col in table.columns: + try: + if col.type.spatial_index: + col.type.spatial_index = False + except AttributeError: + pass + return table + + +@Operations.register_operation("drop_geospatial_table") +class DropGeospatialTableOp(ops.DropTableOp): + @classmethod + def drop_geospatial_table(cls, operations, table_name, schema=None, **kw): + """Handle the different situations arising from dropping geospatial table from a DB.""" + + op = cls(table_name, schema=schema, table_kw=kw) + return operations.invoke(op) + + def reverse(self): + """Used to autogenerate the downgrade function.""" + return CreateGeospatialTableOp.from_table( + self.to_table(), + _namespace_metadata=self._namespace_metadata, + ) + + @classmethod + def from_table( + cls, table: "Table", _namespace_metadata=None + ) -> "DropGeospatialTableOp": + obj = super().from_table(table, _namespace_metadata) + return obj + + def to_table( + self, migration_context=None + ) -> "Table": + table = super().to_table(migration_context) + + # Set spatial_index attribute to False so the indexes are created explicitely + for col in table.columns: + try: + if col.type.spatial_index: + col.type.spatial_index = False + except AttributeError: + pass + return table + + +@Operations.implementation_for(CreateGeospatialTableOp) +def create_geospatial_table(operations, operation): + """Handle the actual table creation according to the dialect backend. + + Args: + operations: Operations object from alembic base, defining high level migration operations. + operation: CreateGeospatialTableOp call, with attributes for table_name, column_name, + column_type, and optional keywords. + """ + table_name = operation.table_name + + # For now the default events defined in geoalchemy2 are enought to handle table creation + operations.create_table(table_name, *operation.columns, **operation.kw) + + +@Operations.implementation_for(DropGeospatialTableOp) +def drop_geospatial_table(operations, operation): + """Handle the actual table removal according to the dialect backend. + + Args: + operations: Operations object from alembic base, defining high level migration operations. + operation: DropGeospatialTableOp call, with attributes for table_name, column_name, + column_type, and optional keywords. + """ + table_name = operation.table_name + bind = operations.get_bind() + dialect = bind.dialect + + if "sqlite" in dialect.name: + spatialite_version = _get_spatialite_version(bind) + if parse_version(spatialite_version) >= parse_version("5"): + drop_func = func.DropTable + else: + drop_func = func.DropGeoTable + operations.execute(drop_func(table_name)) + else: + operations.drop_table(table_name, operation.schema, **operation.table_kw) + + +@renderers.dispatch_for(CreateGeospatialTableOp) +def render_create_geo_table(autogen_context, op): + """Render the create_geospatial_table operation in migration script.""" + table_render = _add_table(autogen_context, op) + return table_render.replace(".create_table(", ".create_geospatial_table(") + + +@renderers.dispatch_for(DropGeospatialTableOp) +def render_drop_geo_table(autogen_context, op): + """Render the drop_geospatial_table operation in migration script.""" + table_render = _drop_table(autogen_context, op) + return table_render.replace(".drop_table(", ".drop_geospatial_table(") + + +@writer.rewrites(ops.CreateTableOp) +def create_geo_table(context, revision, op): + """This function replaces the default CreateTableOp by a geospatial-specific one.""" + dialect = context.bind.dialect + gis_cols = _get_gis_cols(op, (Geometry, Geography, Raster), dialect, check_col_management=False) + + if gis_cols: + new_op = CreateGeospatialTableOp(op.table_name, op.columns, op.schema) + else: + new_op = op + + return new_op + + +@writer.rewrites(ops.DropTableOp) +def drop_geo_table(context, revision, op): + """This function replaces the default DropTableOp by a geospatial-specific one.""" + dialect = context.bind.dialect + table = op.to_table() + gis_cols = _get_gis_cols(table, (Geometry, Geography, Raster), dialect, check_col_management=False) + + if gis_cols: + new_op = DropGeospatialTableOp(op.table_name, op.schema) + else: + new_op = op + + return new_op + + +@Operations.register_operation("create_geospatial_index") +class CreateGeospatialIndexOp(ops.CreateIndexOp): + @classmethod + def create_geospatial_index(cls, operations, index_name, table_name, columns, schema=None, unique=False, **kw): + """Handle the different situations arising from dropping geospatial table from a DB.""" + op = cls(index_name, table_name, columns, schema=schema, unique=unique, **kw) + return operations.invoke(op) + + def reverse(self): + """Used to autogenerate the downgrade function.""" + return DropGeospatialIndexOp( + self.index_name, + self.table_name, + column_name=self.columns[0].name, + schema=self.schema, + ) + + +@Operations.register_operation("drop_geospatial_index") +class DropGeospatialIndexOp(ops.DropIndexOp): + + def __init__(self, *args, column_name, **kwargs): + super().__init__(*args, **kwargs) + self.column_name = column_name + + @classmethod + def drop_geospatial_index(cls, operations, index_name, table_name, column_name, schema=None, unique=False, **kw): + """Handle the different situations arising from dropping geospatial table from a DB.""" + op = cls(index_name, table_name, column_name=column_name, schema=schema, unique=unique, **kw) + return operations.invoke(op) + + def reverse(self): + """Used to autogenerate the downgrade function.""" + return CreateGeospatialIndexOp( + self.index_name, + self.table_name, + column_name=self.column_name, + schema=self.schema, + _reverse=self, + **self.kw + ) + + @classmethod + def from_index(cls, index: "Index") -> "DropGeospatialIndexOp": + assert index.table is not None + assert len(index.columns) == 1, "A spatial index must be set on one column only" + return cls( + index.name, + index.table.name, + column_name=index.columns[0].name, + schema=index.table.schema, + _reverse=CreateGeospatialIndexOp.from_index(index), + **index.kwargs, + ) + + +@Operations.implementation_for(CreateGeospatialIndexOp) +def create_geospatial_index(operations, operation): + """Handle the actual index creation according to the dialect backend. + + Args: + operations: Operations object from alembic base, defining high level migration operations. + operation: CreateGeospatialIndexOp call, with attributes for table_name, column_name, + column_type, and optional keywords. + """ + # return # Do nothing and rely on the + bind = operations.get_bind() + dialect = bind.dialect + + if "sqlite" in dialect.name: + assert len(operation.columns) == 1, "A spatial index must be set on one column only" + operations.execute(func.CreateSpatialIndex(operation.table_name, operation.columns[0])) + else: + operations.create_index( + operation.index_name, + operation.table_name, + operation.columns, + operation.schema, + operation.unique, + **operation.kw + ) + + +@Operations.implementation_for(DropGeospatialIndexOp) +def drop_geospatial_index(operations, operation): + """Handle the actual index drop according to the dialect backend. + + Args: + operations: Operations object from alembic base, defining high level migration operations. + operation: DropGeospatialIndexOp call, with attributes for table_name, column_name, + column_type, and optional keywords. + """ + bind = operations.get_bind() + dialect = bind.dialect + + if "sqlite" in dialect.name: + operations.execute(func.DisableSpatialIndex(operation.table_name, operation.column_name)) + else: + operations.drop_index( + operation.index_name, + operation.table_name, + operation.schema, + **operation.kw + ) + + +@renderers.dispatch_for(CreateGeospatialIndexOp) +def render_create_geo_index(autogen_context, op): + """Render the create_geospatial_index operation in migration script.""" + idx_render = _add_index(autogen_context, op) + return idx_render.replace(".create_index(", ".create_geospatial_index(") + + +@renderers.dispatch_for(DropGeospatialIndexOp) +def render_drop_geo_index(autogen_context, op): + """Render the drop_geospatial_index operation in migration script.""" + idx_render = _drop_index(autogen_context, op) + + # Replace function name + text = idx_render.replace(".drop_index(", ".drop_geospatial_index(") + + # Add column name as keyword argument + text = text[:-1] + ", column_name='%s')" % (op.column_name,) + + return text + + +@writer.rewrites(ops.CreateIndexOp) +def create_geo_index(context, revision, op): + """This function replaces the default CreateIndexOp by a geospatial-specific one.""" + dialect = context.bind.dialect + + if len(op.columns) == 1: + col = op.columns[0] + if ( + isinstance(col, Column) + and _check_spatial_type(col.type, (Geometry, Geography, Raster), dialect) + ): + # Fix index properties + op.kw["postgresql_using"] = op.kw.get("postgresql_using", "gist") + if col.type.use_N_D_index: + postgresql_ops = {col.name: "gist_geometry_ops_nd"} + else: + postgresql_ops = {} + op.kw["postgresql_ops"] = op.kw.get("postgresql_ops", postgresql_ops) + + return CreateGeospatialIndexOp( + op.index_name, + op.table_name, + op.columns, + op.schema, + op.unique, + **op.kw + ) + + return op + + +@writer.rewrites(ops.DropIndexOp) +def drop_geo_index(context, revision, op): + """This function replaces the default DropIndexOp by a geospatial-specific one.""" + dialect = context.bind.dialect + idx = op.to_index() + + if len(idx.columns) == 1: + col = idx.columns[0] + if ( + isinstance(col, Column) + and _check_spatial_type(col.type, (Geometry, Geography, Raster), dialect) + ): + return DropGeospatialIndexOp( + op.index_name, + op.table_name, + column_name=col.name, + schema=op.schema, + **op.kw + ) + + return op + + +def load_spatialite(dbapi_conn, connection_record): + """Load SpatiaLite extension in SQLite DB.""" + if "SPATIALITE_LIBRARY_PATH" not in os.environ: + raise RuntimeError("The SPATIALITE_LIBRARY_PATH environment variable is not set.") + dbapi_conn.enable_load_extension(True) + dbapi_conn.load_extension(os.environ['SPATIALITE_LIBRARY_PATH']) + dbapi_conn.enable_load_extension(False) + dbapi_conn.execute('SELECT InitSpatialMetaData()') diff --git a/tests/__init__.py b/tests/__init__.py index f2b7eee7..f700af0b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,12 +1,15 @@ import os import re +import shutil import pytest from packaging import version from pkg_resources import parse_version from sqlalchemy import __version__ as SA_VERSION +from sqlalchemy import create_engine from sqlalchemy import select as raw_select from sqlalchemy import text +from sqlalchemy.event import listen from sqlalchemy.exc import OperationalError from sqlalchemy.sql import func @@ -84,3 +87,22 @@ def load_spatialite(dbapi_conn, connection_record): dbapi_conn.enable_load_extension(True) dbapi_conn.load_extension(os.environ['SPATIALITE_LIBRARY_PATH']) dbapi_conn.enable_load_extension(False) + +def copy_and_connect_sqlite_db(input_db, tmp_db, engine_echo): + if 'SPATIALITE_LIBRARY_PATH' not in os.environ: + pytest.skip('SPATIALITE_LIBRARY_PATH is not defined, skip SpatiaLite tests') + + shutil.copyfile(input_db, tmp_db) + + db_url = f"sqlite:///{tmp_db}" + engine = create_engine(db_url, echo=engine_echo) + listen(engine, 'connect', load_spatialite) + + if input_db.endswith("spatialite_lt_4.sqlite"): + engine._spatialite_version = 3 + elif input_db.endswith("spatialite_ge_4.sqlite"): + engine._spatialite_version = 4 + else: + engine._spatialite_version = None + + return engine diff --git a/tests/conftest.py b/tests/conftest.py index ee185773..af4e4775 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ import os +from pathlib import Path import pytest from sqlalchemy import MetaData @@ -8,9 +9,9 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker +from . import copy_and_connect_sqlite_db from . import get_postgis_version from . import get_postgres_major_version -from . import load_spatialite from .schema_fixtures import * # noqa @@ -21,9 +22,14 @@ def pytest_addoption(parser): help='PostgreSQL DB URL used for tests (`postgresql://user:password@host:port/dbname`).', ) parser.addoption( - '--sqlite_dburl', + '--sqlite_spatialite3_dburl', action='store', - help='SQLite DB URL used for tests (`sqlite:///path_to_db_file`).', + help='SQLite DB URL used for tests with SpatiaLite3 (`sqlite:///path_to_db_file`).', + ) + parser.addoption( + '--sqlite_spatialite4_dburl', + action='store', + help='SQLite DB URL used for tests with SpatiaLite4 (`sqlite:///path_to_db_file`).', ) parser.addoption( '--engine-echo', @@ -35,16 +41,25 @@ def pytest_addoption(parser): def pytest_generate_tests(metafunc): if "db_url" in metafunc.fixturenames: + sqlite_dialects = ["sqlite-spatialite3", "sqlite-spatialite4"] + dialects = None + if metafunc.module.__name__ == "tests.test_functional_postgresql": dialects = ["postgresql"] elif metafunc.module.__name__ == "tests.test_functional_sqlite": - dialects = ["sqlite"] - elif getattr(metafunc.function, "tested_dialects", False): + dialects = sqlite_dialects + + if getattr(metafunc.function, "tested_dialects", False): dialects = metafunc.function.tested_dialects elif getattr(metafunc.cls, "tested_dialects", False): dialects = metafunc.cls.tested_dialects - else: - dialects = ["postgresql", "sqlite"] + + if dialects is None: + dialects = ["postgresql", "sqlite-spatialite3", "sqlite-spatialite4"] + + if "sqlite" in dialects: + dialects = [i for i in dialects if i != "sqlite"] + sqlite_dialects + metafunc.parametrize("db_url", dialects, indirect=True) @@ -58,20 +73,41 @@ def db_url_postgresql(request): @pytest.fixture(scope='session') -def db_url_sqlite(request): +def db_url_sqlite(request, tmpdir_factory): return ( - request.config.getoption('--sqlite_dburl') + request.config.getoption('--sqlite_spatialite4_dburl') or os.getenv('PYTEST_SQLITE_DB_URL') - or 'sqlite:///spatialdb' + # or f"sqlite:///{tmpdir_factory.getbasetemp() / 'spatialdb'}" + or "sqlite-auto" ) @pytest.fixture(scope='session') -def db_url(request, db_url_postgresql, db_url_sqlite): +def db_url_sqlite_spatialite3(request, tmpdir_factory): + return ( + request.config.getoption('--sqlite_spatialite3_dburl') + or os.getenv('PYTEST_SPATIALITE3_DB_URL') + or f"sqlite:///{Path(__file__).parent / 'data' / 'spatialite_lt_4.sqlite'}" + ) + + +@pytest.fixture(scope='session') +def db_url_sqlite_spatialite4(request, tmpdir_factory): + return ( + request.config.getoption('--sqlite_spatialite4_dburl') + or os.getenv('PYTEST_SPATIALITE4_DB_URL') + or f"sqlite:///{Path(__file__).parent / 'data' / 'spatialite_ge_4.sqlite'}" + ) + + +@pytest.fixture(scope='session') +def db_url(request, db_url_postgresql, db_url_sqlite_spatialite3, db_url_sqlite_spatialite4): if request.param == "postgresql": return db_url_postgresql - elif request.param == "sqlite": - return db_url_sqlite + elif request.param == "sqlite-spatialite3": + return db_url_sqlite_spatialite3 + elif request.param == "sqlite-spatialite4": + return db_url_sqlite_spatialite4 return None @@ -82,13 +118,20 @@ def _engine_echo(request): @pytest.fixture -def engine(db_url, _engine_echo): +def engine(tmpdir, db_url, _engine_echo): """Provide an engine to test database.""" + if db_url.startswith("sqlite:///"): + # Copy the input SQLite DB to a temporary file and return an engine to it + input_url = str(db_url)[10:] + return copy_and_connect_sqlite_db( + input_url, + tmpdir / "test_spatial_db", + _engine_echo + ) + + # For other dialects the engine is directly returned engine = create_engine(db_url, echo=_engine_echo) - if engine.dialect.name == "sqlite": - if 'SPATIALITE_LIBRARY_PATH' not in os.environ: - pytest.skip('SPATIALITE_LIBRARY_PATH is not defined, skip SpatiaLite tests') - listen(engine, 'connect', load_spatialite) + engine._spatialite_version = None return engine diff --git a/tests/data/spatialite_ge_4.sqlite b/tests/data/spatialite_ge_4.sqlite index 836094330559d43c2c6548c4aab56d7b0e40aeba..b3df6b6c25ca6c9d9fe9b82989fc40879154a294 100644 GIT binary patch delta 1879 zcmbVNOK{Uh81`;#;yfJLfC;Y~D^l_1xzS~e*u zB}|$2LZ=f@JA2^}+EYuXO{-~jg1A`dGy=lP0vxR33f$_uo(J zfA?E`d+wU!+=63%N7n<3#lqpcaY;C^HX4^)dp90=VDTl6+i=QBESeqM#c~mJpxYVP zgXkY&o8AF_1Ass;Ctw7$8w&~e4bZmOUIY%UVCw^|$TbUh>o;v>0NOO6xewUQcX#WT zn>!$A(XO^|pjp4u@&N!ggkSE`ue5y&zy_`6@B^#%kz*1xX?I#S>zy3{0Eok#8b_Q> z2XeR;|1<{n1O_IJfu6w7o50{3!?14z12u%f+mGS7FotJY49`#)cJ0BisT0FShXbRv zbdMJ_XFZJv*OkvA7}gCUHTK%j&c}7mp6}eowFGh*UTsSE~DX zzqec|<%E1Dlcdz8(J8m4=+p zmsKGePlR#LT(vYat%d^vO|ANQytSfIRfU4bTP%rLc}k7N$43VfDRO;nBo&Q}M<4Z( z)X+fk*ip=sRavTuQMr_rRpr&1EDl$sVn$L_k>PniNhM;3BJrN_k)afci}5ggm?o*9 zKNM#CA&TY0K9&oyEX&Z0_PNi!li|s9no5qp6itK!sa3^NbQ%*jS%XZ7(eC;_o5N3& zYL*-sOpc6?rO1_~6bDPisZymN<;_URh-@N8V&*`6;AkWniKj@lDiyO*B`cPtij>dG zd66Nh0RIFIi#dS?XKr~Qtdml$Wo5B6C6+46v{J+ztWblsOv=c`D*hX>AXk*EB4LXE zAN^BOUX?{zQmaaFnxyJ-K88^&RjMbKDOlyrrp{^V*w{!iMHb|0MJ6e82{-NKX?l5O z1|~+ZloKQs8Av44RJ}P#Qe)%sMYIs0wKS)y|1<|%V-OIHRq}kOm0xybD+AG8VqS4A5hn2In5``tjqefuO5?Sw1WpL zYq^|XyZ)FgqYd>(*Kz|P?Q;L{x-6&v+V7(6kQlWP?-QfC>-mF(_T}8+S5D6792UF1 ztNZX^C*dYK8@o7Kk>%oyk~#T=PvAc`<+5xO5iI=Vn5pd-F{|f8YwXdjTVsE8BM^fZ zY_1JITW?>ycoci)QVgD0W1W{)ymFO&+Ts5+P1^=+V|MMdqxWBo)8IsS6@V^u=Qtby z+fBROg%;gSRwE+8VFBgi=mo#=iBx{cTwc*UImFK<8|>|B}G8Q8295H4R8+rwe&atJ_S?& delta 1362 zcmb_cO-K}B7@lW###Ptdnbpi%vsPEF_Gfly|8y10z%|ke!-#&tgph(tGKo4=5S@xh zzr%O&V09`Zq@m`jhfWq0!bWPD9Xy0k^y8_&chbN%Iv5Rn&wTUD^F8yuGxNL`2b~v3 zoYzas=LJDX()e|B(nMOksG>8NTdgSEJ}+qP9Wq_3%CL1e+1Gsn3V-cGC6Bxi9fk{^ z0G01ALK;qUun14UR?2(eV#norIJkQPKI5_64HU2jdp+{3wLW9O?h=vDhVl}~HD>Z| z0y&%(ml-!4|#-oP_^t}X; zc7pIu0&N>XsEHuBnIKS0;Exh)3=(XxYFuAJQ0)Yvuq?Q+EcO2u%K{Y%>!bWnFx-(Z zuKo@w-{#T;<*~wD8c|-CG3+@CgRjeRxMLwbzFUviMPqe|q!y3WuzUUkOIcmlBFS3j z3G`;Oi4+?Q^r}l7ENz@s1y5&7<2t(&?8#Qs@1@s$eLE<1(<(Z%!q>l;wj}|~ z75_1IvYmdZTz1wO{!8@C#5xQCYxs;8Td>*+>{>o7+axnxj%E?7_*Ea;QEXDg--=kt z$EvXvMGLyNS+T%uSdS4AEBLpKR7oK}?8h#+tsb$^P0Tlb%)_qIK3Wge@Q6y5W#b6K z!z)ANT(TgwthibU$-EoFUeT1EB5i}mY#GO>$iHUDxh#W&WO^=xOYre;85~0eKl&V( zQEt9_j!D4F$6g@K15;2`xlyP0iMxM9`$;U!WiU94e>TGMG;gL&;0 LdSIek{675yJ`ox0 diff --git a/tests/data/spatialite_lt_4.sqlite b/tests/data/spatialite_lt_4.sqlite index 08db5c08d408ff89fd1b9f835852f40992d30166..9a1e8bd2593b6ee4c6af0d7c9179bd702415578c 100644 GIT binary patch delta 1762 zcmeHI&rj1(9Pf{ba|4Cw5)BHq{1Ds&+pcVDOcNIvV+f3FBVy)K>(O1)R+ZGGA z4GvMCWdE;;c^Okh2z$=*y;Z;Oapm~;SA% z%}yp#!O+wWkaAj&PRg8;!HR` z7>iD&NZ1TPb3uv(eqSI+`vQRBg5%6YfMFP#qU#~|`6~g6=E!s!Bx83Xi6EOg(k`It zx7;3_4$$>wx8_}r!bayrQ#Q+4i3AI=cycBULanf-=G*abD3OG97b+|)v}U%xF+7=! z#ult{TYF(rQ*)YL6cr1}8G%VGl8_mShvvh{a6CmCrl@8`Ju8$%T~riF5oi*y+&?%B z7wt_H=*2>$|D0u1*K$!JLKGMsMz=US346HrLG+ zyt4~dVu*;~KM=82TRJ%yCkNv{Iv67zD7J-QTktpV32VM}-9#HJ(=K$d8Cm;?qGaRQ vfg2raR4EsFy-C0NilQTp*EP3$Y(3q7+7k#ktLggY#-;j)jak(D=AGjw7(na^ delta 264 zcmZp8!PIbrX@a!iUj_ySZ6JmM4ylPc#y~;6-~?8n5I6Hlkc8I8f*582bvYnYRR&0? zNCOGQ$#$)7zF)1nn1#oOgNgYC1M^qr7n=n|?l4cj9nHJ>YxE=uM)}PhB^At@ z8r--Tl{UL~?O PB`0rP@5QLOXafrXu<<&& diff --git a/tests/test_alembic_migrations.py b/tests/test_alembic_migrations.py index 9b8c66aa..201dff32 100644 --- a/tests/test_alembic_migrations.py +++ b/tests/test_alembic_migrations.py @@ -1,10 +1,14 @@ """Test alembic migrations of spatial columns.""" import re +import pytest import sqlalchemy as sa # noqa (This import is only used in the migration scripts) +from alembic import command +from alembic import script from alembic.autogenerate import compare_metadata from alembic.autogenerate import produce_migrations from alembic.autogenerate import render_python_code +from alembic.config import Config from alembic.migration import MigrationContext from alembic.operations import Operations from sqlalchemy import Column @@ -16,6 +20,12 @@ from geoalchemy2 import Geometry from geoalchemy2 import alembic_helpers +from alembic.operations import ops +from alembic.operations.ops import ModifyTableOps +from alembic import context + +from . import test_only_with_dialects + def filter_tables(name, type_, parent_names): """Filter tables that we don't care about.""" @@ -47,6 +57,7 @@ def test_no_diff(self, conn, Lake, setup_tables): opts={ "include_object": alembic_helpers.include_object, "include_name": filter_tables, + "process_revision_directives": alembic_helpers.writer, }, ) @@ -87,6 +98,7 @@ def test_diff(self, conn, Lake, setup_tables): opts={ "include_object": alembic_helpers.include_object, "include_name": filter_tables, + "process_revision_directives": alembic_helpers.writer, }, ) @@ -117,138 +129,360 @@ def test_diff(self, conn, Lake, setup_tables): assert add_new_geom_col[3].type.dimension == 2 -def test_migration(conn, metadata): - """Test the actual migration of spatial types.""" - Table( - "alembic_table", - metadata, - Column("id", Integer, primary_key=True), - Column("int_col", Integer, index=True), - Column( - "geom", - Geometry( - geometry_type="POINT", - srid=4326, - management=False, - ), - ), - # The managed column does not work for now - # Column( - # "managed_geom", - # Geometry( - # geometry_type="POINT", - # srid=4326, - # management=True, - # ), - # ), - Column( - "geom_no_idx", - Geometry( - geometry_type="POINT", - srid=4326, - spatial_index=False, - ), - ), - ) +@pytest.fixture +def alembic_dir(tmpdir): + return (tmpdir / "alembic_files") - mc = MigrationContext.configure( - conn, - opts={ - "include_object": alembic_helpers.include_object, - "include_name": filter_tables, - "user_module_prefix": "geoalchemy2.types.", - }, - ) - migration_script = produce_migrations(mc, metadata) - upgrade_script = render_python_code( - migration_script.upgrade_ops, render_item=alembic_helpers.render_item - ) - downgrade_script = render_python_code( - migration_script.downgrade_ops, render_item=alembic_helpers.render_item - ) +@pytest.fixture +def alembic_config_path(alembic_dir): + return (alembic_dir / "test_alembic.ini") - op = Operations(mc) # noqa (This variable is only used in the migration scripts) - # Compile and execute the upgrade part of the migration script - eval(compile(upgrade_script.replace(" ", ""), "upgrade_script.py", "exec")) +@pytest.fixture +def alembic_env_path(alembic_dir): + return (alembic_dir / "env.py") - if conn.dialect.name == "postgresql": - # Postgresql dialect +@pytest.fixture +def test_script_path(alembic_dir): + return (alembic_dir / "test_script.py") + + +@pytest.fixture +def alembic_env(engine, alembic_dir, alembic_config_path, alembic_env_path, test_script_path): + cfg_tmp = Config(alembic_config_path) + engine.execute("DROP TABLE IF EXISTS alembic_version;") + command.init(cfg_tmp, str(alembic_dir), template="generic") + with alembic_env_path.open(mode="w", encoding="utf8") as f: + f.write( + """ +import importlib + +from alembic import context +from sqlalchemy import MetaData, engine_from_config +from sqlalchemy.event import listen +from geoalchemy2 import alembic_helpers + +config = context.config + +engine = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + echo=True) + +if engine.dialect.name == "sqlite": + listen(engine, 'connect', alembic_helpers.load_spatialite) + +spec = importlib.util.spec_from_file_location("test_script", "{}") +module = importlib.util.module_from_spec(spec) +spec.loader.exec_module(module) + +target_metadata = module.metadata + +connection = engine.connect() + +def include_object(obj, name, obj_type, reflected, compare_to): + if obj_type == "table" and name.startswith("idx_"): + # Ignore SQLite tables used for spatial indexes + # TODO: Improve this condition!!! + return False + if ( + obj_type == "table" + and name not in [ + "new_spatial_table" + ] + ): + return False + return True + +context.configure( + connection=connection, + target_metadata=target_metadata, + version_table_pk=True, + process_revision_directives=alembic_helpers.writer, + render_item=alembic_helpers.render_item, + include_object=include_object, +) + +try: + with context.begin_transaction(): + context.run_migrations() +finally: + connection.close() + engine.dispose() + +""".format(str(test_script_path)) + ) + with test_script_path.open(mode="w", encoding="utf8") as f: + f.write( + """ +from sqlalchemy import MetaData + +metadata = MetaData() + +""" + ) + sc = script.ScriptDirectory.from_config(cfg_tmp) + return sc + + +@pytest.fixture +def alembic_config(engine, alembic_dir, alembic_config_path, alembic_env): + cfg = Config(str(alembic_config_path)) + with alembic_config_path.open(mode="w", encoding="utf8") as f: + f.write( + """ +[alembic] +script_location = {} +sqlalchemy.url = {} + +[loggers] +keys = root + +[handlers] +keys = console + +[logger_root] +level = WARN +handlers = console +qualname = + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatters] +keys = generic + +[formatter_generic] +format = %%(levelname)-5.5s [%%(name)s] %%(message)s +datefmt = %%H:%%M:%%S + +""".format(alembic_dir, engine.url) + ) + return cfg + + +def _check_indexes(conn, expected): + if conn.dialect.name == "postgresql": # Query to check the indexes index_query = text( """SELECT indexname, indexdef FROM pg_indexes WHERE - tablename = 'alembic_table' + tablename = 'new_spatial_table' ORDER BY indexname;""" ) indexes = conn.execute(index_query).fetchall() - expected_indices = [ - ( - "alembic_table_pkey", - """CREATE UNIQUE INDEX alembic_table_pkey - ON gis.alembic_table - USING btree (id)""", - ), - ( - "idx_alembic_table_geom", - """CREATE INDEX idx_alembic_table_geom - ON gis.alembic_table - USING gist (geom)""", - ), - ( - "ix_alembic_table_int_col", - """CREATE INDEX ix_alembic_table_int_col - ON gis.alembic_table - USING btree (int_col)""", - ), + expected = [ + (i[0], re.sub("\n *", " ", i[1])) + for i in expected["postgresql"] ] - assert len(indexes) == 3 - - for idx, expected_idx in zip(indexes, expected_indices): - assert idx[0] == expected_idx[0] - assert idx[1] == re.sub("\n *", " ", expected_idx[1]) + assert indexes == expected elif conn.dialect.name == "sqlite": - # SQLite dialect - # Query to check the indexes - query_indexes = text( + index_query = text( """SELECT * FROM geometry_columns - WHERE f_table_name = 'alembic_table' + WHERE f_table_name = 'new_spatial_table' ORDER BY f_table_name, f_geometry_column;""" ) - # Check the actual properties - geom_cols = conn.execute(query_indexes).fetchall() - assert geom_cols == [ - ("alembic_table", "geom", 1, 2, 4326, 1), - ("alembic_table", "geom_no_idx", 1, 2, 4326, 0), - ] + indexes = conn.execute(index_query).fetchall() + assert indexes == expected["sqlite"] + +@test_only_with_dialects("postgresql", "sqlite-spatialite4") +def test_migration_revision(conn, metadata, alembic_config, alembic_env_path, test_script_path): + initial_rev = command.revision( + alembic_config, + "Initial state", + autogenerate=True, + rev_id="initial", + ) + command.upgrade(alembic_config, initial_rev.revision) - # Compile and execute the downgrade part of the migration script - eval(compile(downgrade_script.replace(" ", ""), "downgrade_script.py", "exec")) + # Add a new table in metadata + with test_script_path.open(mode="w", encoding="utf8") as f: + f.write( + """ +from sqlalchemy import MetaData +from sqlalchemy import Column +from sqlalchemy import Integer +from sqlalchemy import Table +from geoalchemy2 import Geometry - if conn.dialect.name == "postgresql": - # Postgresql dialect - # Here the indexes are attached to the table so if the DROP TABLE works it's ok - pass - elif conn.dialect.name == "sqlite": - # SQLite dialect +metadata = MetaData() + +new_table = Table( + "new_spatial_table", + metadata, + Column("id", Integer, primary_key=True), + Column( + "geom_with_idx", + Geometry( + geometry_type="LINESTRING", + srid=4326, + ), + ), + Column( + "geom_without_idx", + Geometry( + geometry_type="LINESTRING", + srid=4326, + spatial_index=False, + ), + ), +) - # Query to check the indexes - query_indexes = text( - """SELECT * - FROM geometry_columns - WHERE f_table_name = 'alembic_table' - ORDER BY f_table_name, f_geometry_column;""" +""" + ) + + # Auto-generate a new migration script + rev_table = command.revision( + alembic_config, + "Add a new table", + autogenerate=True, + rev_id="table", + ) + + # Apply the upgrade script + command.upgrade(alembic_config, rev_table.revision) + + _check_indexes( + conn, + { + "postgresql": [ + ( + "idx_new_spatial_table_geom_with_idx", + """CREATE INDEX idx_new_spatial_table_geom_with_idx + ON gis.new_spatial_table + USING gist (geom_with_idx)""", + ), + ( + "new_spatial_table_pkey", + """CREATE UNIQUE INDEX new_spatial_table_pkey + ON gis.new_spatial_table + USING btree (id)""", + ), + ], + "sqlite": [ + ("new_spatial_table", "geom_with_idx", 2, 2, 4326, 1), + ("new_spatial_table", "geom_without_idx", 2, 2, 4326, 0), + ], + } + ) + + # Remove spatial columns and add new ones + with test_script_path.open(mode="w", encoding="utf8") as f: + f.write( + """ +from sqlalchemy import MetaData +from sqlalchemy import Column +from sqlalchemy import Integer +from sqlalchemy import Table +from geoalchemy2 import Geometry + +metadata = MetaData() + +new_table = Table( + "new_spatial_table", + metadata, + Column("id", Integer, primary_key=True), + Column( + "new_geom_with_idx", + Geometry( + geometry_type="LINESTRING", + srid=4326, + ), + ), + Column( + "new_geom_without_idx", + Geometry( + geometry_type="LINESTRING", + srid=4326, + spatial_index=False, + ), + ), +) + +""" ) - # Now the indexes should have been dropped - geom_cols = conn.execute(query_indexes).fetchall() - assert geom_cols == [] + # Auto-generate a new migration script + rev_cols = command.revision( + alembic_config, + "Add and remove spatial columns", + autogenerate=True, + rev_id="columns", + ) + + # Apply the upgrade script + command.upgrade(alembic_config, rev_cols.revision) + + _check_indexes( + conn, + { + "postgresql": [ + ( + "idx_new_spatial_table_new_geom_with_idx", + """CREATE INDEX idx_new_spatial_table_new_geom_with_idx + ON gis.new_spatial_table + USING gist (new_geom_with_idx)""", + ), + ( + "new_spatial_table_pkey", + """CREATE UNIQUE INDEX new_spatial_table_pkey + ON gis.new_spatial_table + USING btree (id)""", + ), + ], + "sqlite": [ + ("new_spatial_table", "new_geom_with_idx", 2, 2, 4326, 1), + ("new_spatial_table", "new_geom_without_idx", 2, 2, 4326, 0), + ], + } + ) + + # Apply the downgrade script for columns + command.downgrade(alembic_config, rev_table.revision) + + _check_indexes( + conn, + { + "postgresql": [ + ( + "idx_new_spatial_table_geom_with_idx", + """CREATE INDEX idx_new_spatial_table_geom_with_idx + ON gis.new_spatial_table + USING gist (geom_with_idx)""", + ), + ( + "new_spatial_table_pkey", + """CREATE UNIQUE INDEX new_spatial_table_pkey + ON gis.new_spatial_table + USING btree (id)""", + ), + ], + "sqlite": [ + ("new_spatial_table", "geom_with_idx", 2, 2, 4326, 1), + ("new_spatial_table", "geom_without_idx", 2, 2, 4326, 0), + ], + } + ) + + # Apply the downgrade script for tables + command.downgrade(alembic_config, initial_rev.revision) + + _check_indexes( + conn, + { + "postgresql": [], + "sqlite": [], + } + ) diff --git a/tests/test_functional_sqlite.py b/tests/test_functional_sqlite.py index afdcf2a8..04ed4d3c 100644 --- a/tests/test_functional_sqlite.py +++ b/tests/test_functional_sqlite.py @@ -21,7 +21,7 @@ from geoalchemy2.elements import WKTElement from geoalchemy2.shape import from_shape -from . import load_spatialite +from . import test_only_with_dialects from . import select if platform.python_implementation().lower() == 'pypy': @@ -90,6 +90,16 @@ def test_all_indexes(self, conn, TableWithIndexes, setup_tables): for expected_idx in expected_indices: assert self.check_spatial_idx(conn, expected_idx) + TableWithIndexes.__table__.drop(bind=conn) + + indexes_after_drop = conn.execute(text("""SELECT * FROM "geometry_columns";""")).fetchall() + tables_after_drop = conn.execute( + text("SELECT name FROM sqlite_master WHERE type ='table' AND name NOT LIKE 'sqlite_%';") + ).fetchall() + + assert indexes_after_drop == [] + assert [table for table in tables_after_drop if 'table_with_indexes' in table.name] == [] + class TestInsertionORM(): pass @@ -190,51 +200,17 @@ def test_insert(self, conn, ConstrainedLake, setup_tables): class TestReflection(): - def _copy_and_connect_db(self, input_db, tmp_db, engine_echo): - if 'SPATIALITE_LIBRARY_PATH' not in os.environ: - pytest.skip('SPATIALITE_LIBRARY_PATH is not defined, skip SpatiaLite tests') - - shutil.copyfile(input_db, tmp_db) - - db_url = f"sqlite:///{tmp_db}" - engine = create_engine(db_url, echo=engine_echo) - listen(engine, 'connect', load_spatialite) - - conn = engine.connect() - - return conn - - @pytest.fixture - def spatialite_3_conn(self, tmpdir, _engine_echo): - return self._copy_and_connect_db( - Path(__file__).parent / "data" / "spatialite_lt_4.sqlite", - tmpdir / "test_geoalchemy2_spatialite_lt_4.sqlite", - _engine_echo - ) - - @pytest.fixture - def spatialite_4_conn(self, tmpdir, _engine_echo): - return self._copy_and_connect_db( - Path(__file__).parent / "data" / "spatialite_ge_4.sqlite", - tmpdir / "test_geoalchemy2_spatialite_ge_4.sqlite", - _engine_echo - ) - - @pytest.fixture - def setup_reflection_tables_lt_4(self, reflection_tables_metadata, spatialite_3_conn): - reflection_tables_metadata.drop_all(spatialite_3_conn, checkfirst=True) - reflection_tables_metadata.create_all(spatialite_3_conn) - @pytest.fixture - def setup_reflection_tables_ge_4(self, reflection_tables_metadata, spatialite_4_conn): - reflection_tables_metadata.drop_all(spatialite_4_conn, checkfirst=True) - reflection_tables_metadata.create_all(spatialite_4_conn) + def setup_reflection_tables(self, reflection_tables_metadata, conn): + reflection_tables_metadata.drop_all(conn, checkfirst=True) + reflection_tables_metadata.create_all(conn) - def test_reflection_spatialite_lt_4(self, spatialite_3_conn, setup_reflection_tables_lt_4): + @test_only_with_dialects("sqlite-spatialite3") + def test_reflection_spatialite_lt_4(self, conn, setup_reflection_tables): t = Table( 'lake', MetaData(), - autoload_with=spatialite_3_conn) + autoload_with=conn) type_ = t.c.geom.type assert isinstance(type_, Geometry) @@ -267,7 +243,7 @@ def test_reflection_spatialite_lt_4(self, spatialite_3_conn, setup_reflection_ta assert type_.dimension == 4 # Drop the table - t.drop(bind=spatialite_3_conn) + t.drop(bind=conn) # Query to check the tables query_tables = text( @@ -287,11 +263,11 @@ def test_reflection_spatialite_lt_4(self, spatialite_3_conn, setup_reflection_ta ) # Check the indices - geom_cols = spatialite_3_conn.execute(query_indexes).fetchall() + geom_cols = conn.execute(query_indexes).fetchall() assert geom_cols == [] # Check the tables - all_tables = [i[0] for i in spatialite_3_conn.execute(query_tables).fetchall()] + all_tables = [i[0] for i in conn.execute(query_tables).fetchall()] assert all_tables == [ 'SpatialIndex', 'geometry_columns', @@ -306,10 +282,10 @@ def test_reflection_spatialite_lt_4(self, spatialite_3_conn, setup_reflection_ta ] # Recreate the table to check that the reflected properties are correct - t.create(bind=spatialite_3_conn) + t.create(bind=conn) # Check the actual properties - geom_cols = spatialite_3_conn.execute(query_indexes).fetchall() + geom_cols = conn.execute(query_indexes).fetchall() assert geom_cols == [ ('lake', 'geom', 'LINESTRING', 'XY', 4326, 1), ('lake', 'geom_m', 'LINESTRING', 'XYM', 4326, 1), @@ -318,7 +294,7 @@ def test_reflection_spatialite_lt_4(self, spatialite_3_conn, setup_reflection_ta ('lake', 'geom_zm', 'LINESTRING', 'XYZM', 4326, 1), ] - all_tables = [i[0] for i in spatialite_3_conn.execute(query_tables).fetchall()] + all_tables = [i[0] for i in conn.execute(query_tables).fetchall()] assert all_tables == [ 'SpatialIndex', 'geometry_columns', @@ -349,11 +325,12 @@ def test_reflection_spatialite_lt_4(self, spatialite_3_conn, setup_reflection_ta 'virts_layer_statistics', ] - def test_reflection_spatialite_ge_4(self, spatialite_4_conn, setup_reflection_tables_ge_4): + @test_only_with_dialects("sqlite-spatialite4") + def test_reflection_spatialite_ge_4(self, conn, setup_reflection_tables): t = Table( 'lake', MetaData(), - autoload_with=spatialite_4_conn) + autoload_with=conn) type_ = t.c.geom.type assert isinstance(type_, Geometry) @@ -386,7 +363,7 @@ def test_reflection_spatialite_ge_4(self, spatialite_4_conn, setup_reflection_ta assert type_.dimension == 4 # Drop the table - t.drop(bind=spatialite_4_conn) + t.drop(bind=conn) # Query to check the tables query_tables = text( @@ -406,11 +383,11 @@ def test_reflection_spatialite_ge_4(self, spatialite_4_conn, setup_reflection_ta ) # Check the indices - geom_cols = spatialite_4_conn.execute(query_indexes).fetchall() + geom_cols = conn.execute(query_indexes).fetchall() assert geom_cols == [] # Check the tables - all_tables = [i[0] for i in spatialite_4_conn.execute(query_tables).fetchall()] + all_tables = [i[0] for i in conn.execute(query_tables).fetchall()] assert all_tables == [ 'ElementaryGeometries', 'SpatialIndex', @@ -434,10 +411,10 @@ def test_reflection_spatialite_ge_4(self, spatialite_4_conn, setup_reflection_ta ] # Recreate the table to check that the reflected properties are correct - t.create(bind=spatialite_4_conn) + t.create(bind=conn) # Check the actual properties - geom_cols = spatialite_4_conn.execute(query_indexes).fetchall() + geom_cols = conn.execute(query_indexes).fetchall() assert geom_cols == [ ('lake', 'geom', 2, 2, 4326, 1), ('lake', 'geom_m', 2002, 3, 4326, 1), @@ -446,7 +423,7 @@ def test_reflection_spatialite_ge_4(self, spatialite_4_conn, setup_reflection_ta ('lake', 'geom_zm', 3002, 4, 4326, 1), ] - all_tables = [i[0] for i in spatialite_4_conn.execute(query_tables).fetchall()] + all_tables = [i[0] for i in conn.execute(query_tables).fetchall()] assert all_tables == [ 'ElementaryGeometries', 'SpatialIndex', From 6a5382876cbb82996a56a2c188aa93513c6819b4 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Thu, 26 May 2022 14:42:08 +0200 Subject: [PATCH 05/18] Use batch operation to drop spatial columns with SQLite --- geoalchemy2/alembic_helpers.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/geoalchemy2/alembic_helpers.py b/geoalchemy2/alembic_helpers.py index a9b3a77f..71c0826f 100644 --- a/geoalchemy2/alembic_helpers.py +++ b/geoalchemy2/alembic_helpers.py @@ -137,15 +137,9 @@ def drop_geospatial_column(operations, operation): # (the column is not actually dropped here) operations.execute(func.DiscardGeometryColumn(table_name, column_name)) - # This second drop column call is necessary: SpatiaLite was designed for a SQLite that did - # not support dropping columns from tables at all. DiscardGeometryColumn removes associated - # metadata and triggers from the DB associated with a geospatial column, without removing - # the column itself. The next call actually removes the geospatial column, IF the underlying - # SQLite package version >= 3.35 - conn = operations.get_bind() - sqlite_version = conn.execute(text("SELECT sqlite_version();")).scalar() - if parse_version(sqlite_version) >= parse_version("3.35"): - operations.drop_column(table_name, column_name) + # Actually drop the column + with operations.batch_alter_table(table_name) as batch_op: + batch_op.drop_column(column_name) elif "postgresql" in dialect.name: operations.drop_column(table_name, column_name) From 7e6e66a376b1d065e37a9c1c7fbbd487eb9536b3 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Mon, 30 May 2022 18:00:35 +0200 Subject: [PATCH 06/18] Fix reflexion for postgresql and alembic helpers --- .github/workflows/test_and_publish.yml | 2 +- .gitignore | 9 + geoalchemy2/__init__.py | 74 +++-- geoalchemy2/alembic_helpers.py | 435 +++++++++++++++++++------ geoalchemy2/types.py | 3 +- tests/__init__.py | 32 ++ tests/test_alembic_migrations.py | 89 ++--- tests/test_functional.py | 50 +++ 8 files changed, 510 insertions(+), 184 deletions(-) diff --git a/.github/workflows/test_and_publish.yml b/.github/workflows/test_and_publish.yml index 9bd18fb3..5645890b 100644 --- a/.github/workflows/test_and_publish.yml +++ b/.github/workflows/test_and_publish.yml @@ -23,7 +23,7 @@ jobs: python-version: [3.6, 3.7, 3.8, 3.9, pypy3] # The type of runner that the job will run on - runs-on: ubuntu-18.04 + runs-on: ubuntu-22.04 services: postgres: diff --git a/.gitignore b/.gitignore index 742f0443..d83ad220 100644 --- a/.gitignore +++ b/.gitignore @@ -29,9 +29,12 @@ pip-delete-this-directory.txt # Unit test / coverage reports .tox/ .coverage +.coverage-* .cache nosetests.xml coverage.xml +.pytest_cache/ +reports/ # Translations *.mo @@ -51,3 +54,9 @@ coverage.xml # Sphinx documentation doc/_build/ doc/gallery/ + +# Miscellaneous +*.csv +*.sh +*.txt +issues diff --git a/geoalchemy2/__init__.py b/geoalchemy2/__init__.py index 08c6b775..f8876c5f 100644 --- a/geoalchemy2/__init__.py +++ b/geoalchemy2/__init__.py @@ -42,8 +42,8 @@ def _check_spatial_type(tested_type, spatial_types, dialect=None): ) -def _spatial_idx_name(table, column): - return 'idx_{}_{}'.format(table.name, column.name) +def _spatial_idx_name(table_name, column_name): + return 'idx_{}_{}'.format(table_name, column_name) def check_management(column, dialect): @@ -117,6 +117,12 @@ def after_parent_attach(column, table): # If the column is managed, the indexes are created after the table return + try: + if column.type._spatial_index_reflected: + return + except AttributeError: + pass + if _check_spatial_type(column.type, (Geometry, Geography)): if column.type.use_N_D_index: postgresql_ops = {column.name: "gist_geometry_ops_nd"} @@ -124,7 +130,7 @@ def after_parent_attach(column, table): postgresql_ops = {} table.append_constraint( Index( - _spatial_idx_name(table, column), + _spatial_idx_name(table.name, column.name), column, postgresql_using='gist', postgresql_ops=postgresql_ops, @@ -134,7 +140,7 @@ def after_parent_attach(column, table): elif _check_spatial_type(column.type, Raster): table.append_constraint( Index( - _spatial_idx_name(table, column), + _spatial_idx_name(table.name, column.name), func.ST_ConvexHull(column), postgresql_using='gist', _column_flag=True, @@ -197,7 +203,7 @@ def dispatch(current_event, table, bind): ) and col in idx.columns.values(): table.indexes.remove(idx) if ( - idx.name != _spatial_idx_name(table, col) + idx.name != _spatial_idx_name(table.name, col.name) or not getattr(col.type, "spatial_index", False) ): table.info["_after_create_indexes"].append(idx) @@ -272,7 +278,7 @@ def dispatch(current_event, table, bind): else: postgresql_ops = {} idx = Index( - _spatial_idx_name(table, col), + _spatial_idx_name(table.name, col.name), col, postgresql_using='gist', postgresql_ops=postgresql_ops, @@ -313,37 +319,36 @@ def _reflect_geometry_column(inspector, table, column_info): coord_dimension = 3 # Query to check a given column has spatial index - # has_index_query = """SELECT (indexrelid IS NOT NULL) AS has_index - # FROM ( - # SELECT - # n.nspname, - # c.relname, - # c.oid AS relid, - # a.attname, - # a.attnum - # FROM pg_attribute a - # INNER JOIN pg_class c ON (a.attrelid=c.oid) - # INNER JOIN pg_type t ON (a.atttypid=t.oid) - # INNER JOIN pg_namespace n ON (c.relnamespace=n.oid) - # WHERE t.typname='geometry' - # AND c.relkind='r' - # ) g - # LEFT JOIN pg_index i ON (g.relid = i.indrelid AND g.attnum = ANY(i.indkey)) - # WHERE relname = '{}' AND attname = '{}'""".format( - # table.name, column_info["name"] - # ) - # if table.schema is not None: - # has_index_query += " AND nspname = '{}'".format(table.schema) - # spatial_index = inspector.bind.execute(text(has_index_query)).scalar() - - # NOTE: For now we just set the spatial_index attribute to False because the indexes - # are already retrieved by the reflection process. + has_index_query = """SELECT (indexrelid IS NOT NULL) AS has_index + FROM ( + SELECT + n.nspname, + c.relname, + c.oid AS relid, + a.attname, + a.attnum + FROM pg_attribute a + INNER JOIN pg_class c ON (a.attrelid=c.oid) + INNER JOIN pg_type t ON (a.atttypid=t.oid) + INNER JOIN pg_namespace n ON (c.relnamespace=n.oid) + WHERE t.typname='geometry' + AND c.relkind='r' + ) g + LEFT JOIN pg_index i ON (g.relid = i.indrelid AND g.attnum = ANY(i.indkey)) + WHERE relname = '{}' AND attname = '{}'""".format( + table.name, column_info["name"] + ) + if table.schema is not None: + has_index_query += " AND nspname = '{}'".format(table.schema) + spatial_index = inspector.bind.execute(text(has_index_query)).scalar() # Set attributes column_info["type"].geometry_type = geometry_type column_info["type"].dimension = coord_dimension - column_info["type"].spatial_index = False - # column_info["type"].spatial_index = bool(spatial_index) + column_info["type"].spatial_index = bool(spatial_index) + + # Spatial indexes are automatically reflected with PostgreSQL dialect + column_info["type"]._spatial_index_reflected = True elif inspector.bind.dialect.name == "sqlite": # Get geometry type, SRID and spatial index from the SpatiaLite metadata col_attributes = _get_spatialite_attrs(inspector.bind, table.name, column_info["name"]) @@ -390,6 +395,9 @@ def _reflect_geometry_column(inspector, table, column_info): column_info["type"].srid = srid column_info["type"].spatial_index = bool(spatial_index) + # Spatial indexes are not automatically reflected with SQLite dialect + column_info["type"]._spatial_index_reflected = False + _setup_ddl_event_listeners() diff --git a/geoalchemy2/alembic_helpers.py b/geoalchemy2/alembic_helpers.py index 71c0826f..c0afbb21 100644 --- a/geoalchemy2/alembic_helpers.py +++ b/geoalchemy2/alembic_helpers.py @@ -1,5 +1,6 @@ """Some helpers to use with Alembic migration tool.""" import os +from collections import defaultdict from alembic.autogenerate import renderers from alembic.autogenerate import rewriter @@ -9,10 +10,22 @@ from alembic.autogenerate.render import _drop_column from alembic.autogenerate.render import _drop_index from alembic.autogenerate.render import _drop_table +from alembic.ddl.base import RenameTable +from alembic.ddl.base import format_table_name +from alembic.ddl.base import visit_rename_table +from alembic.operations import BatchOperations from alembic.operations import Operations from alembic.operations import ops +from alembic.operations.batch import ApplyBatchImpl from packaging.version import parse as parse_version +from sqlalchemy import Index +from sqlalchemy import MetaData +from sqlalchemy import Table from sqlalchemy import text +from sqlalchemy.dialects.sqlite.base import SQLiteDialect +from sqlalchemy.engine import reflection +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.schema import DropTable from sqlalchemy.types import TypeDecorator from geoalchemy2 import Column @@ -21,17 +34,105 @@ from geoalchemy2 import Raster from geoalchemy2 import _check_spatial_type from geoalchemy2 import _get_gis_cols +from geoalchemy2 import _get_spatialite_attrs from geoalchemy2 import _get_spatialite_version +from geoalchemy2 import _spatial_idx_name from geoalchemy2 import check_management from geoalchemy2 import func - writer = rewriter.Rewriter() +_SPATIAL_TABLES = set() + + +def _monkey_patch_get_indexes_for_sqlite(): + normal_behavior = SQLiteDialect.get_indexes + + def spatial_behavior(self, connection, table_name, schema=None, **kw): + indexes = self._get_indexes_normal_behavior( + connection, table_name, schema=None, **kw + ) + spatial_index_query = text( + """SELECT * + FROM geometry_columns + WHERE f_table_name = '{}' + ORDER BY f_table_name, f_geometry_column;""".format( + table_name + ) + ) + spatial_indexes = connection.execute(spatial_index_query).fetchall() + if spatial_indexes: + reflected_names = set([i["name"] for i in indexes]) + for idx in spatial_indexes: + idx_col = idx[1] + idx_name = _spatial_idx_name(table_name, idx_col) + if not bool(idx[-1]) or idx_name in reflected_names: + continue + indexes.append( + { + "name": idx_name, + "column_names": [idx_col], + "unique": 0, + } + ) + reflected_names.add(idx_name) + return indexes + + SQLiteDialect.get_indexes = spatial_behavior + SQLiteDialect._get_indexes_normal_behavior = normal_behavior + + +def _monkey_patch_transfer_elements_to_new_table_for_sqlite(): + normal_behavior = ApplyBatchImpl._transfer_elements_to_new_table + + def remove_spatial_indexes(table, new_table, dialect): + new_indexes = set() + input_col_names = set([col.name for col in table.columns]) + remove_index_cols = [] + for idx in table.indexes: + if len(idx.columns) == 1: + col = idx.columns[0] + if not isinstance(col, Column) or not _check_spatial_type( + col.type, + (Geometry, Geography, Raster), + dialect, + ): + new_indexes.add(idx) + else: + new_indexes.add(idx) + table.indexes = new_indexes + + new_indexes = set() + for idx in new_table.indexes: + if len(idx.columns) == 1: + col = idx.columns[0] + if not isinstance(col, Column) or not _check_spatial_type( + col.type, + (Geometry, Geography, Raster), + dialect, + ): + new_indexes.add(idx) + if col.name not in input_col_names: + for i in idx.columns: + i.spatial_index = False + else: + new_indexes.add(idx) + new_table.indexes = new_indexes + + def spatial_behavior(self): + self._transfer_elements_to_new_table_normal_behavior() + remove_spatial_indexes(self.table, self.new_table, self.impl.dialect) + + ApplyBatchImpl._transfer_elements_to_new_table = spatial_behavior + ApplyBatchImpl._transfer_elements_to_new_table_normal_behavior = normal_behavior + + +_monkey_patch_get_indexes_for_sqlite() +_monkey_patch_transfer_elements_to_new_table_for_sqlite() def render_item(obj_type, obj, autogen_context): """Apply custom rendering for selected items.""" - if obj_type == 'type' and isinstance(obj, (Geometry, Geography, Raster)): + if obj_type == "type" and isinstance(obj, (Geometry, Geography, Raster)): import_name = obj.__class__.__name__ autogen_context.imports.add(f"from geoalchemy2 import {import_name}") return "%r" % obj @@ -41,6 +142,9 @@ def render_item(obj_type, obj, autogen_context): @Operations.register_operation("add_geospatial_column") +@BatchOperations.register_operation( + "add_geospatial_column", "batch_add_geospatial_column" +) class AddGeospatialColumnOp(ops.AddColumnOp): """ Add a Geospatial Column in an Alembic migration context. This methodology originates from: @@ -59,13 +163,49 @@ def reverse(self): self.schema, self.table_name, self.column.name ) + @classmethod + def batch_add_geospatial_column( + cls, + operations, + column, + insert_before=None, + insert_after=None, + ): + """Issue an "add column" instruction using the current + batch migration context. + + .. seealso:: + + :meth:`.Operations.add_column` + + """ + + kw = {} + if insert_before: + kw["insert_before"] = insert_before + if insert_after: + kw["insert_after"] = insert_after + + op = cls( + operations.impl.table_name, + column, + schema=operations.impl.schema, + **kw, + ) + return operations.invoke(op) + @Operations.register_operation("drop_geospatial_column") +@BatchOperations.register_operation( + "drop_geospatial_column", "batch_drop_geospatial_column" +) class DropGeospatialColumnOp(ops.DropColumnOp): """Drop a Geospatial Column in an Alembic migration context.""" @classmethod - def drop_geospatial_column(cls, operations, table_name, column_name, schema=None, **kw): + def drop_geospatial_column( + cls, operations, table_name, column_name, schema=None, **kw + ): """Handle the different situations arising from dropping geospatial column from a DB.""" op = cls(table_name, column_name, schema=schema, **kw) @@ -77,6 +217,24 @@ def reverse(self): self.schema, self.table_name, self.column ) + @classmethod + def batch_drop_geospatial_column(cls, operations, column_name, **kw): + """Issue a "drop column" instruction using the current + batch migration context. + + .. seealso:: + + :meth:`.Operations.drop_column` + + """ + op = cls( + operations.impl.table_name, + column_name, + schema=operations.impl.schema, + **kw, + ) + return operations.invoke(op) + @Operations.implementation_for(AddGeospatialColumnOp) def add_geospatial_column(operations, operation): @@ -93,24 +251,25 @@ def add_geospatial_column(operations, operation): dialect = operations.get_bind().dialect - if isinstance(operation.column, TypeDecorator): - # Will be either geoalchemy2.types.Geography or geoalchemy2.types.Geometry, if using a - # custom type - geospatial_core_type = operation.column.type.load_dialect_impl(dialect) - else: - geospatial_core_type = operation.column.type - - if "sqlite" in dialect.name: - operations.execute(func.AddGeometryColumn( - table_name, - column_name, - geospatial_core_type.srid, - geospatial_core_type.geometry_type, - geospatial_core_type.dimension, - not geospatial_core_type.nullable, - )) + if dialect.name == "sqlite": + if isinstance(operation.column, TypeDecorator): + # Will be either geoalchemy2.types.Geography or geoalchemy2.types.Geometry, if using a + # custom type + geospatial_core_type = operation.column.type.load_dialect_impl(dialect) + else: + geospatial_core_type = operation.column.type + operations.execute( + func.AddGeometryColumn( + table_name, + column_name, + geospatial_core_type.srid, + geospatial_core_type.geometry_type, + geospatial_core_type.dimension, + not geospatial_core_type.nullable, + ) + ) elif "postgresql" in dialect.name: - operations.add_column( + operations.impl.add_column( table_name, operation.column, schema=operation.schema, @@ -129,19 +288,50 @@ def drop_geospatial_column(operations, operation): table_name = operation.table_name column_name = operation.column_name + column = operation.to_column(operations.migration_context) dialect = operations.get_bind().dialect - if "sqlite" in dialect.name: - # Discard the column and remove associated index - # (the column is not actually dropped here) - operations.execute(func.DiscardGeometryColumn(table_name, column_name)) + if dialect.name == "sqlite": + _SPATIAL_TABLES.add(table_name) + operations.impl.drop_column( + table_name, column, schema=operation.schema, **operation.kw + ) + + +@compiles(RenameTable, "sqlite") +def visit_rename_geospatial_table( + element: "RenameTable", compiler: "DDLCompiler", **kw +) -> str: + + table_is_spatial = element.table_name in _SPATIAL_TABLES + new_table_is_spatial = element.new_table_name in _SPATIAL_TABLES + + if table_is_spatial or new_table_is_spatial: + # Here we suppose there is only one DB attached to the current engine, so the prefix + # is set to NULL + return "SELECT RenameTable(NULL, '%s', '%s')" % ( + format_table_name(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, element.schema), + ) + else: + return visit_rename_table(element, compiler, **kw) - # Actually drop the column - with operations.batch_alter_table(table_name) as batch_op: - batch_op.drop_column(column_name) - elif "postgresql" in dialect.name: - operations.drop_column(table_name, column_name) + +@compiles(DropTable, "sqlite") +def visit_drop_geospatial_table( + element: "DropTable", compiler: "DDLCompiler", **kw +) -> str: + + table_is_spatial = element.element.name in _SPATIAL_TABLES + + if table_is_spatial: + # Here we suppose there is only one DB attached to the current engine + return "SELECT DropTable(NULL, '%s')" % ( + format_table_name(compiler, element.element.name, None), + ) + else: + return compiler.visit_drop_table(element, **kw) @renderers.dispatch_for(AddGeospatialColumnOp) @@ -167,6 +357,7 @@ def add_geo_column(context, revision, op): col_type = col_type.load_dialect_impl(dialect) if isinstance(col_type, (Geometry, Geography, Raster)): op.column.type.spatial_index = False + op.column.type._spatial_index_reflected = None new_op = AddGeospatialColumnOp(op.table_name, op.column, op.schema) else: new_op = op @@ -214,10 +405,10 @@ def from_table( obj = super().from_table(table, _namespace_metadata) return obj - def to_table( - self, migration_context=None - ) -> "Table": + def to_table(self, migration_context=None) -> "Table": table = super().to_table(migration_context) + + # Set spatial_index attribute to False so the indexes are created explicitly for col in table.columns: try: if col.type.spatial_index: @@ -250,18 +441,8 @@ def from_table( obj = super().from_table(table, _namespace_metadata) return obj - def to_table( - self, migration_context=None - ) -> "Table": + def to_table(self, migration_context=None) -> "Table": table = super().to_table(migration_context) - - # Set spatial_index attribute to False so the indexes are created explicitely - for col in table.columns: - try: - if col.type.spatial_index: - col.type.spatial_index = False - except AttributeError: - pass return table @@ -275,10 +456,14 @@ def create_geospatial_table(operations, operation): column_type, and optional keywords. """ table_name = operation.table_name + bind = operations.get_bind() # For now the default events defined in geoalchemy2 are enought to handle table creation operations.create_table(table_name, *operation.columns, **operation.kw) + if bind.dialect.name == "sqlite": + _SPATIAL_TABLES.add(table_name) + @Operations.implementation_for(DropGeospatialTableOp) def drop_geospatial_table(operations, operation): @@ -293,15 +478,9 @@ def drop_geospatial_table(operations, operation): bind = operations.get_bind() dialect = bind.dialect - if "sqlite" in dialect.name: - spatialite_version = _get_spatialite_version(bind) - if parse_version(spatialite_version) >= parse_version("5"): - drop_func = func.DropTable - else: - drop_func = func.DropGeoTable - operations.execute(drop_func(table_name)) - else: - operations.drop_table(table_name, operation.schema, **operation.table_kw) + if dialect.name == "sqlite": + _SPATIAL_TABLES.add(table_name) + operations.drop_table(table_name, operation.schema, **operation.table_kw) @renderers.dispatch_for(CreateGeospatialTableOp) @@ -322,7 +501,9 @@ def render_drop_geo_table(autogen_context, op): def create_geo_table(context, revision, op): """This function replaces the default CreateTableOp by a geospatial-specific one.""" dialect = context.bind.dialect - gis_cols = _get_gis_cols(op, (Geometry, Geography, Raster), dialect, check_col_management=False) + gis_cols = _get_gis_cols( + op, (Geometry, Geography, Raster), dialect, check_col_management=False + ) if gis_cols: new_op = CreateGeospatialTableOp(op.table_name, op.columns, op.schema) @@ -337,7 +518,9 @@ def drop_geo_table(context, revision, op): """This function replaces the default DropTableOp by a geospatial-specific one.""" dialect = context.bind.dialect table = op.to_table() - gis_cols = _get_gis_cols(table, (Geometry, Geography, Raster), dialect, check_col_management=False) + gis_cols = _get_gis_cols( + table, (Geometry, Geography, Raster), dialect, check_col_management=False + ) if gis_cols: new_op = DropGeospatialTableOp(op.table_name, op.schema) @@ -348,9 +531,21 @@ def drop_geo_table(context, revision, op): @Operations.register_operation("create_geospatial_index") +@BatchOperations.register_operation( + "create_geospatial_index", "batch_create_geospatial_index" +) class CreateGeospatialIndexOp(ops.CreateIndexOp): @classmethod - def create_geospatial_index(cls, operations, index_name, table_name, columns, schema=None, unique=False, **kw): + def create_geospatial_index( + cls, + operations, + index_name, + table_name, + columns, + schema=None, + unique=False, + **kw, + ): """Handle the different situations arising from dropping geospatial table from a DB.""" op = cls(index_name, table_name, columns, schema=schema, unique=unique, **kw) return operations.invoke(op) @@ -364,18 +559,61 @@ def reverse(self): schema=self.schema, ) + @classmethod + def batch_create_geospatial_index( + cls, + operations, + index_name, + columns, + **kw, + ): + """Issue a "create index" instruction using the + current batch migration context. + + .. seealso:: + + :meth:`.Operations.create_index` + + """ + op = cls( + index_name, + operations.impl.table_name, + columns, + schema=operations.impl.schema, + **kw, + ) + return operations.invoke(op) + @Operations.register_operation("drop_geospatial_index") +@BatchOperations.register_operation( + "drop_geospatial_index", "batch_drop_geospatial_index" +) class DropGeospatialIndexOp(ops.DropIndexOp): - def __init__(self, *args, column_name, **kwargs): super().__init__(*args, **kwargs) self.column_name = column_name @classmethod - def drop_geospatial_index(cls, operations, index_name, table_name, column_name, schema=None, unique=False, **kw): + def drop_geospatial_index( + cls, + operations, + index_name, + table_name, + column_name, + schema=None, + unique=False, + **kw, + ): """Handle the different situations arising from dropping geospatial table from a DB.""" - op = cls(index_name, table_name, column_name=column_name, schema=schema, unique=unique, **kw) + op = cls( + index_name, + table_name, + column_name=column_name, + schema=schema, + unique=unique, + **kw, + ) return operations.invoke(op) def reverse(self): @@ -386,7 +624,7 @@ def reverse(self): column_name=self.column_name, schema=self.schema, _reverse=self, - **self.kw + **self.kw, ) @classmethod @@ -402,6 +640,24 @@ def from_index(cls, index: "Index") -> "DropGeospatialIndexOp": **index.kwargs, ) + @classmethod + def batch_drop_geospatial_index(cls, operations, index_name, **kw): + """Issue a "drop index" instruction using the + current batch migration context. + + .. seealso:: + + :meth:`.Operations.drop_index` + + """ + op = cls( + index_name, + table_name=operations.impl.table_name, + schema=operations.impl.schema, + **kw, + ) + return operations.invoke(op) + @Operations.implementation_for(CreateGeospatialIndexOp) def create_geospatial_index(operations, operation): @@ -412,22 +668,19 @@ def create_geospatial_index(operations, operation): operation: CreateGeospatialIndexOp call, with attributes for table_name, column_name, column_type, and optional keywords. """ - # return # Do nothing and rely on the bind = operations.get_bind() dialect = bind.dialect - if "sqlite" in dialect.name: - assert len(operation.columns) == 1, "A spatial index must be set on one column only" - operations.execute(func.CreateSpatialIndex(operation.table_name, operation.columns[0])) - else: - operations.create_index( - operation.index_name, - operation.table_name, - operation.columns, - operation.schema, - operation.unique, - **operation.kw + if dialect.name == "sqlite": + assert ( + len(operation.columns) == 1 + ), "A spatial index must be set on one column only" + operations.execute( + func.CreateSpatialIndex(operation.table_name, operation.columns[0]) ) + else: + idx = operation.to_index(operations.migration_context) + operations.impl.create_index(idx) @Operations.implementation_for(DropGeospatialIndexOp) @@ -442,15 +695,12 @@ def drop_geospatial_index(operations, operation): bind = operations.get_bind() dialect = bind.dialect - if "sqlite" in dialect.name: - operations.execute(func.DisableSpatialIndex(operation.table_name, operation.column_name)) - else: - operations.drop_index( - operation.index_name, - operation.table_name, - operation.schema, - **operation.kw + if dialect.name == "sqlite": + operations.execute( + func.DisableSpatialIndex(operation.table_name, operation.column_name) ) + else: + operations.impl.drop_index(operation.to_index(operations.migration_context)) @renderers.dispatch_for(CreateGeospatialIndexOp) @@ -481,9 +731,8 @@ def create_geo_index(context, revision, op): if len(op.columns) == 1: col = op.columns[0] - if ( - isinstance(col, Column) - and _check_spatial_type(col.type, (Geometry, Geography, Raster), dialect) + if isinstance(col, Column) and _check_spatial_type( + col.type, (Geometry, Geography, Raster), dialect ): # Fix index properties op.kw["postgresql_using"] = op.kw.get("postgresql_using", "gist") @@ -494,12 +743,7 @@ def create_geo_index(context, revision, op): op.kw["postgresql_ops"] = op.kw.get("postgresql_ops", postgresql_ops) return CreateGeospatialIndexOp( - op.index_name, - op.table_name, - op.columns, - op.schema, - op.unique, - **op.kw + op.index_name, op.table_name, op.columns, op.schema, op.unique, **op.kw ) return op @@ -513,16 +757,15 @@ def drop_geo_index(context, revision, op): if len(idx.columns) == 1: col = idx.columns[0] - if ( - isinstance(col, Column) - and _check_spatial_type(col.type, (Geometry, Geography, Raster), dialect) + if isinstance(col, Column) and _check_spatial_type( + col.type, (Geometry, Geography, Raster), dialect ): return DropGeospatialIndexOp( op.index_name, op.table_name, column_name=col.name, schema=op.schema, - **op.kw + **op.kw, ) return op @@ -531,8 +774,10 @@ def drop_geo_index(context, revision, op): def load_spatialite(dbapi_conn, connection_record): """Load SpatiaLite extension in SQLite DB.""" if "SPATIALITE_LIBRARY_PATH" not in os.environ: - raise RuntimeError("The SPATIALITE_LIBRARY_PATH environment variable is not set.") + raise RuntimeError( + "The SPATIALITE_LIBRARY_PATH environment variable is not set." + ) dbapi_conn.enable_load_extension(True) - dbapi_conn.load_extension(os.environ['SPATIALITE_LIBRARY_PATH']) + dbapi_conn.load_extension(os.environ["SPATIALITE_LIBRARY_PATH"]) dbapi_conn.enable_load_extension(False) - dbapi_conn.execute('SELECT InitSpatialMetaData()') + dbapi_conn.execute("SELECT InitSpatialMetaData()") diff --git a/geoalchemy2/types.py b/geoalchemy2/types.py index 976be062..9dc00357 100644 --- a/geoalchemy2/types.py +++ b/geoalchemy2/types.py @@ -125,7 +125,7 @@ class _GISType(UserDefinedType): def __init__(self, geometry_type='GEOMETRY', srid=-1, dimension=2, spatial_index=True, use_N_D_index=False, management=False, use_typmod=None, - from_text=None, name=None, nullable=True): + from_text=None, name=None, nullable=True, _spatial_index_reflected=None): geometry_type, srid = self.check_ctor_args( geometry_type, srid, dimension, management, use_typmod, nullable) self.geometry_type = geometry_type @@ -141,6 +141,7 @@ def __init__(self, geometry_type='GEOMETRY', srid=-1, dimension=2, self.use_typmod = use_typmod self.extended = self.as_binary == 'ST_AsEWKB' self.nullable = nullable + self._spatial_index_reflected = _spatial_index_reflected def get_col_spec(self): if not self.geometry_type: diff --git a/tests/__init__.py b/tests/__init__.py index f700af0b..2bdc23b6 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -106,3 +106,35 @@ def copy_and_connect_sqlite_db(input_db, tmp_db, engine_echo): engine._spatialite_version = None return engine + + +def check_indexes(conn, expected, table_name): + if conn.dialect.name == "postgresql": + # Query to check the indexes + index_query = text( + """SELECT indexname, indexdef + FROM pg_indexes + WHERE + tablename = '{}' + ORDER BY indexname;""".format(table_name) + ) + indexes = conn.execute(index_query).fetchall() + + expected = [ + (i[0], re.sub("\n *", " ", i[1])) + for i in expected["postgresql"] + ] + + assert indexes == expected + + elif conn.dialect.name == "sqlite": + # Query to check the indexes + index_query = text( + """SELECT * + FROM geometry_columns + WHERE f_table_name = '{}' + ORDER BY f_table_name, f_geometry_column;""".format(table_name) + ) + + indexes = conn.execute(index_query).fetchall() + assert indexes == expected["sqlite"] diff --git a/tests/test_alembic_migrations.py b/tests/test_alembic_migrations.py index 201dff32..f635480f 100644 --- a/tests/test_alembic_migrations.py +++ b/tests/test_alembic_migrations.py @@ -4,6 +4,7 @@ import pytest import sqlalchemy as sa # noqa (This import is only used in the migration scripts) from alembic import command +from alembic import context from alembic import script from alembic.autogenerate import compare_metadata from alembic.autogenerate import produce_migrations @@ -11,6 +12,8 @@ from alembic.config import Config from alembic.migration import MigrationContext from alembic.operations import Operations +from alembic.operations import ops +from alembic.operations.ops import ModifyTableOps from sqlalchemy import Column from sqlalchemy import Integer from sqlalchemy import MetaData @@ -20,10 +23,7 @@ from geoalchemy2 import Geometry from geoalchemy2 import alembic_helpers -from alembic.operations import ops -from alembic.operations.ops import ModifyTableOps -from alembic import context - +from . import check_indexes from . import test_only_with_dialects @@ -55,7 +55,6 @@ def test_no_diff(self, conn, Lake, setup_tables): mc = MigrationContext.configure( conn, opts={ - "include_object": alembic_helpers.include_object, "include_name": filter_tables, "process_revision_directives": alembic_helpers.writer, }, @@ -96,7 +95,6 @@ def test_diff(self, conn, Lake, setup_tables): mc = MigrationContext.configure( conn, opts={ - "include_object": alembic_helpers.include_object, "include_name": filter_tables, "process_revision_directives": alembic_helpers.writer, }, @@ -131,26 +129,28 @@ def test_diff(self, conn, Lake, setup_tables): @pytest.fixture def alembic_dir(tmpdir): - return (tmpdir / "alembic_files") + return tmpdir / "alembic_files" @pytest.fixture def alembic_config_path(alembic_dir): - return (alembic_dir / "test_alembic.ini") + return alembic_dir / "test_alembic.ini" @pytest.fixture def alembic_env_path(alembic_dir): - return (alembic_dir / "env.py") + return alembic_dir / "env.py" @pytest.fixture def test_script_path(alembic_dir): - return (alembic_dir / "test_script.py") + return alembic_dir / "test_script.py" @pytest.fixture -def alembic_env(engine, alembic_dir, alembic_config_path, alembic_env_path, test_script_path): +def alembic_env( + engine, alembic_dir, alembic_config_path, alembic_env_path, test_script_path +): cfg_tmp = Config(alembic_config_path) engine.execute("DROP TABLE IF EXISTS alembic_version;") command.init(cfg_tmp, str(alembic_dir), template="generic") @@ -169,7 +169,8 @@ def alembic_env(engine, alembic_dir, alembic_config_path, alembic_env_path, test engine = engine_from_config( config.get_section(config.config_ini_section), prefix='sqlalchemy.', - echo=True) + echo=True, +) if engine.dialect.name == "sqlite": listen(engine, 'connect', alembic_helpers.load_spatialite) @@ -203,6 +204,7 @@ def include_object(obj, name, obj_type, reflected, compare_to): process_revision_directives=alembic_helpers.writer, render_item=alembic_helpers.render_item, include_object=include_object, + render_as_batch=True ) try: @@ -212,7 +214,9 @@ def include_object(obj, name, obj_type, reflected, compare_to): connection.close() engine.dispose() -""".format(str(test_script_path)) +""".format( + str(test_script_path) + ) ) with test_script_path.open(mode="w", encoding="utf8") as f: f.write( @@ -261,44 +265,17 @@ class = StreamHandler format = %%(levelname)-5.5s [%%(name)s] %%(message)s datefmt = %%H:%%M:%%S -""".format(alembic_dir, engine.url) +""".format( + alembic_dir, engine.url + ) ) return cfg -def _check_indexes(conn, expected): - if conn.dialect.name == "postgresql": - # Query to check the indexes - index_query = text( - """SELECT indexname, indexdef - FROM pg_indexes - WHERE - tablename = 'new_spatial_table' - ORDER BY indexname;""" - ) - indexes = conn.execute(index_query).fetchall() - - expected = [ - (i[0], re.sub("\n *", " ", i[1])) - for i in expected["postgresql"] - ] - - assert indexes == expected - - elif conn.dialect.name == "sqlite": - # Query to check the indexes - index_query = text( - """SELECT * - FROM geometry_columns - WHERE f_table_name = 'new_spatial_table' - ORDER BY f_table_name, f_geometry_column;""" - ) - - indexes = conn.execute(index_query).fetchall() - assert indexes == expected["sqlite"] - @test_only_with_dialects("postgresql", "sqlite-spatialite4") -def test_migration_revision(conn, metadata, alembic_config, alembic_env_path, test_script_path): +def test_migration_revision( + conn, metadata, alembic_config, alembic_env_path, test_script_path +): initial_rev = command.revision( alembic_config, "Initial state", @@ -354,7 +331,7 @@ def test_migration_revision(conn, metadata, alembic_config, alembic_env_path, te # Apply the upgrade script command.upgrade(alembic_config, rev_table.revision) - _check_indexes( + check_indexes( conn, { "postgresql": [ @@ -375,7 +352,8 @@ def test_migration_revision(conn, metadata, alembic_config, alembic_env_path, te ("new_spatial_table", "geom_with_idx", 2, 2, 4326, 1), ("new_spatial_table", "geom_without_idx", 2, 2, 4326, 0), ], - } + }, + table_name="new_spatial_table", ) # Remove spatial columns and add new ones @@ -425,7 +403,7 @@ def test_migration_revision(conn, metadata, alembic_config, alembic_env_path, te # Apply the upgrade script command.upgrade(alembic_config, rev_cols.revision) - _check_indexes( + check_indexes( conn, { "postgresql": [ @@ -446,13 +424,14 @@ def test_migration_revision(conn, metadata, alembic_config, alembic_env_path, te ("new_spatial_table", "new_geom_with_idx", 2, 2, 4326, 1), ("new_spatial_table", "new_geom_without_idx", 2, 2, 4326, 0), ], - } + }, + table_name="new_spatial_table", ) # Apply the downgrade script for columns command.downgrade(alembic_config, rev_table.revision) - _check_indexes( + check_indexes( conn, { "postgresql": [ @@ -473,16 +452,18 @@ def test_migration_revision(conn, metadata, alembic_config, alembic_env_path, te ("new_spatial_table", "geom_with_idx", 2, 2, 4326, 1), ("new_spatial_table", "geom_without_idx", 2, 2, 4326, 0), ], - } + }, + table_name="new_spatial_table", ) # Apply the downgrade script for tables command.downgrade(alembic_config, initial_rev.revision) - _check_indexes( + check_indexes( conn, { "postgresql": [], "sqlite": [], - } + }, + table_name="new_spatial_table", ) diff --git a/tests/test_functional.py b/tests/test_functional.py index 54dcaca3..06e9a4d3 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -30,6 +30,7 @@ from sqlalchemy.sql import func from sqlalchemy.testing.assertions import ComparesTables +from geoalchemy2 import _get_spatialite_attrs from geoalchemy2 import Geometry from geoalchemy2 import Raster from geoalchemy2.elements import WKBElement @@ -37,6 +38,7 @@ from geoalchemy2.shape import from_shape from geoalchemy2.shape import to_shape +from . import check_indexes from . import format_wkt from . import get_postgis_version from . import select @@ -653,9 +655,57 @@ def test_reflection(self, conn, setup_reflection_tables): # Drop the table t.drop(bind=conn) + # Query to check the indexes + query_indexes = text( + """SELECT * FROM geometry_columns ORDER BY f_table_name, f_geometry_column;""" + ) + + # Check the indexes + check_indexes( + conn, + { + "postgresql": [], + "sqlite": [], + }, + table_name=t.name, + ) + # Recreate the table to check that the reflected properties are correct t.create(bind=conn) + # Check the indexes + col_attributes = _get_spatialite_attrs(conn, t.name, "geom") + if isinstance(col_attributes[2], int): + sqlite_indexes = [ + ('lake', 'geom', 2, 2, 4326, 1), + ('lake', 'geom_m', 2002, 3, 4326, 1), + ('lake', 'geom_no_idx', 2, 2, 4326, 0), + ('lake', 'geom_z', 1002, 3, 4326, 1), + ('lake', 'geom_zm', 3002, 4, 4326, 1), + ] + else: + sqlite_indexes = [ + ('lake', 'geom', 'LINESTRING', 'XY', 4326, 1), + ('lake', 'geom_m', 'LINESTRING', 'XYM', 4326, 1), + ('lake', 'geom_no_idx', 'LINESTRING', 'XY', 4326, 0), + ('lake', 'geom_z', 'LINESTRING', 'XYZ', 4326, 1), + ('lake', 'geom_zm', 'LINESTRING', 'XYZM', 4326, 1), + ] + check_indexes( + conn, + { + "postgresql": [ + ('idx_lake_geom', 'CREATE INDEX idx_lake_geom ON gis.lake USING gist (geom)'), + ('idx_lake_geom_m', 'CREATE INDEX idx_lake_geom_m ON gis.lake USING gist (geom_m)'), + ('idx_lake_geom_z', 'CREATE INDEX idx_lake_geom_z ON gis.lake USING gist (geom_z)'), + ('idx_lake_geom_zm', 'CREATE INDEX idx_lake_geom_zm ON gis.lake USING gist (geom_zm)'), + ('lake_pkey', 'CREATE UNIQUE INDEX lake_pkey ON gis.lake USING btree (id)'), + ], + "sqlite": sqlite_indexes, + }, + table_name=t.name, + ) + def test_raster_reflection(self, conn, Ocean, setup_tables): skip_pg12_sa1217(conn) skip_postgis1(conn) From 139c3230ac08ec2f620d3ace75e623f5852ffa98 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Mon, 30 May 2022 18:09:42 +0200 Subject: [PATCH 07/18] Lint and python version --- .github/workflows/test_and_publish.yml | 2 +- geoalchemy2/__init__.py | 28 ++++++++++++++++++------ geoalchemy2/alembic_helpers.py | 17 ++------------- setup.py | 3 +-- tests/__init__.py | 1 + tests/conftest.py | 1 - tests/test_alembic_migrations.py | 9 -------- tests/test_functional.py | 30 +++++++++++++++++--------- tests/test_functional_sqlite.py | 5 ----- 9 files changed, 47 insertions(+), 49 deletions(-) diff --git a/.github/workflows/test_and_publish.yml b/.github/workflows/test_and_publish.yml index 5645890b..71087f31 100644 --- a/.github/workflows/test_and_publish.yml +++ b/.github/workflows/test_and_publish.yml @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [3.6, 3.7, 3.8, 3.9, pypy3] + python-version: [3.7, 3.8, 3.9, pypy3] # The type of runner that the job will run on runs-on: ubuntu-22.04 diff --git a/geoalchemy2/__init__.py b/geoalchemy2/__init__.py index f8876c5f..347e2857 100644 --- a/geoalchemy2/__init__.py +++ b/geoalchemy2/__init__.py @@ -173,12 +173,27 @@ def dispatch(current_event, table, bind): drop_func = 'DiscardGeometryColumn' # Disable spatial indexes if present - stmt = select(*_format_select_args(getattr(func, 'CheckSpatialIndex')(table.name, col.name))) + stmt = select( + *_format_select_args( + getattr(func, 'CheckSpatialIndex')(table.name, col.name) + ) + ) if bind.execute(stmt).fetchone()[0] is not None: - stmt = select(*_format_select_args(getattr(func, 'DisableSpatialIndex')(table.name, col.name))) + stmt = select( + *_format_select_args( + getattr(func, 'DisableSpatialIndex')(table.name, col.name) + ) + ) stmt = stmt.execution_options(autocommit=True) bind.execute(stmt) - bind.execute(text("""DROP TABLE IF EXISTS idx_{}_{};""".format(table.name, col.name))) + bind.execute( + text( + """DROP TABLE IF EXISTS idx_{}_{};""".format( + table.name, + col.name, + ) + ) + ) elif bind.dialect.name == 'postgresql': drop_func = 'DropGeometryColumn' else: @@ -335,9 +350,10 @@ def _reflect_geometry_column(inspector, table, column_info): AND c.relkind='r' ) g LEFT JOIN pg_index i ON (g.relid = i.indrelid AND g.attnum = ANY(i.indkey)) - WHERE relname = '{}' AND attname = '{}'""".format( - table.name, column_info["name"] - ) + WHERE relname = '{}' AND attname = '{}'; + """.format( + table.name, column_info["name"] + ) if table.schema is not None: has_index_query += " AND nspname = '{}'".format(table.schema) spatial_index = inspector.bind.execute(text(has_index_query)).scalar() diff --git a/geoalchemy2/alembic_helpers.py b/geoalchemy2/alembic_helpers.py index c0afbb21..a2711b91 100644 --- a/geoalchemy2/alembic_helpers.py +++ b/geoalchemy2/alembic_helpers.py @@ -1,6 +1,5 @@ """Some helpers to use with Alembic migration tool.""" import os -from collections import defaultdict from alembic.autogenerate import renderers from alembic.autogenerate import rewriter @@ -17,13 +16,10 @@ from alembic.operations import Operations from alembic.operations import ops from alembic.operations.batch import ApplyBatchImpl -from packaging.version import parse as parse_version from sqlalchemy import Index -from sqlalchemy import MetaData from sqlalchemy import Table from sqlalchemy import text from sqlalchemy.dialects.sqlite.base import SQLiteDialect -from sqlalchemy.engine import reflection from sqlalchemy.ext.compiler import compiles from sqlalchemy.schema import DropTable from sqlalchemy.types import TypeDecorator @@ -34,10 +30,7 @@ from geoalchemy2 import Raster from geoalchemy2 import _check_spatial_type from geoalchemy2 import _get_gis_cols -from geoalchemy2 import _get_spatialite_attrs -from geoalchemy2 import _get_spatialite_version from geoalchemy2 import _spatial_idx_name -from geoalchemy2 import check_management from geoalchemy2 import func writer = rewriter.Rewriter() @@ -87,7 +80,6 @@ def _monkey_patch_transfer_elements_to_new_table_for_sqlite(): def remove_spatial_indexes(table, new_table, dialect): new_indexes = set() input_col_names = set([col.name for col in table.columns]) - remove_index_cols = [] for idx in table.indexes: if len(idx.columns) == 1: col = idx.columns[0] @@ -287,7 +279,6 @@ def drop_geospatial_column(operations, operation): """ table_name = operation.table_name - column_name = operation.column_name column = operation.to_column(operations.migration_context) dialect = operations.get_bind().dialect @@ -300,9 +291,7 @@ def drop_geospatial_column(operations, operation): @compiles(RenameTable, "sqlite") -def visit_rename_geospatial_table( - element: "RenameTable", compiler: "DDLCompiler", **kw -) -> str: +def visit_rename_geospatial_table(element, compiler, **kw): table_is_spatial = element.table_name in _SPATIAL_TABLES new_table_is_spatial = element.new_table_name in _SPATIAL_TABLES @@ -319,9 +308,7 @@ def visit_rename_geospatial_table( @compiles(DropTable, "sqlite") -def visit_drop_geospatial_table( - element: "DropTable", compiler: "DDLCompiler", **kw -) -> str: +def visit_drop_geospatial_table(element, compiler, **kw): table_is_spatial = element.element.name in _SPATIAL_TABLES diff --git a/setup.py b/setup.py index 0f332dd0..1bfd43d9 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,6 @@ "Environment :: Plugins", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -27,7 +26,7 @@ 'Source': 'https://github.com/geoalchemy/geoalchemy2', }, license='MIT', - python_requires=">=3.6", + python_requires=">=3.7", packages=find_packages(exclude=['ez_setup', 'examples', 'tests', 'doc']), include_package_data=True, zip_safe=False, diff --git a/tests/__init__.py b/tests/__init__.py index 2bdc23b6..7176a5e4 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -88,6 +88,7 @@ def load_spatialite(dbapi_conn, connection_record): dbapi_conn.load_extension(os.environ['SPATIALITE_LIBRARY_PATH']) dbapi_conn.enable_load_extension(False) + def copy_and_connect_sqlite_db(input_db, tmp_db, engine_echo): if 'SPATIALITE_LIBRARY_PATH' not in os.environ: pytest.skip('SPATIALITE_LIBRARY_PATH is not defined, skip SpatiaLite tests') diff --git a/tests/conftest.py b/tests/conftest.py index af4e4775..e55eb687 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,6 @@ from sqlalchemy import MetaData from sqlalchemy import create_engine from sqlalchemy import text -from sqlalchemy.event import listen from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker diff --git a/tests/test_alembic_migrations.py b/tests/test_alembic_migrations.py index f635480f..45f54968 100644 --- a/tests/test_alembic_migrations.py +++ b/tests/test_alembic_migrations.py @@ -1,24 +1,15 @@ """Test alembic migrations of spatial columns.""" -import re - import pytest import sqlalchemy as sa # noqa (This import is only used in the migration scripts) from alembic import command -from alembic import context from alembic import script from alembic.autogenerate import compare_metadata -from alembic.autogenerate import produce_migrations -from alembic.autogenerate import render_python_code from alembic.config import Config from alembic.migration import MigrationContext -from alembic.operations import Operations -from alembic.operations import ops -from alembic.operations.ops import ModifyTableOps from sqlalchemy import Column from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import Table -from sqlalchemy import text from geoalchemy2 import Geometry from geoalchemy2 import alembic_helpers diff --git a/tests/test_functional.py b/tests/test_functional.py index 06e9a4d3..87c2ae49 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -655,11 +655,6 @@ def test_reflection(self, conn, setup_reflection_tables): # Drop the table t.drop(bind=conn) - # Query to check the indexes - query_indexes = text( - """SELECT * FROM geometry_columns ORDER BY f_table_name, f_geometry_column;""" - ) - # Check the indexes check_indexes( conn, @@ -695,11 +690,26 @@ def test_reflection(self, conn, setup_reflection_tables): conn, { "postgresql": [ - ('idx_lake_geom', 'CREATE INDEX idx_lake_geom ON gis.lake USING gist (geom)'), - ('idx_lake_geom_m', 'CREATE INDEX idx_lake_geom_m ON gis.lake USING gist (geom_m)'), - ('idx_lake_geom_z', 'CREATE INDEX idx_lake_geom_z ON gis.lake USING gist (geom_z)'), - ('idx_lake_geom_zm', 'CREATE INDEX idx_lake_geom_zm ON gis.lake USING gist (geom_zm)'), - ('lake_pkey', 'CREATE UNIQUE INDEX lake_pkey ON gis.lake USING btree (id)'), + ( + 'idx_lake_geom', + 'CREATE INDEX idx_lake_geom ON gis.lake USING gist (geom)', + ), + ( + 'idx_lake_geom_m', + 'CREATE INDEX idx_lake_geom_m ON gis.lake USING gist (geom_m)', + ), + ( + 'idx_lake_geom_z', + 'CREATE INDEX idx_lake_geom_z ON gis.lake USING gist (geom_z)', + ), + ( + 'idx_lake_geom_zm', + 'CREATE INDEX idx_lake_geom_zm ON gis.lake USING gist (geom_zm)', + ), + ( + 'lake_pkey', + 'CREATE UNIQUE INDEX lake_pkey ON gis.lake USING btree (id)', + ), ], "sqlite": sqlite_indexes, }, diff --git a/tests/test_functional_sqlite.py b/tests/test_functional_sqlite.py index 04ed4d3c..2f9e5b3d 100644 --- a/tests/test_functional_sqlite.py +++ b/tests/test_functional_sqlite.py @@ -1,7 +1,4 @@ -import os import platform -import shutil -from pathlib import Path import pytest from shapely.geometry import LineString @@ -11,9 +8,7 @@ from sqlalchemy import MetaData from sqlalchemy import String from sqlalchemy import Table -from sqlalchemy import create_engine from sqlalchemy import text -from sqlalchemy.event import listen from sqlalchemy.exc import IntegrityError from sqlalchemy.sql import func From 6af353ea25713ea5dde7a2898b9e70fbb5e70c1f Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Tue, 31 May 2022 09:51:04 +0200 Subject: [PATCH 08/18] Fix spatial index reflection in Alembic --- geoalchemy2/alembic_helpers.py | 47 ++--------------------------- tests/test_functional.py | 4 ++- tests/test_functional_postgresql.py | 2 +- 3 files changed, 6 insertions(+), 47 deletions(-) diff --git a/geoalchemy2/alembic_helpers.py b/geoalchemy2/alembic_helpers.py index a2711b91..18005200 100644 --- a/geoalchemy2/alembic_helpers.py +++ b/geoalchemy2/alembic_helpers.py @@ -65,61 +65,18 @@ def spatial_behavior(self, connection, table_name, schema=None, **kw): "name": idx_name, "column_names": [idx_col], "unique": 0, + "dialect_options": {"_column_flag": True}, } ) reflected_names.add(idx_name) return indexes + spatial_behavior.__doc__ = normal_behavior.__doc__ SQLiteDialect.get_indexes = spatial_behavior SQLiteDialect._get_indexes_normal_behavior = normal_behavior -def _monkey_patch_transfer_elements_to_new_table_for_sqlite(): - normal_behavior = ApplyBatchImpl._transfer_elements_to_new_table - - def remove_spatial_indexes(table, new_table, dialect): - new_indexes = set() - input_col_names = set([col.name for col in table.columns]) - for idx in table.indexes: - if len(idx.columns) == 1: - col = idx.columns[0] - if not isinstance(col, Column) or not _check_spatial_type( - col.type, - (Geometry, Geography, Raster), - dialect, - ): - new_indexes.add(idx) - else: - new_indexes.add(idx) - table.indexes = new_indexes - - new_indexes = set() - for idx in new_table.indexes: - if len(idx.columns) == 1: - col = idx.columns[0] - if not isinstance(col, Column) or not _check_spatial_type( - col.type, - (Geometry, Geography, Raster), - dialect, - ): - new_indexes.add(idx) - if col.name not in input_col_names: - for i in idx.columns: - i.spatial_index = False - else: - new_indexes.add(idx) - new_table.indexes = new_indexes - - def spatial_behavior(self): - self._transfer_elements_to_new_table_normal_behavior() - remove_spatial_indexes(self.table, self.new_table, self.impl.dialect) - - ApplyBatchImpl._transfer_elements_to_new_table = spatial_behavior - ApplyBatchImpl._transfer_elements_to_new_table_normal_behavior = normal_behavior - - _monkey_patch_get_indexes_for_sqlite() -_monkey_patch_transfer_elements_to_new_table_for_sqlite() def render_item(obj_type, obj, autogen_context): diff --git a/tests/test_functional.py b/tests/test_functional.py index 87c2ae49..39ccf03e 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -27,6 +27,7 @@ from sqlalchemy.exc import IntegrityError from sqlalchemy.exc import OperationalError from sqlalchemy.exc import ProgrammingError +from sqlalchemy.exc import SAWarning from sqlalchemy.sql import func from sqlalchemy.testing.assertions import ComparesTables @@ -719,7 +720,8 @@ def test_reflection(self, conn, setup_reflection_tables): def test_raster_reflection(self, conn, Ocean, setup_tables): skip_pg12_sa1217(conn) skip_postgis1(conn) - t = Table('ocean', MetaData(), autoload_with=conn) + with pytest.warns(SAWarning): + t = Table('ocean', MetaData(), autoload_with=conn) type_ = t.c.rast.type assert isinstance(type_, Raster) diff --git a/tests/test_functional_postgresql.py b/tests/test_functional_postgresql.py index e744a0c8..93a1ae41 100644 --- a/tests/test_functional_postgresql.py +++ b/tests/test_functional_postgresql.py @@ -40,7 +40,7 @@ from . import skip_postgis1 from . import skip_postgis2 -SQLA_LT_2 = parse_version(SA_VERSION) <= parse_version("1.999") +SQLA_LT_2 = parse_version(SA_VERSION) <= parse_version("1.4") if SQLA_LT_2: from sqlalchemy.engine.reflection import Inspector get_inspector = Inspector.from_engine From 747408ca7553562131b487f05ca3ea5b846ec5a0 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Tue, 31 May 2022 10:05:53 +0200 Subject: [PATCH 09/18] Move load_spatialite and improve the doc for Alembic --- doc/alembic.rst | 33 ++++++------ geoalchemy2/__init__.py | 19 +++++++ geoalchemy2/alembic_helpers.py | 87 ++++++++------------------------ tests/__init__.py | 9 +--- tests/test_alembic_migrations.py | 3 +- 5 files changed, 62 insertions(+), 89 deletions(-) diff --git a/doc/alembic.rst b/doc/alembic.rst index 2e67bc46..932ed668 100644 --- a/doc/alembic.rst +++ b/doc/alembic.rst @@ -41,9 +41,9 @@ following migration script: """Create new table - Revision ID: 01b69e67a408 - Revises: 2371af7aed3f - Create Date: 2022-01-27 15:53:05.268929 + Revision ID: + Revises: + Create Date: """ from alembic import op @@ -51,8 +51,8 @@ following migration script: # revision identifiers, used by Alembic. - revision = "01b69e67a408" - down_revision = "2371af7aed3f" + revision = "" + down_revision = "" branch_labels = None depends_on = None @@ -122,8 +122,8 @@ file used by Alembic, like in the following example: # ... context.configure( # ... - render_item=render_item, - include_object=include_object, + process_revision_directives=alembic_helpers.writer, + render_item=alembic_helpers.render_item, ) # ... @@ -132,12 +132,12 @@ file used by Alembic, like in the following example: # ... context.configure( # ... - render_item=render_item, - include_object=include_object, + process_revision_directives=alembic_helpers.writer, + render_item=alembic_helpers.render_item, ) # ... -After running the ``alembic`` command, the migration script will be properly generated and should +After running the ``alembic`` command, the migration script should be properly generated and should not need to be manually edited. @@ -188,8 +188,8 @@ in ``my_package.custom_types``, you just have to edit the ``env.py`` file like t # ... context.configure( # ... + process_revision_directives=alembic_helpers.writer, render_item=render_item, - include_object=include_object, ) # ... @@ -198,20 +198,21 @@ in ``my_package.custom_types``, you just have to edit the ``env.py`` file like t # ... context.configure( # ... + process_revision_directives=alembic_helpers.writer, render_item=render_item, - include_object=include_object, ) # ... Then the proper imports will be automatically added in the migration scripts. -Add / Drop columns or tables ----------------------------- +Dialects +-------- Some dialects (like SQLite) require some specific management to alter columns or tables. In this case, other dedicated helpers are provided to handle this. For example, if one wants to add and drop -columns in a SQLite database, the ``env.py`` file should look like the following: +columns in a SQLite database, the SpatiaLite extension should be loaded when the engine connects, +thus the ``env.py`` file should look like the following: .. code-block:: python @@ -224,6 +225,7 @@ columns in a SQLite database, the ``env.py`` file should look like the following context.configure( # ... process_revision_directives=writer, + render_item=alembic_helpers.render_item, ) # ... @@ -239,5 +241,6 @@ columns in a SQLite database, the ``env.py`` file should look like the following context.configure( # ... process_revision_directives=writer, + render_item=alembic_helpers.render_item, ) # ... diff --git a/geoalchemy2/__init__.py b/geoalchemy2/__init__.py index 347e2857..6cb39bf2 100644 --- a/geoalchemy2/__init__.py +++ b/geoalchemy2/__init__.py @@ -1,4 +1,6 @@ """GeoAlchemy2 package.""" +import os + import sqlalchemy from packaging import version from sqlalchemy import Column @@ -417,6 +419,23 @@ def _reflect_geometry_column(inspector, table, column_info): _setup_ddl_event_listeners() + +def load_spatialite(dbapi_conn, connection_record): + """Load SpatiaLite extension in SQLite DB. + + The path to the SpatiaLite module should be set in the `SPATIALITE_LIBRARY_PATH` environment + variable. + """ + if "SPATIALITE_LIBRARY_PATH" not in os.environ: + raise RuntimeError( + "The SPATIALITE_LIBRARY_PATH environment variable is not set." + ) + dbapi_conn.enable_load_extension(True) + dbapi_conn.load_extension(os.environ["SPATIALITE_LIBRARY_PATH"]) + dbapi_conn.enable_load_extension(False) + dbapi_conn.execute("SELECT InitSpatialMetaData();") + + # Get version number __version__ = "UNKNOWN VERSION" try: diff --git a/geoalchemy2/alembic_helpers.py b/geoalchemy2/alembic_helpers.py index 18005200..196ad8c8 100644 --- a/geoalchemy2/alembic_helpers.py +++ b/geoalchemy2/alembic_helpers.py @@ -38,6 +38,7 @@ def _monkey_patch_get_indexes_for_sqlite(): + """Monkey patch SQLAlchemy to fix spatial index reflection.""" normal_behavior = SQLiteDialect.get_indexes def spatial_behavior(self, connection, table_name, schema=None, **kw): @@ -80,7 +81,7 @@ def spatial_behavior(self, connection, table_name, schema=None, **kw): def render_item(obj_type, obj, autogen_context): - """Apply custom rendering for selected items.""" + """Add proper imports for spatial types.""" if obj_type == "type" and isinstance(obj, (Geometry, Geography, Raster)): import_name = obj.__class__.__name__ autogen_context.imports.add(f"from geoalchemy2 import {import_name}") @@ -120,15 +121,7 @@ def batch_add_geospatial_column( insert_before=None, insert_after=None, ): - """Issue an "add column" instruction using the current - batch migration context. - - .. seealso:: - - :meth:`.Operations.add_column` - - """ - + """Issue an "add column" instruction using the current batch migration context.""" kw = {} if insert_before: kw["insert_before"] = insert_before @@ -156,7 +149,6 @@ def drop_geospatial_column( cls, operations, table_name, column_name, schema=None, **kw ): """Handle the different situations arising from dropping geospatial column from a DB.""" - op = cls(table_name, column_name, schema=schema, **kw) return operations.invoke(op) @@ -168,14 +160,7 @@ def reverse(self): @classmethod def batch_drop_geospatial_column(cls, operations, column_name, **kw): - """Issue a "drop column" instruction using the current - batch migration context. - - .. seealso:: - - :meth:`.Operations.drop_column` - - """ + """Issue a "drop column" instruction using the current batch migration context.""" op = cls( operations.impl.table_name, column_name, @@ -249,7 +234,7 @@ def drop_geospatial_column(operations, operation): @compiles(RenameTable, "sqlite") def visit_rename_geospatial_table(element, compiler, **kw): - + """Specific compilation rule to rename spatial tables with SQLite dialect.""" table_is_spatial = element.table_name in _SPATIAL_TABLES new_table_is_spatial = element.new_table_name in _SPATIAL_TABLES @@ -266,7 +251,7 @@ def visit_rename_geospatial_table(element, compiler, **kw): @compiles(DropTable, "sqlite") def visit_drop_geospatial_table(element, compiler, **kw): - + """Specific compilation rule to drop spatial tables with SQLite dialect.""" table_is_spatial = element.element.name in _SPATIAL_TABLES if table_is_spatial: @@ -294,7 +279,7 @@ def render_drop_geo_column(autogen_context, op): @writer.rewrites(ops.AddColumnOp) def add_geo_column(context, revision, op): - """This function replaces the default AddColumnOp by a geospatial-specific one.""" + """Replace the default AddColumnOp by a geospatial-specific one.""" col_type = op.column.type if isinstance(col_type, TypeDecorator): dialect = context.bind.dialect @@ -310,7 +295,7 @@ def add_geo_column(context, revision, op): @writer.rewrites(ops.DropColumnOp) def drop_geo_column(context, revision, op): - """This function replaces the default DropColumnOp by a geospatial-specific one.""" + """Replace the default DropColumnOp by a geospatial-specific one.""" col_type = op.to_column().type if isinstance(col_type, TypeDecorator): dialect = context.bind.dialect @@ -343,13 +328,11 @@ def reverse(self): ) @classmethod - def from_table( - cls, table: "Table", _namespace_metadata=None - ) -> "CreateGeospatialTableOp": + def from_table(cls, table, _namespace_metadata=None): obj = super().from_table(table, _namespace_metadata) return obj - def to_table(self, migration_context=None) -> "Table": + def to_table(self, migration_context=None): table = super().to_table(migration_context) # Set spatial_index attribute to False so the indexes are created explicitly @@ -379,13 +362,11 @@ def reverse(self): ) @classmethod - def from_table( - cls, table: "Table", _namespace_metadata=None - ) -> "DropGeospatialTableOp": + def from_table(cls, table, _namespace_metadata=None): obj = super().from_table(table, _namespace_metadata) return obj - def to_table(self, migration_context=None) -> "Table": + def to_table(self, migration_context=None): table = super().to_table(migration_context) return table @@ -443,7 +424,7 @@ def render_drop_geo_table(autogen_context, op): @writer.rewrites(ops.CreateTableOp) def create_geo_table(context, revision, op): - """This function replaces the default CreateTableOp by a geospatial-specific one.""" + """Replace the default CreateTableOp by a geospatial-specific one.""" dialect = context.bind.dialect gis_cols = _get_gis_cols( op, (Geometry, Geography, Raster), dialect, check_col_management=False @@ -459,7 +440,7 @@ def create_geo_table(context, revision, op): @writer.rewrites(ops.DropTableOp) def drop_geo_table(context, revision, op): - """This function replaces the default DropTableOp by a geospatial-specific one.""" + """Replace the default DropTableOp by a geospatial-specific one.""" dialect = context.bind.dialect table = op.to_table() gis_cols = _get_gis_cols( @@ -490,7 +471,7 @@ def create_geospatial_index( unique=False, **kw, ): - """Handle the different situations arising from dropping geospatial table from a DB.""" + """Handle the different situations arising from creating geospatial index into a DB.""" op = cls(index_name, table_name, columns, schema=schema, unique=unique, **kw) return operations.invoke(op) @@ -511,14 +492,7 @@ def batch_create_geospatial_index( columns, **kw, ): - """Issue a "create index" instruction using the - current batch migration context. - - .. seealso:: - - :meth:`.Operations.create_index` - - """ + """Issue a "create index" instruction using the current batch migration context.""" op = cls( index_name, operations.impl.table_name, @@ -549,7 +523,7 @@ def drop_geospatial_index( unique=False, **kw, ): - """Handle the different situations arising from dropping geospatial table from a DB.""" + """Handle the different situations arising from dropping geospatial index from a DB.""" op = cls( index_name, table_name, @@ -572,7 +546,7 @@ def reverse(self): ) @classmethod - def from_index(cls, index: "Index") -> "DropGeospatialIndexOp": + def from_index(cls, index): assert index.table is not None assert len(index.columns) == 1, "A spatial index must be set on one column only" return cls( @@ -586,14 +560,7 @@ def from_index(cls, index: "Index") -> "DropGeospatialIndexOp": @classmethod def batch_drop_geospatial_index(cls, operations, index_name, **kw): - """Issue a "drop index" instruction using the - current batch migration context. - - .. seealso:: - - :meth:`.Operations.drop_index` - - """ + """Issue a "drop index" instruction using the current batch migration context.""" op = cls( index_name, table_name=operations.impl.table_name, @@ -670,7 +637,7 @@ def render_drop_geo_index(autogen_context, op): @writer.rewrites(ops.CreateIndexOp) def create_geo_index(context, revision, op): - """This function replaces the default CreateIndexOp by a geospatial-specific one.""" + """Replace the default CreateIndexOp by a geospatial-specific one.""" dialect = context.bind.dialect if len(op.columns) == 1: @@ -695,7 +662,7 @@ def create_geo_index(context, revision, op): @writer.rewrites(ops.DropIndexOp) def drop_geo_index(context, revision, op): - """This function replaces the default DropIndexOp by a geospatial-specific one.""" + """Replace the default DropIndexOp by a geospatial-specific one.""" dialect = context.bind.dialect idx = op.to_index() @@ -713,15 +680,3 @@ def drop_geo_index(context, revision, op): ) return op - - -def load_spatialite(dbapi_conn, connection_record): - """Load SpatiaLite extension in SQLite DB.""" - if "SPATIALITE_LIBRARY_PATH" not in os.environ: - raise RuntimeError( - "The SPATIALITE_LIBRARY_PATH environment variable is not set." - ) - dbapi_conn.enable_load_extension(True) - dbapi_conn.load_extension(os.environ["SPATIALITE_LIBRARY_PATH"]) - dbapi_conn.enable_load_extension(False) - dbapi_conn.execute("SELECT InitSpatialMetaData()") diff --git a/tests/__init__.py b/tests/__init__.py index 7176a5e4..8948512a 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -13,6 +13,8 @@ from sqlalchemy.exc import OperationalError from sqlalchemy.sql import func +from geoalchemy2 import load_spatialite + class test_only_with_dialects: def __init__(self, *dialects): @@ -82,13 +84,6 @@ def format_wkt(wkt): return wkt.replace(", ", ",") -def load_spatialite(dbapi_conn, connection_record): - """Load SpatiaLite extension in SQLite DB.""" - dbapi_conn.enable_load_extension(True) - dbapi_conn.load_extension(os.environ['SPATIALITE_LIBRARY_PATH']) - dbapi_conn.enable_load_extension(False) - - def copy_and_connect_sqlite_db(input_db, tmp_db, engine_echo): if 'SPATIALITE_LIBRARY_PATH' not in os.environ: pytest.skip('SPATIALITE_LIBRARY_PATH is not defined, skip SpatiaLite tests') diff --git a/tests/test_alembic_migrations.py b/tests/test_alembic_migrations.py index 45f54968..9f65d704 100644 --- a/tests/test_alembic_migrations.py +++ b/tests/test_alembic_migrations.py @@ -154,6 +154,7 @@ def alembic_env( from sqlalchemy import MetaData, engine_from_config from sqlalchemy.event import listen from geoalchemy2 import alembic_helpers +from geoalchemy2 import load_spatialite config = context.config @@ -164,7 +165,7 @@ def alembic_env( ) if engine.dialect.name == "sqlite": - listen(engine, 'connect', alembic_helpers.load_spatialite) + listen(engine, 'connect', load_spatialite) spec = importlib.util.spec_from_file_location("test_script", "{}") module = importlib.util.module_from_spec(spec) From 4a00bcad2a16e82a4e250ec746dcc763f988613f Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Tue, 31 May 2022 10:33:09 +0200 Subject: [PATCH 10/18] Check that spatialite is loaded before reflecting spatial indexes --- geoalchemy2/alembic_helpers.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/geoalchemy2/alembic_helpers.py b/geoalchemy2/alembic_helpers.py index 196ad8c8..ed28eb0d 100644 --- a/geoalchemy2/alembic_helpers.py +++ b/geoalchemy2/alembic_helpers.py @@ -45,6 +45,15 @@ def spatial_behavior(self, connection, table_name, schema=None, **kw): indexes = self._get_indexes_normal_behavior( connection, table_name, schema=None, **kw ) + + # Check that SpatiaLite was loaded into the DB + is_spatial_db = connection.exec_driver_sql( + """PRAGMA main.table_info(geometry_columns)""" + ).fetchall() + if not is_spatial_db: + return indexes + + # Get spatial indexes spatial_index_query = text( """SELECT * FROM geometry_columns @@ -70,6 +79,7 @@ def spatial_behavior(self, connection, table_name, schema=None, **kw): } ) reflected_names.add(idx_name) + return indexes spatial_behavior.__doc__ = normal_behavior.__doc__ From 75dfba91fcebe800f540e1379c9eb6a4abe91bdc Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Tue, 31 May 2022 10:58:32 +0200 Subject: [PATCH 11/18] Bump python version --- .coveragerc | 2 -- .github/workflows/test_and_publish.yml | 6 +++--- requirements.txt | 6 +++--- setup.cfg | 10 +++++----- 4 files changed, 11 insertions(+), 13 deletions(-) diff --git a/.coveragerc b/.coveragerc index 456a5545..37cdd3d5 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,5 +1,3 @@ [run] source = geoalchemy2 relative_files = True -omit = - geoalchemy2/alembic_helpers.py diff --git a/.github/workflows/test_and_publish.yml b/.github/workflows/test_and_publish.yml index 71087f31..b45c623b 100644 --- a/.github/workflows/test_and_publish.yml +++ b/.github/workflows/test_and_publish.yml @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [3.7, 3.8, 3.9, pypy3] + python-version: ["3.7", "3.8", "3.9", "3.10", "pypy3"] # The type of runner that the job will run on runs-on: ubuntu-22.04 @@ -123,10 +123,10 @@ jobs: - uses: actions/checkout@v2 - - name: Set up Python 3.6 + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: 3.7 # Build distribution and deploy to Pypi - name: Build and deploy package diff --git a/requirements.txt b/requirements.txt index 37d81942..4dbc8e11 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ # Additional requirements for running the testsuite and development -flake8==3.7.9 -pytest==3.7.4 -pytest-cov==2.5.1 +flake8 +pytest +pytest-cov pytest-html diff --git a/setup.cfg b/setup.cfg index 6e566900..2c4e55af 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,16 +3,16 @@ universal = 1 [tox:tox] -envlist = py{36,37,38,39}-sqla{11, latest}, pypy3-sqla{11, latest}, lint, coverage +envlist = py{37,38,39,310}-sqla{11, latest}, pypy3-sqla{11, latest}, lint, coverage requires= setuptools>42 [gh-actions] python = - 3.6: py36-sqla{11, latest}, lint - 3.7: py37-sqla{11, latest} + 3.7: py37-sqla{11, latest}, lint 3.8: py38-sqla{11, latest} 3.9: py39-sqla{11, latest} + 3.10: py310-sqla{11, latest} pypy-3: pypy3-sqla{11, latest} [testenv] @@ -57,7 +57,7 @@ commands = coverage report -m [testenv:lint] -basepython = python3.6 +basepython = python3.7 skip_install = true deps = isort @@ -67,7 +67,7 @@ commands = flake8 --ignore=W503,W504 geoalchemy2 tests [testenv:format] -basepython = python3.6 +basepython = python3.7 skip_install = true deps = isort From 9ae870a760e2795a18fb1325508648b8d7f0a4bd Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Tue, 31 May 2022 12:20:17 +0200 Subject: [PATCH 12/18] Improve migration test --- tests/test_alembic_migrations.py | 63 +++++++++++++++++++++++++++----- 1 file changed, 54 insertions(+), 9 deletions(-) diff --git a/tests/test_alembic_migrations.py b/tests/test_alembic_migrations.py index 9f65d704..2b8a5841 100644 --- a/tests/test_alembic_migrations.py +++ b/tests/test_alembic_migrations.py @@ -307,6 +307,14 @@ def test_migration_revision( spatial_index=False, ), ), + Column( + "geom_without_idx_2", + Geometry( + geometry_type="LINESTRING", + srid=4326, + spatial_index=False, + ), + ), ) """ @@ -343,11 +351,28 @@ def test_migration_revision( "sqlite": [ ("new_spatial_table", "geom_with_idx", 2, 2, 4326, 1), ("new_spatial_table", "geom_without_idx", 2, 2, 4326, 0), + ('new_spatial_table', 'geom_without_idx_2', 2, 2, 4326, 0) ], }, table_name="new_spatial_table", ) + # Insert data in new table to check that everything works when Alembic copies the tables + from_text = "GeomFromEWKT" if conn.dialect.name == "sqlite" else "ST_GeomFromEWKT" + conn.execute( + """INSERT INTO new_spatial_table ( + geom_with_idx, + geom_without_idx, + geom_without_idx_2 + ) VALUES ( + {from_text}('SRID=4326;LINESTRING(0 0, 1 1)'), + {from_text}('SRID=4326;LINESTRING(0 0, 1 1)'), + {from_text}('SRID=4326;LINESTRING(0 0, 1 1)') + ) + """.format(from_text=from_text) + ) + conn.execute("COMMIT") + # Remove spatial columns and add new ones with test_script_path.open(mode="w", encoding="utf8") as f: f.write( @@ -364,6 +389,23 @@ def test_migration_revision( "new_spatial_table", metadata, Column("id", Integer, primary_key=True), + Column( + "geom_with_idx", + Geometry( + geometry_type="LINESTRING", + srid=4326, + ), + nullable=False, + ), + Column( + "geom_without_idx", + Geometry( + geometry_type="LINESTRING", + srid=4326, + spatial_index=False, + ), + nullable=False, + ), Column( "new_geom_with_idx", Geometry( @@ -387,7 +429,7 @@ def test_migration_revision( # Auto-generate a new migration script rev_cols = command.revision( alembic_config, - "Add and remove spatial columns", + "Add, alter and remove spatial columns", autogenerate=True, rev_id="columns", ) @@ -400,19 +442,21 @@ def test_migration_revision( { "postgresql": [ ( - "idx_new_spatial_table_new_geom_with_idx", - """CREATE INDEX idx_new_spatial_table_new_geom_with_idx - ON gis.new_spatial_table - USING gist (new_geom_with_idx)""", + 'idx_new_spatial_table_geom_with_idx', + 'CREATE INDEX idx_new_spatial_table_geom_with_idx ON gis.new_spatial_table USING gist (geom_with_idx)' ), ( - "new_spatial_table_pkey", - """CREATE UNIQUE INDEX new_spatial_table_pkey - ON gis.new_spatial_table - USING btree (id)""", + 'idx_new_spatial_table_new_geom_with_idx', + 'CREATE INDEX idx_new_spatial_table_new_geom_with_idx ON gis.new_spatial_table USING gist (new_geom_with_idx)', + ), + ( + 'new_spatial_table_pkey', + 'CREATE UNIQUE INDEX new_spatial_table_pkey ON gis.new_spatial_table USING btree (id)', ), ], "sqlite": [ + ("new_spatial_table", "geom_with_idx", 2, 2, 4326, 1), + ("new_spatial_table", "geom_without_idx", 2, 2, 4326, 0), ("new_spatial_table", "new_geom_with_idx", 2, 2, 4326, 1), ("new_spatial_table", "new_geom_without_idx", 2, 2, 4326, 0), ], @@ -443,6 +487,7 @@ def test_migration_revision( "sqlite": [ ("new_spatial_table", "geom_with_idx", 2, 2, 4326, 1), ("new_spatial_table", "geom_without_idx", 2, 2, 4326, 0), + ('new_spatial_table', 'geom_without_idx_2', 2, 2, 4326, 0), ], }, table_name="new_spatial_table", From 54fd8171cb7fa297cd987ceffbf3c4d5d2ea1263 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Wed, 1 Jun 2022 22:39:23 +0200 Subject: [PATCH 13/18] Coverage --- .github/workflows/test_and_publish.yml | 2 +- setup.cfg | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test_and_publish.yml b/.github/workflows/test_and_publish.yml index b45c623b..f6939ef8 100644 --- a/.github/workflows/test_and_publish.yml +++ b/.github/workflows/test_and_publish.yml @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "pypy3"] + python-version: ["3.7", "3.8", "3.9", "3.10", "pypy-3"] # The type of runner that the job will run on runs-on: ubuntu-22.04 diff --git a/setup.cfg b/setup.cfg index 2c4e55af..53032c12 100644 --- a/setup.cfg +++ b/setup.cfg @@ -22,8 +22,8 @@ passenv= SPATIALITE_DB_PATH setenv= COVERAGE_FILE = {env:COVERAGE_FILE:.coverage-{envname}} - EXPECTED_COV = 93 - pypy3: EXPECTED_COV = 92 + EXPECTED_COV = 91 + pypy3: EXPECTED_COV = 90 deps= alembic sqla11: SQLAlchemy==1.1.2 From 219f25958fd07bc7269b0ef13272d46a71dba8f7 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Wed, 1 Jun 2022 22:43:07 +0200 Subject: [PATCH 14/18] Fix pypy --- .github/workflows/test_and_publish.yml | 4 ++-- doc/index.rst | 10 +++++----- setup.cfg | 14 +++++++------- setup.py | 2 +- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/test_and_publish.yml b/.github/workflows/test_and_publish.yml index f6939ef8..71799232 100644 --- a/.github/workflows/test_and_publish.yml +++ b/.github/workflows/test_and_publish.yml @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "pypy-3"] + python-version: ["3.7", "3.8", "3.9", "3.10", "pypy3.8"] # The type of runner that the job will run on runs-on: ubuntu-22.04 @@ -48,7 +48,7 @@ jobs: # Setup Python - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} architecture: x64 diff --git a/doc/index.rst b/doc/index.rst index 65495d02..d705f57e 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -6,11 +6,12 @@ GeoAlchemy 2 Documentation GeoAlchemy 2 provides extensions to `SQLAlchemy `_ for working with spatial databases. -GeoAlchemy 2 focuses on `PostGIS `_. PostGIS 1.5 and -PostGIS 2 are supported. +GeoAlchemy 2 focuses on `PostGIS `_. PostGIS 1.5, +PostGIS 2 and PostGIS 3 are supported. SpatiaLite is also supported, but using GeoAlchemy 2 with SpatiaLite requires some specific -configuration on the application side. GeoAlchemy 2 works with SpatiaLite 4.3.0 and higher. +configuration on the application side. GeoAlchemy 2 works with SpatiaLite 4.3.0 and higher +(except for alembic helpers that need SpatiaLite >= 5). GeoAlchemy 2 aims to be simpler than its predecessor, `GeoAlchemy `_. Simpler to use, and simpler @@ -22,8 +23,7 @@ The current version of this documentation applies to the version |version| of Ge Requirements ------------ -GeoAlchemy 2 requires SQLAlchemy 0.8. GeoAlchemy 2 does not work with -SQLAlchemy 0.7 and lower. +GeoAlchemy 2 requires SQLAlchemy 1.4. Installation ------------ diff --git a/setup.cfg b/setup.cfg index 53032c12..124c6577 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,17 +3,17 @@ universal = 1 [tox:tox] -envlist = py{37,38,39,310}-sqla{11, latest}, pypy3-sqla{11, latest}, lint, coverage +envlist = py{37,38,39,310}-sqla{14, latest}, pypy3-sqla{14, latest}, lint, coverage requires= setuptools>42 [gh-actions] python = - 3.7: py37-sqla{11, latest}, lint - 3.8: py38-sqla{11, latest} - 3.9: py39-sqla{11, latest} - 3.10: py310-sqla{11, latest} - pypy-3: pypy3-sqla{11, latest} + 3.7: py37-sqla{14, latest}, lint + 3.8: py38-sqla{14, latest} + 3.9: py39-sqla{14, latest} + 3.10: py310-sqla{14, latest} + pypy3.8: pypy3-sqla{14, latest} [testenv] passenv= @@ -26,7 +26,7 @@ setenv= pypy3: EXPECTED_COV = 90 deps= alembic - sqla11: SQLAlchemy==1.1.2 + sqla14: SQLAlchemy==1.4.* sqlalatest: SQLAlchemy !pypy3: psycopg2 pypy3: psycopg2cffi diff --git a/setup.py b/setup.py index 1bfd43d9..6e1f3bb6 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ zip_safe=False, setup_requires=["setuptools_scm"], install_requires=[ - 'SQLAlchemy>=1.1', + 'SQLAlchemy>=1.4', 'packaging' ], entry_points=""" From adf947033d0fd341e062080fbfad421ebff43851 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Thu, 2 Jun 2022 09:11:13 +0200 Subject: [PATCH 15/18] lint --- tests/test_functional.py | 2 +- tests/test_functional_sqlite.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_functional.py b/tests/test_functional.py index 39ccf03e..860022ad 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -31,9 +31,9 @@ from sqlalchemy.sql import func from sqlalchemy.testing.assertions import ComparesTables -from geoalchemy2 import _get_spatialite_attrs from geoalchemy2 import Geometry from geoalchemy2 import Raster +from geoalchemy2 import _get_spatialite_attrs from geoalchemy2.elements import WKBElement from geoalchemy2.elements import WKTElement from geoalchemy2.shape import from_shape diff --git a/tests/test_functional_sqlite.py b/tests/test_functional_sqlite.py index 2f9e5b3d..57f2e141 100644 --- a/tests/test_functional_sqlite.py +++ b/tests/test_functional_sqlite.py @@ -16,8 +16,8 @@ from geoalchemy2.elements import WKTElement from geoalchemy2.shape import from_shape -from . import test_only_with_dialects from . import select +from . import test_only_with_dialects if platform.python_implementation().lower() == 'pypy': pytest.skip('skip SpatiaLite tests on PyPy', allow_module_level=True) From 33840b1310bceeefa9b820a0d3fdd318bd8b2ac4 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Thu, 2 Jun 2022 09:17:51 +0200 Subject: [PATCH 16/18] Fix pypy --- .github/workflows/test_and_publish.yml | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test_and_publish.yml b/.github/workflows/test_and_publish.yml index 71799232..801aec03 100644 --- a/.github/workflows/test_and_publish.yml +++ b/.github/workflows/test_and_publish.yml @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "pypy3.8"] + python-version: ["3.7", "3.8", "3.9", "3.10", "pypy-3.8"] # The type of runner that the job will run on runs-on: ubuntu-22.04 diff --git a/setup.cfg b/setup.cfg index 124c6577..f42cbfa9 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,7 +13,7 @@ python = 3.8: py38-sqla{14, latest} 3.9: py39-sqla{14, latest} 3.10: py310-sqla{14, latest} - pypy3.8: pypy3-sqla{14, latest} + pypy-3.8: pypy3-sqla{14, latest} [testenv] passenv= From 0c2416281b330795ee7fcb15bdf09af50fb00d6a Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Thu, 2 Jun 2022 09:50:39 +0200 Subject: [PATCH 17/18] Black and lint --- geoalchemy2/alembic_helpers.py | 5 ----- tests/test_alembic_migrations.py | 23 ++++++++++++++--------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/geoalchemy2/alembic_helpers.py b/geoalchemy2/alembic_helpers.py index ed28eb0d..0a8d0157 100644 --- a/geoalchemy2/alembic_helpers.py +++ b/geoalchemy2/alembic_helpers.py @@ -1,6 +1,4 @@ """Some helpers to use with Alembic migration tool.""" -import os - from alembic.autogenerate import renderers from alembic.autogenerate import rewriter from alembic.autogenerate.render import _add_column @@ -15,9 +13,6 @@ from alembic.operations import BatchOperations from alembic.operations import Operations from alembic.operations import ops -from alembic.operations.batch import ApplyBatchImpl -from sqlalchemy import Index -from sqlalchemy import Table from sqlalchemy import text from sqlalchemy.dialects.sqlite.base import SQLiteDialect from sqlalchemy.ext.compiler import compiles diff --git a/tests/test_alembic_migrations.py b/tests/test_alembic_migrations.py index 2b8a5841..39fb5599 100644 --- a/tests/test_alembic_migrations.py +++ b/tests/test_alembic_migrations.py @@ -351,7 +351,7 @@ def test_migration_revision( "sqlite": [ ("new_spatial_table", "geom_with_idx", 2, 2, 4326, 1), ("new_spatial_table", "geom_without_idx", 2, 2, 4326, 0), - ('new_spatial_table', 'geom_without_idx_2', 2, 2, 4326, 0) + ("new_spatial_table", "geom_without_idx_2", 2, 2, 4326, 0), ], }, table_name="new_spatial_table", @@ -369,7 +369,9 @@ def test_migration_revision( {from_text}('SRID=4326;LINESTRING(0 0, 1 1)'), {from_text}('SRID=4326;LINESTRING(0 0, 1 1)') ) - """.format(from_text=from_text) + """.format( + from_text=from_text + ) ) conn.execute("COMMIT") @@ -442,16 +444,19 @@ def test_migration_revision( { "postgresql": [ ( - 'idx_new_spatial_table_geom_with_idx', - 'CREATE INDEX idx_new_spatial_table_geom_with_idx ON gis.new_spatial_table USING gist (geom_with_idx)' + "idx_new_spatial_table_geom_with_idx", + """CREATE INDEX idx_new_spatial_table_geom_with_idx ON gis.new_spatial_table + USING gist (geom_with_idx)""", ), ( - 'idx_new_spatial_table_new_geom_with_idx', - 'CREATE INDEX idx_new_spatial_table_new_geom_with_idx ON gis.new_spatial_table USING gist (new_geom_with_idx)', + "idx_new_spatial_table_new_geom_with_idx", + """CREATE INDEX idx_new_spatial_table_new_geom_with_idx ON gis.new_spatial_table + USING gist (new_geom_with_idx)""", ), ( - 'new_spatial_table_pkey', - 'CREATE UNIQUE INDEX new_spatial_table_pkey ON gis.new_spatial_table USING btree (id)', + "new_spatial_table_pkey", + """CREATE UNIQUE INDEX new_spatial_table_pkey ON gis.new_spatial_table + USING btree (id)""", ), ], "sqlite": [ @@ -487,7 +492,7 @@ def test_migration_revision( "sqlite": [ ("new_spatial_table", "geom_with_idx", 2, 2, 4326, 1), ("new_spatial_table", "geom_without_idx", 2, 2, 4326, 0), - ('new_spatial_table', 'geom_without_idx_2', 2, 2, 4326, 0), + ("new_spatial_table", "geom_without_idx_2", 2, 2, 4326, 0), ], }, table_name="new_spatial_table", From 5deed2a59088553107b4a8397a3e7eb588b47f00 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Thu, 2 Jun 2022 10:07:50 +0200 Subject: [PATCH 18/18] Fix pypy --- setup.cfg | 2 +- tests/test_alembic_migrations.py | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index f42cbfa9..4d058642 100644 --- a/setup.cfg +++ b/setup.cfg @@ -23,7 +23,7 @@ passenv= setenv= COVERAGE_FILE = {env:COVERAGE_FILE:.coverage-{envname}} EXPECTED_COV = 91 - pypy3: EXPECTED_COV = 90 + pypy3: EXPECTED_COV = 78 deps= alembic sqla14: SQLAlchemy==1.4.* diff --git a/tests/test_alembic_migrations.py b/tests/test_alembic_migrations.py index 39fb5599..b34db18b 100644 --- a/tests/test_alembic_migrations.py +++ b/tests/test_alembic_migrations.py @@ -1,4 +1,6 @@ """Test alembic migrations of spatial columns.""" +import platform + import pytest import sqlalchemy as sa # noqa (This import is only used in the migration scripts) from alembic import command @@ -268,6 +270,9 @@ class = StreamHandler def test_migration_revision( conn, metadata, alembic_config, alembic_env_path, test_script_path ): + if platform.python_implementation().lower() == 'pypy': + pytest.skip('skip SpatiaLite tests on PyPy', allow_module_level=True) + initial_rev = command.revision( alembic_config, "Initial state",