Mercurial > dive4elements > river
changeset 5128:a020100ee6a1
SCHEME CHANGE: Merge branch dami into default.
A summary on the scheme changes:
HWS and Lines tables are dropped and will be replaced by HWS_Lines and
HWS_Points.
The catchment table removed and will be replaced
by a WMS Service.
Hydr_boundaries has an added reference to boundary_kind sectie_kind
and sobek_kind objects.
Dem has a new column srid.
author | Andre Heinecke <aheinecke@intevation.de> |
---|---|
date | Thu, 28 Feb 2013 11:48:17 +0100 |
parents | e37b25628dd4 (current diff) 3d8000616ed5 (diff) |
children | b11f3563a117 |
files | flys-artifacts/doc/conf/meta-data.xml flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSCatchmentArtifact.java flys-backend/contrib/hws_schema.diff flys-backend/contrib/shpimporter/catchments.py flys-backend/contrib/shpimporter/lines.py flys-backend/doc/schema/import-dems.sql flys-backend/src/main/java/de/intevation/flys/model/Catchment.java flys-backend/src/main/java/de/intevation/flys/model/Hws.java flys-backend/src/main/java/de/intevation/flys/model/Line.java |
diffstat | 63 files changed, 2867 insertions(+), 1490 deletions(-) [+] |
line wrap: on
line diff
--- a/contrib/make_flys_release/README Thu Feb 28 11:06:20 2013 +0100 +++ b/contrib/make_flys_release/README Thu Feb 28 11:48:17 2013 +0100 @@ -38,3 +38,21 @@ `server` und `client`. Im Server sind alle Konfigurationen sowie notwendige Bibliotheken zum Starten des FLYS Servers enthalten. Im Client ist lediglich das WAR Archiv für einen Servlet Container (z.B. Tomcat) enthalten. + +Importer: +========= +Das script um den Importer zu bauen und zu paketieren liegt unter +bin/make-importer-package.sh dies muss man anpassen in dem man Pfade angibt +unter welchem sich weitere pakete befinden. +Um das Paket zu bauen muss rpm2cpio installiert sein. + +Benötigt werden in den einzustellenden Verzeichnissen: +EXTRAS: + - libgdal1-1.9.0-intevation1.x86\_64.rpm + - gdal-1.9.0-intevation1.x86\_64.rpm + - python-gdal-1.9.0-intevation1.x86\_64.rpm +ORACLE: + - cx\_Oracle-5.1.2-11g-py26-1.x86\_64.rpm + - instantclient-basic-linux-x86-64-11.2.0.2.0.zip + - instantclient-sdk-linux-x86-64-11.2.0.2.0.zip + - instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/make_flys_release/bin/make-importer-package.sh Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,70 @@ +#!/bin/bash +set -e + +# See README for more information + +# The working directory. Resulting tarball will be placed in the directory above. +PKG_DIR=/tmp/flys-importer +# Path to oracle zip archives and an oracle_cx rpm +ORACLE=/path/to/oracle/archives +# Default conf +CONF_DIR=/path/to/conf/dir +# Path to the flys checkout +FLYS_DIR=/path/to/flys/root +# Extra packages +EXTRAS=/path/to/gdal + +rm -fr $PKG_DIR +mkdir -p $PKG_DIR/hydr_morph +mkdir -p $PKG_DIR/geodaesie +mkdir -p $PKG_DIR/opt/lib64 +mkdir -p $PKG_DIR/schema + +cd ${FLYS_DIR}/flys-backend +mvn -f pom-oracle.xml clean compile assembly:single +cp target/flys-backend-1.0-SNAPSHOT-jar-with-dependencies.jar \ + $PKG_DIR/hydr_morph/importer.jar +cp ${FLYS_DIR}/flys-backend/contrib/shpimporter/*.py $PKG_DIR/geodaesie +cp ${FLYS_DIR}/flys-backend/contrib/run_geo.sh \ + ${FLYS_DIR}/flys-backend/contrib/run_hydr_morph.sh \ + $PKG_DIR +cp -r ${CONF_DIR} $PKG_DIR +ln -s /usr/lib64/libproj.so.0.6.6 $PKG_DIR/opt/lib64/libproj.so # workaround for bad packaging +rm -rf /tmp/other_rpms +mkdir /tmp/other_rpms +cd /tmp/other_rpms + +rpm2cpio ${EXTRAS}/libgdal1-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories +rpm2cpio ${EXTRAS}/gdal-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories +rpm2cpio ${EXTRAS}/python-gdal-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories +rpm2cpio ${ORACLE}/cx\_Oracle-5.1.2-11g-py26-1.x86\_64.rpm | cpio -i --make-directories +cp -r /tmp/other_rpms/usr/* $PKG_DIR/opt +rm -rf /tmp/other_rpms + +cp ${FLYS_DIR}/flys-backend/doc/schema/*.sql $PKG_DIR/schema +cp ${FLYS_DIR}/flys-backend/doc/documentation/de/importer-manual.pdf $PKG_DIR + +# Oracle (Do not distribute) +unzip ${ORACLE}/instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt +unzip ${ORACLE}/instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt +unzip ${ORACLE}/instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt + +mkdir $PKG_DIR//opt/instantclient_11_2/lib +cd $PKG_DIR//opt/instantclient_11_2/lib +ln -s ../libclntsh.so.11.1 . +ln -s ../libclntsh.so.11.1 libclntsh.so +ln -s ../libnnz11.so . +ln -s ../libocci.so.11.1 . +ln -s ../libocci.so.11.1 libocci.so +ln -s ../libociei.so . +ln -s ../libocijdbc11.so . +ln -s ../libsqlplusic.so . +ln -s ../libsqlplus.so . +# End Oracle + +sed -i 's/shpimporter\/shp/geodaesie\/shp/' $PKG_DIR/run_geo.sh + +cd $PKG_DIR/.. +DATE=$(date +%Y%m%d%H%m) +tar -czf flys-importer${DATE}.tar.gz flys-importer +sha1sum flys-importer${DATE}.tar.gz > flys-importer${DATE}.tar.gz.sha1
--- a/flys-artifacts/doc/conf/conf.xml Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-artifacts/doc/conf/conf.xml Thu Feb 28 11:48:17 2013 +0100 @@ -39,24 +39,15 @@ <artifact-factory name="wmsqpsfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks." ttl="3600000" artifact="de.intevation.flys.artifacts.WMSQPSArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory> - <artifact-factory name="wmshwsfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks." - ttl="3600000" - artifact="de.intevation.flys.artifacts.WMSHwsArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory> <artifact-factory name="wmshydrboundariesfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks." ttl="3600000" artifact="de.intevation.flys.artifacts.WMSHydrBoundaryArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory> <artifact-factory name="wmshydrboundariespolyfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks." ttl="3600000" artifact="de.intevation.flys.artifacts.WMSHydrBoundaryPolyArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory> - <artifact-factory name="wmscatchmentfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks." - ttl="3600000" - artifact="de.intevation.flys.artifacts.WMSCatchmentArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory> <artifact-factory name="wmsfloodplainfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks." ttl="3600000" artifact="de.intevation.flys.artifacts.WMSFloodplainArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory> - <artifact-factory name="wmslinefactory" description="Factory to create an artifact to be used in WINFO" - ttl="3600000" - artifact="de.intevation.flys.artifacts.WMSLineArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory> <artifact-factory name="wmsbuildingsfactory" description="Factory to create an artifact to be used in WINFO" ttl="3600000" artifact="de.intevation.flys.artifacts.WMSBuildingsArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
--- a/flys-artifacts/doc/conf/meta-data.xml Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-artifacts/doc/conf/meta-data.xml Thu Feb 28 11:48:17 2013 +0100 @@ -932,23 +932,6 @@ </dc:elements> </dc:context> </dc:macro> - <dc:macro name="flood-map-hws"> - <dc:context> - <dc:statement> - SELECT count(*) as km_exists, name as name - FROM hws WHERE river_id = ${river_id} GROUP BY name - </dc:statement> - <dc:elements> - <dc:if test="$km_exists>0"> - <hws> - <dc:attribute name="factory" value="wmshwsfactory"/> - <dc:attribute name="ids" value="${river_id};${name}"/> - <dc:attribute name="name" value="${name}"/> - </hws> - </dc:if> - </dc:elements> - </dc:context> - </dc:macro> <dc:macro name="flood-map-hydr-boundaries"> <hydr_boundaries_lines> <dc:call-macro name="flood-map-hydr-boundaries-lines"/> @@ -1029,23 +1012,6 @@ </dc:context> </land> </dc:macro> - <dc:macro name="flood-map-catchments"> - <dc:context> - <dc:statement> - SELECT count(*) as km_exists, name as name - FROM catchment WHERE river_id = ${river_id} GROUP BY name - </dc:statement> - <dc:elements> - <dc:if test="$km_exists>0"> - <catchment> - <dc:attribute name="factory" value="wmscatchmentfactory"/> - <dc:attribute name="ids" value="${river_id};${name}"/> - <dc:attribute name="name" value="${name}"/> - </catchment> - </dc:if> - </dc:elements> - </dc:context> - </dc:macro> <dc:macro name="flood-map-floodplain"> <dc:context> <dc:statement> @@ -1062,11 +1028,11 @@ </dc:elements> </dc:context> </dc:macro> - <dc:macro name="flood-map-lines"> + <dc:macro name="flood-map-hwslines"> <dc:context> <dc:statement> SELECT count(*) as km_exists, name as name - FROM lines WHERE river_id = ${river_id} GROUP BY name + FROM hws_lines WHERE river_id = ${river_id} GROUP BY name </dc:statement> <dc:elements> <dc:if test="$km_exists>0"> @@ -1226,9 +1192,6 @@ <fixpoints> <dc:call-macro name="flood-map-fixpoints"/> </fixpoints> - <hws> - <dc:call-macro name="flood-map-hws"/> - </hws> <hydrboundaries> <dc:call-macro name="flood-map-hydr-boundaries"/> <dc:call-macro name="flood-map-floodplain"/> @@ -1241,9 +1204,9 @@ <dc:call-macro name="flood-map-km"/> <dc:call-macro name="flood-map-qps"/> </kilometrage> - <lines> - <dc:call-macro name="flood-map-lines"/> - </lines> + <hwslines> + <dc:call-macro name="flood-map-hwslines"/> + </hwslines> <dc:call-macro name="flood-map-uesk"/> <gaugelocations> <dc:call-macro name="flood-map-gaugelocations"/>
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSCatchmentArtifact.java Thu Feb 28 11:06:20 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,166 +0,0 @@ -package de.intevation.flys.artifacts; - -import java.util.List; - -import org.w3c.dom.Document; - -import org.apache.log4j.Logger; - -import com.vividsolutions.jts.geom.Envelope; - -import de.intevation.artifacts.ArtifactFactory; -import de.intevation.artifacts.CallMeta; - -import de.intevation.artifactdatabase.state.DefaultOutput; -import de.intevation.artifactdatabase.state.Facet; -import de.intevation.artifactdatabase.state.State; - -import de.intevation.flys.model.Catchment; -import de.intevation.flys.model.River; - -import de.intevation.flys.artifacts.model.FacetTypes; -import de.intevation.flys.artifacts.model.RiverFactory; -import de.intevation.flys.utils.FLYSUtils; -import de.intevation.flys.utils.GeometryUtils; - - -public class WMSCatchmentArtifact extends WMSDBArtifact { - - public static final String NAME = "catchment"; - - - private static final Logger logger = - Logger.getLogger(WMSCatchmentArtifact.class); - - - @Override - public void setup( - String identifier, - ArtifactFactory factory, - Object context, - CallMeta callMeta, - Document data) - { - logger.debug("WMSCatchmentArtifact.setup"); - - super.setup(identifier, factory, context, callMeta, data); - } - - - @Override - public String getName() { - return NAME; - } - - - @Override - public State getCurrentState(Object cc) { - State s = new CatchmentState(this); - - List<Facet> fs = getFacets(getCurrentStateId()); - - DefaultOutput o = new DefaultOutput( - "floodmap", - "floodmap", - "image/png", - fs, - "map"); - - s.getOutputs().add(o); - - return s; - } - - - public static class CatchmentState extends WMSDBState implements FacetTypes - { - private static final Logger logger = - Logger.getLogger(CatchmentState.class); - - protected int riverId; - - public CatchmentState(WMSDBArtifact artifact) { - super(artifact); - riverId = 0; - } - - public int getRiverId() { - if (riverId == 0) { - String ids = artifact.getDataAsString("ids"); - String[] parts = ids.split(";"); - - try { - riverId = Integer.parseInt(parts[0]); - } - catch (NumberFormatException nfe) { - logger.error("Cannot parse river id from '" + ids + "'"); - } - } - - return riverId; - } - - @Override - protected String getFacetType() { - return FLOODMAP_CATCHMENT; - } - - @Override - protected String getUrl() { - return FLYSUtils.getUserWMSUrl(artifact.identifier()); - } - - @Override - protected String getSrid() { - River river = RiverFactory.getRiver(getRiverId()); - return FLYSUtils.getRiverSrid(river.getName()); - } - - @Override - protected Envelope getExtent(boolean reproject) { - List<Catchment> catchments = - Catchment.getCatchments(getRiverId(), getName()); - - Envelope max = null; - - for (Catchment c: catchments) { - Envelope env = c.getGeom().getEnvelopeInternal(); - - if (max == null) { - max = env; - continue; - } - - max.expandToInclude(env); - } - - return max != null && reproject - ? GeometryUtils.transform(max, getSrid()) - : max; - } - - @Override - protected String getFilter() { - return "river_id=" + String.valueOf(getRiverId()) - + " AND name='" + getName() + "'"; - } - - @Override - protected String getDataString() { - String srid = getSrid(); - - if (FLYSUtils.isUsingOracle()) { - return "geom FROM catchment USING SRID " + srid; - } - else { - return "geom FROM catchment USING UNIQUE id USING SRID " + srid; - } - } - - @Override - protected String getGeometryType() { - return "POLYGON"; - } - } // end of WMSKmState -} -// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSHwsArtifact.java Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSHwsArtifact.java Thu Feb 28 11:48:17 2013 +0100 @@ -15,8 +15,8 @@ import de.intevation.artifactdatabase.state.Facet; import de.intevation.artifactdatabase.state.State; +import de.intevation.flys.model.HWSLine; import de.intevation.flys.model.River; -import de.intevation.flys.model.Hws; import de.intevation.flys.artifacts.model.FacetTypes; import de.intevation.flys.artifacts.model.RiverFactory; @@ -118,11 +118,11 @@ @Override protected Envelope getExtent(boolean reproject) { - List<Hws> hws = Hws.getHws(getRiverId(), getName()); + List<HWSLine> hws = HWSLine.getLines(getRiverId(), getName()); Envelope max = null; - for (Hws h: hws) { + for (HWSLine h: hws) { Envelope env = h.getGeom().getEnvelopeInternal(); if (max == null) {
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSLineArtifact.java Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSLineArtifact.java Thu Feb 28 11:48:17 2013 +0100 @@ -16,7 +16,7 @@ import de.intevation.artifactdatabase.state.State; import de.intevation.flys.model.River; -import de.intevation.flys.model.Line; +import de.intevation.flys.model.HWSLine; import de.intevation.flys.artifacts.model.FacetTypes; import de.intevation.flys.artifacts.model.RiverFactory; @@ -118,11 +118,11 @@ @Override protected Envelope getExtent(boolean reproject) { - List<Line> lines = Line.getLines(getRiverId(), getName()); + List<HWSLine> lines = HWSLine.getLines(getRiverId(), getName()); Envelope max = null; - for (Line l: lines) { + for (HWSLine l: lines) { Envelope env = l.getGeom().getEnvelopeInternal(); if (max == null) {
--- a/flys-backend/README Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/README Thu Feb 28 11:48:17 2013 +0100 @@ -9,6 +9,12 @@ $ createuser --no-createrole --no-superuser --pwprompt --no-createdb flys $ createdb --encoding=UTF-8 --owner flys flystest1 + +Build an importer package: +mvn -f pom.xml clean compile assembly:single +Alternatively with oracle: +mvn -f pom-oracle.xml clean compile assembly:single + Standalone DateGuesser testing example: mvn -e -Dexec.mainClass=de.intevation.flys.utils.DateGuesser -Dexec.args="110803" exec:java <<EOF
--- a/flys-backend/contrib/hws_schema.diff Thu Feb 28 11:06:20 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,128 +0,0 @@ -diff -r 0bb0525bca52 flys-backend/doc/schema/postgresql-drop-spatial.sql ---- a/flys-backend/doc/schema/postgresql-drop-spatial.sql Fri Jan 25 15:38:34 2013 +0100 -+++ b/flys-backend/doc/schema/postgresql-drop-spatial.sql Fri Jan 25 15:42:05 2013 +0100 -@@ -27,8 +27,14 @@ - DROP TABLE catchment; - DROP SEQUENCE CATCHMENT_ID_SEQ; - --DROP TABLE hws; --DROP SEQUENCE HWS_ID_SEQ; -+--DROP TABLE hws; -+--DROP SEQUENCE HWS_ID_SEQ; -+ -+DROP TABLE hws_points; -+DROP SEQUENCE HWS_POINTS_ID_SEQ; -+ -+DROP TABLE hws_lines; -+DROP SEQUENCE HWS_LINES_ID_SEQ; - - DROP TABLE floodmaps; - DROP SEQUENCE FLOODMAPS_ID_SEQ; -@@ -42,4 +48,7 @@ - DROP TABLE gauge_location; - DROP SEQUENCE GAUGE_LOCATION_ID_SEQ; - -+DROP TABLE fed_states; -+DROP TABLE hws_kinds; -+ - COMMIT; -diff -r 0bb0525bca52 flys-backend/doc/schema/postgresql-spatial.sql ---- a/flys-backend/doc/schema/postgresql-spatial.sql Fri Jan 25 15:38:34 2013 +0100 -+++ b/flys-backend/doc/schema/postgresql-spatial.sql Fri Jan 25 15:42:05 2013 +0100 -@@ -132,20 +132,87 @@ - SELECT AddGeometryColumn('catchment','geom',31467,'POLYGON',2); - ALTER TABLE catchment ALTER COLUMN id SET DEFAULT NEXTVAL('CATCHMENT_ID_SEQ'); - -+-- Static lookup tables for Hochwasserschutzanlagen -+CREATE TABLE hws_kinds ( -+ id int PRIMARY KEY NOT NULL, -+ kind VARCHAR(64) NOT NULL -+); -+INSERT INTO hws_kinds (id, kind) VALUES (1, 'Durchlass'); -+INSERT INTO hws_kinds (id, kind) VALUES (2, 'Damm'); -+INSERT INTO hws_kinds (id, kind) VALUES (3, 'Graben'); - ----Hydrologie/HW-Schutzanlagen/hws.shp --CREATE SEQUENCE HWS_ID_SEQ; --CREATE TABLE hws ( -+CREATE TABLE fed_states ( - id int PRIMARY KEY NOT NULL, -+ name VARCHAR(23) NOT NULL -+); -+INSERT INTO fed_states (id, name) VALUES (1, 'Bayern'); -+INSERT INTO fed_states (id, name) VALUES (2, 'Hessen'); -+INSERT INTO fed_states (id, name) VALUES (3, 'Niedersachsen'); -+INSERT INTO fed_states (id, name) VALUES (4, 'Nordrhein-Westfalen'); -+INSERT INTO fed_states (id, name) VALUES (5, 'Rheinland-Pfalz'); -+INSERT INTO fed_states (id, name) VALUES (6, 'Saarland'); -+INSERT INTO fed_states (id, name) VALUES (7, 'Schleswig-Holstein'); -+INSERT INTO fed_states (id, name) VALUES (8, 'Brandenburg'); -+INSERT INTO fed_states (id, name) VALUES (9, 'Mecklenburg-Vorpommern'); -+INSERT INTO fed_states (id, name) VALUES (10, 'Thüringen'); -+INSERT INTO fed_states (id, name) VALUES (11, 'Baden-Württemberg'); -+INSERT INTO fed_states (id, name) VALUES (12, 'Sachsen-Anhalt'); -+INSERT INTO fed_states (id, name) VALUES (13, 'Sachsen'); -+INSERT INTO fed_states (id, name) VALUES (14, 'Berlin'); -+INSERT INTO fed_states (id, name) VALUES (15, 'Bremen'); -+INSERT INTO fed_states (id, name) VALUES (16, 'Hamburg'); -+ -+--Hydrologie/HW-Schutzanlagen/*Linien.shp -+CREATE SEQUENCE HWS_LINES_ID_SEQ; -+CREATE TABLE hws_lines ( -+ id int PRIMARY KEY NOT NULL, -+ ogr_fid int, -+ kind_id int REFERENCES hws_kinds(id) DEFAULT 2, -+ fed_state_id int REFERENCES fed_states(id), - river_id int REFERENCES rivers(id), -- hws_facility VARCHAR(256), -- type VARCHAR(256), -- name VARCHAR(64), -- path VARCHAR(256) -+ name VARCHAR(256), -+ path VARCHAR(256), -+ offical INT DEFAULT 0, -+ agency VARCHAR(256), -+ range VARCHAR(256), -+ shore_side INT DEFAULT 0, -+ source VARCHAR(256), -+ status_date TIMESTAMP, -+ description VARCHAR(256) - ); --SELECT AddGeometryColumn('hws','geom',31467,'LINESTRING',2); --ALTER TABLE hws ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_ID_SEQ'); -+SELECT AddGeometryColumn('hws_lines', 'geom', 31467, 'LINESTRING', 2); -+SELECT AddGeometryColumn('hws_lines', 'geom_target', 31467, 'LINESTRING', 2); -- ? -+SELECT AddGeometryColumn('hws_lines', 'geom_rated_level', 31467, 'LINESTRING', 2); -- ? -+-- TODO: dike_km_from dike_km_to, are they geometries? - -+ALTER TABLE hws_lines ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_LINES_ID_SEQ'); -+ -+--Hydrologie/HW-Schutzanlagen/*Punkte.shp -+CREATE SEQUENCE HWS_POINTS_ID_SEQ; -+CREATE TABLE hws_points ( -+ id int PRIMARY KEY NOT NULL, -+ ogr_fid int, -+ kind_id int REFERENCES hws_kinds(id) DEFAULT 2, -+ fed_state_id int REFERENCES fed_states(id), -+ river_id int REFERENCES rivers(id), -+ name VARCHAR, -+ path VARCHAR, -+ offical INT DEFAULT 0, -+ agency VARCHAR, -+ range VARCHAR, -+ shore_side INT DEFAULT 0, -+ source VARCHAR, -+ status_date VARCHAR, -+ description VARCHAR, -+ freeboard FLOAT8, -+ dike_km FLOAT8, -+ z FLOAT8, -+ z_target FLOAT8, -+ rated_level FLOAT8 -+); -+SELECT AddGeometryColumn('hws_points', 'geom', 31467, 'POINT', 2); -+ -+ALTER TABLE hws_points ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_POINTS_ID_SEQ'); - - -- - --Hydrologie/UeSG
--- a/flys-backend/contrib/run_geo.sh Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/run_geo.sh Thu Feb 28 11:48:17 2013 +0100 @@ -2,8 +2,7 @@ # Required RIVER_PATH="/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar" -RIVER_ID=1 -TARGET_SRS=31467 +RIVER_NAME="Saar" # Set this to your target database for Oracle HOST=localhost @@ -17,7 +16,6 @@ SKIP_AXIS=0 SKIP_KMS=0 SKIP_CROSSSECTIONS=0 -SKIP_LINES=0 SKIP_FIXPOINTS=0 SKIP_BUILDINGS=0 SKIP_FLOODPLAINS=0 @@ -27,15 +25,26 @@ SKIP_GAUGE_LOCATION=0 SKIP_CATCHMENTS=0 SKIP_UESG=0 +SKIP_DGM=0 +# There should be no need to change anything below this line DIR=`dirname $0` DIR=`readlink -f "$DIR"` +OPTIONAL_LIBS="${DIR}"/opt +if [ -d "$OPTIONAL_LIBS" ]; then + export PATH="$OPTIONAL_LIBS/bin:$PATH" + export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib:$LD_LIBRARY_PATH" + export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib64:$LD_LIBRARY_PATH" + export PYTHONPATH="$OPTIONAL_LIBS/lib/python2.6/site-packages:$PYTHONPATH" + export PYTHONPATH="$OPTIONAL_LIBS/lib64/python2.6/site-packages:$PYTHONPATH" + export GDAL_DATA="$OPTIONAL_LIBS/share/gdal" +fi + exec python $DIR/shpimporter/shpimporter.py \ --directory $RIVER_PATH \ - --river_id $RIVER_ID \ - --target_srs $TARGET_SRS \ + --river_name $RIVER_NAME \ --ogr_connection "$OGR_CONNECTION" \ --host $HOST \ --user $USER \ @@ -44,13 +53,12 @@ --skip_axis $SKIP_AXIS \ --skip_kms $SKIP_KMS \ --skip_crosssections $SKIP_CROSSSECTIONS \ - --skip_lines $SKIP_LINES \ --skip_fixpoints $SKIP_FIXPOINTS \ --skip_buildings $SKIP_BUILDINGS \ --skip_floodplains $SKIP_FLOODPLAINS \ --skip_hydr_boundaries $SKIP_HYDR_BOUNDARIES \ --skip_gauge_locations $SKIP_GAUGE_LOCATION \ - --skip_catchments $SKIP_CATCHMENTS \ --skip_uesgs $SKIP_UESG \ --skip_hws_lines $SKIP_HWS_LINES \ - --skip_hws_points $SKIP_HWS_POINTS + --skip_hws_points $SKIP_HWS_POINTS \ + --skip_dgm $SKIP_DGM
--- a/flys-backend/contrib/run_hydr_morph.sh Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/run_hydr_morph.sh Thu Feb 28 11:48:17 2013 +0100 @@ -8,6 +8,7 @@ BACKEND_PORT="1521" BACKEND_NAME="XE" LOG4J_CONFIG="conf/log4j.properties" +JAR="hydr_morph/importer.jar" ##################################################################### @@ -55,25 +56,17 @@ MIN_MEMORY="1024m" -########################## Importer Settings ######################## -APP="de.intevation.flys.importer.Importer" -DIR=`dirname $0` -DIR=`readlink -f "$DIR/.."` -##################################################################### - +######################### Run Importer ############################## +OPTIONAL_LIBS="${DIR}"/../opt +if [ -d "$OPTIONAL_LIBS" ]; then + export PATH="$OPTIONAL_LIBS/bin:$PATH" + export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib:$LD_LIBRARY_PATH" + export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib64:$LD_LIBRARY_PATH" +fi -########################## Collect required libraries ############### -CLASSPATH= -for l in `find "$DIR/lib" -name \*.jar -print`; do - CLASSPATH=$CLASSPATH:$l -done +export LC_ALL=de_DE@euro # Workaround encoding problem -export CLASSPATH -##################################################################### - - -######################### Run Importer ############################## -exec java \ +exec java -jar \ -Xmx$MIN_MEMORY \ -server \ -Dlog4j.configuration=file://`readlink -f $LOG4J_CONFIG` \ @@ -110,4 +103,4 @@ -Dflys.backend.url=$BACKEND_URL \ -Dflys.backend.driver=$BACKEND_DB_DRIVER \ -Dflys.backend.dialect=$BACKEND_DB_DIALECT \ - $APP + $JAR
--- a/flys-backend/contrib/shpimporter/axis.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/axis.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,7 +1,9 @@ -import ogr +try: + from osgeo import ogr +except ImportError: + import ogr from importer import Importer -import shpimporter NAME="Axis" TABLE_NAME="river_axes" @@ -27,7 +29,7 @@ def isShapeRelevant(self, name, path): - return name == "achse" or name.find("achse") >= 0 + return "achse" in name.lower() def createNewFeature(self, featureDef, feat, **args):
--- a/flys-backend/contrib/shpimporter/boundaries.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/boundaries.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,10 +1,13 @@ -import ogr +try: + from osgeo import ogr +except ImportError: + import ogr from importer import Importer TABLE_NAME="hydr_boundaries" TABLE_NAME_POLY="hydr_boundaries_poly" -PATH="Hydrologie/Hydr.Grenzen/Linien" +PATH="Hydrologie/Hydr.Grenzen" NAME="Hydr. Boundaries" @@ -13,29 +16,74 @@ def getPath(self, base): return "%s/%s" % (base, PATH) - def getTablename(self): return TABLE_NAME - def getName(self): return NAME - def isGeometryValid(self, geomType): - return geomType == 2 - + return geomType == ogr.wkbLineString def isShapeRelevant(self, name, path): - return True - + shp = ogr.Open(path) + if self.isGeometryValid(shp.GetLayerByName(name).GetGeomType()) and \ + self.getKind(path) > 0: + return True + else: + return False def getKind(self, path): - if path.find("BfG") > 0: + if "linien/bfg" in path.lower(): return 1 + elif "linien/land" in path.lower(): + return 2 + elif "/sonstige/" in path.lower(): + return 3 else: - return 2 + return 0 + def createNewFeature(self, featureDef, feat, **args): + kind = self.getKind(args['path']) + + newFeat = ogr.Feature(featureDef) + geometry = feat.GetGeometryRef() + geometry.SetCoordinateDimension(2) + + newFeat.SetGeometry(geometry) + newFeat.SetField("name", args['name']) + newFeat.SetField("kind", kind) + if self.IsFieldSet(feat, "SECTIE"): + newFeat.SetField("sectie", feat.GetField("SECTIE")) + + if self.IsFieldSet(feat, "SOBEK"): + newFeat.SetField("sobek", feat.GetField("SOBEK")) + + if self.IsFieldSet(feat, "river_id"): + newFeat.SetField("river_id", feat.GetField("river_id")) + else: + newFeat.SetField("river_id", self.river_id) + + return newFeat + +class HydrBoundaryPoly(HydrBoundary): + + def getTablename(self): + return TABLE_NAME_POLY + + def getName(self): + return "%s (Polygons)" % NAME + + def isGeometryValid(self, geomType): + return geomType == ogr.wkbPolygon or geomType == ogr.wkbMultiPolygon + + def isShapeRelevant(self, name, path): + shp = ogr.Open(path) + if self.isGeometryValid(shp.GetLayerByName(name).GetGeomType()) and \ + self.getKind(path) > 0: + return True + else: + return False def createNewFeature(self, featureDef, feat, **args): kind = self.getKind(args['path']) @@ -48,39 +96,12 @@ newFeat.SetField("name", args['name']) newFeat.SetField("kind", kind) - if self.IsFieldSet(feat, "river_id"): - newFeat.SetField("river_id", feat.GetField("river_id")) - else: - newFeat.SetField("river_id", self.river_id) - - return newFeat - - - -class HydrBoundaryPoly(HydrBoundary): - - def getTablename(self): - return TABLE_NAME_POLY - + if self.IsFieldSet(feat, "SECTIE"): + newFeat.SetField("sectie", feat.GetField("SECTIE")) - def getName(self): - return "%s (Polygons)" % NAME - - - def isGeometryValid(self, geomType): - return geomType == 3 or geomType == 6 - + if self.IsFieldSet(feat, "SOBEK"): + newFeat.SetField("sobek", feat.GetField("SOBEK")) - def createNewFeature(self, featureDef, feat, **args): - kind = self.getKind(args['path']) - - newFeat = ogr.Feature(featureDef) - geometry = feat.GetGeometryRef() - geometry.SetCoordinateDimension(2) - - newFeat.SetGeometry(geometry) - newFeat.SetField("name", args['name']) - newFeat.SetField("kind", kind) if self.IsFieldSet(feat, "river_id"): newFeat.SetField("river_id", feat.GetField("river_id"))
--- a/flys-backend/contrib/shpimporter/buildings.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/buildings.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,4 +1,7 @@ -import ogr +try: + from osgeo import ogr +except ImportError: + import ogr from importer import Importer
--- a/flys-backend/contrib/shpimporter/catchments.py Thu Feb 28 11:06:20 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,53 +0,0 @@ -import ogr - -from importer import Importer - -TABLE_NAME="catchment" -PATH="Hydrologie/Einzugsgebiet" -NAME="Catchments" - - -class Catchment(Importer): - - def getPath(self, base): - return "%s/%s" % (base, PATH) - - - def getTablename(self): - return TABLE_NAME - - - def getName(self): - return NAME - - - def isGeometryValid(self, geomType): - return geomType == 3 or geomType == 6 - - - def isShapeRelevant(self, name, path): - return True - - - def createNewFeature(self, featureDef, feat, **args): - newFeat = ogr.Feature(featureDef) - geometry = feat.GetGeometryRef() - geometry.SetCoordinateDimension(2) - - newFeat.SetGeometry(geometry) - - if self.IsFieldSet(feat, "river_id"): - newFeat.SetField("river_id", feat.GetField("river_id")) - else: - newFeat.SetField("river_id", self.river_id) - - if self.IsFieldSet(feat, "Name"): - newFeat.SetField("name", feat.GetField("name")) - else: - newFeat.SetField("name", args['name']) - - if self.IsFieldSet(feat, "AREA"): - newFeat.SetField("area", feat.GetField("area")) - - return newFeat -
--- a/flys-backend/contrib/shpimporter/crosssectiontracks.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/crosssectiontracks.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,4 +1,7 @@ -import ogr +try: + from osgeo import ogr +except ImportError: + import ogr from importer import Importer
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/contrib/shpimporter/dgm.py Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- + +import codecs +import utils + +def latin(string): + return unicode(string, "latin1") + +# <dbfield> : (<csvfield>, conversion function) +DGM_MAP = { + "lower" : ("km_von", lambda x: int(x)), + "upper" : ("km_bis", lambda x: int(x)), + "year_from" : "Jahr_von", + "year_to" : "Jahr_bis", + "projection" : "Projektion", + "elevation_state" : latin("Höhenstatus"), + "format" : "Format", + "border_break" : ("Bruchkanten", + lambda x: True if x.lower() == "Ja" else False), + "resolution" : (latin("Auflösung"), lambda x: x), +# "description" : + "srid" : "SRID", + "path" : ("Pfad_Bestand", lambda x: x), + } + +SQL_INSERT_DGT = "INSERT INTO dem (river_id, name, " + ", ".join(DGM_MAP.keys()) + \ + ") VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" +SQL_INSERT_DGT_ORA = "INSERT INTO dem (river_id, name, " + ", ".join(DGM_MAP.keys()) + \ + ") VALUES (:s, :s, :s, :s, :s, :s, :s, :s, :s, :s, :s, :s, :s)" + +def insertRiverDgm(dbconn, dgmfile, river_name, dry_run, oracle): + with codecs.open(dgmfile, "r", "latin1") as csvfile: + firstline = csvfile.readline() + names = firstline.split(";") + namedict = {} + field_nr = 0 + for name in names: + namedict[name] = field_nr + field_nr += 1 + + river_id = utils.getRiverId(dbconn, river_name, oracle) + for line in csvfile: + fields = line.split(";") + if not fields: continue + if fields[namedict[latin("Gewässer")]] != river_name: + continue + else: + values=[] + for key, val in DGM_MAP.items(): + if isinstance(val, tuple): + values.append(val[1](fields[namedict[val[0]]])) + else: + values.append(unicode.encode( + fields[namedict[val]], "UTF-8")) + name = "%s KM %s - %s" % (river_name, fields[namedict["km_von"]], + fields[namedict["km_bis"]]) + cur = dbconn.cursor() + if oracle: + stmt = SQL_INSERT_DGT_ORA + else: + stmt = SQL_INSERT_DGT + + cur.execute(stmt, [river_id, name] + values) + + if not dry_run: + dbconn.commit() +
--- a/flys-backend/contrib/shpimporter/fixpoints.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/fixpoints.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,4 +1,7 @@ -import ogr, osr +try: + from osgeo import ogr, osr +except ImportError: + import ogr, osr from importer import Importer
--- a/flys-backend/contrib/shpimporter/floodplains.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/floodplains.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,4 +1,7 @@ -import ogr +try: + from osgeo import ogr +except ImportError: + import ogr from importer import Importer
--- a/flys-backend/contrib/shpimporter/gauges.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/gauges.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,4 +1,7 @@ -import ogr +try: + from osgeo import ogr +except ImportError: + import ogr from importer import Importer
--- a/flys-backend/contrib/shpimporter/hws.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/hws.py Thu Feb 28 11:48:17 2013 +0100 @@ -3,97 +3,66 @@ try: from osgeo import ogr -except ImportErrror: +except ImportError: import ogr from importer import Importer import utils +import logging +logger = logging.getLogger("Hochwasserschutzanlagen") + PATH="Hydrologie/HW-Schutzanlagen" NAME="HWS" # Keep in sync with hws_kinds table: HWS_KIND = { - "Durchlass" : 1, - "Damm" : 2, - "Deich" : 2, - "Graben" : 3, + "durchlass" : 1, + "damm" : 2, + "deich" : 2, + "hochufer" : 2, + "graben" : 3, + "rohr1" : 1, } # Keep in sync with fed_states table: FED_STATES = { - "Bayern" : 1, - "Hessen" : 2, - "Niedersachsen" : 3, - "Nordrhein-Westfalen" : 4, - "Rheinland-Pfalz" : 5, - "Saarland" : 6, - "Schleswig-Holstein" : 7, - "Brandenburg" : 8, - "Mecklenburg-Vorpommern" : 9, - "Thüringen" : 10, - "Baden-Württemberg" : 11, - "Sachsen-Anhalt" : 12, - "Sachsen" : 13, - "Berlin" : 14, - "Bremen" : 15, - "Hamburg" : 16, + "bayern" : 1, + "hessen" : 2, + "niedersachsen" : 3, + "nordrhein-westfalen" : 4, + "nordrhein westfalen" : 4, + "rheinland-pfalz" : 5, + "rheinland pfalz" : 5, + "saarland" : 6, + "schleswig-holstein" : 7, + "schleswig holstein" : 7, + "brandenburg" : 8, + "mecklenburg-vorpommern" : 9, + "mecklenburg vorpommern" : 9, + "thüringen" : 10, + "baden-württemberg" : 11, + "baden württemberg" : 11, + "sachsen-anhalt" : 12, + "sachsen anhalt" : 12, + "sachsen" : 13, + "berlin" : 14, + "bremen" : 15, + "hamburg" : 16, } -class HWSLines(Importer): - - def getPath(self, base): - return "%s/%s" % (base, PATH) - - def getTablename(self): - return "hws_lines" - - def getName(self): - return "HWS_LINES" - - def isGeometryValid(self, geomType): - return geomType == 2 - - def isShapeRelevant(self, name, path): - return True - - def createNewFeature(self, featureDef, feat, **args): - newFeat = ogr.Feature(featureDef) - geometry = feat.GetGeometryRef() - geometry.SetCoordinateDimension(2) - - newFeat.SetGeometry(geometry) - - if self.IsFieldSet(feat, "river_id"): - newFeat.SetField("river_id", feat.GetField("river_id")) - else: - newFeat.SetField("river_id", self.river_id) - - if self.IsFieldSet(feat, "TYP"): - newFeat.SetField("type", feat.GetField("TYP")) - - if self.IsFieldSet(feat, "Bauart"): - newFeat.SetField("hws_facility", feat.GetField("Bauart")) - - if self.IsFieldSet(feat, "Name"): - newFeat.SetField("name", feat.GetField("name")) - else: - newFeat.SetField("name", args['name']) - - return newFeat - class HWSPoints(Importer): fieldmap = { - "Name" : "name", - "Quelle" : "source", - "Anmerkung" : "description", - "Stand" : "status_date", - "Verband" : "agency", - "Deich_KM" : "dike_km", - "Bereich" : "range", - "Höhe_SOLL" : "z_target", - "WSP_BfG100" : "rated_level", - "Hoehe_IST" : "z", + "name$" : "name", + "quelle$" : "source", + "anmerkung$" : "description", + "stand$" : "status_date", + "verband$" : "agency", + "Deich_{0,1}KM$" : "dike_km", + "Bereich$" : "range", + "H[oeö]{0,2}he_{0,1}SOLL$" : "z_target", + "(WSP_){0,1}BfG_{0,1}100$" : "rated_level", + "H[oeö]{0,2}he_{0,1}IST$" : "z", } def getPath(self, base): @@ -106,13 +75,11 @@ return "HWS_POINTS" def isGeometryValid(self, geomType): - return geomType == 1 + return geomType == ogr.wkbPoint or geomType == ogr.wkbPoint25D def isShapeRelevant(self, name, path): - if "punkte" in os.path.basename(path).lower(): - return True - else: - return False + shp = ogr.Open(path) + return self.isGeometryValid(shp.GetLayerByName(name).GetGeomType()) def createNewFeature(self, featureDef, feat, **args): newFeat = ogr.Feature(featureDef) @@ -123,65 +90,108 @@ newFeat.SetGeometry(geometry) - newFeat.SetFID(feat.GetFID()) - - newFeat.SetField("ogr_fid", feat.GetFID()) - - if self.IsFieldSet(feat, "Art"): - self.handled("Art") - kind_id = HWS_KIND.get(feat.GetField("Art")) + artname = self.searchField("art$") + if self.IsFieldSet(feat, artname): + self.handled(artname) + kind_id = HWS_KIND.get(feat.GetField(artname).lower()) if not kind_id: - print ("Unbekannte Art: %s" % \ - feat.GetField("Art")) + logger.warn("Unknown Art: %s" % \ + feat.GetField(artname)) else: newFeat.SetField("kind_id", kind_id) - if self.IsFieldSet(feat, "Bundesland"): - self.handled("Bundesland") - fed_id = FED_STATES.get(feat.GetField("Bundesland")) + fname = self.searchField("Bundesland$") + if self.IsFieldSet(feat, fname): + self.handled(fname) + fed_id = FED_STATES.get(feat.GetField(fname).lower()) if not fed_id: - print ("Unbekanntes Bundesland: %s" % \ + logger.warn("Unknown Bundesland: %s" % \ feat.GetField("Bundesland")) else: newFeat.SetField("fed_state_id", fed_id) - if self.IsFieldSet(feat, "river_id"): - self.handled("river_id") - if feat.GetField("river_id") != self.river_id: - print ("River_id mismatch between shapefile and" - " importer parameter.") - newFeat.SetField("river_id", feat.GetField("river_id")) - else: - newFeat.SetField("river_id", self.river_id) - - if self.IsFieldSet(feat, "Ufer"): - self.handled("Ufer") - shoreString = feat.GetField("Ufer") + fname = self.searchField("(ufer$)|(flussseite$)") + if self.IsFieldSet(feat, fname): + self.handled(fname) + shoreString = feat.GetField(fname) if "links" in shoreString.lower(): newFeat.SetField("shore_side", True) elif "rechts" in shoreString.lower(): newFeat.SetField("shore_side", False) - if not self.IsFieldSet(feat, "Name"): - self.handled("Name") + + fname = self.searchField("river_{0,1}id$") + if self.IsFieldSet(feat, fname): + self.handled(fname) + if feat.GetField(fname) != self.river_id: + logger.warn("River_id mismatch between shapefile and" + " importer parameter.") + newFeat.SetField("river_id", feat.GetField(fname)) + else: + newFeat.SetField("river_id", self.river_id) + + fname = self.searchField("name$") + if not self.IsFieldSet(feat, fname): newFeat.SetField("name", args['name']) - if self.IsFieldSet(feat, "offiziell"): - self.handled("offiziell") - offiziell = feat.GetField("offiziell") + fname = self.searchField("offiziell$") + if self.IsFieldSet(feat, fname): + self.handled(fname) + offiziell = feat.GetField(fname) if offiziell == "1" or offiziell == 1: - newFeat.SetField("offiziell", True) + newFeat.SetField("official", True) else: - newFeat.SetField("offiziell", False) + newFeat.SetField("official", False) + # Set the official value based on the file name as a fallback + elif args.get("name", "").lower() == "rohre_und_sperren" or \ + args.get("name", "").lower() == "rohre-und-sperren": + newFeat.SetField("official", True) if self.IsFieldSet(newFeat, "z") and \ self.IsFieldSet(newFeat, "rated_level"): - self.handled("Freibord_m") + fname = self.searchField("freibord(_m){0,1}$") + self.handled(fname) z = newFeat.GetFieldAsDouble("z") rl = newFeat.GetFieldAsDouble("rated_level") newFeat.SetField("freeboard", z - rl) return newFeat +class HWSLines(HWSPoints): + # TODO: GEOM_target, GEOM_rated_level, dike_km_from, dike_km_to + fieldmap = { + "name$" : "name", + "quelle$" : "source", + "anmerkung$" : "description", + "stand$" : "status_date", + "verband$" : "agency", + "Bereich$" : "range", + } + + def getPath(self, base): + return "%s/%s" % (base, PATH) + + def getTablename(self): + return "hws_lines" + + def getName(self): + return "HWS_LINES" + + def isGeometryValid(self, geomType): + return geomType == ogr.wkbLineString or geomType == ogr.wkbLineString25D + + def isShapeRelevant(self, name, path): + shp = ogr.Open(path) + return self.isGeometryValid(shp.GetLayerByName(name).GetGeomType()) + + def createNewFeature(self, featureDef, feat, **args): + newFeat = HWSPoints.createNewFeature(self, featureDef, feat, **args) + geometry = feat.GetGeometryRef() + geometry.SetCoordinateDimension(3) + newFeat.SetGeometry(geometry) + + return newFeat + +
--- a/flys-backend/contrib/shpimporter/importer.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/importer.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,21 +1,24 @@ try: - from osgeo import ogr -except ImportErrror: - import ogr -import osr -import shpimporter + from osgeo import ogr, osr +except ImportError: + import ogr, osr import utils +import re +import logging + +logger = logging.getLogger("importer") class Importer: - def __init__(self, config, dbconn): - self.config = config + def __init__(self, river_id, dbconn, dry_run): + self.river_id = river_id self.dbconn = dbconn - self.river_id = config.river_id + self.dry_run = dry_run self.dest_srs = osr.SpatialReference() - self.dest_srs.ImportFromEPSG(config.target_srs) + self.dest_srs.ImportFromEPSG(31467) self.handled_fields = [] self.tracking_import = False + self.srcLayer = None def getKind(self, path): raise NotImplementedError("Importer.getKind is abstract!") @@ -30,10 +33,39 @@ raise NotImplementedError("Importer.getTablename is abstract!") def IsFieldSet(self, feat, name): + if not name: + return False if feat.GetFieldIndex(name) == -1: return False # Avoids an Error in IsFieldSet return feat.IsFieldSet(feat.GetFieldIndex(name)) + def searchField(self, regex): + """ + Searches for a field in the current src layer that matches + the expression regex. + Throws an exception if more then one field matches + @param feat: The feature to search for attributes + @param regex: The regex to look for + + @returns: The field name as a string + """ + + if not hasattr(self.srcLayer, "fieldnames"): + self.srcLayer.fieldnames = [] + for i in range(0, self.srcLayer.GetLayerDefn().GetFieldCount()): + self.srcLayer.fieldnames.append( + self.srcLayer.GetLayerDefn().GetFieldDefn(i).GetNameRef()) + + result = None + for name in self.srcLayer.fieldnames: + match = re.match(regex, name, re.IGNORECASE) + if match: + if result: + raise Exception("More then one field matches: %s" % regex) + else: + result = match.group(0) + return result + def IsDoubleFieldSet(self, feat, name): try: isset = feat.GetFieldAsDouble(name) @@ -46,20 +78,23 @@ def walkOverShapes(self, shape): (name, path) = shape - if not self.isShapeRelevant(name, path): - shpimporter.INFO("Skip shapefile '%s'" % path) - return shp = ogr.Open(shape[1]) if shp is None: - shpimporter.ERROR("Shapefile '%s' could not be opened!" % path) + logger.error("Shapefile '%s' could not be opened!" % path) return - shpimporter.INFO("Processing shapefile '%s'" % path) + if not self.isShapeRelevant(name, path): + logger.info("Skip shapefile: '%s' of Type: %s" % (path, + utils.getWkbString(shp.GetLayerByName(name).GetGeomType()))) + return + + + logger.info("Processing shapefile '%s'" % path) srcLayer = shp.GetLayerByName(name) if srcLayer is None: - shpimporter.ERROR("Layer '%s' was not found!" % name) + logger.error("Layer '%s' was not found!" % name) return return self.shape2Database(srcLayer, name, path) @@ -69,11 +104,12 @@ src_srs = geometry.GetSpatialReference() if src_srs is None: - shpimporter.ERROR("No source SRS given! No transformation possible!") + logger.error("No source SRS given! No transformation possible!") return feat transformer = osr.CoordinateTransformation(src_srs, self.dest_srs) - geometry.Transform(transformer) + if geometry.Transform(transformer): + return None return feat @@ -90,15 +126,19 @@ """ Checks the mapping dictonary for key value pairs to copy from the source to the destination feature. + The keys can be reguar expressions that are matched + agains the source fieldnames The Key is the attribute of the source feature to be copied into the target attribute named by the dict's value. """ self.tracking_import = True - self.handled_fields.extend(mapping.keys()) for key, value in mapping.items(): - if src.GetFieldIndex(key) == -1: + realname = self.searchField(key) + if realname == None: continue + if not realname in self.handled_fields: + self.handled_fields.append(realname) # 0 OFTInteger, Simple 32bit integer # 1 OFTIntegerList, List of 32bit integers # 2 OFTReal, Double Precision floating point @@ -111,31 +151,32 @@ # 9 OFTDate, Date # 10 OFTTime, Time # 11 OFTDateTime, Date and Time - if src.IsFieldSet(src.GetFieldIndex(key)): - if src.GetFieldType(key) == 2: - target.SetField(value, src.GetFieldAsDouble(key)) + if src.IsFieldSet(src.GetFieldIndex(realname)): + if src.GetFieldType(realname) == 2: + target.SetField(value, src.GetFieldAsDouble(realname)) else: - target.SetField(value, src.GetField(key)) + target.SetField(value, utils.getUTF8(src.GetField(realname))) def shape2Database(self, srcLayer, name, path): destLayer = self.dbconn.GetLayerByName(self.getTablename()) if srcLayer is None: - shpimporter.ERROR("Shapefile is None!") + logger.error("Shapefile is None!") return -1 if destLayer is None: - shpimporter.ERROR("No destination layer given!") + logger.error("No destination layer given!") return -1 count = srcLayer.GetFeatureCount() - shpimporter.DEBUG("Try to add %i features to database." % count) + logger.debug("Try to add %i features to database." % count) srcLayer.ResetReading() + self.srcLayer = srcLayer geomType = -1 success = 0 - unsupported = 0 + unsupported = {} creationFailed = 0 featureDef = destLayer.GetLayerDefn() @@ -143,7 +184,7 @@ geom = feat.GetGeometryRef() if geom is None: - shpimporter.DEBUG("Unkown Geometry reference for feature") + logger.debug("Unkown Geometry reference for feature") continue geomType = geom.GetGeometryType() @@ -151,25 +192,31 @@ if self.isGeometryValid(geomType): newFeat = self.createNewFeature(featureDef, feat, - name=name, + name=utils.getUTF8(name), path=path) if newFeat is not None: newFeat.SetField("path", utils.getUTF8Path(path)) newFeat = self.transform(newFeat) - res = destLayer.CreateFeature(newFeat) - if res is None or res > 0: - shpimporter.ERROR("Unable to insert feature. Error: %r" % res) + if newFeat: + res = destLayer.CreateFeature(newFeat) + if res is None or res > 0: + logger.error("Unable to insert feature. Error: %r" % res) + else: + success = success + 1 else: - success = success + 1 + logger.error("Could not transform feature: %s " % feat.GetFID()) + creationFailed += 1 else: creationFailed = creationFailed + 1 else: - unsupported = unsupported + 1 + unsupported[utils.getWkbString(geomType)] = \ + unsupported.get(utils.getWkbString(geomType), 0) + 1 - shpimporter.INFO("Inserted %i features" % success) - shpimporter.INFO("Failed to create %i features" % creationFailed) - shpimporter.INFO("Found %i unsupported features" % unsupported) + logger.info("Inserted %i features" % success) + logger.info("Failed to create %i features" % creationFailed) + for key, value in unsupported.items(): + logger.info("Found %i unsupported features of type: %s" % (value, key)) if self.tracking_import: unhandled = [] @@ -179,14 +226,14 @@ unhandled.append(act_field) if len(unhandled): - shpimporter.INFO("Did not import values from fields: %s " % \ + logger.info("Did not import values from fields: %s " % \ " ".join(unhandled)) try: - if self.config.dry_run > 0: + if self.dry_run: return geomType destLayer.CommitTransaction() except e: - shpimporter.ERROR("Exception while committing transaction.") + logger.error("Exception while committing transaction.") return geomType
--- a/flys-backend/contrib/shpimporter/km.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/km.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,4 +1,7 @@ -import ogr +try: + from osgeo import ogr +except ImportError: + import ogr from importer import Importer
--- a/flys-backend/contrib/shpimporter/lines.py Thu Feb 28 11:06:20 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,56 +0,0 @@ -import ogr - -from importer import Importer - -TABLE_NAME="lines" -PATH="Geodaesie/Linien" -NAME="Lines" - - -class Line(Importer): - - def getPath(self, base): - return "%s/%s" % (base, PATH) - - - def getTablename(self): - return TABLE_NAME - - - def getName(self): - return NAME - - - def isGeometryValid(self, geomType): - return geomType == 2 or geomType == -2147483646 - - - def isShapeRelevant(self, name, path): - return True - - - def createNewFeature(self, featureDef, feat, **args): - newFeat = ogr.Feature(featureDef) - geometry = feat.GetGeometryRef() - geometry.SetCoordinateDimension(2) - - newFeat.SetGeometry(geometry) - newFeat.SetField("name", args['name']) - - if self.IsFieldSet(feat, "river_id"): - newFeat.SetField("river_id", feat.GetField("river_id")) - else: - newFeat.SetField("river_id", self.river_id) - - if self.IsFieldSet(feat, "TYP"): - newFeat.SetField("kind", feat.GetFieldAsDouble("TYP")) - else: - newFeat.SetField("kind", "DAMM") - - if self.IsFieldSet(feat, "Z"): - newFeat.SetField("z", feat.GetFieldAsDouble("Z")) - else: - newFeat.SetField("z", 9999) - - return newFeat -
--- a/flys-backend/contrib/shpimporter/run.sh Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/run.sh Thu Feb 28 11:48:17 2013 +0100 @@ -20,7 +20,6 @@ SKIP_HYDR_BOUNDARIES=0 SKIP_HWS=0 SKIP_GAUGE_LOCATION=0 -SKIP_CATCHMENTS=0 SKIP_UESG=0 exec python shpimporter.py \ @@ -41,6 +40,5 @@ --skip_hydr_boundaries $SKIP_HYDR_BOUNDARIES \ --skip_hws $SKIP_HWS \ --skip_gauge_locations $SKIP_GAUGE_LOCATION \ - --skip_catchments $SKIP_CATCHMENTS \ --skip_uesgs $SKIP_UESG
--- a/flys-backend/contrib/shpimporter/shpimporter.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/shpimporter.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,14 +1,16 @@ try: from osgeo import ogr -except ImportErrror: +except ImportError: import ogr import utils, optparse +import sys +import os +import logging from uesg import UESG from axis import Axis from km import KM -from lines import Line from fixpoints import Fixpoint from buildings import Building from crosssectiontracks import CrosssectionTrack @@ -16,44 +18,33 @@ from boundaries import HydrBoundary, HydrBoundaryPoly from hws import HWSLines, HWSPoints from gauges import GaugeLocation -from catchments import Catchment - - -VERBOSE_DEBUG=2 -VERBOSE_INFO=1 - - -def DEBUG(msg): - config = getConfig() - if config.verbose >= VERBOSE_DEBUG: - print "DEBUG: %s" % msg - -def INFO(msg): - config = getConfig() - if config.verbose >= VERBOSE_INFO: - print "INFO: %s" % msg +from dgm import insertRiverDgm -def ERROR(msg): - config = getConfig() - print "ERROR: %s" % msg +logger = logging.getLogger("shpimporter") +def initialize_logging(level): + """Initializes the logging system""" + root = logging.getLogger() + root.setLevel(level) + hdlr = logging.StreamHandler() + fmt = logging.Formatter("%(levelname)s %(name)s: %(message)s") + hdlr.setFormatter(fmt) + root.addHandler(hdlr) -def getImporters(config, dbconn): +def getImporters(river_id, dbconn, dry_run): return [ - Axis(config, dbconn), - KM(config, dbconn), - CrosssectionTrack(config, dbconn), - Line(config, dbconn), - Fixpoint(config, dbconn), - Building(config, dbconn), - Floodplain(config, dbconn), - HydrBoundary(config, dbconn), - HydrBoundaryPoly(config, dbconn), - HWSLines(config, dbconn), - HWSPoints(config, dbconn), - GaugeLocation(config, dbconn), - Catchment(config, dbconn), - UESG(config, dbconn) + Axis(river_id, dbconn, dry_run), + KM(river_id, dbconn, dry_run), + CrosssectionTrack(river_id, dbconn, dry_run), + Fixpoint(river_id, dbconn, dry_run), + Building(river_id, dbconn, dry_run), + Floodplain(river_id, dbconn, dry_run), + HydrBoundary(river_id, dbconn, dry_run), + HydrBoundaryPoly(river_id, dbconn, dry_run), + HWSLines(river_id, dbconn, dry_run), + HWSPoints(river_id, dbconn, dry_run), + GaugeLocation(river_id, dbconn, dry_run), + UESG(river_id, dbconn, dry_run) ] @@ -64,7 +55,7 @@ parser.add_option("--host", type="string") parser.add_option("--user", type="string") parser.add_option("--password", type="string") - parser.add_option("--river_id", type="int") + parser.add_option("--river_name", type="string") parser.add_option("--verbose", type="int", default=1) parser.add_option("--dry_run", type="int", default=0) parser.add_option("--ogr_connection", type="string") @@ -72,33 +63,36 @@ parser.add_option("--skip_hydr_boundaries", type="int") parser.add_option("--skip_buildings", type="int") parser.add_option("--skip_crosssections", type="int") - parser.add_option("--skip_lines", type="int") parser.add_option("--skip_fixpoints", type="int") parser.add_option("--skip_floodplains", type="int") parser.add_option("--skip_hws_lines", type="int") parser.add_option("--skip_hws_points", type="int") parser.add_option("--skip_gauge_locations", type="int") - parser.add_option("--skip_catchments", type="int") parser.add_option("--skip_kms", type="int") parser.add_option("--skip_uesgs", type="int") + parser.add_option("--skip_dgm", type="int") (config, args) = parser.parse_args() + if config.verbose > 1: + initialize_logging(logging.DEBUG) + elif config.verbose == 1: + initialize_logging(logging.INFO) + else: + initialize_logging(logging.WARN) + if config.directory == None: - ERROR("No river directory specified!") + logger.error("No river directory specified!") raise Exception("Invalid config") if not config.ogr_connection: if not config.host: - ERROR("No database host specified!") + logger.error("No database host specified!") raise Exception("Invalid config") if not config.user: - ERROR("No databaser user specified!") + logger.error("No databaser user specified!") raise Exception("Invalid config") if not config.password: - ERROR("No password specified!") + logger.error("No password specified!") raise Exception("Invalid config") - if config.river_id == None: - ERROR("No river id specified!") - raise Exception("Invalid config") return config @@ -114,20 +108,17 @@ return True elif config.skip_crosssections == 1 and isinstance(importer, CrosssectionTrack): return True - elif config.skip_lines == 1 and isinstance(importer, Line): - return True elif config.skip_fixpoints == 1 and isinstance(importer, Fixpoint): return True elif config.skip_floodplains == 1 and isinstance(importer, Floodplain): return True - elif config.skip_hws_points == 1 and isinstance(importer, HWSPoints): - return True elif config.skip_hws_lines == 1 and isinstance(importer, HWSLines): return True + elif config.skip_hws_points == 1 and isinstance(importer, HWSPoints) and \ + not isinstance(importer, HWSLines): + return True elif config.skip_gauge_locations == 1 and isinstance(importer, GaugeLocation): return True - elif config.skip_catchments == 1 and isinstance(importer, Catchment): - return True elif config.skip_kms == 1 and isinstance(importer, KM): return True elif config.skip_uesgs == 1 and isinstance(importer, UESG): @@ -135,7 +126,6 @@ return False - def main(): config=None try: @@ -144,48 +134,109 @@ return -1 if config == None: - ERROR("Unable to read config from command line!") + logger.error("Unable to read config from command line!") return if config.dry_run > 0: - INFO("You enable 'dry_run'. No database transaction will take place!") + logger.info("You enable 'dry_run'. No database transaction will take place!") if config.ogr_connection: connstr = config.ogr_connection else: connstr = 'OCI:%s/%s@%s' % (config.user, config.password, config.host) + oracle = False # Marker if oracle is used. + if 'OCI:' in connstr: + oracle = True + try: + import cx_Oracle as dbapi + raw_connstr=connstr.replace("OCI:", "") + except ImportError: + logger.error("Module cx_Oracle not found in: %s\n" + "Neccessary to connect to a Oracle Database.\n" + "Please refer to the installation " + "documentation." % sys.path) + return -1 + + else: # Currently only support for oracle and postgres + try: + import psycopg2 as dbapi + raw_connstr=connstr.replace("PG:", "") + except ImportError: + logger.error("Module psycopg2 not found in: %s\n" + "Neccessary to connect to a Posgresql Database.\n" + "Please refer to the installation " + "documentation." % sys.path) + return -1 + + dbconn_raw = dbapi.connect(raw_connstr) dbconn = ogr.Open(connstr) if dbconn == None: - ERROR("Could not connect to database %s" % connstr) + logger.error("Could not connect to database %s" % connstr) return -1 - importers = getImporters(config, dbconn) types = {} - for importer in importers: - if skip_importer(config, importer): - INFO("Skip import of '%s'" % importer.getName()) - continue - - INFO("Start import of '%s'" % importer.getName()) - - shapes = utils.findShapefiles(importer.getPath(config.directory)) - DEBUG("Found %i Shapefiles" % len(shapes)) + directories = [] + if not config.river_name: + for file in [os.path.join(config.directory, d) for d in \ + os.listdir(config.directory)]: + if os.path.isdir(file): + directories.append(file) + else: + directories.append(config.directory) - for shpTuple in shapes: - geomType = importer.walkOverShapes(shpTuple) - try: - if geomType is not None: - num = types[geomType] - types[geomType] = num+1 - except: - types[geomType] = 1 + for directory in directories: + if not config.river_name: + river_name = utils.getUTF8Path( + os.path.basename(os.path.normpath(directory))) + else: + river_name = config.river_name + river_id = utils.getRiverId(dbconn_raw, river_name, oracle) - for key in types: - DEBUG("%i x geometry type %s" % (types[key], key)) + if not river_id: + logger.info("Could not find river in database. Skipping: %s" + % river_name) + continue + else: + logger.info("Importing River: %s" % river_name) + for importer in getImporters(river_id, dbconn, config.dry_run): + if skip_importer(config, importer): + logger.info("Skip import of '%s'" % importer.getName()) + continue + + logger.info("Start import of '%s'" % importer.getName()) + + shapes = utils.findShapefiles(importer.getPath(config.directory)) + logger.debug("Found %i Shapefiles" % len(shapes)) + + for shpTuple in shapes: + geomType = importer.walkOverShapes(shpTuple) + try: + if geomType is not None: + num = types[geomType] + types[geomType] = num+1 + except: + types[geomType] = 1 + + for key in types: + logger.debug("%i x geometry type %s" % (types[key], key)) + + if not config.skip_dgm: + dgmfilename = os.path.join( + config.directory, "..", "DGMs.csv") + if not os.access(dgmfilename, os.R_OK) or not \ + os.path.isfile(dgmfilename): + logger.info("Could not find or access DGM file: %s \n" + "Skipping DGM import." % dgmfilename) + else: + logger.info("Inserting DGM meta information in 'dem' table.") + insertRiverDgm(dbconn_raw, dgmfilename, river_name, + config.dry_run, oracle) + else: + logger.info("Skip import of DGM.") if __name__ == '__main__': main()
--- a/flys-backend/contrib/shpimporter/uesg.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/uesg.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,4 +1,7 @@ -import ogr +try: + from osgeo import ogr +except ImportError: + import ogr from importer import Importer
--- a/flys-backend/contrib/shpimporter/utils.py Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/contrib/shpimporter/utils.py Thu Feb 28 11:48:17 2013 +0100 @@ -1,8 +1,17 @@ import os import sys -from shpimporter import DEBUG, INFO, ERROR +import logging + +try: + from osgeo import ogr +except ImportError: + import ogr + +logger = logging.getLogger("utils") SHP='.shp' +SQL_SELECT_RIVER_ID="SELECT id FROM rivers WHERE name = %s" +SQL_SELECT_RIVER_ID_ORA="SELECT id FROM rivers WHERE name = :s" def findShapefiles(path): shapes = [] @@ -11,7 +20,7 @@ if len(files) == 0: continue - DEBUG("Processing directory '%s' with %i files " % (root, len(files))) + logger.debug("Processing directory '%s' with %i files " % (root, len(files))) for f in files: idx = f.find(SHP) @@ -20,6 +29,36 @@ return shapes +def getRiverId(dbconn, name, oracle): + """ + Returns the id of the river "name" + Dbconn must be a python database connection api compliant object + """ + cur = dbconn.cursor() + if oracle: + # This is stupid and shoudl not be neccessary. But I don't + # know how to make it work both ways. aheinecke - 02/2013 + stmt = SQL_SELECT_RIVER_ID_ORA + else: + stmt = SQL_SELECT_RIVER_ID + cur.execute(stmt, (name,)) + row = cur.fetchone() + if row: + return row[0] + else: + return 0 + +def getUTF8(string): + """ + Tries to convert the string to a UTF-8 encoding by first checking if it + is UTF-8 and then trying cp1252 + """ + try: + return unicode.encode(unicode(string, "UTF-8"), "UTF-8") + except UnicodeDecodeError: + # Probably European Windows names so lets try again + return unicode.encode(unicode(string, "cp1252"), "UTF-8") + def getUTF8Path(path): """ Tries to convert path to utf-8 by first checking the filesystemencoding @@ -31,3 +70,30 @@ except UnicodeDecodeError: # Probably European Windows names so lets try again return unicode.encode(unicode(path, "cp1252"), "UTF-8") + +WKB_MAP = { + ogr.wkb25Bit : 'wkb25Bit', + ogr.wkbGeometryCollection : 'wkbGeometryCollection', + ogr.wkbGeometryCollection25D :'wkbGeometryCollection25D', + ogr.wkbLineString : 'wkbLineString', + ogr.wkbLineString25D : 'wkbLineString25D', + ogr.wkbLinearRing : 'wkbLinearRing', + ogr.wkbMultiLineString : 'wkbMultiLineString', + ogr.wkbMultiLineString25D : 'wkbMultiLineString25D', + ogr.wkbMultiPoint : 'wkbMultiPoint', + ogr.wkbMultiPoint25D : 'wkbMultiPoint25D', + ogr.wkbMultiPolygon : 'wkbMultiPolygon', + ogr.wkbMultiPolygon25D : 'wkbMultiPolygon25D', + ogr.wkbNDR : 'wkbNDR', + ogr.wkbNone : 'wkbNone', + ogr.wkbPoint : 'wkbPoint', + ogr.wkbPoint25D : 'wkbPoint25D', + ogr.wkbPolygon : 'wkbPolygon', + ogr.wkbPolygon25D : 'wkbPolygon25D', + ogr.wkbUnknown : 'wkbUnknown', + ogr.wkbXDR : 'wkbXDR' +} + +def getWkbString(type): + return WKB_MAP.get(type) or "Unknown" +
--- a/flys-backend/doc/documentation/de/importer-geodaesie.tex Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/documentation/de/importer-geodaesie.tex Thu Feb 28 11:48:17 2013 +0100 @@ -1,10 +1,14 @@ \section{Geodatenimport} -Der Geodaten Importer ist ein in Python geschriebenes Kommandozeilen Tool zum -Import von Shapefiles in eine Datenbank. Zum Lesen der Shapefiles und zum -Schreiben der Geodaten in die Datenbank wird GDAL verwendet. Der Import in eine -Oracle Datenbank erfordert, dass GDAL und GDAL Python Bindungs mit -Oracle Unterstützung installiert sind. Weitere Details hierzu befinden sich im +Der Geodaten Importer ist ein in der Programmiersprache Python +geschriebenes Kommandozeilen Werkzeug zum Import von Shapefiles in +eine Datenbank. +Zum Lesen der Shapefiles und zum schreiben der Geodaten +in die Datenbank wird die GDAL Bibliothek verwendet. +Um Daten in eine Oracle Datenbank zu importieren ist es nötig, dass +GDAL und GDAL Python Bindungs mit Oracle Unterstützung installiert +sind. Bei der Verwendung von PostgreSQL entfällt dieser Schritt. +Weitere Details hierzu befinden sich im Kapitel \ref{Systemanforderungen} und \ref{Installationsanleitung}. Der Importer kann mit einem Shellscript von der Kommandozeile gestartet werden @@ -13,23 +17,32 @@ importiert werden sollen. Für jede Klasse gibt es einen speziellen Parser, der die speziellen Attribute eines Shapefiles liest und in die entsprechende Relation der Datenbank schreibt. Die Parser sind speziell auf das -Dateisystem der BfG ausgerichtet. So wird z.B. erwartet, dass die Shapefiles der +Dateisystem der BfG ausgerichtet. So wird beispielsweise erwartet, dass die Shapefiles der Gewässerachse im Ordner $Geodaesie/Flussachse+km$ liegen. Weitere Informationen zu den einzelnen Parsern sind dem nächsten Kapitel \ref{Beschreibung der Parser} zu entnehmen. Der Erfolg oder Misserfolg eines Shape-Imports wird je nach Konfiguration im Logfile vermerkt. Folgende Einträge können dem Logfile entnommen werden: +%TODO etwas zum srs schreiben. + \textbf{INFO: Inserted 4 features} \\Gibt die Anzahl der erfolgreich importierten Features an.\\ \textbf{INFO: Failed to create 2 features} \\Gibt die Anzahl der Features an, die nicht importiert werden konnten.\\ -\textbf{INFO: Found 3 unsupported features} +\textbf{INFO: Found 3 unsupported features of type: wbkMultiLineString} \\Gibt die Anzahl der Features an, die aufgrund ihres Datentyps nicht importiert -werden konnten. Z.B: es werden Linien erwartet, im Shapefile sind jedoch -Polygone enthalten.\\ +werden konnten. Wenn etwa Punkte erwartet wurden aber sich im Shapefile +Polygone befanden.\\ + +\textbf{INFO: Did not import values from fields: TYP ID GRUENDUNG BHW} +\\Manche Importer versuchen neben der Geographischen Information weitere +Felder in die Datenbank einzulesen. Um festzustellen ob ein Feld aufgrund +von Tippfehlern oder unterschiedlicher Schreibweise nicht importiert wurde, +gibt diese Information Auskunft darüber welche Felder aus der Shape Datei +nicht verwendet wurden.\\ \textbf{ERROR: No source SRS given! No transformation possible!} \\Das Shapefile enthält keine Information, in welcher Projektion die Geometrien @@ -88,7 +101,7 @@ \begin{tabular}[t]{ll} Pfad & Hydrologie/Hydr.Grenzen/Linien \\ Geometrie & LINESTRING, POLYGON \\ -Attribute & name, kind \\ +Attribute & name, kind, sectie, sobek \\ \end{tabular} \subsubsection{Bauwerke} @@ -149,8 +162,9 @@ \hspace{1cm} \begin{tabular}[t]{ll} Pfad & Hydrologie/HW-Schutzanlagen \\ -Geometrie & LINESTRING \\ -Attribute & TYP, Bauart, Name, name \\ +Geometrie & LINESTRING, POINT \\ +Attribute & name, source, description, status\_date, agency, + dike\_km, range, z\_target, rated\_level, z \\ \end{tabular} @@ -163,19 +177,6 @@ \end{tabular} -\subsubsection{Linien} -\hspace{1cm} -\begin{tabular}[t]{ll} -Pfad & Geodaesie/Linien \\ -Geometrie & LINESTRING, MULTILINESTRING \\ -Attribute & name, TYP, Z \\ - -Anmerkung & Wenn kein Attribut 'TYP' definiert ist, wird standardmäßig der Wert \\ - & 'DAMM' angenommen. Fehlt ein Attribut 'Z' wird '9999' als Höhe \\ - & angenommen. \\ -\end{tabular} - - \subsubsection{Überschwemmungsfläche} \hspace{1cm} \begin{tabular}[t]{ll} @@ -184,79 +185,6 @@ Attribut & name, diff, count, area, perimeter \\ \end{tabular} - -\subsection{Systemanforderungen} -\label{Systemanforderungen} -\begin{itemize} - \item Oracle Datenbank inkl. Schema für FLYS - \item GDAL Binding für Python mit Oracle Support - \item ogr2ogr - \item Python $>=$ 2.6 -\end{itemize} - - -\subsection{Installationsanleitung} -\label{Installationsanleitung} -\begin{itemize} - - \item Python\\ - Zum Starten des Importers ist es notwendig Python zu installieren. Dies können - Sie mit folgendem Befehl auf der Kommandozeile erledigen: - - \begin{lstlisting} - zypper in python - \end{lstlisting} - - \item Oracle Instantclient\\ - Der Oracle Instantclient 11.2 wird benötigt, damit der Importer mittels Python - und GDAL in die bestehende Oracle Datenbank schreiben kann. Dazu ist es - erforderlich, folgende Archive von Oracle herunterzuladen. Zu finden sind die - folgenden Pakete unter\\ - \href{http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html}{http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html} - - \begin{itemize} - \item instantclient-basic-linux-x86-64-11.2.0.2.0.zip - \item instantclient-sdk-linux-x86-64-11.2.0.2.0.zip - \item instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip - \end{itemize} - - Anschließend führen Sie folgende Befehle auf der Kommandozeile aus: - - \begin{lstlisting} - - mkdir /opt - - unzip ~/instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d /opt - unzip ~/instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d /opt - unzip ~/instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d /opt - - mkdir /opt/instantclient_11_2/lib - cd /opt/instantclient_11_2/lib - ln -s ../libclntsh.so.11.1 . - ln -s ../libclntsh.so.11.1 libclntsh.so - ln -s ../libnnz11.so . - ln -s ../libocci.so.11.1 . - ln -s ../libocci.so.11.1 libocci.so - ln -s ../libociei.so . - ln -s ../libocijdbc11.so . - ln -s ../libsqlplusic.so . - ln -s ../libsqlplus.so . - - rpm -i --nodeps ~/flys-importer/rpm/RPMS/x86_64/libgdal1180-1.8.0-intevation1.x86_64.rpm - rpm -i --nodeps ~/flys-importer/rpm/RPMS/x86_64/libgdal180-devel-1.8.0-intevation1.x86_64.rpm - rpm -i --nodeps ~/flys-importer/rpm/RPMS/x86_64/gdal180-1.8.0-intevation1.x86_64.rpm - - \end{lstlisting} - - Sollten keine Fehler aufgetreten sein, haben Sie den \textit{Oracle - Instantclient 11.2} erfolgreich entpackt und im Dateisystem unter - \textit{/opt/instantclient\_11\_2} abgelegt. Mit den Befehlen $rpm -i --nodeps$ - haben Sie anschließend die notwendigen Bindings installiert, damit der Importer - die Geodaten in die Oracle Datenbank schreiben kann. - -\end{itemize} - - \subsection{Konfiguration} \label{Konfiguration} Der Geodaten Importer kann über die Datei \textit{contrib/run\_geo.sh} @@ -267,12 +195,11 @@ \textbf{RIVER\_PATH} \\Der Pfad zum Gewässer im Dateisystem. -\textbf{RIVER\_ID} -\\Die Datenbank ID des zu importierenden Gewässers. - -\textbf{TARGET\_SRS} -\\Das EPSG Referenzsystem in das die Geodaten beim Import projeziert werden -sollen. +\textbf{RIVER\_NAME} +\\Der Datenbank Name des zu importierenden Gewässers. Wird dieser Parameter +nicht übergeben werden die Ordnernamen im mit dem Parameter RIVER\_PATH +angegebenen Verzeichnis als Flussnamen interpretiert und es wird versucht +diese zu Importieren. \textbf{HOST} \\Der Host der Datenbank. @@ -312,9 +239,6 @@ \textbf{SKIP\_CROSSSECTIONS} \\Bei gesetztem Wert `1` werden keine Querprofilespuren importiert. -\textbf{SKIP\_LINES} -\\Bei gesetztem Wert `1` werden keine Linien importiert. - \textbf{SKIP\_FIXPOINTS} \\Bei gesetztem Wert `1` werden keine Festpunkte importiert. @@ -342,6 +266,9 @@ \textbf{SKIP\_UESG} \\Bei gesetztem Wert `1` werden keine Überschwemmungsflächen importiert. +\textbf{SKIP\_DGM} +\\Bei gesetztem Wert `1` werden keine Informationen über Digitale Geländemodelle importiert. + \subsection{Starten des Geodaten Importers} \label{Starten des Geodaten Importers}
--- a/flys-backend/doc/documentation/de/importer-hydr-morph.tex Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/documentation/de/importer-hydr-morph.tex Thu Feb 28 11:48:17 2013 +0100 @@ -107,7 +107,7 @@ ausgeschlossen. \subsubsection{Profilspuren (*.w80-Dateien)} -Der Import von W80-Profilspuren kann mit \textbf{-Dflys.backend.importer.skip.w80s=true} +Der Import von W80-Profilspuren kann mit \textbf{-Dflys.backend.importer.skip.w80=true} unterdrückt werden. Es werden rekursiv alle *.w80-Dateien aus \textit{../../..} relativ zur gewaesser.wst-Datei betrachtet. Vor dem Import werden mit Hilfe eines Längen- und eines MD5-Summen-Vergleichs inhaltliche Duplikate @@ -760,7 +760,7 @@ gestartet. Dazu führen folgenden Befehl aus:\\ \begin{lstlisting} - contrib/run_hydr_morph.sh + ./run_hydr_morph.sh \end{lstlisting} Nachdem der Prompt der Konsole zurückkehrt, ist der Import abgeschlossen oder es
--- a/flys-backend/doc/documentation/de/importer-manual.tex Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/documentation/de/importer-manual.tex Thu Feb 28 11:48:17 2013 +0100 @@ -26,9 +26,9 @@ % Document DATE and VERSION % set these values when releasing a new version -\newcommand{\documentdate}{30. August 2012} -\newcommand{\documentversion}{1.0} -\newcommand{\documentrevision}{rev5303} +\newcommand{\documentdate}{19. Februar 2013} +\newcommand{\documentversion}{1.1} +\newcommand{\documentrevision}{rev5062} \newcommand{\documentID}{importer-manual.tex} %----------------------------------------------
--- a/flys-backend/doc/documentation/de/overview.tex Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/documentation/de/overview.tex Thu Feb 28 11:48:17 2013 +0100 @@ -49,8 +49,15 @@ Bitte beachten Sie, dass diese Werkzeuge nicht zur Installtion und zum Betrieb der Software selbst notwendig sind! +\subsection{Systemanforderungen} +\label{Systemanforderungen} +\begin{itemize} + \item Oracle oder PosgreSQL Datenbank inkl. Schema für FLYS + \item SUSE Enterprise Linux 11.2 SP 1 +\end{itemize} -\subsubsection{Vorbereiten der Datenbank} +\subsection{Installationsanleitung} +\label{Installationsanleitung} Nachdem Sie das Paket nun in das Heimatverzeichnis des Nutzers auf das Zielsystem kopiert haben, entpacken Sie es mit folgenden Befehlen: @@ -61,6 +68,95 @@ cd flys-importer \end{lstlisting} +\subsubsection{Java} +Der flys-importer benötigt Java Version 6 um diese zu installieren laden Sie +bitte von \url{http://www.oracle.com/technetwork/java/javase/downloads/jdk6downloads-1902814.html} +eine aktulle Java Version als -rpm.bin herunter. Zum Beispiel: jdk-6u41-linux-x64-rpm.bin + +Nach dem Herunterladen, öffnen Sie eine konsole und wechseln in das Downloadverzeichnis. +Führen Sie nun folgende Befehle aus: + + \begin{lstlisting} + su - # login als root + sh jdk-6u41-linux-x64-rpm.bin + <bestaetigen mit enter> + update-alternatives --install /usr/bin/java java /usr/java/jdk1.6.0_41/bin/java 5 + update-alternatives --install /etc/alternatives/jre jre /usr/java/jdk1.6.0_41/jre 5 + update-alternatives --config java + \end{lstlisting} + +\subsubsection{Python und GDAL} +Installieren Sie nun die restlichen benötigten Pakete. +Dazu installieren Sie zuerst einige Abhängigkeiten und anschließend die +von der Intevation GmbH bereitgestellten speziellen Versionen von proj und libgeos + +Um die Abhängigkeiten zu installieren führen Sie bitte folgende Befehle aus: + + \begin{lstlisting} + zypper ar http://download.opensuse.org/repositories/home:/intevation:/bfg/SLE_11/ "intevation:bfg" + rpm --import http://download.opensuse.org/repositories/home:/intevation:/bfg/SLE_11/repodata/repomd.xml.key + zypper ref # Paketlist neu laden + zypper in python libgeos0 libproj0 proj netcdf libnetcdf4 \ + xerces-c libxerces-c-3_0 unixODBC postgresql-libs + zypper mr -d "intevation:bfg" + zypper ref # Paketliste neu laden + \end{lstlisting} + +%\subsubsection Oracle Instantclient\\ +%Der Oracle Instantclient 11.2.0.2.0 wird benötigt, damit der Importer mittels Python +%und GDAL in die bestehende Oracle Datenbqlnk schreiben kann. Wenn Sie +%eine PosgreSQL Datenbank verwenden, können Sie diesen Schritt überspringen. +% +%Zur Anbindung von Oracle ist es erforderlich, folgende Archive von +%Oracle herunterzuladen (Sie benötigen dafür ein Oracle Benutzerkonto): +% +%Der Oracle Instantclient 11.2 wird benötigt, damit der Importer mittels Python +%und GDAL in die bestehende Oracle Datenbank schreiben kann. Dazu ist es +%erforderlich, folgende Archive von Oracle herunterzuladen. +% +% \begin{itemize} +% \item instantclient-basic-linux-x86-64-11.2.0.2.0.zip +% \item instantclient-sdk-linux-x86-64-11.2.0.2.0.zip +% \item instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip +% \end{itemize} +% +%Zu finden sind die +% Pakete unter:\\ +%\href{http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html} +%{http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html} +% +% +%Um diese Pakete zu installieren, öffnen Sie eine Konsole und wechseln +%in das Verzeichnis in welches Sie die heruntergeladenen Pakete +%gespeichert haben. (z.B.: cd /home/benutzername/Downloads ) +% Anschließend führen Sie folgende Befehle auf der Kommandozeile aus: +% +% \begin{lstlisting} +% unzip instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d /opt +% unzip instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d /opt +% unzip instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d /opt +% +% mkdir /opt/instantclient_11_2/lib +% cd /opt/instantclient_11_2/lib +% ln -s ../libclntsh.so.11.1 . +% ln -s ../libclntsh.so.11.1 libclntsh.so +% ln -s ../libnnz11.so . +% ln -s ../libocci.so.11.1 . +% ln -s ../libocci.so.11.1 libocci.so +% ln -s ../libociei.so . +% ln -s ../libocijdbc11.so . +% ln -s ../libsqlplusic.so . +% ln -s ../libsqlplus.so . +% +% echo "/opt/instantclient_11_2/lib/" > /etc/ld.so.conf.d/oci.conf +% ldconfig +% \end{lstlisting} +% +%Sollten keine Fehler aufgetreten sein, haben Sie den \textit{Oracle +% Instantclient 11.2} erfolgreich entpackt und im Dateisystem unter +% \textit{/opt/instantclient\_11\_2} abgelegt. +% +\subsubsection{Vorbereiten der Datenbank} Bevor die Importer verwendet werden können, ist es notwendig, dass eine leere Oracle Datenbank vorhanden ist. Anschließend müssen folgende SQL Skripte in diese Datenbank eingespielt werden: @@ -82,24 +178,27 @@ Mittels diesem SQL Skript werden die Indizes zum geodätischen Datenbankschema\\ hinzugefügt. -\item import-dems.sql \\ -In diesem Skript sind Befehle zum Einfügen der digitalen Geländemodelle -enthalten. Die Dateipfade in diesem Skript sind so anzupassen, dass sie auf die -entsprechenden Geländemodelle im Dateisystem verweisen. Es ist notwendig die -Pfade absolut anzugeben. - \end{enumerate} Zum Einspielen dieser Schemata setzen Sie folgende Befehle auf der Kommandozeile ab. Beachten Sie, dass $sqlplus$ im Pfad liegen muss, und der Linux-Nutzer dieses Kommando ausführen können muss. Außerdem sind $benutzername$ und $passwort$ entsprechend Ihres Datenbank-Zugangs anzupassen. +SQLPlus befindet sich in /opt/instantclient\_11\_2 um es verfügbar zu machen +führen Sie im Importer Verzeichnis folgende Befehle aus: + +\begin{lstlisting} +export LD_LIBRARY_PATH=opt/instantclient\_11\_2/lib:$LD_LIBRARY_PATH +export PATH=opt/instantclient\_11\_2:$PATH +\end{lstlisting} + +Nun erstellen Sie das Schema: \begin{lstlisting} sqlplus benutzername/passwort @schema/oracle.sql sqlplus benutzername/passwort @schema/oracle-minfo.sql sqlplus benutzername/passwort @schema/oracle-spatial.sql sqlplus benutzername/passwort @schema/oracle-spatial_idx.sql - sqlplus benutzername/passwort @schema/import-dems.sql \end{lstlisting} +
--- a/flys-backend/doc/schema/import-dems.sql Thu Feb 28 11:06:20 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,24 +0,0 @@ -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_00000_01010', 0.0, 101.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_00000_10110.grd'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_00992_02030', 99.0, 203.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_09920_20300.grd'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_02020_02998', 202.0, 300.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_20200_29980.grd'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_02981_04010', 298.0, 401.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_29810_40100.grd'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_04000_05009', 400.0, 501.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_40000_50090.grd'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_05001_05830', 500.0, 583.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_50010_58330.grd'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_00000_00058', 0.0, 6.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0000-0580.xyz'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_00058_00153', 6.0, 15.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0058-0153.xyz'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_00153_00416', 15.0, 42.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0153-0416.xyz'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_00414_01012_O', 41.0, 101.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', 'muss überarbeitet werden', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0414-1012O.xyz'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_00414_01012_W', 41.0, 101.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', 'muss überarbeitet werden', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0414-1012W.xyz'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_01012_01488', 101.0, 145.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1012-1488.xyz'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_01488_01666', 145.0, 167.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1488-1666.xyz'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_01666_01960', 167.0, 196.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1666-1960.xyz'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_01960_02044', 196.0, 204.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1960-2044.XYZ'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_02044_02184', 204.0, 218.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/2044-2184.XYZ'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_02184_02420', 218.0, 242.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/525480MO.XYZ'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00000_00079', 0.0, 8.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0000-0079_long.txt'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00080_00204', 8.0, 20.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0080-0204_long.txt'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00205_00314', 20.0, 31.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0205-0314_long.txt'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00315_00541', 31.0, 54.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0315-0541_long.txt'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00542_00655', 54.0, 65.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0542-0655_long.txt'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00656_00828', 65.0, 83.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0656-0828_long.txt'); -INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00829_00931', 83.0, 93.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0829-0931_erweitert.txt');
--- a/flys-backend/doc/schema/oracle-drop-spatial.sql Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/schema/oracle-drop-spatial.sql Thu Feb 28 11:48:17 2013 +0100 @@ -13,11 +13,6 @@ DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'CROSS_SECTION_TRACKS'; DROP SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ; -DROP TRIGGER lines_trigger; -DROP TABLE lines; -DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'LINES'; -DROP SEQUENCE LINES_ID_SEQ; - DROP TRIGGER buildings_trigger; DROP TABLE buildings; DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'BUILDINGS'; @@ -42,10 +37,15 @@ DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'CATCHMENT'; DROP SEQUENCE CATCHMENT_ID_SEQ; -DROP TRIGGER hws_trigger; -DROP TABLE hws; -DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HWS'; -DROP SEQUENCE HWS_ID_SEQ; +DROP TRIGGER hws_lines_trigger; +DROP TABLE hws_lines; +DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HWS_LINES'; +DROP SEQUENCE HWS_LINES_ID_SEQ; + +DROP TRIGGER hws_points_trigger; +DROP TABLE hws_points; +DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HWS_POINTS'; +DROP SEQUENCE HWS_POINTS_ID_SEQ; DROP TRIGGER floodmaps_trigger; DROP TABLE floodmaps; @@ -66,3 +66,9 @@ DROP TABLE gauge_location; DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'GAUGE_LOCATION'; DROP SEQUENCE GAUGE_LOCATION_ID_SEQ; + +DROP TABLE hws_kinds; +DROP TABLE sectie_kinds; +DROP TABLE sobek_kinds; +DROP TABLE fed_states; +DROP TABLE boundary_kinds;
--- a/flys-backend/doc/schema/oracle-drop.sql Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/schema/oracle-drop.sql Thu Feb 28 11:48:17 2013 +0100 @@ -88,3 +88,7 @@ DROP VIEW wst_value_table; DROP VIEW wst_w_values ; DROP VIEW wst_q_values; +DROP VIEW official_lines; +DROP VIEW q_main_values; +DROP VIEW official_q_values; +DROP VIEW wst_ranges;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/doc/schema/oracle-spatial-migrate-dami.sql Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,122 @@ +DROP TRIGGER hws_trigger; +DROP TABLE hws; +DROP SEQUENCE HWS_ID_SEQ; + +--Static lookup tables for Hochwasserschutzanlagen +CREATE TABLE hws_kinds ( + id NUMBER PRIMARY KEY NOT NULL, + kind VARCHAR(64) NOT NULL +); +INSERT INTO hws_kinds (id, kind) VALUES (1, 'Durchlass'); +INSERT INTO hws_kinds (id, kind) VALUES (2, 'Damm'); +INSERT INTO hws_kinds (id, kind) VALUES (3, 'Graben'); + +CREATE TABLE fed_states ( + id NUMBER PRIMARY KEY NOT NULL, + name VARCHAR(23) NOT NULL +); +INSERT INTO fed_states (id, name) VALUES (1, 'Bayern'); +INSERT INTO fed_states (id, name) VALUES (2, 'Hessen'); +INSERT INTO fed_states (id, name) VALUES (3, 'Niedersachsen'); +INSERT INTO fed_states (id, name) VALUES (4, 'Nordrhein-Westfalen'); +INSERT INTO fed_states (id, name) VALUES (5, 'Rheinland-Pfalz'); +INSERT INTO fed_states (id, name) VALUES (6, 'Saarland'); +INSERT INTO fed_states (id, name) VALUES (7, 'Schleswig-Holstein'); +INSERT INTO fed_states (id, name) VALUES (8, 'Brandenburg'); +INSERT INTO fed_states (id, name) VALUES (9, 'Mecklenburg-Vorpommern'); +INSERT INTO fed_states (id, name) VALUES (10, 'Thüringen'); +INSERT INTO fed_states (id, name) VALUES (11, 'Baden-Württemberg'); +INSERT INTO fed_states (id, name) VALUES (12, 'Sachsen-Anhalt'); +INSERT INTO fed_states (id, name) VALUES (13, 'Sachsen'); +INSERT INTO fed_states (id, name) VALUES (14, 'Berlin'); +INSERT INTO fed_states (id, name) VALUES (15, 'Bremen'); +INSERT INTO fed_states (id, name) VALUES (16, 'Hamburg'); + +-- HWS-Lines +CREATE SEQUENCE HWS_LINES_ID_SEQ; +CREATE TABLE hws_lines ( + OGR_FID NUMBER(38), + GEOM MDSYS.SDO_GEOMETRY, + kind_id NUMBER(2) DEFAULT 2 REFERENCES hws_kinds(id), + fed_state_id NUMBER(2) REFERENCES fed_states(id), + river_id NUMBER(38) REFERENCES rivers(id), + name VARCHAR(256), + path VARCHAR(256), + official NUMBER DEFAULT 0, + agency VARCHAR(256), + range VARCHAR(256), + shore_side NUMBER DEFAULT 0, + source VARCHAR(256), + status_date TIMESTAMP, + description VARCHAR(256), + id NUMBER PRIMARY KEY NOT NULL +); +INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hws_lines', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467); +CREATE INDEX hws_lines_spatial_idx ON hws_lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); + +CREATE OR REPLACE TRIGGER hws_lines_trigger BEFORE INSERT ON hws_lines FOR each ROW + BEGIN + SELECT HWS_LINES_ID_SEQ.nextval INTO :new.id FROM dual; + END; + +-- HWS Points lookup tables +CREATE TABLE sectie_kinds ( + id NUMBER PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO sectie_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO sectie_kinds (id, name) VALUES (1, 'Flussschlauch'); +INSERT INTO sectie_kinds (id, name) VALUES (2, 'Uferbank'); +INSERT INTO sectie_kinds (id, name) VALUES (3, 'Überflutungsbereich'); + +CREATE TABLE sobek_kinds ( + id NUMBER PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO sobek_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO sobek_kinds (id, name) VALUES (1, 'Stromführend'); +INSERT INTO sobek_kinds (id, name) VALUES (2, 'Stromspeichernd'); + +CREATE TABLE boundary_kinds ( + id NUMBER PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO boundary_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO boundary_kinds (id, name) VALUES (1, 'BfG'); +INSERT INTO boundary_kinds (id, name) VALUES (2, 'Land'); +INSERT INTO boundary_kinds (id, name) VALUES (3, 'Sonstige'); + +-- HWS Points +CREATE SEQUENCE HWS_POINTS_ID_SEQ; +CREATE TABLE hws_points ( + OGR_FID NUMBER(38), + GEOM MDSYS.SDO_GEOMETRY, + ogr_fid NUMBER, + kind_id NUMBER DEFAULT 2 REFERENCES hws_kinds(id), + fed_state_id NUMBER REFERENCES fed_states(id), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, + name VARCHAR(256), + path VARCHAR(256), + official NUMBER DEFAULT 0, + agency VARCHAR(256), + range VARCHAR(256), + shore_side NUMBER DEFAULT 0, + source VARCHAR(256), + status_date VARCHAR(256), + description VARCHAR(256), + freeboard NUMBER(19,5), + dike_km NUMBER(19,5), + z NUMBER(19,5), + z_target NUMBER(19,5), + rated_level NUMBER(19,5), + id NUMBER PRIMARY KEY NOT NULL +); + +-- Altrications +ALTER TABLE dem ADD srid NUMBER NOT NULL; +ALTER TABLE hydr_boundaries_poly ADD sectie NUMBER REFERENCES sectie_kinds(id); +ALTER TABLE hydr_boundaries_poly ADD sobek NUMBER REFERENCES sobek_kinds(id); +ALTER TABLE hydr_boundaries ADD sectie NUMBER REFERENCES sectie_kinds(id); +ALTER TABLE hydr_boundaries ADD sobek NUMBER REFERENCES sobek_kinds(id); +ALTER TABLE hydr_boundaries ADD kind NUMBER REFERENCES boundary_kinds(id); +ALTER TABLE hydr_boundaries_poly ADD kind NUMBER REFERENCES boundary_kinds(id);
--- a/flys-backend/doc/schema/oracle-spatial.sql Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/schema/oracle-spatial.sql Thu Feb 28 11:48:17 2013 +0100 @@ -1,9 +1,10 @@ +WHENEVER SQLERROR EXIT; -- Geodaesie/Flussachse+km/achse CREATE SEQUENCE RIVER_AXES_ID_SEQ; CREATE TABLE river_axes( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, kind NUMBER(38) DEFAULT 0 NOT NULL, name VARCHAR(64), path VARCHAR(256), @@ -23,7 +24,7 @@ CREATE TABLE river_axes_km( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, km NUMBER(6,3), name VARCHAR(64), path VARCHAR(256), @@ -43,7 +44,7 @@ CREATE TABLE cross_section_tracks ( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, km NUMBER(38,12) NOT NULL, z NUMBER(38,12) DEFAULT 0 NOT NULL, name VARCHAR(64), @@ -59,39 +60,12 @@ --CREATE INDEX CrossSectionTracks_spatial_idx ON cross_section_tracks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); --- TODO: TestMe. Fix Importer-Script. Fix oracle_spatial_idx.sql script. --- Geodaesie/Linien/rohre-und-speeren -CREATE SEQUENCE LINES_ID_SEQ; -CREATE TABLE lines ( - OGR_FID NUMBER(38), - GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), - kind VARCHAR2(16) NOT NULL, - z NUMBER(38,12) DEFAULT 0, - name VARCHAR(64), - path VARCHAR(256), - ID NUMBER PRIMARY KEY NOT NULL -); -INSERT INTO USER_SDO_GEOM_METADATA VALUES ('lines', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467); -CREATE OR REPLACE TRIGGER lines_trigger BEFORE INSERT ON lines FOR each ROW - BEGIN - SELECT LINES_ID_SEQ.nextval INTO :new.id FROM dual; - END; -/ --- NOTE: Should lines should be 3D. --- TODO: Test index. ---CREATE INDEX lines_idx ON lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); --- 'kind': --- 0: ROHR1 --- 1: DAMM - - -- Geodaesie/Bauwerke/Wehre.shp CREATE SEQUENCE BUILDINGS_ID_SEQ; CREATE TABLE buildings( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, name VARCHAR2(255), path VARCHAR(256), ID NUMBER PRIMARY KEY NOT NULL @@ -110,7 +84,7 @@ CREATE TABLE fixpoints ( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, x NUMBER(38,11), y NUMBER(38,11), km NUMBER(38,11) NOT NULL, @@ -133,7 +107,7 @@ CREATE TABLE floodplain( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, name VARCHAR(64), path VARCHAR(256), ID NUMBER PRIMARY KEY NOT NULL @@ -153,20 +127,21 @@ CREATE SEQUENCE DEM_ID_SEQ; CREATE TABLE dem ( ID NUMBER PRIMARY KEY NOT NULL, - river_id NUMBER(38), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, -- XXX Should we use the ranges table instead? name VARCHAR(64), lower NUMBER(19,5), upper NUMBER(19,5), - year_from VARCHAR(32) NOT NULL, - year_to VARCHAR(32) NOT NULL, - projection VARCHAR(32) NOT NULL, + year_from VARCHAR(32), + year_to VARCHAR(32), + projection VARCHAR(32), elevation_state VARCHAR(32), + srid NUMBER NOT NULL, format VARCHAR(32), - border_break BOOLEAN NOT NULL DEFAULT FALSE, + border_break NUMBER(1) DEFAULT 0 NOT NULL, resolution VARCHAR(16), description VARCHAR(256), - path VARCHAR(256) + path VARCHAR(256) NOT NULL ); CREATE OR REPLACE TRIGGER dem_trigger BEFORE INSERT ON dem FOR each ROW BEGIN @@ -174,54 +149,128 @@ END; / +--Static lookup tables for Hochwasserschutzanlagen +CREATE TABLE hws_kinds ( + id NUMBER PRIMARY KEY NOT NULL, + kind VARCHAR(64) NOT NULL +); +INSERT INTO hws_kinds (id, kind) VALUES (1, 'Durchlass'); +INSERT INTO hws_kinds (id, kind) VALUES (2, 'Damm'); +INSERT INTO hws_kinds (id, kind) VALUES (3, 'Graben'); --- Hydrologie/Einzugsgebiete/EZG.shp -CREATE SEQUENCE CATCHMENT_ID_SEQ; -CREATE TABLE catchment( +CREATE TABLE fed_states ( + id NUMBER PRIMARY KEY NOT NULL, + name VARCHAR(23) NOT NULL +); +INSERT INTO fed_states (id, name) VALUES (1, 'Bayern'); +INSERT INTO fed_states (id, name) VALUES (2, 'Hessen'); +INSERT INTO fed_states (id, name) VALUES (3, 'Niedersachsen'); +INSERT INTO fed_states (id, name) VALUES (4, 'Nordrhein-Westfalen'); +INSERT INTO fed_states (id, name) VALUES (5, 'Rheinland-Pfalz'); +INSERT INTO fed_states (id, name) VALUES (6, 'Saarland'); +INSERT INTO fed_states (id, name) VALUES (7, 'Schleswig-Holstein'); +INSERT INTO fed_states (id, name) VALUES (8, 'Brandenburg'); +INSERT INTO fed_states (id, name) VALUES (9, 'Mecklenburg-Vorpommern'); +INSERT INTO fed_states (id, name) VALUES (10, 'Thüringen'); +INSERT INTO fed_states (id, name) VALUES (11, 'Baden-Württemberg'); +INSERT INTO fed_states (id, name) VALUES (12, 'Sachsen-Anhalt'); +INSERT INTO fed_states (id, name) VALUES (13, 'Sachsen'); +INSERT INTO fed_states (id, name) VALUES (14, 'Berlin'); +INSERT INTO fed_states (id, name) VALUES (15, 'Bremen'); +INSERT INTO fed_states (id, name) VALUES (16, 'Hamburg'); + +--Hydrologie/HW-Schutzanlagen/hws.shp +-- HWS-Lines +CREATE SEQUENCE HWS_LINES_ID_SEQ; +CREATE TABLE hws_lines ( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), - area NUMBER(19,5), - name VARCHAR2(255), - path VARCHAR(256), - ID NUMBER PRIMARY KEY NOT NULL + kind_id NUMBER(2) DEFAULT 2 REFERENCES hws_kinds(id), + fed_state_id NUMBER(2) REFERENCES fed_states(id), + river_id NUMBER(38) REFERENCES rivers(id), + name VARCHAR(256), + path VARCHAR(256), + official NUMBER DEFAULT 0, + agency VARCHAR(256), + range VARCHAR(256), + shore_side NUMBER DEFAULT 0, + source VARCHAR(256), + status_date TIMESTAMP, + description VARCHAR(256), + id NUMBER PRIMARY KEY NOT NULL ); -INSERT INTO USER_SDO_GEOM_METADATA VALUES ('CATCHMENT', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467); - -CREATE TRIGGER catchment_trigger BEFORE INSERT ON catchment FOR each ROW +INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hws_lines', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467); +CREATE OR REPLACE TRIGGER hws_lines_trigger BEFORE INSERT ON hws_lines FOR each ROW BEGIN - SELECT CATCHMENT_ID_SEQ.nextval INTO :new.id FROM dual; + SELECT HWS_LINES_ID_SEQ.nextval INTO :new.id FROM dual; END; / ---CREATE INDEX catchment_spatial_idx ON catchment(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=polygon'); +-- HWS Points lookup tables +CREATE TABLE sectie_kinds ( + id NUMBER PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO sectie_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO sectie_kinds (id, name) VALUES (1, 'Flussschlauch'); +INSERT INTO sectie_kinds (id, name) VALUES (2, 'Uferbank'); +INSERT INTO sectie_kinds (id, name) VALUES (3, 'Überflutungsbereich'); ---Hydrologie/HW-Schutzanlagen/hws.shp -CREATE SEQUENCE HWS_ID_SEQ; -CREATE TABLE hws( +CREATE TABLE sobek_kinds ( + id NUMBER PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO sobek_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO sobek_kinds (id, name) VALUES (1, 'Stromführend'); +INSERT INTO sobek_kinds (id, name) VALUES (2, 'Stromspeichernd'); + +CREATE TABLE boundary_kinds ( + id NUMBER PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO boundary_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO boundary_kinds (id, name) VALUES (1, 'BfG'); +INSERT INTO boundary_kinds (id, name) VALUES (2, 'Land'); +INSERT INTO boundary_kinds (id, name) VALUES (3, 'Sonstige'); + +-- HWS Points +CREATE SEQUENCE HWS_POINTS_ID_SEQ; +CREATE TABLE hws_points ( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), - hws_facility VARCHAR2(255), - type VARCHAR2(255), - name VARCHAR(64), - path VARCHAR(256), - ID NUMBER PRIMARY KEY NOT NULL + kind_id NUMBER DEFAULT 2 REFERENCES hws_kinds(id), + fed_state_id NUMBER REFERENCES fed_states(id), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, + name VARCHAR(256), + path VARCHAR(256), + official NUMBER DEFAULT 0, + agency VARCHAR(256), + range VARCHAR(256), + shore_side NUMBER DEFAULT 0, + source VARCHAR(256), + status_date VARCHAR(256), + description VARCHAR(256), + freeboard NUMBER(19,5), + dike_km NUMBER(19,5), + z NUMBER(19,5), + z_target NUMBER(19,5), + rated_level NUMBER(19,5), + id NUMBER PRIMARY KEY NOT NULL ); -INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hws', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467); -CREATE OR REPLACE TRIGGER hws_trigger BEFORE INSERT ON hws FOR each ROW + +INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hws_points', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467); + +CREATE OR REPLACE TRIGGER hws_points_trigger BEFORE INSERT ON hws_points FOR each ROW BEGIN - SELECT HWS_ID_SEQ.nextval INTO :new.id FROM dual; + SELECT HWS_POINTS_ID_SEQ.nextval INTO :new.id FROM dual; END; / ---CREATE INDEX hws_spatial_idx ON hws(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); - --Hydrologie/UeSG CREATE SEQUENCE FLOODMAPS_ID_SEQ; CREATE TABLE floodmaps ( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, name VARCHAR(255), kind NUMBER(38), diff NUMBER(19,5), @@ -237,17 +286,17 @@ SELECT FLOODMAPS_ID_SEQ.nextval INTO :new.id FROM dual; END; / -CREATE INDEX floodmaps_spatial_idx ON floodmaps(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON'); - --Hydrologie/Hydr.Grenzen/Linien CREATE SEQUENCE HYDR_BOUNDARIES_ID_SEQ; CREATE TABLE hydr_boundaries ( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, name VARCHAR(255), - kind NUMBER(38), + kind NUMBER(38) REFERENCES boundary_kinds(id), + sectie NUMBER(38) REFERENCES sectie_kinds(id), + sobek NUMBER(38) REFERENCES sobek_kinds(id), path VARCHAR(256), id NUMBER PRIMARY KEY NOT NULL ); @@ -257,15 +306,16 @@ SELECT HYDR_BOUNDARIES_ID_SEQ.nextval INTO :new.id FROM dual; END; / -CREATE INDEX hydr_boundaries_idx ON hydr_boundaries(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); CREATE SEQUENCE HYDR_BOUNDARIES_POLY_ID_SEQ; CREATE TABLE hydr_boundaries_poly ( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, - river_id NUMBER(38), + river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, name VARCHAR(255), - kind NUMBER(38), + kind NUMBER(38) REFERENCES boundary_kinds(id), + sectie NUMBER(38) REFERENCES sectie_kinds(id), + sobek NUMBER(38) REFERENCES sobek_kinds(id), path VARCHAR(256), id NUMBER PRIMARY KEY NOT NULL ); @@ -275,8 +325,6 @@ SELECT HYDR_BOUNDARIES_POLY_ID_SEQ.nextval INTO :new.id FROM dual; END; / -CREATE INDEX hydr_boundaries_poly_idx ON hydr_boundaries_poly(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON'); - -- Hydrologie/Streckendaten/ CREATE SEQUENCE GAUGE_LOCATION_ID_SEQ; @@ -294,4 +342,3 @@ SELECT GAUGE_LOCATION_ID_SEQ.nextval INTO :new.id FROM dual; END; / -CREATE INDEX gauge_location_idx ON gauge_location(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
--- a/flys-backend/doc/schema/oracle-spatial_idx.sql Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/schema/oracle-spatial_idx.sql Thu Feb 28 11:48:17 2013 +0100 @@ -1,9 +1,32 @@ -CREATE INDEX catchment_spatial_idx ON catchment(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=multipolygon'); -CREATE INDEX river_axes_km_spatial_idx ON river_axes_km(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=point'); -CREATE INDEX buildings_spatial_idx ON buildings(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); -CREATE INDEX fixpoints_spatial_idx ON fixpoints(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT'); -CREATE INDEX river_axes_spatial_idx ON river_axes(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); -CREATE INDEX CrossSectionTracks_spatial_idx ON cross_section_tracks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); -CREATE INDEX hws_spatial_idx ON hws(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); +-- TODO: index prevents `DELETE FROM rivers' on 11g +-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein" +-- CREATE INDEX river_axes_km_spatial_idx ON river_axes_km(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=point'); + +-- TODO: index prevents importing on 11g. +-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein" +-- CREATE INDEX buildings_spatial_idx ON buildings(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); + +-- TODO: index prevents `DELETE FROM rivers' on 11g +-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein" +-- CREATE INDEX fixpoints_spatial_idx ON fixpoints(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT'); + +-- TODO: index prevents importing on 11g. +-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein" +-- CREATE INDEX river_axes_spatial_idx ON river_axes(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); + +-- TODO: index prevents importing on 11g. +-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein" +-- CREATE INDEX CrossSectionTracks_spatial_idx ON cross_section_tracks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); + CREATE INDEX floodplain_spatial_idx ON floodplain(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POLYGON'); -CREATE INDEX lines_idx ON lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); + +-- TODO: index prevents importing on 11g. +-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein" +-- CREATE INDEX hydr_boundaries_idx ON hydr_boundaries(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); + +CREATE INDEX hws_points_spatial_idx ON hws_points(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT'); +CREATE INDEX hws_lines_spatial_idx ON hws_lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); +CREATE INDEX floodmaps_spatial_idx ON floodmaps(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON'); +CREATE INDEX gauge_location_idx ON gauge_location(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT'); +CREATE INDEX hydr_boundaries_poly_idx ON hydr_boundaries_poly(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON'); +
--- a/flys-backend/doc/schema/oracle.sql Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/schema/oracle.sql Thu Feb 28 11:48:17 2013 +0100 @@ -340,38 +340,40 @@ -- ADD CONSTRAINTs -ALTER TABLE annotations ADD CONSTRAINT cAnnotationsRanges FOREIGN KEY (range_id) REFERENCES ranges; +ALTER TABLE annotations ADD CONSTRAINT cAnnotationsAttributes FOREIGN KEY (attribute_id) REFERENCES attributes; ALTER TABLE annotations ADD CONSTRAINT cAnnotationsEdges FOREIGN KEY (edge_id) REFERENCES edges; ALTER TABLE annotations ADD CONSTRAINT cAnnotationsPositions FOREIGN KEY (position_id) REFERENCES positions; -ALTER TABLE annotations ADD CONSTRAINT cAnnotationsAttributes FOREIGN KEY (attribute_id) REFERENCES attributes; ALTER TABLE annotations ADD CONSTRAINT cAnnotationsTypes FOREIGN KEY (type_id) REFERENCES annotation_types; -ALTER TABLE cross_section_lines ADD CONSTRAINT cQPSLinesCrossSections FOREIGN KEY (cross_section_id) REFERENCES cross_sections; -ALTER TABLE cross_section_points ADD CONSTRAINT cQPSPointsCrossSectionLines FOREIGN KEY (cross_section_line_id) REFERENCES cross_section_lines; -ALTER TABLE cross_sections ADD CONSTRAINT cCrossSectionsRivers FOREIGN KEY (river_id) REFERENCES rivers; ALTER TABLE cross_sections ADD CONSTRAINT cCrossSectionsTimeIntervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals; -ALTER TABLE discharge_table_values ADD CONSTRAINT cTableValuesDischargeTables foreign key (table_id) REFERENCES discharge_tables; ALTER TABLE discharge_tables ADD CONSTRAINT cDischargeTablesTime_intervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals; -ALTER TABLE discharge_tables ADD CONSTRAINT cDischargeTablesGauges FOREIGN KEY (gauge_id) REFERENCES gauges; -ALTER TABLE gauges ADD CONSTRAINT cGaugesRivers FOREIGN KEY (river_id) REFERENCES rivers; -ALTER TABLE gauges ADD CONSTRAINT cGaugesRanges FOREIGN KEY (range_id) REFERENCES ranges; -ALTER TABLE hyk_entries ADD CONSTRAINT cHykEntriesHyks FOREIGN KEY (hyk_id) REFERENCES hyks; -ALTER TABLE hyk_flow_zones ADD CONSTRAINT cHykFlowZonesHykFormations FOREIGN KEY (formation_id) REFERENCES hyk_formations; ALTER TABLE hyk_flow_zones ADD CONSTRAINT cHykFlowZonesHykFlowZoneTypes FOREIGN KEY (type_id) REFERENCES hyk_flow_zone_types; -ALTER TABLE hyks ADD CONSTRAINT cHyksRivers FOREIGN KEY (river_id) REFERENCES rivers; -ALTER TABLE hyk_formations ADD CONSTRAINT cHykFormationsHykEntries FOREIGN KEY (hyk_entry_id) REFERENCES hyk_entries; +ALTER TABLE main_values ADD CONSTRAINT cMainValuesNamedMainValues FOREIGN KEY (named_value_id) REFERENCES named_main_values; ALTER TABLE main_values ADD CONSTRAINT cMainValuesTimeIntervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals; -ALTER TABLE main_values ADD CONSTRAINT cMainValuesGauges FOREIGN KEY (gauge_id) REFERENCES gauges; -ALTER TABLE main_values ADD CONSTRAINT cMainValuesNamedMainValues FOREIGN KEY (named_value_id) REFERENCES named_main_values; ALTER TABLE named_main_values ADD CONSTRAINT cNamedMainValuesMainValueTypes FOREIGN KEY (type_id) REFERENCES main_value_types; -ALTER TABLE ranges ADD CONSTRAINT cRangesRivers FOREIGN KEY (river_id) REFERENCES rivers; ALTER TABLE rivers ADD CONSTRAINT cRiversUnits FOREIGN KEY (wst_unit_id) REFERENCES units; -ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstColums FOREIGN KEY (wst_column_id) REFERENCES wst_columns; -ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstQRanges FOREIGN KEY (wst_q_range_id) REFERENCES wst_q_ranges; -ALTER TABLE wst_column_values ADD CONSTRAINT cWstColumnValuesWstColumns FOREIGN KEY (wst_column_id) REFERENCES wst_columns; ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsTime_intervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals; -ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsWsts FOREIGN KEY (wst_id) REFERENCES wsts; -ALTER TABLE wst_q_ranges ADD CONSTRAINT cWstQRangesRanges FOREIGN KEY (range_id) REFERENCES RANGES; -ALTER TABLE wsts ADD CONSTRAINT cWstsRivers FOREIGN KEY (river_id) REFERENCES rivers; + +-- Cascading references +ALTER TABLE annotations ADD CONSTRAINT cAnnotationsRanges FOREIGN KEY (range_id) REFERENCES ranges ON DELETE CASCADE; +ALTER TABLE cross_section_lines ADD CONSTRAINT cQPSLinesCrossSections FOREIGN KEY (cross_section_id) REFERENCES cross_sections ON DELETE CASCADE; +ALTER TABLE cross_section_points ADD CONSTRAINT cQPSPointsCrossSectionLines FOREIGN KEY (cross_section_line_id) REFERENCES cross_section_lines ON DELETE CASCADE; +ALTER TABLE cross_sections ADD CONSTRAINT cCrossSectionsRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE; +ALTER TABLE discharge_tables ADD CONSTRAINT cDischargeTablesGauges FOREIGN KEY (gauge_id) REFERENCES gauges ON DELETE CASCADE; +ALTER TABLE discharge_table_values ADD CONSTRAINT cTableValuesDischargeTables FOREIGN KEY (table_id) REFERENCES discharge_tables ON DELETE CASCADE; +ALTER TABLE gauges ADD CONSTRAINT cGaugesRanges FOREIGN KEY (range_id) REFERENCES ranges ON DELETE CASCADE; +ALTER TABLE gauges ADD CONSTRAINT cGaugesRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE; +ALTER TABLE hyk_entries ADD CONSTRAINT cHykEntriesHyks FOREIGN KEY (hyk_id) REFERENCES hyks ON DELETE CASCADE; +ALTER TABLE hyk_flow_zones ADD CONSTRAINT cHykFlowZonesHykFormations FOREIGN KEY (formation_id) REFERENCES hyk_formations ON DELETE CASCADE; +ALTER TABLE hyk_formations ADD CONSTRAINT cHykFormationsHykEntries FOREIGN KEY (hyk_entry_id) REFERENCES hyk_entries ON DELETE CASCADE; +ALTER TABLE hyks ADD CONSTRAINT cHyksRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE; +ALTER TABLE main_values ADD CONSTRAINT cMainValuesGauges FOREIGN KEY (gauge_id) REFERENCES gauges ON DELETE CASCADE; +ALTER TABLE ranges ADD CONSTRAINT cRangesRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE; +ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstColums FOREIGN KEY (wst_column_id) REFERENCES wst_columns ON DELETE CASCADE; +ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstQRanges FOREIGN KEY (wst_q_range_id) REFERENCES wst_q_ranges ON DELETE CASCADE; +ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsWsts FOREIGN KEY (wst_id) REFERENCES wsts ON DELETE CASCADE; +ALTER TABLE wst_column_values ADD CONSTRAINT cWstColumnValuesWstColumns FOREIGN KEY (wst_column_id) REFERENCES wst_columns ON DELETE CASCADE; +ALTER TABLE wst_q_ranges ADD CONSTRAINT cWstQRangesRanges FOREIGN KEY (range_id) REFERENCES RANGES ON DELETE CASCADE; +ALTER TABLE wsts ADD CONSTRAINT cWstsRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE; -- VIEWS
--- a/flys-backend/doc/schema/postgresql-drop-spatial.sql Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/schema/postgresql-drop-spatial.sql Thu Feb 28 11:48:17 2013 +0100 @@ -9,9 +9,6 @@ DROP TABLE cross_section_tracks; DROP SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ; -DROP TABLE lines; -DROP SEQUENCE LINES_ID_SEQ; - DROP TABLE buildings; DROP SEQUENCE BUILDINGS_ID_SEQ; @@ -24,11 +21,11 @@ DROP TABLE dem; DROP SEQUENCE DEM_ID_SEQ; -DROP TABLE catchment; -DROP SEQUENCE CATCHMENT_ID_SEQ; +DROP TABLE hws_points; +DROP SEQUENCE HWS_POINTS_ID_SEQ; -DROP TABLE hws; -DROP SEQUENCE HWS_ID_SEQ; +DROP TABLE hws_lines; +DROP SEQUENCE HWS_LINES_ID_SEQ; DROP TABLE floodmaps; DROP SEQUENCE FLOODMAPS_ID_SEQ; @@ -42,4 +39,10 @@ DROP TABLE gauge_location; DROP SEQUENCE GAUGE_LOCATION_ID_SEQ; +DROP TABLE fed_states; +DROP TABLE hws_kinds; +DROP TABLE sobek_kinds; +DROP TABLE sectie_kinds; +DROP TABLE boundary_kinds; + COMMIT;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/doc/schema/postgresql-migrate-dami.sql Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,127 @@ +DROP table hws; +DROP sequence HWS_ID_SEQ; +DROP table lines; +DROP sequence LINES_ID_SEQ; +DROP table catchment; +DROP sequence CATCHMENT_ID_SEQ; + +-- Static lookup tables for Hochwasserschutzanlagen +CREATE TABLE hws_kinds ( + id int PRIMARY KEY NOT NULL, + kind VARCHAR(64) NOT NULL +); +INSERT INTO hws_kinds (id, kind) VALUES (1, 'Durchlass'); +INSERT INTO hws_kinds (id, kind) VALUES (2, 'Damm'); +INSERT INTO hws_kinds (id, kind) VALUES (3, 'Graben'); + +CREATE TABLE fed_states ( + id int PRIMARY KEY NOT NULL, + name VARCHAR(23) NOT NULL +); +INSERT INTO fed_states (id, name) VALUES (1, 'Bayern'); +INSERT INTO fed_states (id, name) VALUES (2, 'Hessen'); +INSERT INTO fed_states (id, name) VALUES (3, 'Niedersachsen'); +INSERT INTO fed_states (id, name) VALUES (4, 'Nordrhein-Westfalen'); +INSERT INTO fed_states (id, name) VALUES (5, 'Rheinland-Pfalz'); +INSERT INTO fed_states (id, name) VALUES (6, 'Saarland'); +INSERT INTO fed_states (id, name) VALUES (7, 'Schleswig-Holstein'); +INSERT INTO fed_states (id, name) VALUES (8, 'Brandenburg'); +INSERT INTO fed_states (id, name) VALUES (9, 'Mecklenburg-Vorpommern'); +INSERT INTO fed_states (id, name) VALUES (10, 'Thüringen'); +INSERT INTO fed_states (id, name) VALUES (11, 'Baden-Württemberg'); +INSERT INTO fed_states (id, name) VALUES (12, 'Sachsen-Anhalt'); +INSERT INTO fed_states (id, name) VALUES (13, 'Sachsen'); +INSERT INTO fed_states (id, name) VALUES (14, 'Berlin'); +INSERT INTO fed_states (id, name) VALUES (15, 'Bremen'); +INSERT INTO fed_states (id, name) VALUES (16, 'Hamburg'); + +CREATE TABLE sectie_kinds ( + id int PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO sectie_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO sectie_kinds (id, name) VALUES (1, 'Flussschlauch'); +INSERT INTO sectie_kinds (id, name) VALUES (2, 'Uferbank'); +INSERT INTO sectie_kinds (id, name) VALUES (3, 'Überflutungsbereich'); + +CREATE TABLE sobek_kinds ( + id int PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO sobek_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO sobek_kinds (id, name) VALUES (1, 'Stromführend'); +INSERT INTO sobek_kinds (id, name) VALUES (2, 'Stromspeichernd'); + +CREATE TABLE boundary_kinds ( + id int PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO boundary_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO boundary_kinds (id, name) VALUES (1, 'BfG'); +INSERT INTO boundary_kinds (id, name) VALUES (2, 'Land'); +INSERT INTO boundary_kinds (id, name) VALUES (3, 'Sonstige'); + +--Hydrologie/HW-Schutzanlagen/*Linien.shp +CREATE SEQUENCE HWS_LINES_ID_SEQ; +CREATE TABLE hws_lines ( + id int PRIMARY KEY NOT NULL, + ogr_fid int, + kind_id int REFERENCES hws_kinds(id) DEFAULT 2, + fed_state_id int REFERENCES fed_states(id), + river_id int REFERENCES rivers(id), + name VARCHAR(256), + path VARCHAR(256), + offical INT DEFAULT 0, + agency VARCHAR(256), + range VARCHAR(256), + shore_side INT DEFAULT 0, + source VARCHAR(256), + status_date TIMESTAMP, + description VARCHAR(256) +); +SELECT AddGeometryColumn('hws_lines', 'geom', 31467, 'LINESTRING', 3); +-- TODO: dike_km_from dike_km_to, are they geometries? + +ALTER TABLE hws_lines ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_LINES_ID_SEQ'); + +--Hydrologie/HW-Schutzanlagen/*Punkte.shp +CREATE SEQUENCE HWS_POINTS_ID_SEQ; +CREATE TABLE hws_points ( + id int PRIMARY KEY NOT NULL, + ogr_fid int, + kind_id int REFERENCES hws_kinds(id) DEFAULT 2, + fed_state_id int REFERENCES fed_states(id), + river_id int REFERENCES rivers(id), + name VARCHAR, + path VARCHAR, + offical INT DEFAULT 0, + agency VARCHAR, + range VARCHAR, + shore_side INT DEFAULT 0, + source VARCHAR, + status_date VARCHAR, + description VARCHAR, + freeboard FLOAT8, + dike_km FLOAT8, + z FLOAT8, + z_target FLOAT8, + rated_level FLOAT8 +); +SELECT AddGeometryColumn('hws_points', 'geom', 31467, 'POINT', 2); + +ALTER TABLE hws_points ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_POINTS_ID_SEQ'); + +ALTER TABLE hydr_boundaries_poly ADD COLUMN sectie INT REFERENCES sectie_kinds(id); +ALTER TABLE hydr_boundaries_poly ADD COLUMN sobek INT REFERENCES sobek_kinds(id); +ALTER TABLE hydr_boundaries_poly ADD FOREIGN KEY (kind) REFERENCES boundary_kinds(id); +ALTER TABLE hydr_boundaries ADD COLUMN sectie INT REFERENCES sectie_kinds(id); +ALTER TABLE hydr_boundaries ADD COLUMN sobek INT REFERENCES sobek_kinds(id); +ALTER TABLE hydr_boundaries ADD FOREIGN KEY (kind) REFERENCES boundary_kinds(id); +ALTER TABLE dem ADD COLUMN srid INT NOT NULL; +ALTER TABLE dem ALTER COLUMN year_from DROP NOT NULL; +ALTER TABLE dem ALTER COLUMN year_to DROP NOT NULL; +ALTER TABLE dem ALTER COLUMN projection DROP NOT NULL; +ALTER TABLE dem ALTER COLUMN path SET NOT NULL; + +COMMIT; +
--- a/flys-backend/doc/schema/postgresql-minfo.sql Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/schema/postgresql-minfo.sql Thu Feb 28 11:48:17 2013 +0100 @@ -46,12 +46,12 @@ evaluation_by VARCHAR(255), description VARCHAR(255), PRIMARY KEY(id), - CONSTRAINT fk_bed_single_river_id FOREIGN KEY (river_id) REFERENCES rivers(id), + CONSTRAINT fk_bed_single_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE, CONSTRAINT fk_type FOREIGN KEY (type_id) REFERENCES bed_height_type(id), CONSTRAINT fk_location_system FOREIGN KEY (location_system_id) REFERENCES location_system(id), CONSTRAINT fk_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id), CONSTRAINT fk_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id), - CONSTRAINT fk_range FOREIGN KEY (range_id) REFERENCES ranges(id) + CONSTRAINT fk_range FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE ); @@ -72,7 +72,7 @@ CONSTRAINT fk_time_interval FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id), CONSTRAINT fk_epoch_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id), CONSTRAINT fk_epoch_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id), - CONSTRAINT fk_epoch_range FOREIGN KEY (range_id) REFERENCES ranges(id) + CONSTRAINT fk_epoch_range FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE ); @@ -88,7 +88,7 @@ sounding_width NUMERIC, width NUMERIC, PRIMARY KEY(id), - CONSTRAINT fk_bed_single_values_parent FOREIGN KEY (bed_height_single_id) REFERENCES bed_height_single(id) + CONSTRAINT fk_bed_single_values_parent FOREIGN KEY (bed_height_single_id) REFERENCES bed_height_single(id) ON DELETE CASCADE ); @@ -100,7 +100,7 @@ station NUMERIC NOT NULL, height NUMERIC, PRIMARY KEY(id), - CONSTRAINT fk_bed_epoch_values_parent FOREIGN KEY (bed_height_epoch_id) REFERENCES bed_height_epoch(id) + CONSTRAINT fk_bed_epoch_values_parent FOREIGN KEY (bed_height_epoch_id) REFERENCES bed_height_epoch(id) ON DELETE CASCADE ); @@ -125,7 +125,7 @@ unit_id int NOT NULL, description VARCHAR(256), PRIMARY KEY(id), - CONSTRAINT fk_sd_river_id FOREIGN KEY (river_id) REFERENCES rivers(id), + CONSTRAINT fk_sd_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE, CONSTRAINT fk_sd_depth_id FOREIGN KEY (depth_id) REFERENCES depths(id), CONSTRAINT fk_sd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id) ); @@ -141,7 +141,7 @@ description VARCHAR(256), year int, PRIMARY KEY(id), - CONSTRAINT fk_sdv_sediment_density_id FOREIGN KEY(sediment_density_id) REFERENCES sediment_density(id) + CONSTRAINT fk_sdv_sediment_density_id FOREIGN KEY(sediment_density_id) REFERENCES sediment_density(id) ON DELETE CASCADE ); @@ -152,7 +152,7 @@ river_id int NOT NULL, unit_id int NOT NULL, PRIMARY KEY(id), - CONSTRAINT fk_mw_river_id FOREIGN KEY(river_id) REFERENCES rivers(id), + CONSTRAINT fk_mw_river_id FOREIGN KEY(river_id) REFERENCES rivers(id) ON DELETE CASCADE, CONSTRAINT fk_mw_unit_id FOREIGN KEY(unit_id) REFERENCES units(id) ); @@ -166,7 +166,7 @@ width NUMERIC NOT NULL, description VARCHAR(256), PRIMARY KEY(id), - CONSTRAINT fk_mwv_morphologic_width_id FOREIGN KEY (morphologic_width_id) REFERENCES morphologic_width(id) + CONSTRAINT fk_mwv_morphologic_width_id FOREIGN KEY (morphologic_width_id) REFERENCES morphologic_width(id) ON DELETE CASCADE ); @@ -180,7 +180,7 @@ lower_discharge VARCHAR(16) NOT NULL, upper_discharge VARCHAR(16), PRIMARY KEY(id), - CONSTRAINT fk_dz_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) + CONSTRAINT fk_dz_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE ); @@ -192,8 +192,8 @@ discharge_zone_id int NOT NULL, description VARCHAR(256), PRIMARY KEY (id), - CONSTRAINT fk_fvm_river_id FOREIGN KEY (river_id) REFERENCES rivers(id), - CONSTRAINT fk_fvm_discharge_zone_id FOREIGN KEY (discharge_zone_id) REFERENCES discharge_zone (id) + CONSTRAINT fk_fvm_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE, + CONSTRAINT fk_fvm_discharge_zone_id FOREIGN KEY (discharge_zone_id) REFERENCES discharge_zone (id) ON DELETE CASCADE ); @@ -208,7 +208,7 @@ main_channel NUMERIC NOT NULL, shear_stress NUMERIC NOT NULL, PRIMARY KEY(id), - CONSTRAINT fk_fvv_flow_velocity_model_id FOREIGN KEY (flow_velocity_model_id) REFERENCES flow_velocity_model(id) + CONSTRAINT fk_fvv_flow_velocity_model_id FOREIGN KEY (flow_velocity_model_id) REFERENCES flow_velocity_model(id) ON DELETE CASCADE ); @@ -220,7 +220,7 @@ river_id int NOT NULL, description VARCHAR(256), PRIMARY KEY (id), - CONSTRAINT fk_fvm_rivers_id FOREIGN KEY (river_id) REFERENCES rivers(id) + CONSTRAINT fk_fvm_rivers_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE ); CREATE SEQUENCE FV_MEASURE_VALUES_ID_SEQ; @@ -235,7 +235,7 @@ v NUMERIC NOT NULL, description VARCHAR(256), PRIMARY KEY (id), - CONSTRAINT fk_fvmv_measurements_id FOREIGN KEY (measurements_id) REFERENCES flow_velocity_measurements (id) + CONSTRAINT fk_fvmv_measurements_id FOREIGN KEY (measurements_id) REFERENCES flow_velocity_measurements (id) ON DELETE CASCADE ); @@ -262,7 +262,7 @@ time_interval_id int NOT NULL, description VARCHAR(256), PRIMARY KEY (id), - CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id), + CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE, CONSTRAINT fk_sy_grain_fraction_id FOREIGN KEY (grain_fraction_id) REFERENCES grain_fraction(id), CONSTRAINT fk_sy_unit_id FOREIGN KEY (unit_id) REFERENCES units(id), CONSTRAINT fk_sy_time_interval_id FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id) @@ -277,7 +277,7 @@ station NUMERIC NOT NULL, value NUMERIC NOT NULL, PRIMARY KEY (id), - CONSTRAINT fk_syv_sediment_yield_id FOREIGN KEY (sediment_yield_id) REFERENCES sediment_yield(id) + CONSTRAINT fk_syv_sediment_yield_id FOREIGN KEY (sediment_yield_id) REFERENCES sediment_yield(id) ON DELETE CASCADE ); @@ -289,7 +289,7 @@ unit_id int NOT NULL, description VARCHAR(256), PRIMARY KEY (id), - CONSTRAINT fk_w_river_id FOREIGN KEY (river_id) REFERENCES rivers(id), + CONSTRAINT fk_w_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE, CONSTRAINT fk_w_unit_id FOREIGN KEY (unit_id) REFERENCES units(id) ); @@ -301,7 +301,7 @@ waterlevel_id int NOT NULL, q NUMERIC NOT NULL, PRIMARY KEY (id), - CONSTRAINT fk_wqr_waterlevel_id FOREIGN KEY (waterlevel_id) REFERENCES waterlevel(id) + CONSTRAINT fk_wqr_waterlevel_id FOREIGN KEY (waterlevel_id) REFERENCES waterlevel(id) ON DELETE CASCADE ); @@ -313,7 +313,7 @@ station NUMERIC NOT NULL, w NUMERIC NOT NULL, PRIMARY KEY (id), - CONSTRAINT fk_wv_waterlevel_q_range_id FOREIGN KEY (waterlevel_q_range_id) REFERENCES waterlevel_q_range(id) + CONSTRAINT fk_wv_waterlevel_q_range_id FOREIGN KEY (waterlevel_q_range_id) REFERENCES waterlevel_q_range(id) ON DELETE CASCADE ); @@ -325,7 +325,7 @@ unit_id int NOT NULL, description VARCHAR(256), PRIMARY KEY (id), - CONSTRAINT fk_wd_river_id FOREIGN KEY (river_id) REFERENCES rivers (id), + CONSTRAINT fk_wd_river_id FOREIGN KEY (river_id) REFERENCES rivers (id) ON DELETE CASCADE, CONSTRAINT fk_wd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id) ); @@ -337,7 +337,7 @@ difference_id int NOT NULL, description VARCHAR(256), PRIMARY KEY (id), - CONSTRAINT fk_wdc_difference_id FOREIGN KEY (difference_id) REFERENCES waterlevel_difference (id) + CONSTRAINT fk_wdc_difference_id FOREIGN KEY (difference_id) REFERENCES waterlevel_difference (id) ON DELETE CASCADE ); @@ -349,7 +349,7 @@ station NUMERIC NOT NULL, value NUMERIC NOT NULL, PRIMARY KEY (id), - CONSTRAINT fk_wdv_column_id FOREIGN KEY (column_id) REFERENCES waterlevel_difference_column (id) + CONSTRAINT fk_wdv_column_id FOREIGN KEY (column_id) REFERENCES waterlevel_difference_column (id) ON DELETE CASCADE ); @@ -367,9 +367,9 @@ operator VARCHAR(64), comment VARCHAR(512), PRIMARY KEY (id), - CONSTRAINT fk_ms_river_id FOREIGN KEY (river_id) REFERENCES rivers(id), - CONSTRAINT fk_ms_range_id FOREIGN KEY (range_id) REFERENCES ranges(id), - CONSTRAINT fk_ms_reference_gauge_id FOREIGN KEY (reference_gauge_id) REFERENCES gauges(id), + CONSTRAINT fk_ms_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE, + CONSTRAINT fk_ms_range_id FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE, + CONSTRAINT fk_ms_reference_gauge_id FOREIGN KEY (reference_gauge_id) REFERENCES gauges(id) ON DELETE CASCADE, CONSTRAINT fk_ms_observation_timerange_id FOREIGN KEY (observation_timerange_id) REFERENCES time_intervals(id), UNIQUE (river_id, station) ); @@ -383,7 +383,7 @@ time_interval_id int NOT NULL, description VARCHAR(256), PRIMARY KEY (id), - CONSTRAINT fk_sqr_river_id FOREIGN KEY (river_id) REFERENCES rivers(id), + CONSTRAINT fk_sqr_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE, CONSTRAINT fk_sqr_tinterval_id FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id) ); @@ -400,6 +400,6 @@ a NUMERIC NOT NULL, b NUMERIC NOT NULL, PRIMARY KEY (id), - CONSTRAINT fk_sqr_id FOREIGN KEY (sq_relation_id) REFERENCES sq_relation(id) + CONSTRAINT fk_sqr_id FOREIGN KEY (sq_relation_id) REFERENCES sq_relation(id) ON DELETE CASCADE ); COMMIT;
--- a/flys-backend/doc/schema/postgresql-spatial.sql Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/schema/postgresql-spatial.sql Thu Feb 28 11:48:17 2013 +0100 @@ -4,7 +4,7 @@ CREATE SEQUENCE RIVER_AXES_ID_SEQ; CREATE TABLE river_axes ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), + river_id int REFERENCES rivers(id) ON DELETE CASCADE, kind int NOT NULL DEFAULT 0, name VARCHAR(64), path VARCHAR(256) @@ -18,8 +18,8 @@ CREATE SEQUENCE RIVER_AXES_KM_ID_SEQ; CREATE TABLE river_axes_km ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), - km NUMERIC NOT NULL, + river_id int REFERENCES rivers(id) ON DELETE CASCADE, + km FLOAT8 NOT NULL, name VARCHAR(64), path VARCHAR(256) ); @@ -31,9 +31,9 @@ CREATE SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ; CREATE TABLE cross_section_tracks ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), - km NUMERIC NOT NULL, - z NUMERIC NOT NULL DEFAULT 0, + river_id int REFERENCES rivers(id) ON DELETE CASCADE, + km FLOAT8 NOT NULL, + z FLOAT8 NOT NULL DEFAULT 0, name VARCHAR(64), path VARCHAR(256) ); @@ -41,28 +41,11 @@ ALTER TABLE cross_section_tracks ALTER COLUMN id SET DEFAULT NEXTVAL('CROSS_SECTION_TRACKS_ID_SEQ'); --- Geodaesie/Linien/rohre-und-spreen -CREATE SEQUENCE LINES_ID_SEQ; -CREATE TABLE lines ( - id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), - kind VARCHAR(16) NOT NULL, - z NUMERIC DEFAULT 0, - name VARCHAR(64), - path VARCHAR(256) -); -SELECT AddGeometryColumn('lines', 'geom', 31467, 'LINESTRING', 3); -ALTER TABLE lines ALTER COLUMN id SET DEFAULT NEXTVAL('LINES_ID_SEQ'); --- 'kind': --- 0: ROHR1 --- 1: DAMM - - -- Geodaesie/Bauwerke/Wehre.shp CREATE SEQUENCE BUILDINGS_ID_SEQ; CREATE TABLE buildings ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), + river_id int REFERENCES rivers(id) ON DELETE CASCADE, name VARCHAR(256), path VARCHAR(256) ); @@ -74,10 +57,10 @@ CREATE SEQUENCE FIXPOINTS_ID_SEQ; CREATE TABLE fixpoints ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), - x int, - y int, - km NUMERIC NOT NULL, + river_id int REFERENCES rivers(id) ON DELETE CASCADE, + x FLOAT8, + y FLOAT8, + km FLOAT8 NOT NULL, HPGP VARCHAR(2), name VARCHAR(64), path VARCHAR(256) @@ -90,7 +73,7 @@ CREATE SEQUENCE FLOODPLAIN_ID_SEQ; CREATE TABLE floodplain ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), + river_id int REFERENCES rivers(id) ON DELETE CASCADE, name VARCHAR(64), path VARCHAR(256) ); @@ -102,50 +85,104 @@ CREATE SEQUENCE DEM_ID_SEQ; CREATE TABLE dem ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), + river_id int REFERENCES rivers(id) ON DELETE CASCADE, -- XXX Should we use the ranges table instead? name VARCHAR(64), - lower NUMERIC, - upper NUMERIC, - year_from VARCHAR(32) NOT NULL, - year_to VARCHAR(32) NOT NULL, - projection VARCHAR(32) NOT NULL, + lower FLOAT8, + upper FLOAT8, + year_from VARCHAR(32), + year_to VARCHAR(32), + projection VARCHAR(32), + srid int NOT NULL, elevation_state VARCHAR(32), format VARCHAR(32), border_break BOOLEAN NOT NULL DEFAULT FALSE, resolution VARCHAR(16), description VARCHAR(256), - path VARCHAR(256) + path VARCHAR(256) NOT NULL ); ALTER TABLE dem ALTER COLUMN id SET DEFAULT NEXTVAL('DEM_ID_SEQ'); --- Hydrologie/Einzugsgebiete/EZG.shp -CREATE SEQUENCE CATCHMENT_ID_SEQ; -CREATE TABLE catchment ( +-- Static lookup tables for Hochwasserschutzanlagen +CREATE TABLE hws_kinds ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), - area NUMERIC, - name VARCHAR(256), - path VARCHAR(256) + kind VARCHAR(64) NOT NULL ); -SELECT AddGeometryColumn('catchment','geom',31467,'POLYGON',2); -ALTER TABLE catchment ALTER COLUMN id SET DEFAULT NEXTVAL('CATCHMENT_ID_SEQ'); - +INSERT INTO hws_kinds (id, kind) VALUES (1, 'Durchlass'); +INSERT INTO hws_kinds (id, kind) VALUES (2, 'Damm'); +INSERT INTO hws_kinds (id, kind) VALUES (3, 'Graben'); ---Hydrologie/HW-Schutzanlagen/hws.shp -CREATE SEQUENCE HWS_ID_SEQ; -CREATE TABLE hws ( +CREATE TABLE fed_states ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), - hws_facility VARCHAR(256), - type VARCHAR(256), - name VARCHAR(64), - path VARCHAR(256) + name VARCHAR(23) NOT NULL ); -SELECT AddGeometryColumn('hws','geom',31467,'LINESTRING',2); -ALTER TABLE hws ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_ID_SEQ'); +INSERT INTO fed_states (id, name) VALUES (1, 'Bayern'); +INSERT INTO fed_states (id, name) VALUES (2, 'Hessen'); +INSERT INTO fed_states (id, name) VALUES (3, 'Niedersachsen'); +INSERT INTO fed_states (id, name) VALUES (4, 'Nordrhein-Westfalen'); +INSERT INTO fed_states (id, name) VALUES (5, 'Rheinland-Pfalz'); +INSERT INTO fed_states (id, name) VALUES (6, 'Saarland'); +INSERT INTO fed_states (id, name) VALUES (7, 'Schleswig-Holstein'); +INSERT INTO fed_states (id, name) VALUES (8, 'Brandenburg'); +INSERT INTO fed_states (id, name) VALUES (9, 'Mecklenburg-Vorpommern'); +INSERT INTO fed_states (id, name) VALUES (10, 'Thüringen'); +INSERT INTO fed_states (id, name) VALUES (11, 'Baden-Württemberg'); +INSERT INTO fed_states (id, name) VALUES (12, 'Sachsen-Anhalt'); +INSERT INTO fed_states (id, name) VALUES (13, 'Sachsen'); +INSERT INTO fed_states (id, name) VALUES (14, 'Berlin'); +INSERT INTO fed_states (id, name) VALUES (15, 'Bremen'); +INSERT INTO fed_states (id, name) VALUES (16, 'Hamburg'); +--Hydrologie/HW-Schutzanlagen/*Linien.shp +CREATE SEQUENCE HWS_LINES_ID_SEQ; +CREATE TABLE hws_lines ( + id int PRIMARY KEY NOT NULL, + ogr_fid int, + kind_id int REFERENCES hws_kinds(id) DEFAULT 2, + fed_state_id int REFERENCES fed_states(id), + river_id int REFERENCES rivers(id) ON DELETE CASCADE, + name VARCHAR(256), + path VARCHAR(256), + official INT DEFAULT 0, + agency VARCHAR(256), + range VARCHAR(256), + shore_side INT DEFAULT 0, + source VARCHAR(256), + status_date TIMESTAMP, + description VARCHAR(256) +); +SELECT AddGeometryColumn('hws_lines', 'geom', 31467, 'LINESTRING', 3); +-- TODO: dike_km_from dike_km_to, are they geometries? + +ALTER TABLE hws_lines ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_LINES_ID_SEQ'); + +--Hydrologie/HW-Schutzanlagen/*Punkte.shp +CREATE SEQUENCE HWS_POINTS_ID_SEQ; +CREATE TABLE hws_points ( + id int PRIMARY KEY NOT NULL, + ogr_fid int, + kind_id int REFERENCES hws_kinds(id) DEFAULT 2, + fed_state_id int REFERENCES fed_states(id), + river_id int REFERENCES rivers(id) ON DELETE CASCADE, + name VARCHAR, + path VARCHAR, + official INT DEFAULT 0, + agency VARCHAR, + range VARCHAR, + shore_side INT DEFAULT 0, + source VARCHAR, + status_date VARCHAR, + description VARCHAR, + freeboard FLOAT8, + dike_km FLOAT8, + z FLOAT8, + z_target FLOAT8, + rated_level FLOAT8 +); +SELECT AddGeometryColumn('hws_points', 'geom', 31467, 'POINT', 2); + +ALTER TABLE hws_points ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_POINTS_ID_SEQ'); -- --Hydrologie/UeSG @@ -160,13 +197,13 @@ CREATE SEQUENCE FLOODMAPS_ID_SEQ; CREATE TABLE floodmaps ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), + river_id int REFERENCES rivers(id) ON DELETE CASCADE, name varchar(64) NOT NULL, kind int NOT NULL, - diff real, + diff FLOAT8, count int, - area real, - perimeter real, + area FLOAT8, + perimeter FLOAT8, path VARCHAR(256) ); SELECT AddGeometryColumn('floodmaps', 'geom', 31467, 'MULTIPOLYGON', 2); @@ -174,13 +211,40 @@ ALTER TABLE floodmaps ADD CONSTRAINT enforce_geotype_geom CHECK (geometrytype(geom) = 'POLYGON'::text OR geometrytype(geom) = 'MULTIPOLYGON'::text); ALTER TABLE floodmaps ALTER COLUMN id SET DEFAULT NEXTVAL('FLOODMAPS_ID_SEQ'); +CREATE TABLE sectie_kinds ( + id int PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO sectie_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO sectie_kinds (id, name) VALUES (1, 'Flussschlauch'); +INSERT INTO sectie_kinds (id, name) VALUES (2, 'Uferbank'); +INSERT INTO sectie_kinds (id, name) VALUES (3, 'Überflutungsbereich'); + +CREATE TABLE sobek_kinds ( + id int PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO sobek_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO sobek_kinds (id, name) VALUES (1, 'Stromführend'); +INSERT INTO sobek_kinds (id, name) VALUES (2, 'Stromspeichernd'); + +CREATE TABLE boundary_kinds ( + id int PRIMARY KEY NOT NULL, + name VARCHAR(64) NOT NULL +); +INSERT INTO boundary_kinds (id, name) VALUES (0, 'Unbekannt'); +INSERT INTO boundary_kinds (id, name) VALUES (1, 'BfG'); +INSERT INTO boundary_kinds (id, name) VALUES (2, 'Land'); +INSERT INTO boundary_kinds (id, name) VALUES (3, 'Sonstige'); CREATE SEQUENCE HYDR_BOUNDARIES_ID_SEQ; CREATE TABLE hydr_boundaries ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), + river_id int REFERENCES rivers(id) ON DELETE CASCADE, name VARCHAR(255), - kind int, + kind int REFERENCES boundary_kinds(id), + sectie int REFERENCES sectie_kinds(id), + sobek int REFERENCES sobek_kinds(id), path VARCHAR(256) ); SELECT AddGeometryColumn('hydr_boundaries','geom',31467,'LINESTRING',3); @@ -190,9 +254,11 @@ CREATE SEQUENCE HYDR_BOUNDARIES_POLY_ID_SEQ; CREATE TABLE hydr_boundaries_poly ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), + river_id int REFERENCES rivers(id) ON DELETE CASCADE, name VARCHAR(255), - kind int, + kind int REFERENCES boundary_kinds(id), + sectie int REFERENCES sectie_kinds(id), + sobek int REFERENCES sobek_kinds(id), path VARCHAR(256) ); SELECT AddGeometryColumn('hydr_boundaries_poly','geom',31467,'POLYGON',3); @@ -202,7 +268,7 @@ CREATE SEQUENCE GAUGE_LOCATION_ID_SEQ; CREATE TABLE gauge_location ( id int PRIMARY KEY NOT NULL, - river_id int REFERENCES rivers(id), + river_id int REFERENCES rivers(id) ON DELETE CASCADE, name VARCHAR(255), path VARCHAR(256) );
--- a/flys-backend/doc/schema/postgresql.sql Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/doc/schema/postgresql.sql Thu Feb 28 11:48:17 2013 +0100 @@ -31,7 +31,7 @@ CREATE TABLE ranges ( id int PRIMARY KEY NOT NULL, - river_id int NOT NULL REFERENCES rivers(id), + river_id int NOT NULL REFERENCES rivers(id) ON DELETE CASCADE, a NUMERIC NOT NULL, b NUMERIC, UNIQUE (river_id, a, b) @@ -68,7 +68,7 @@ CREATE TABLE annotations ( id int PRIMARY KEY NOT NULL, - range_id int NOT NULL REFERENCES ranges(id), + range_id int NOT NULL REFERENCES ranges(id) ON DELETE CASCADE, attribute_id int NOT NULL REFERENCES attributes(id), position_id int REFERENCES positions(id), edge_id int REFERENCES edges(id), @@ -81,7 +81,7 @@ CREATE TABLE gauges ( id int PRIMARY KEY NOT NULL, name VARCHAR(256) NOT NULL, - river_id int NOT NULL REFERENCES rivers(id), + river_id int NOT NULL REFERENCES rivers(id) ON DELETE CASCADE, station NUMERIC NOT NULL UNIQUE, aeo NUMERIC NOT NULL, official_number int8 UNIQUE, @@ -89,7 +89,7 @@ -- Pegelnullpunkt datum NUMERIC NOT NULL, -- Streckengueltigkeit - range_id int REFERENCES ranges (id), + range_id int REFERENCES ranges (id) ON DELETE CASCADE, UNIQUE (name, river_id), UNIQUE (river_id, station) @@ -128,7 +128,7 @@ CREATE TABLE main_values ( id int PRIMARY KEY NOT NULL, - gauge_id int NOT NULL REFERENCES gauges(id), + gauge_id int NOT NULL REFERENCES gauges(id) ON DELETE CASCADE, named_value_id int NOT NULL REFERENCES named_main_values(id), value NUMERIC NOT NULL, @@ -143,7 +143,7 @@ CREATE TABLE discharge_tables ( id int PRIMARY KEY NOT NULL, - gauge_id int NOT NULL REFERENCES gauges(id), + gauge_id int NOT NULL REFERENCES gauges(id) ON DELETE CASCADE, description VARCHAR(256) NOT NULL, bfg_id VARCHAR(50), kind int NOT NULL DEFAULT 0, @@ -158,7 +158,7 @@ CREATE TABLE discharge_table_values ( id int PRIMARY KEY NOT NULL, - table_id int NOT NULL REFERENCES discharge_tables(id), + table_id int NOT NULL REFERENCES discharge_tables(id) ON DELETE CASCADE, q NUMERIC NOT NULL, w NUMERIC NOT NULL, @@ -170,7 +170,7 @@ CREATE TABLE wsts ( id int PRIMARY KEY NOT NULL, - river_id int NOT NULL REFERENCES rivers(id), + river_id int NOT NULL REFERENCES rivers(id) ON DELETE CASCADE, description VARCHAR(256) NOT NULL, kind int NOT NULL DEFAULT 0, -- TODO: more meta infos @@ -182,7 +182,7 @@ CREATE TABLE wst_columns ( id int PRIMARY KEY NOT NULL, - wst_id int NOT NULL REFERENCES wsts(id), + wst_id int NOT NULL REFERENCES wsts(id) ON DELETE CASCADE, name VARCHAR(256) NOT NULL, description VARCHAR(256), position int NOT NULL DEFAULT 0, @@ -198,7 +198,7 @@ CREATE TABLE wst_column_values ( id int PRIMARY KEY NOT NULL, - wst_column_id int NOT NULL REFERENCES wst_columns(id), + wst_column_id int NOT NULL REFERENCES wst_columns(id) ON DELETE CASCADE, position NUMERIC NOT NULL, w NUMERIC NOT NULL, @@ -211,7 +211,7 @@ CREATE TABLE wst_q_ranges ( id int PRIMARY KEY NOT NULL, - range_id int NOT NULL REFERENCES ranges(id), + range_id int NOT NULL REFERENCES ranges(id) ON DELETE CASCADE, q NUMERIC NOT NULL ); @@ -220,8 +220,8 @@ CREATE TABLE wst_column_q_ranges ( id int PRIMARY KEY NOT NULL, - wst_column_id int NOT NULL REFERENCES wst_columns(id), - wst_q_range_id int NOT NULL REFERENCES wst_q_ranges(id), + wst_column_id int NOT NULL REFERENCES wst_columns(id) ON DELETE CASCADE, + wst_q_range_id int NOT NULL REFERENCES wst_q_ranges(id) ON DELETE CASCADE, UNIQUE (wst_column_id, wst_q_range_id) ); @@ -277,7 +277,7 @@ CREATE TABLE cross_sections ( id int PRIMARY KEY NOT NULL, - river_id int NOT NULL REFERENCES rivers(id), + river_id int NOT NULL REFERENCES rivers(id) ON DELETE CASCADE, time_interval_id int REFERENCES time_intervals(id), description VARCHAR(256) ); @@ -287,7 +287,7 @@ CREATE TABLE cross_section_lines ( id int PRIMARY KEY NOT NULL, km NUMERIC NOT NULL, - cross_section_id int NOT NULL REFERENCES cross_sections(id), + cross_section_id int NOT NULL REFERENCES cross_sections(id) ON DELETE CASCADE, UNIQUE (km, cross_section_id) ); @@ -295,7 +295,7 @@ CREATE TABLE cross_section_points ( id int PRIMARY KEY NOT NULL, - cross_section_line_id int NOT NULL REFERENCES cross_section_lines(id), + cross_section_line_id int NOT NULL REFERENCES cross_section_lines(id) ON DELETE CASCADE, col_pos int NOT NULL, x NUMERIC NOT NULL, y NUMERIC NOT NULL, @@ -314,7 +314,7 @@ CREATE TABLE hyks ( id int PRIMARY KEY NOT NULL, - river_id int NOT NULL REFERENCES rivers(id), + river_id int NOT NULL REFERENCES rivers(id) ON DELETE CASCADE, description VARCHAR(256) NOT NULL ); @@ -322,7 +322,7 @@ CREATE TABLE hyk_entries ( id int PRIMARY KEY NOT NULL, - hyk_id int NOT NULL REFERENCES hyks(id), + hyk_id int NOT NULL REFERENCES hyks(id) ON DELETE CASCADE, km NUMERIC NOT NULL, measure TIMESTAMP, UNIQUE (hyk_id, km) @@ -333,7 +333,7 @@ CREATE TABLE hyk_formations ( id int PRIMARY KEY NOT NULL, formation_num int NOT NULL DEFAULT 0, - hyk_entry_id int NOT NULL REFERENCES hyk_entries(id), + hyk_entry_id int NOT NULL REFERENCES hyk_entries(id) ON DELETE CASCADE, top NUMERIC NOT NULL, bottom NUMERIC NOT NULL, distance_vl NUMERIC NOT NULL, @@ -354,7 +354,7 @@ CREATE TABLE hyk_flow_zones ( id int PRIMARY KEY NOT NULL, - formation_id int NOT NULL REFERENCES hyk_formations(id), + formation_id int NOT NULL REFERENCES hyk_formations(id) ON DELETE CASCADE, type_id int NOT NULL REFERENCES hyk_flow_zone_types(id), a NUMERIC NOT NULL, b NUMERIC NOT NULL,
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/pom-oracle.xml Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,142 @@ +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + + <groupId>de.intevation.flys</groupId> + <artifactId>flys-backend</artifactId> + <version>1.0-SNAPSHOT</version> + <packaging>jar</packaging> + + <name>flys-backend</name> + <url>http://maven.apache.org</url> + + <properties> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + </properties> + + <build> + <plugins> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>hibernate3-maven-plugin</artifactId> + <version>2.2</version> + <!-- + <configuration> + <componentProperties> + <propertyfile>src/main/config/hbm.properties</propertyfile> + </componentProperties> + </configuration> + --> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <version>2.0.2</version> + <configuration> + <source>1.6</source> + <target>1.6</target> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <configuration> + <archive> + <manifest> + <mainClass>de.intevation.flys.importer.Importer</mainClass> + <packageName>de.intevation.flys.importer</packageName> + </manifest> + </archive> + </configuration> + </plugin> + <plugin> + <artifactId>maven-assembly-plugin</artifactId> + <configuration> + <archive> + <manifest> + <mainClass>de.intevation.flys.importer.Importer</mainClass> + </manifest> + </archive> + <descriptorRefs> + <descriptorRef>jar-with-dependencies</descriptorRef> + </descriptorRefs> + </configuration> + </plugin> + </plugins> + </build> + + <dependencies> + <dependency> + <groupId>de.intevation.artifacts.common</groupId> + <artifactId>artifacts-common</artifactId> + <version>1.0-SNAPSHOT</version> + </dependency> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <version>3.8.1</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>net.sf.opencsv</groupId> + <artifactId>opencsv</artifactId> + <version>2.0</version> + </dependency> + <dependency> + <groupId>org.hibernate</groupId> + <artifactId>hibernate-core</artifactId> + <version>3.6.5.Final</version> + </dependency> + <dependency> + <groupId>org.hibernate</groupId> + <artifactId>hibernate-entitymanager</artifactId> + <version>3.6.5.Final</version> + </dependency> + <dependency> + <groupId>log4j</groupId> + <artifactId>log4j</artifactId> + <version>1.2.14</version> + </dependency> + <dependency> + <groupId>commons-dbcp</groupId> + <artifactId>commons-dbcp</artifactId> + <version>1.4</version> + </dependency> + <dependency> + <groupId>org.hibernatespatial</groupId> + <artifactId>hibernate-spatial-postgis</artifactId> + <version>1.1</version> + </dependency> + <dependency> + <groupId>org.hibernatespatial</groupId> + <artifactId>hibernate-spatial-oracle</artifactId> + <version>1.1</version> + </dependency> + <dependency> + <groupId>org.postgis</groupId> + <artifactId>postgis-jdbc</artifactId> + <version>1.3.3</version> + </dependency> + <dependency> + <groupId>ojdbc5.jar</groupId> + <artifactId>ojdbc5</artifactId> + <version>0</version> + </dependency> + </dependencies> + + <repositories> + <repository> + <id>repository.jboss.org/nexus</id> + <name>JBoss Repository - Nexus</name> + <url>http://repository.jboss.org/nexus/content/groups/public/</url> + </repository> + <repository> + <id>OSGEO GeoTools repo</id> + <url>http://download.osgeo.org/webdav/geotools</url> + </repository> + <repository> + <id>Hibernate Spatial repo</id> + <url>http://www.hibernatespatial.org/repository</url> + </repository> + </repositories> +</project>
--- a/flys-backend/pom.xml Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/pom.xml Thu Feb 28 11:48:17 2013 +0100 @@ -37,6 +37,31 @@ <target>1.6</target> </configuration> </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <configuration> + <archive> + <manifest> + <mainClass>de.intevation.flys.importer.Importer</mainClass> + <packageName>de.intevation.flys.importer</packageName> + </manifest> + </archive> + </configuration> + </plugin> + <plugin> + <artifactId>maven-assembly-plugin</artifactId> + <configuration> + <archive> + <manifest> + <mainClass>de.intevation.flys.importer.Importer</mainClass> + </manifest> + </archive> + <descriptorRefs> + <descriptorRef>jar-with-dependencies</descriptorRef> + </descriptorRefs> + </configuration> + </plugin> </plugins> </build> @@ -89,11 +114,6 @@ <version>1.1</version> </dependency> <dependency> - <groupId>org.hibernatespatial</groupId> - <artifactId>hibernate-spatial-oracle</artifactId> - <version>1.1</version> - </dependency> - <dependency> <groupId>org.postgis</groupId> <artifactId>postgis-jdbc</artifactId> <version>1.3.3</version>
--- a/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java Thu Feb 28 11:48:17 2013 +0100 @@ -11,7 +11,7 @@ import de.intevation.flys.model.BedHeightSingleValue; import de.intevation.flys.model.BedHeightType; import de.intevation.flys.model.Building; -import de.intevation.flys.model.Catchment; +import de.intevation.flys.model.BoundaryKind; import de.intevation.flys.model.CrossSection; import de.intevation.flys.model.CrossSectionLine; import de.intevation.flys.model.CrossSectionPoint; @@ -23,6 +23,7 @@ import de.intevation.flys.model.DischargeZone; import de.intevation.flys.model.Edge; import de.intevation.flys.model.ElevationModel; +import de.intevation.flys.model.FedState; import de.intevation.flys.model.Fixpoint; import de.intevation.flys.model.Floodmaps; import de.intevation.flys.model.Floodplain; @@ -33,15 +34,15 @@ import de.intevation.flys.model.Gauge; import de.intevation.flys.model.GaugeLocation; import de.intevation.flys.model.GrainFraction; +import de.intevation.flys.model.HWSKind; +import de.intevation.flys.model.HWSLine; import de.intevation.flys.model.HYK; import de.intevation.flys.model.HYKEntry; import de.intevation.flys.model.HYKFlowZone; import de.intevation.flys.model.HYKFlowZoneType; import de.intevation.flys.model.HYKFormation; -import de.intevation.flys.model.Hws; import de.intevation.flys.model.HydrBoundary; import de.intevation.flys.model.HydrBoundaryPoly; -import de.intevation.flys.model.Line; import de.intevation.flys.model.LocationSystem; import de.intevation.flys.model.MainValue; import de.intevation.flys.model.MainValueType; @@ -56,6 +57,8 @@ import de.intevation.flys.model.RiverAxisKm; import de.intevation.flys.model.SQRelation; import de.intevation.flys.model.SQRelationValue; +import de.intevation.flys.model.SectieKind; +import de.intevation.flys.model.SobekKind; import de.intevation.flys.model.SedimentDensity; import de.intevation.flys.model.SedimentDensityValue; import de.intevation.flys.model.SedimentYield; @@ -123,7 +126,7 @@ BedHeightSingleValue.class, BedHeightType.class, Building.class, - Catchment.class, + BoundaryKind.class, CrossSection.class, CrossSectionLine.class, CrossSectionPoint.class, @@ -135,6 +138,7 @@ DischargeZone.class, Edge.class, ElevationModel.class, + FedState.class, Fixpoint.class, Floodplain.class, Floodmaps.class, @@ -145,7 +149,8 @@ Gauge.class, GaugeLocation.class, GrainFraction.class, - Hws.class, + HWSKind.class, + HWSLine.class, HydrBoundary.class, HydrBoundaryPoly.class, HYK.class, @@ -153,7 +158,6 @@ HYKFormation.class, HYKFlowZoneType.class, HYKFlowZone.class, - Line.class, LocationSystem.class, MainValueType.class, MeasurementStation.class, @@ -166,6 +170,8 @@ River.class, RiverAxis.class, RiverAxisKm.class, + SectieKind.class, + SobekKind.class, SedimentDensity.class, SedimentDensityValue.class, SedimentYield.class,
--- a/flys-backend/src/main/java/de/intevation/flys/backend/SpatialInfo.java Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/backend/SpatialInfo.java Thu Feb 28 11:48:17 2013 +0100 @@ -10,9 +10,9 @@ import de.intevation.flys.model.Building; import de.intevation.flys.model.CrossSectionTrack; import de.intevation.flys.model.Fixpoint; -import de.intevation.flys.model.Line; import de.intevation.flys.model.River; import de.intevation.flys.model.RiverAxis; +import de.intevation.flys.model.HWSLine; public class SpatialInfo { @@ -42,7 +42,6 @@ logger.info("Spatial information of River '" + RIVERNAME + "'"); spatial.doRiverAxisInfo(river); spatial.doCrossSectionTracksInfo(river); - spatial.doLinesInfo(river); spatial.doBuildingsInfo(river); spatial.doFixpointsInfo(river); } @@ -112,23 +111,6 @@ } - protected void doLinesInfo(River river) { - Query query = session.createQuery( - "from Line where river =:river"); - query.setParameter("river", river); - - List<Line> list = query.list(); - - if (list == null || list.size() == 0) { - logger.warn("No Lines for '" + river.getName() + "' found!"); - return; - } - else { - logger.info("River contains " + list.size() + " Lines."); - } - } - - protected void doBuildingsInfo(River river) { Query query = session.createQuery( "from Building where river =:river"); @@ -161,5 +143,26 @@ logger.info("River contains " + list.size() + " Fixpoints."); } } + + @Deprecated + protected void doLinesInfo(River river) { + doHWSLinesInfo(river); + } + + protected void doHWSLinesInfo(River river) { + Query query = session.createQuery( + "from hws_lines where river =:river"); + query.setParameter("river", river); + + List<HWSLine> list = query.list(); + + if (list == null || list.size() == 0) { + logger.warn("No Lines for '" + river.getName() + "' found!"); + return; + } + else { + logger.info("River contains " + list.size() + " Lines."); + } + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/flys-backend/src/main/java/de/intevation/flys/importer/Importer.java Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/importer/Importer.java Thu Feb 28 11:48:17 2013 +0100 @@ -141,7 +141,8 @@ infoGewParser.parse(gewFile); } catch (IOException ioe) { - log.error("error while parsing gew: " + gew); + log.error("error while parsing gew: " + gew, ioe); + System.exit(1); } } @@ -156,7 +157,8 @@ infoGewParser.parse(gewFile); } catch (IOException ioe) { - log.error("error while parsing gew: " + gew); + log.error("error while parsing gew: " + gew, ioe); + System.exit(1); } }
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/src/main/java/de/intevation/flys/model/BoundaryKind.java Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,45 @@ +package de.intevation.flys.model; + +import java.io.Serializable; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name = "boundary_kinds") +public class BoundaryKind implements Serializable { + + private Integer id; + private String name; + + @Id + @Column(name = "id") + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + /** + * Get name. + * + * @return name of the kind of boundary as String. + */ + @Column(name = "name") + public String getName() { + return name; + } + + /** + * Set name. + * + * @param name the value to set. + */ + public void setName(String name) { + this.name = name; + } +}
--- a/flys-backend/src/main/java/de/intevation/flys/model/Catchment.java Thu Feb 28 11:06:20 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,107 +0,0 @@ -package de.intevation.flys.model; - -import java.io.Serializable; -import java.math.BigDecimal; -import java.util.List; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.OneToOne; -import javax.persistence.Table; - -import org.hibernate.Session; -import org.hibernate.Query; -import org.hibernate.annotations.Type; - -import com.vividsolutions.jts.geom.Geometry; - -import de.intevation.flys.backend.SessionHolder; - - -@Entity -@Table(name = "catchment") -public class Catchment -implements Serializable -{ - private Integer id; - private BigDecimal area; - private String name; - private River river; - private Geometry geom; - - public Catchment() { - } - - - @Id - @Column(name = "id") - public Integer getId() { - return id; - } - - - public void setId(Integer id) { - this.id = id; - } - - - @OneToOne - @JoinColumn(name = "river_id") - public River getRiver() { - return river; - } - - - public void setRiver(River river) { - this.river = river; - } - - - @Column(name = "name") - public String getName() { - return name; - } - - - public void setName(String name) { - this.name = name; - } - - - @Column(name = "area") - public BigDecimal getArea() { - return area; - } - - - public void setArea(BigDecimal area) { - this.area = area; - } - - - @Column(name = "geom") - @Type(type = "org.hibernatespatial.GeometryUserType") - public Geometry getGeom() { - return geom; - } - - - public void setGeom(Geometry geom) { - this.geom = geom; - } - - - public static List<Catchment> getCatchments(int riverId, String name) { - Session session = SessionHolder.HOLDER.get(); - - Query query = session.createQuery( - "from Catchment where river.id =:river_id AND name=:name"); - query.setParameter("river_id", riverId); - query.setParameter("name", name); - - return query.list(); - } -} -// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/DGM.java Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/model/DGM.java Thu Feb 28 11:48:17 2013 +0100 @@ -6,10 +6,13 @@ import javax.persistence.Column; import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.OneToOne; import javax.persistence.Table; +import javax.persistence.SequenceGenerator; import org.hibernate.Session; import org.hibernate.Query; @@ -22,6 +25,7 @@ public class DGM implements Serializable { private Integer id; + private Integer srid; private River river; @@ -40,6 +44,13 @@ } @Id + @SequenceGenerator( + name = "SEQUENCE_DEM_ID_SEQ", + sequenceName = "DEM_ID_SEQ", + allocationSize = 1) + @GeneratedValue( + strategy = GenerationType.SEQUENCE, + generator = "SEQUENCE_DEM_ID_SEQ") @Column(name = "id") public Integer getId() { return id; @@ -82,6 +93,14 @@ return path; } + public void setSrid(int srid) { + this.srid = srid; + } + + @Column(name = "srid") + public int getSrid() { + return srid; + } public static DGM getDGM(int id) { Session session = SessionHolder.HOLDER.get();
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/src/main/java/de/intevation/flys/model/FedState.java Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,45 @@ +package de.intevation.flys.model; + +import java.io.Serializable; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name = "fed_states") +public class FedState implements Serializable { + + private Integer id; + private String name; + + @Id + @Column(name = "id") + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + /** + * Get name. + * + * @return name of the Federal State as String. + */ + @Column(name = "name") + public String getName() { + return name; + } + + /** + * Set name. + * + * @param name the value to set. + */ + public void setName(String name) { + this.name = name; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/src/main/java/de/intevation/flys/model/HWSKind.java Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,44 @@ +package de.intevation.flys.model; + +import java.io.Serializable; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name = "hws_kinds") +public class HWSKind implements Serializable { + private Integer id; + private String name; + + @Id + @Column(name = "id") + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + /** + * Get name. + * + * @return The name of the Hochwasserschutzanlagenart as String. + */ + @Column(name = "name") + public String getName() { + return name; + } + + /** + * Set name. + * + * @param name the value to set. + */ + public void setName(String name) { + this.name = name; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/src/main/java/de/intevation/flys/model/HWSLine.java Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,336 @@ +package de.intevation.flys.model; + +import com.vividsolutions.jts.geom.Geometry; + +import de.intevation.flys.model.HWSKind; + +import java.io.Serializable; +import java.util.List; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.OneToOne; +import javax.persistence.Table; +import javax.persistence.SequenceGenerator; + +import org.hibernate.Session; +import org.hibernate.Query; +import org.hibernate.annotations.Type; + +import de.intevation.flys.backend.SessionHolder; + +@Entity +@Table(name = "hws_lines") +public class HWSLine implements Serializable { + + private Integer id; + + private Integer ogrFid; + private HWSKind kind; + private FedState fedState; + private River river; + private Integer offical; + private Integer shoreSide; + private String name; + private String path; + private String agency; + private String range; + private String source; + private String status_date; + private String description; + private Geometry geom; + + @Id + @SequenceGenerator( + name = "SEQUENCE_HWS_LINES_ID_SEQ", + sequenceName = "HWS_LINES_ID_SEQ", + allocationSize = 1) + @GeneratedValue( + strategy = GenerationType.SEQUENCE, + generator = "SEQUENCE_HWS_LINES_ID_SEQ") + @Column(name = "id") + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + @Column(name = "geom") + @Type(type = "org.hibernatespatial.GeometryUserType") + public Geometry getGeom() { + return geom; + } + + + public void setGeom(Geometry geom) { + this.geom = geom; + } + + /** + * Get ogrFid. + * + * @return ogrFid as Integer. + */ + @Column(name = "ogr_fid") + public Integer getOgrFid() { + return ogrFid; + } + + /** + * Set ogrFid. + * + * @param ogrFid the value to set. + */ + public void setOgrFid(Integer ogrFid) { + this.ogrFid = ogrFid; + } + + + /** + * Get offical. + * + * @return offical as Integer. + */ + @Column(name = "offical") + public Integer getOffical() { + return offical; + } + + /** + * Set offical. + * + * @param offical the value to set. + */ + public void setOffical(Integer offical) { + this.offical = offical; + } + + /** + * Get shoreSide. + * + * @return shoreSide as Integer. + */ + @Column(name = "shore_side") + public Integer getShoreSide() { + return shoreSide; + } + + /** + * Set shoreSide. + * + * @param shoreSide the value to set. + */ + public void setShoreSide(Integer shoreSide) { + this.shoreSide = shoreSide; + } + + /** + * Get name. + * + * @return name as String. + */ + @Column(name = "name") + public String getName() { + return name; + } + + /** + * Set name. + * + * @param name the value to set. + */ + public void setName(String name) { + this.name = name; + } + + /** + * Get path. + * + * @return path as String. + */ + @Column(name = "path") + public String getPath() { + return path; + } + + /** + * Set path. + * + * @param path the value to set. + */ + public void setPath(String path) { + this.path = path; + } + + /** + * Get agency. + * + * @return agency as String. + */ + @Column(name = "agency") + public String getAgency() { + return agency; + } + + /** + * Set agency. + * + * @param agency the value to set. + */ + public void setAgency(String agency) { + this.agency = agency; + } + + /** + * Get range. + * + * @return range as String. + */ + @Column(name = "range") + public String getRange() { + return range; + } + + /** + * Set range. + * + * @param range the value to set. + */ + public void setRange(String range) { + this.range = range; + } + + /** + * Get source. + * + * @return source as String. + */ + @Column(name = "source") + public String getSource() { + return source; + } + + /** + * Set source. + * + * @param source the value to set. + */ + public void setSource(String source) { + this.source = source; + } + + /** + * Get status_date. + * + * @return status_date as String. + */ + @Column(name = "status_date") + public String getStatusDate() { + return status_date; + } + + /** + * Set status_date. + * + * @param status_date the value to set. + */ + public void setStatusDate(String status_date) { + this.status_date = status_date; + } + + /** + * Get description. + * + * @return description as String. + */ + @Column(name = "description") + public String getDescription() { + return description; + } + + /** + * Set description. + * + * @param description the value to set. + */ + public void setDescription(String description) { + this.description = description; + } + + /** + * Get kind. + * + * @return kind as HWSKind. + */ + @OneToOne + @JoinColumn(name = "kind_id") + public HWSKind getKind() { + return kind; + } + + /** + * Set kind. + * + * @param kind the value to set. + */ + public void setKind(HWSKind kind) { + this.kind = kind; + } + + /** + * Get fedState. + * + * @return fedState as FedState. + */ + @OneToOne + @JoinColumn(name = "fed_state_id") + public FedState getFedState() { + return fedState; + } + + /** + * Set fedState. + * + * @param fedState the value to set. + */ + public void setFedState(FedState fedState) { + this.fedState = fedState; + } + + /** + * Get river. + * + * @return river as River. + */ + @OneToOne + @JoinColumn(name = "river_id") + public River getRiver() { + return river; + } + + /** + * Set river. + * + * @param river the value to set. + */ + public void setRiver(River river) { + this.river = river; + } + + public static List<HWSLine> getLines(int riverId, String name) { + Session session = SessionHolder.HOLDER.get(); + + Query query = session.createQuery( + "from HWSLine where river.id =:river_id and name=:name"); + query.setParameter("river_id", riverId); + query.setParameter("name", name); + + return query.list(); + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/src/main/java/de/intevation/flys/model/HWSPoint.java Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,441 @@ +package de.intevation.flys.model; + +import com.vividsolutions.jts.geom.Geometry; + +import java.io.Serializable; + +import java.math.BigDecimal; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.OneToOne; +import javax.persistence.Table; +import javax.persistence.SequenceGenerator; + +import org.hibernate.annotations.Type; + +@Entity +@Table(name = "hws_points") +public class HWSPoint implements Serializable { + + private Integer id; + + private Integer ogrFid; + private HWSKind kind; + private FedState fedState; + private River river; + private Integer offical; + private Integer shoreSide; + private String name; + private String path; + private String agency; + private String range; + private String source; + private String statusDate; + private String description; + private BigDecimal freeboard; + private BigDecimal dikeKm; + private BigDecimal z; + private BigDecimal zTarget; + private BigDecimal ratedLevel; + private Geometry geom; + + @Id + @SequenceGenerator( + name = "SEQUENCE_HWS_POINTS_ID_SEQ", + sequenceName = "HWS_POINTS_ID_SEQ", + allocationSize = 1) + @GeneratedValue( + strategy = GenerationType.SEQUENCE, + generator = "SEQUENCE_HWS_POINTS_ID_SEQ") + @Column(name = "id") + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + + @Column(name = "geom") + @Type(type = "org.hibernatespatial.GeometryUserType") + public Geometry getGeom() { + return geom; + } + + + public void setGeom(Geometry geom) { + this.geom = geom; + } + + /** + * Get ogrFid. + * + * @return ogrFid as Integer. + */ + @Column(name = "ogr_fid") + public Integer getOgrFid() { + return ogrFid; + } + + /** + * Set ogrFid. + * + * @param ogrFid the value to set. + */ + public void setOgrFid(Integer ogrFid) { + this.ogrFid = ogrFid; + } + + + /** + * Get offical. + * + * @return offical as Integer. + */ + @Column(name = "offical") + public Integer getOffical() { + return offical; + } + + /** + * Set offical. + * + * @param offical the value to set. + */ + public void setOffical(Integer offical) { + this.offical = offical; + } + + /** + * Get shoreSide. + * + * @return shoreSide as Integer. + */ + @Column(name = "shore_side") + public Integer getShoreSide() { + return shoreSide; + } + + /** + * Set shoreSide. + * + * @param shoreSide the value to set. + */ + public void setShoreSide(Integer shoreSide) { + this.shoreSide = shoreSide; + } + + /** + * Get name. + * + * @return name as String. + */ + @Column(name = "name") + public String getName() { + return name; + } + + /** + * Set name. + * + * @param name the value to set. + */ + public void setName(String name) { + this.name = name; + } + + /** + * Get path. + * + * @return path as String. + */ + @Column(name = "path") + public String getPath() { + return path; + } + + /** + * Set path. + * + * @param path the value to set. + */ + public void setPath(String path) { + this.path = path; + } + + /** + * Get agency. + * + * @return agency as String. + */ + @Column(name = "agency") + public String getAgency() { + return agency; + } + + /** + * Set agency. + * + * @param agency the value to set. + */ + public void setAgency(String agency) { + this.agency = agency; + } + + /** + * Get range. + * + * @return range as String. + */ + @Column(name = "range") + public String getRange() { + return range; + } + + /** + * Set range. + * + * @param range the value to set. + */ + public void setRange(String range) { + this.range = range; + } + + /** + * Get source. + * + * @return source as String. + */ + @Column(name = "source") + public String getSource() { + return source; + } + + /** + * Set source. + * + * @param source the value to set. + */ + public void setSource(String source) { + this.source = source; + } + + /** + * Get statusDate. + * + * @return statusDate as String. + */ + @Column(name = "status_date") + public String getStatusDate() { + return statusDate; + } + + /** + * Set statusDate. + * + * @param statusDate the value to set. + */ + public void setStatusDate(String statusDate) + { + this.statusDate = statusDate; + } + + /** + * Get description. + * + * @return description as String. + */ + @Column(name = "description") + public String getDescription() + { + return description; + } + + /** + * Set description. + * + * @param description the value to set. + */ + public void setDescription(String description) + { + this.description = description; + } + + /** + * Get freeboard. + * + * @return freeboard as BigDecimal. + */ + @Column(name = "freeboard") + public BigDecimal getFreeboard() + { + return freeboard; + } + + /** + * Set freeboard. + * + * @param freeboard the value to set. + */ + public void setFreeboard(BigDecimal freeboard) + { + this.freeboard = freeboard; + } + + /** + * Get dikeKm. + * + * @return dikeKm as BigDecimal. + */ + @Column(name = "dike_km") + public BigDecimal getDikeKm() + { + return dikeKm; + } + + /** + * Set dikeKm. + * + * @param dikeKm the value to set. + */ + public void setDikeKm(BigDecimal dikeKm) + { + this.dikeKm = dikeKm; + } + + /** + * Get z. + * + * @return z as BigDecimal. + */ + @Column(name = "z") + public BigDecimal getZ() + { + return z; + } + + /** + * Set z. + * + * @param z the value to set. + */ + public void setZ(BigDecimal z) + { + this.z = z; + } + + /** + * Get zTarget. + * + * @return zTarget as BigDecimal. + */ + @Column(name = "z_target") + public BigDecimal getZTarget() + { + return zTarget; + } + + /** + * Set zTarget. + * + * @param zTarget the value to set. + */ + public void setZTarget(BigDecimal zTarget) + { + this.zTarget = zTarget; + } + + /** + * Get ratedLevel. + * + * @return ratedLevel as BigDecimal. + */ + @Column(name = "rated_level") + public BigDecimal getRatedLevel() + { + return ratedLevel; + } + + /** + * Set ratedLevel. + * + * @param ratedLevel the value to set. + */ + public void setRatedLevel(BigDecimal ratedLevel) + { + this.ratedLevel = ratedLevel; + } + + /** + * Get kind. + * + * @return kind as HWSKind. + */ + @OneToOne + @JoinColumn(name = "kind_id") + public HWSKind getKind() + { + return kind; + } + + /** + * Set kind. + * + * @param kind the value to set. + */ + public void setKind(HWSKind kind) + { + this.kind = kind; + } + + /** + * Get fedState. + * + * @return fedState as FedState. + */ + @OneToOne + @JoinColumn(name = "fed_state_id") + public FedState getFedState() + { + return fedState; + } + + /** + * Set fedState. + * + * @param fedState the value to set. + */ + public void setFedState(FedState fedState) + { + this.fedState = fedState; + } + + /** + * Get river. + * + * @return river as River. + */ + @OneToOne + @JoinColumn(name = "river_id") + public River getRiver() + { + return river; + } + + /** + * Set river. + * + * @param river the value to set. + */ + public void setRiver(River river) + { + this.river = river; + } +} +
--- a/flys-backend/src/main/java/de/intevation/flys/model/Hws.java Thu Feb 28 11:06:20 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,106 +0,0 @@ -package de.intevation.flys.model; - -import java.io.Serializable; -import java.util.List; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.OneToOne; -import javax.persistence.Table; - -import org.hibernate.Session; -import org.hibernate.Query; -import org.hibernate.annotations.Type; - -import com.vividsolutions.jts.geom.LineString; - -import de.intevation.flys.backend.SessionHolder; - - -@Entity -@Table(name = "hws") -public class Hws -implements Serializable -{ - private Integer id; - private String facility; - private String type; - private River river; - private LineString geom; - - public Hws() { - } - - - @Id - @Column(name = "id") - public Integer getId() { - return id; - } - - - public void setId(Integer id) { - this.id = id; - } - - - @OneToOne - @JoinColumn(name = "river_id") - public River getRiver() { - return river; - } - - - public void setRiver(River river) { - this.river = river; - } - - - @Column(name = "hws_facility") - public String getFacility() { - return facility; - } - - - public void setFacility(String facility) { - this.facility = facility; - } - - - @Column(name = "type") - public String getType() { - return type; - } - - - public void setType(String type) { - this.type = type; - } - - - @Column(name = "geom") - @Type(type = "org.hibernatespatial.GeometryUserType") - public LineString getGeom() { - return geom; - } - - - public void setGeom(LineString geom) { - this.geom = geom; - } - - - public static List<Hws> getHws(int riverId, String name) { - Session session = SessionHolder.HOLDER.get(); - - Query query = session.createQuery( - "from Hws where river.id =:river_id and name=:name"); - query.setParameter("river_id", riverId); - query.setParameter("name", name); - - return query.list(); - } -} -// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java Thu Feb 28 11:48:17 2013 +0100 @@ -5,10 +5,13 @@ import javax.persistence.Column; import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.OneToOne; import javax.persistence.Table; +import javax.persistence.SequenceGenerator; import org.hibernate.Session; import org.hibernate.Query; @@ -25,15 +28,25 @@ implements Serializable { private Integer id; + private SectieKind sectie; + private SobekKind sobek; private String name; private River river; private LineString geom; + private BoundaryKind kind; public HydrBoundary() { } @Id + @SequenceGenerator( + name = "SEQUENCE_HYDR_BOUNDARIES_ID_SEQ", + sequenceName = "HYDR_BOUNDARIES_ID_SEQ", + allocationSize = 1) + @GeneratedValue( + strategy = GenerationType.SEQUENCE, + generator = "SEQUENCE_HYDR_BOUNDARIES_ID_SEQ") @Column(name = "id") public Integer getId() { return id; @@ -44,7 +57,6 @@ this.id = id; } - @OneToOne @JoinColumn(name = "river_id") public River getRiver() { @@ -90,5 +102,71 @@ return query.list(); } + + /** + * Get sectie. + * + * @return sectie as SectieKind. + */ + @OneToOne + @JoinColumn(name = "sectie") + public SectieKind getSectie() + { + return sectie; + } + + /** + * Set sectie. + * + * @param sectie the value to set. + */ + public void setSectie(SectieKind sectie) + { + this.sectie = sectie; + } + + /** + * Get sobek. + * + * @return sobek as SobekKind. + */ + @OneToOne + @JoinColumn(name = "sobek") + public SobekKind getSobek() + { + return sobek; + } + + /** + * Set sobek. + * + * @param sobek the value to set. + */ + public void setSobek(SobekKind sobek) + { + this.sobek = sobek; + } + + /** + * Get kind. + * + * @return kind as BoundaryKind. + */ + @OneToOne + @JoinColumn(name = "kind") + public BoundaryKind getKind() + { + return kind; + } + + /** + * Set kind. + * + * @param kind the value to set. + */ + public void setKind(BoundaryKind kind) + { + this.kind = kind; + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java Thu Feb 28 11:48:17 2013 +0100 @@ -5,10 +5,13 @@ import javax.persistence.Column; import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.OneToOne; import javax.persistence.Table; +import javax.persistence.SequenceGenerator; import org.hibernate.Session; import org.hibernate.Query; @@ -28,12 +31,22 @@ private String name; private River river; private Geometry geom; + private SectieKind sectie; + private SobekKind sobek; + private BoundaryKind kind; public HydrBoundaryPoly() { } @Id + @SequenceGenerator( + name = "SEQUENCE_HYDR_BOUNDARIES_POLY_ID_SEQ", + sequenceName = "HYDR_BOUNDARIES_POLY_ID_SEQ", + allocationSize = 1) + @GeneratedValue( + strategy = GenerationType.SEQUENCE, + generator = "SEQUENCE_HYDR_BOUNDARIES_POLY_ID_SEQ") @Column(name = "id") public Integer getId() { return id; @@ -90,5 +103,71 @@ return query.list(); } + + /** + * Get sectie. + * + * @return sectie as SectieKind. + */ + @OneToOne + @JoinColumn(name = "sectie") + public SectieKind getSectie() + { + return sectie; + } + + /** + * Set sectie. + * + * @param sectie the value to set. + */ + public void setSectie(SectieKind sectie) + { + this.sectie = sectie; + } + + /** + * Get sobek. + * + * @return sobek as SobekKind. + */ + @OneToOne + @JoinColumn(name = "sobek") + public SobekKind getSobek() + { + return sobek; + } + + /** + * Set sobek. + * + * @param sobek the value to set. + */ + public void setSobek(SobekKind sobek) + { + this.sobek = sobek; + } + + /** + * Get kind. + * + * @return kind as BoundaryKind. + */ + @OneToOne + @JoinColumn(name = "kind") + public BoundaryKind getKind() + { + return kind; + } + + /** + * Set kind. + * + * @param kind the value to set. + */ + public void setKind(BoundaryKind kind) + { + this.kind = kind; + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/Line.java Thu Feb 28 11:06:20 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,108 +0,0 @@ -package de.intevation.flys.model; - -import java.io.Serializable; -import java.math.BigDecimal; -import java.util.List; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.OneToOne; -import javax.persistence.Table; - -import org.hibernate.Session; -import org.hibernate.Query; - -import org.hibernate.annotations.Type; - -import com.vividsolutions.jts.geom.LineString; - -import de.intevation.flys.backend.SessionHolder; - - -@Entity -@Table(name = "lines") -public class Line -implements Serializable -{ - private Integer id; - private String kind; - private River river; - private LineString geom; - private BigDecimal z; - - public Line() { - } - - - @Id - @Column(name = "id") - public Integer getId() { - return id; - } - - - public void setId(Integer id) { - this.id = id; - } - - - @OneToOne - @JoinColumn(name = "river_id") - public River getRiver() { - return river; - } - - - public void setRiver(River river) { - this.river = river; - } - - - @Column(name = "kind") - public String getKind() { - return kind; - } - - - public void setKind(String kind) { - this.kind = kind; - } - - - @Column(name = "geom") - @Type(type = "org.hibernatespatial.GeometryUserType") - public LineString getGeom() { - return geom; - } - - - public void setGeom(LineString geom) { - this.geom = geom; - } - - - @Column(name = "z") - public BigDecimal getZ() { - return z; - } - - - public void setZ(BigDecimal z) { - this.z = z; - } - - - public static List<Line> getLines(int riverId, String name) { - Session session = SessionHolder.HOLDER.get(); - - Query query = session.createQuery( - "from Line where river.id =:river_id and name=:name"); - query.setParameter("river_id", riverId); - query.setParameter("name", name); - - return query.list(); - } -} -// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/src/main/java/de/intevation/flys/model/SectieKind.java Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,44 @@ +package de.intevation.flys.model; + +import java.io.Serializable; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name = "sectie_kinds") +public class SectieKind implements Serializable { + private Integer id; + private String name; + + @Id + @Column(name = "id") + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + /** + * Get name. + * + * @return name of the kind of sectie as String. + */ + @Column(name = "name") + public String getName() { + return name; + } + + /** + * Set name. + * + * @param name the value to set. + */ + public void setName(String name) { + this.name = name; + } +}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/src/main/java/de/intevation/flys/model/SobekKind.java Thu Feb 28 11:48:17 2013 +0100 @@ -0,0 +1,45 @@ +package de.intevation.flys.model; + +import java.io.Serializable; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name = "sobek_kinds") +public class SobekKind implements Serializable { + + private Integer id; + private String name; + + @Id + @Column(name = "id") + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + /** + * Get name. + * + * @return name of the kind of sobek as String. + */ + @Column(name = "name") + public String getName() { + return name; + } + + /** + * Set name. + * + * @param name the value to set. + */ + public void setName(String name) { + this.name = name; + } +}
--- a/flys-client/pom.xml Thu Feb 28 11:06:20 2013 +0100 +++ b/flys-client/pom.xml Thu Feb 28 11:48:17 2013 +0100 @@ -227,6 +227,11 @@ <id>org.mapfish</id> <url>http://dev.mapfish.org/maven/repository</url> </repository> + <repository> + <id>osgeo</id> + <name>Open Source Geospatial Foundation Repository</name> + <url>http://download.osgeo.org/webdav/geotools/</url> + </repository> </repositories> </project>