changeset 5279:8e66a200a0b9

merge
author Tom Gottfried <tom.gottfried@intevation.de>
date Wed, 13 Mar 2013 19:07:52 +0100
parents 3ddf5843ede3 (diff) 2435968b30b1 (current diff)
children dba703edfff1
files flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java
diffstat 101 files changed, 1585 insertions(+), 2690 deletions(-) [+]
line wrap: on
line diff
--- a/.hgtags	Thu Mar 07 09:46:11 2013 +0100
+++ b/.hgtags	Wed Mar 13 19:07:52 2013 +0100
@@ -29,3 +29,9 @@
 f459911fdbfbe2b2d23e06faba4e338514dd7b54 2.9.10
 f459911fdbfbe2b2d23e06faba4e338514dd7b54 2.9.10
 8c65acf01adc7083c5936d0f8acf67374c97140b 2.9.10
+42bb6ff78d1b734341732772ab24db2a913311b0 2.9.11
+3b86bf214d53da51d85cd8c8ecfeec71aa9da9e4 2.9.12
+3b86bf214d53da51d85cd8c8ecfeec71aa9da9e4 2.9.12
+0000000000000000000000000000000000000000 2.9.12
+0000000000000000000000000000000000000000 2.9.12
+88e3473a38467e8b5bb7d99e92c3f1a795515bf5 2.9.12
--- a/contrib/make_flys_release/README	Thu Mar 07 09:46:11 2013 +0100
+++ b/contrib/make_flys_release/README	Wed Mar 13 19:07:52 2013 +0100
@@ -1,34 +1,42 @@
 Konfiguration:
 ==============
-Im `confs` Verzeichnis liegen Konfigurationsdateien, die für jede FLYS
-Installation angepasst werden müssen (Ports, Hosts, Datenbank-Connection, etc).
-
-In der `make_flys_release.sh` kann über die Variable `RELEASE` ein TAG aus dem
-HG Repository ausgewählt werden, welches für den Bau von FLYS verwendet werden
-soll.
+Zur konfiguration des make_release scripts können umgebungsvariablen
+verwendet werden oder man ändert die entsprechenden Variablen im Script.
 
-Außerdem muss in der `make_flys_release` eingestellt werden, ob man FLYS für
-eine Oracle oder Postgresql Datenbank bauen will. Im Verzeichnis sind
-spezifische Libraries im `libs_oracle` und `libs_postgresql` Verzeichnis
-enthalten. In der `make_flys_release` muss zurzeit in Zeile 71-77 eingestellt
-werden, welche Libs (Oracle / Postgresql) wieder aus dem Zielverzeichnis
-entfernt werden sollen.
+Wichtige variablen sind:
+FLYS_SOURCE_DIR
+TOMCAT_PORT
+MAPSERVER_URL
+WIKI_URL
+LOG_DIR
+DEVELOPER
+DEFAULT_WD
 
-TODOS:
-======
-- auf return Codes der einzelnen Aufrufe (mvn package, etc) reagieren, und den
-  Bau ggf abbrechen
-- Konfig-Option für den Bau für Oracle und Postgresql integrieren.
-- Libs für Postgresql / Oracle besser in den Build-Prozess integrieren
+# Seddb Configuration
+SEDDBURL
+SEDDBPORT
+SEDDBBACK
+SEDDBUSER
+SEDDBPASS
+
+# Backend configuration
+BACKENDURL
+BACKENDPORT
+BACKENDBACK
+BACKENDUSER
+BACKENDPASS
 
 Prozess:
 ========
 Nachdem die Konfigurationen angepasst wurden, kann das Skript mittels
-  sh make_release.sh
+  sh make_release.sh VERSION
 von der Konsole gestartet werden. Anschließend werden die Quellen des
 dive4elements, des HTTP-Clients und von FLYS über SSH aus dem HG Repository
-ausgecheckt. In der `make_flys_release.sh` ist dazu der Name des HG Users in der
-zweiten Zeile anzupassen. Die Quellen werden anschließend mit Maven gebaut.
+ausgecheckt und in FLYS_SOURCE_DIR abgelegt.
+
+Wenn mit der option -t zusätzlich ausgewählt wird diese version zu taggen
+muss in der make_flys_release.sh der entsprechende accountname zum pushen
+des tags als DEVELOPER angegeben werden.
 
 Für den Client wird OpenLayers-2.11 heruntergeladen und in den Client
 verschoben. Zurzeit wird das komplette OpenLayers-2.11 Verzeichnis in den Client
@@ -41,18 +49,34 @@
 
 Importer:
 =========
-Das script um den Importer zu bauen und zu paketieren liegt unter 
-bin/make-importer-package.sh dies muss man anpassen in dem man Pfade angibt
-unter welchem sich weitere pakete befinden.
-Um das Paket zu bauen muss rpm2cpio installiert sein.
+Das script um den Importer zu bauen und zu paketieren liegt unter
+bin/make-importer-package.sh
+Dieses muss man anpassen und ein paar pfade setzen
 
-Benötigt werden in den einzustellenden Verzeichnissen:
-EXTRAS:
-    - libgdal1-1.9.0-intevation1.x86\_64.rpm
-    - gdal-1.9.0-intevation1.x86\_64.rpm 
-    - python-gdal-1.9.0-intevation1.x86\_64.rpm
-ORACLE:
-    - cx\_Oracle-5.1.2-11g-py26-1.x86\_64.rpm
-    - instantclient-basic-linux-x86-64-11.2.0.2.0.zip
-    - instantclient-sdk-linux-x86-64-11.2.0.2.0.zip
-    - instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip
+Wenn man ein "Standalone" Paket bauen möchte kann man diesem script
+einen Parameter übergeben an welchem sich ein tarball befindet
+der mit ins importer paket gepackt werden soll. Dieser Tarball
+kann abhängigkeiten (gdal / proj / oracle) enthalten.
+Das skript um diesen tarball für sles zu erstellen ist
+bin/make-opt-package.sh
+
+Deployment:
+===========
+Der tarball kann auf ein Zielsystem übertragen werden und dort entpackt werden.
+Bei den testsystemen der Intevation ist der Ort der Flys installationen
+üblicherweise /opt/flys/flys-version
+
+Anschließend deployt man den flys-client im webapps verzeichnis von tomcat
+(z.b. /usr/share/tomcat6/webapps )
+ggf. in WEB-INF die web.xml überprüfen / anpassen.
+
+Bei einer konfiguration mit apache vhosts ist nun noch ein entsprechender
+vhost in der apache konfiguration einzurichten.
+
+Anschließend muss man noch sicher stellen das passende wms scripte im
+mapserver verfügbar sind. In /srv/www/cgi-bin müssen entsprechende
+river-wms und user-wms dateien liegen die auf die korrekte aktuelle version verweisen.
+Die WMS urls sind in server/conf/floodmap.xml und server/conf/rivermap.xml konfiguriert.
+
+Nun kann man den server starten. Dazu in das entsprechende server verzeichnis wechseln
+und ./bin/run ausführen. Der server muss mit diesem arbeitsverzeichnis gestartet werden.
--- a/contrib/make_flys_release/bin/make-importer-package.sh	Thu Mar 07 09:46:11 2013 +0100
+++ b/contrib/make_flys_release/bin/make-importer-package.sh	Wed Mar 13 19:07:52 2013 +0100
@@ -5,20 +5,29 @@
 
 # The working directory. Resulting tarball will be placed in the directory above.
 PKG_DIR=/tmp/flys-importer
-# Path to oracle zip archives and an oracle_cx rpm
-ORACLE=/path/to/oracle/archives
 # Default conf
 CONF_DIR=/path/to/conf/dir
 # Path to the flys checkout
 FLYS_DIR=/path/to/flys/root
-# Extra packages
-EXTRAS=/path/to/gdal
+# Tarball that will be extracted into flys-imprter/opt
+EXTRAS=$1
 
 rm -fr $PKG_DIR
 mkdir -p $PKG_DIR/hydr_morph
 mkdir -p $PKG_DIR/geodaesie
 mkdir -p $PKG_DIR/opt/lib64
 mkdir -p $PKG_DIR/schema
+mkdir -p $PKG_DIR/conf
+
+cat > "$PKG_DIR/conf/log4j.properties" << "EOF"
+log4j.rootLogger=DEBUG, IMPORTER
+log4j.appender.IMPORTER.layout=org.apache.log4j.PatternLayout
+log4j.appender.IMPORTER.layout.ConversionPattern=%d [%t] %-5p %c - %m%n
+log4j.appender.IMPORTER=org.apache.log4j.RollingFileAppender
+log4j.appender.IMPORTER.File=./import.log
+log4j.appender.IMPORTER.MaxFileSize=100000KB
+log4j.appender.IMPORTER.MaxBackupIndex=10
+EOF
 
 cd ${FLYS_DIR}/flys-backend
 mvn -f pom-oracle.xml clean compile assembly:single
@@ -32,44 +41,19 @@
     ${FLYS_DIR}/flys-backend/contrib/run_hydr_morph.sh \
     ${FLYS_DIR}/flys-backend/contrib/import_river.sh \
     $PKG_DIR
-cp -r ${CONF_DIR} $PKG_DIR
 cp ${FLYS_DIR}/flys-backend/doc/annotation-types.xml $PKG_DIR/conf
-ln -s /usr/lib64/libproj.so.0.6.6 $PKG_DIR/opt/lib64/libproj.so # workaround for bad packaging
-rm -rf /tmp/other_rpms
-mkdir /tmp/other_rpms
-cd /tmp/other_rpms
-
-rpm2cpio ${EXTRAS}/libgdal1-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories
-rpm2cpio ${EXTRAS}/gdal-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories
-rpm2cpio ${EXTRAS}/python-gdal-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories
-rpm2cpio ${ORACLE}/cx\_Oracle-5.1.2-11g-py26-1.x86\_64.rpm | cpio -i --make-directories
-cp -r /tmp/other_rpms/usr/* $PKG_DIR/opt
-rm -rf /tmp/other_rpms
+if [ -f "$EXTRAS" ]; then
+    cd $PKG_DIR
+    tar -xzf "$EXTRAS"
+fi
 
 cp ${FLYS_DIR}/flys-backend/doc/schema/*.sql $PKG_DIR/schema
 cp ${FLYS_DIR}/flys-backend/doc/documentation/de/importer-manual.pdf $PKG_DIR
 
-# Oracle (Do not distribute)
-unzip ${ORACLE}/instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt
-unzip ${ORACLE}/instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt
-unzip ${ORACLE}/instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt
-
-mkdir $PKG_DIR//opt/instantclient_11_2/lib
-cd $PKG_DIR//opt/instantclient_11_2/lib
-ln -s ../libclntsh.so.11.1 .
-ln -s ../libclntsh.so.11.1 libclntsh.so
-ln -s ../libnnz11.so .
-ln -s ../libocci.so.11.1 .
-ln -s ../libocci.so.11.1 libocci.so
-ln -s ../libociei.so .
-ln -s ../libocijdbc11.so .
-ln -s ../libsqlplusic.so .
-ln -s ../libsqlplus.so .
-# End Oracle
-
 sed -i 's/shpimporter\/shp/geodaesie\/shp/' $PKG_DIR/run_geo.sh
 
 cd $PKG_DIR/..
 DATE=$(date +%Y%m%d%H%m)
-tar -czf flys-importer${DATE}.tar.gz flys-importer
-sha1sum flys-importer${DATE}.tar.gz > flys-importer${DATE}.tar.gz.sha1
+tar -czf flys-importer_${DATE}.tar.gz flys-importer
+sha1sum flys-importer_${DATE}.tar.gz > flys-importer_${DATE}.tar.gz.sha1
+echo Package is at: `readlink -f flys-importer_${DATE}.tar.gz`
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/make_flys_release/bin/make-opt-package.sh	Wed Mar 13 19:07:52 2013 +0100
@@ -0,0 +1,86 @@
+# Required packages are the build essential stuff make gcc etc.
+# and:
+# postgresql-devel libexpat-devel python-devel
+set -e
+# This script is intended to be run on suse enterprise linux
+
+# Path to the oracle zip archives
+ORACLE_LOC=/home/intevation
+# Path to the Source tarballs of gdal-1.9.2.tar.gz proj-4.8.0.tar.gz cx_Oracle-5.1.2.tar.gz
+SOURCES=/home/intevation/Downloads
+#mkdir -p $SOURCES
+#cd $SOURCES
+#wget http://download.osgeo.org/gdal/gdal-1.9.2.tar.gz
+#wget http://download.osgeo.org/proj/proj-4.8.0.tar.gz
+#wget http://downloads.sourceforge.net/project/cx-oracle/5.1.2/cx_Oracle-5.1.2.tar.gz
+
+DEVELDIR=/tmp/gdalbuild
+INSTALL_PREFIX=$DEVELDIR/opt
+export ORACLE_HOME=$DEVELDIR/opt/instantclient_11_2
+export LD_LIBRARY_PATH=$ORACLE_HOME/lib:$LD_LIBRARY_PATH
+
+rm -rf $DEVELDIR
+mkdir -p $DEVELDIR
+
+mkdir -p $SOURCES
+cd $SOURCES
+wget http://download.osgeo.org/gdal/gdal-1.9.2.tar.gz
+wget http://download.osgeo.org/proj/proj-4.8.0.tar.gz
+wget http://downloads.sourceforge.net/project/cx-oracle/5.1.2/cx_Oracle-5.1.2.tar.gz
+
+
+# Oracle
+unzip $ORACLE_LOC/instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d $DEVELDIR/opt
+unzip $ORACLE_LOC/instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d $DEVELDIR/opt
+unzip $ORACLE_LOC/instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d $DEVELDIR/opt
+mkdir $ORACLE_HOME/lib
+cd $ORACLE_HOME/lib
+ln -s ../libclntsh.so.11.1 .
+ln -s ../libclntsh.so.11.1 libclntsh.so
+ln -s ../libnnz11.so .
+ln -s ../libocci.so.11.1 .
+ln -s ../libocci.so.11.1 libocci.so
+ln -s ../libociei.so .
+ln -s ../libocijdbc11.so .
+ln -s ../libsqlplusic.so .
+ln -s ../libsqlplus.so .
+cd $ORACLE_HOME
+ln -s libclntsh.so.11.1 libclntsh.so
+
+cd $DEVELDIR
+tar -xf $SOURCES/proj-4.8.0.tar.gz 
+cd proj-4.8.0
+./configure --prefix=$INSTALL_PREFIX && make && make install
+
+
+cd $DEVELDIR
+tar -xf $SOURCES/gdal-1.9.2.tar.gz 
+cd gdal-1.9.2
+patch -l -p0 << "EOF"
+Index: ogr/ogrsf_frmts/oci/ogrocitablelayer.cpp
+===================================================================
+--- ogr/ogrsf_frmts/oci/ogrocitablelayer.cpp    (revision 25700)
++++ ogr/ogrsf_frmts/oci/ogrocitablelayer.cpp    (working copy)
+@@ -264,7 +264,7 @@
+         char **papszResult;
+         int iDim = -1;
+ 
+-               oDimCmd.Append( "SELECT COUNT(*) FROM ALL_SDO_GEOM_METADATA u," );
++               oDimCmd.Append( "SELECT COUNT(*) FROM USER_SDO_GEOM_METADATA u," );
+                oDimCmd.Append( "  TABLE(u.diminfo) t" );
+                oDimCmd.Append( "  WHERE u.table_name = '" );
+                oDimCmd.Append( osTableName );
+EOF
+LDFLAGS="-Wl,--no-as-needed" ./configure --with-python --with-oci=$ORACLE_HOME \
+    --prefix=$INSTALL_PREFIX && make && make install
+
+cd $DEVELDIR
+tar -xf $SOURCES/cx_Oracle-5.1.2.tar.gz
+cd cx_Oracle-5.1.2
+python setup.py build
+python setup.py install --prefix=$INSTALL_PREFIX
+
+cd $DEVELDIR
+tar -czf flys-importer-opt.tar.gz opt
+echo "Package is:"
+readlink -f flys-importer-opt.tar.gz
--- a/contrib/make_flys_release/confs/artifact-db.xml	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<database>
-    <user>SA</user>
-    <password></password>
-    <url>jdbc:h2:${artifacts.config.dir}/../artifactsdb/artifacts</url>
-</database>
--- a/contrib/make_flys_release/confs/backend-db.xml	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,18 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<backend-database>
-
-    <user>flys293</user>
-    	<password>flys293</password>
-    	<dialect>org.hibernate.dialect.PostgreSQLDialect</dialect>
-    	<driver>org.postgresql.Driver</driver>
-    <url>jdbc:postgresql://czech-republic.atlas.intevation.de:5432/flys293</url>
-
-    <!--
-	<user>flys27</user>
-	<password>flys27</password>
-	<dialect>org.hibernatespatial.oracle.OracleSpatial10gDialect</dialect>
-	<driver>oracle.jdbc.driver.OracleDriver</driver>
-	<url>jdbc:oracle:thin:@//czech-republic.atlas.intevation.de:1521/XE</url>
-    -->
-
-</backend-database>
--- a/contrib/make_flys_release/confs/datacage-db.xml	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<datacage>
-    <user>SA</user>
-    <password></password>
-    <url>jdbc:h2:${artifacts.config.dir}/../datacagedb/datacage</url>
-</datacage>
--- a/contrib/make_flys_release/confs/floodmap.xml	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<floodmap>
-    <shapefile-path value="${artifacts.config.dir}/../shapefiles"/>
-    <mapserver>
-        <server path="http://czech-republic.intevation.de/cgi-bin/flys-default"/>
-        <mapfile path="${artifacts.config.dir}/../flys.map"/>
-        <templates path="${artifacts.config.dir}/mapserver/"/>
-        <map-template path="mapfile.vm"/>
-    </mapserver>
-
-    <velocity>
-        <logfile path="${artifacts.config.dir}/../velocity_log.log"/>
-    </velocity>
-
-    <river name="Saar">
-        <srid value="31466"/>
-        <river-wms url="http://czech-republic.intevation.de/cgi-bin/user-wms" layers="FLYS-Map"/>
-        <background-wms url="http://osm.wheregroup.com/cgi-bin/osm_basic.xml?" layers="OSM_Basic"/>
-    </river>
-    <river name="Mosel">
-        <srid value="31466"/>
-        <river-wms url="http://czech-republic.intevation.de/cgi-bin/user-wms" layers="FLYS-Map"/>
-        <background-wms url="http://osm.wheregroup.com/cgi-bin/osm_basic.xml?" layers="OSM_Basic"/>
-    </river>
-    <river name="Elbe">
-        <srid value="31467"/>
-        <river-wms url="http://czech-republic.intevation.de/cgi-bin/elbe-wms"/>
-        <background-wms url="http://osm.wheregroup.com/cgi-bin/osm_basic.xml?" layers="OSM_Basic"/>
-    </river>
-</floodmap>
--- a/contrib/make_flys_release/confs/mapserver/fontset.txt	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-FreeSans /usr/share/splashy/themes/default/FreeSans.ttf
-DefaultFont /usr/share/splashy/themes/default/FreeSans.ttf
--- a/contrib/make_flys_release/confs/rest-server.xml	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<rest-server>
-    <!--  The port which the ArtifactDatabase (ArtifactServer) will bind to. -->
-    <port>8999</port>
-    <listen>localhost</listen>
-</rest-server>
--- a/contrib/make_flys_release/confs/rivermap.xml	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--// configuration fragment for static river WMS //-->
-<rivermap>
-    <mapserver>
-        <server path="http://example.com/cgi-bin/"/>
-        <mapfile path="${artifacts.config.dir}/../rivers.map"/>
-        <templates path="${artifacts.config.dir}/mapserver/"/>
-        <map-template path="river-mapfile.vm"/>
-    </mapserver>
-
-    <velocity>
-        <logfile path="${artifacts.config.dir}/../rivermap_velocity.log"/>
-    </velocity>
-    
-    <river name="Saar">
-        <srid value="31467"/>
-        <river-wms url="http://example.com/cgi-bin/river-wms" layers="Saar"/>
-        <background-wms url="http://osm.intevation.de/mapcache/?" layers="flys-wms"/>
-    </river>
-    <river name="Mosel">
-        <srid value="31467"/>
-        <river-wms url="http://example.com/cgi-bin/river-wms" layers="Mosel"/>
-        <background-wms url="http://osm.intevation.de/mapcache/?" layers="flys-wms"/>
-    </river>
-    <river name="Elbe">
-        <srid value="31467"/>
-        <river-wms url="http://example.com/cgi-bin/river-wms" layers="Elbe"/>
-        <background-wms url="http://osm.intevation.de/mapcache/?" layers="flys-wms"/>
-    </river>
-</rivermap>
--- a/contrib/make_flys_release/confs/seddb-db.xml	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<seddb-database>
-    <!-- This is the default SedDB db configuration. -->
-    <user>seddb</user>
-    <password>seddbpass</password>
-    <dialect>org.hibernate.dialect.Oracle9iDialect</dialect>
-    <driver>oracle.jdbc.driver.OracleDriver</driver>
-    <url>jdbc:oracle:thin:@//czech-republic.atlas.intevation.de:1521/XE</url>
-</seddb-database>
--- a/contrib/make_flys_release/h2/artifacts-h2.sql	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,59 +0,0 @@
---
--- schema to store artifacts in H2 databases.
---
-
-BEGIN;
-
--- not using AUTO_INCREMENT to be more compatible with
--- other dbms.
-CREATE SEQUENCE ARTIFACTS_ID_SEQ;
-
-CREATE TABLE artifacts (
-    id          INT PRIMARY KEY NOT NULL,
-    gid         UUID            NOT NULL UNIQUE,
-    creation    TIMESTAMP       NOT NULL,
-    last_access TIMESTAMP       NOT NULL,
-    ttl         BIGINT, -- NULL means eternal
-    factory     VARCHAR(256)    NOT NULL,
-    data        BINARY
-);
-
-CREATE SEQUENCE USERS_ID_SEQ;
-
-CREATE TABLE users (
-    id   INT PRIMARY KEY NOT NULL,
-    gid  UUID            NOT NULL UNIQUE,
-    name VARCHAR(256)    NOT NULL,
-    account VARCHAR(256) NOT NULL UNIQUE,
-    role BINARY
-);
-
-CREATE SEQUENCE COLLECTIONS_ID_SEQ;
-
-CREATE TABLE collections (
-    id          INT PRIMARY KEY NOT NULL,
-    gid         UUID            NOT NULL UNIQUE,
-    name VARCHAR(256)           NOT NULL,
-    owner_id    INT             NOT NULL REFERENCES users(id),
-    creation    TIMESTAMP       NOT NULL,
-    last_access TIMESTAMP       NOT NULL,
-    ttl         BIGINT, -- NULL means eternal
-    attribute   BINARY
-);
-
-CREATE SEQUENCE COLLECTION_ITEMS_ID_SEQ;
-
-CREATE TABLE collection_items (
-    id            INT PRIMARY KEY NOT NULL,
-    collection_id INT             NOT NULL REFERENCES collections(id),
-    artifact_id   INT             NOT NULL REFERENCES artifacts(id),
-    attribute     BINARY,
-    creation      TIMESTAMP       NOT NULL,
-    UNIQUE (collection_id, artifact_id)
-);
-
-CREATE TRIGGER collections_access_update_trigger AFTER UPDATE
-    ON artifacts FOR EACH ROW 
-    CALL "de.intevation.artifactdatabase.h2.CollectionAccessUpdateTrigger";
-
-COMMIT;
--- a/contrib/make_flys_release/h2/createArtifacts.sh	Thu Mar 07 09:46:11 2013 +0100
+++ b/contrib/make_flys_release/h2/createArtifacts.sh	Wed Mar 13 19:07:52 2013 +0100
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-mkdir artifactsdb
+mkdir -p artifactsdb
 
 DIR=`dirname $0`
 DIR=`readlink -f "$DIR"`
@@ -12,6 +12,10 @@
 
 export CLASSPATH
 
+if [ $# != 1 ]; then
+    echo "Usage: $0 <schema_file>"
+fi
+
 java org.h2.tools.RunScript \
     -url jdbc:h2:`readlink -f artifactsdb`/artifacts \
-    -script $DIR/artifacts-h2.sql
+    -script "$1"
--- a/contrib/make_flys_release/h2/createDatacage.sh	Thu Mar 07 09:46:11 2013 +0100
+++ b/contrib/make_flys_release/h2/createDatacage.sh	Wed Mar 13 19:07:52 2013 +0100
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-mkdir datacagedb
+mkdir -p datacagedb
 
 DIR=`dirname $0`
 DIR=`readlink -f "$DIR"`
@@ -11,7 +11,10 @@
 done
 
 export CLASSPATH
+if [ $# != 1 ]; then
+    echo "Usage: $0 <schema_file>"
+fi
 
 java org.h2.tools.RunScript \
     -url jdbc:h2:`readlink -f datacagedb`/datacage \
-    -script $DIR/datacage.sql
+    -script "$1"
--- a/contrib/make_flys_release/h2/datacage.sql	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,104 +0,0 @@
-BEGIN;
-
-CREATE SEQUENCE USERS_ID_SEQ;
-
-CREATE TABLE users (
-    id  INT  PRIMARY KEY NOT NULL,
-    gid UUID             NOT NULL UNIQUE
-);
-
-CREATE SEQUENCE COLLECTIONS_ID_SEQ;
-
-CREATE TABLE collections (
-    id       INT  PRIMARY KEY NOT NULL,
-    gid      UUID             NOT NULL UNIQUE,
-    user_id  INT              NOT NULL REFERENCES users(id) ON DELETE CASCADE,
-    name     VARCHAR(256)     NOT NULL,
-    creation TIMESTAMP        NOT NULL
-);
-
-CREATE SEQUENCE ARTIFACTS_ID_SEQ;
-
-CREATE TABLE artifacts (
-    id       INT  PRIMARY KEY NOT NULL,
-    gid      UUID             NOT NULL UNIQUE,
-    state    VARCHAR(256)     NOT NULL,
-    creation TIMESTAMP        NOT NULL
-);
-
-CREATE SEQUENCE COLLECTION_ITEMS_ID_SEQ;
-
-CREATE TABLE collection_items (
-    id            INT PRIMARY KEY NOT NULL,
-    collection_id INT             NOT NULL REFERENCES collections(id) ON DELETE CASCADE,
-    artifact_id   INT             NOT NULL REFERENCES artifacts(id)   ON DELETE CASCADE
-);
-
-CREATE SEQUENCE ARTIFACT_DATA_ID_SEQ;
-
-CREATE TABLE artifact_data (
-    id          INT PRIMARY KEY NOT NULL,
-    artifact_id INT             NOT NULL REFERENCES artifacts(id) ON DELETE CASCADE,
-    kind        VARCHAR(256)    NOT NULL,
-    k           VARCHAR(256)    NOT NULL,
-    v           VARCHAR(256),   -- Maybe too short
-    UNIQUE (artifact_id, k)
-);
-
-CREATE SEQUENCE OUTS_ID_SEQ;
-
-CREATE TABLE outs (
-    id          INT PRIMARY KEY NOT NULL,
-    artifact_id INT             NOT NULL REFERENCES artifacts(id) ON DELETE CASCADE,
-    name        VARCHAR(256)    NOT NULL,
-    description VARCHAR(256),
-    out_type    VARCHAR(256)
-);
-
-CREATE SEQUENCE FACETS_ID_SEQ;
-
-CREATE TABLE facets (
-    id          INT PRIMARY KEY NOT NULL,
-    out_id      INT             NOT NULL REFERENCES outs(id) ON DELETE CASCADE,
-    name        VARCHAR(256)    NOT NULL,
-    num         INT             NOT NULL,
-    state       VARCHAR(256)    NOT NULL,
-    description VARCHAR(256),
-    UNIQUE (out_id, num, name)
-);
-
-CREATE VIEW master_artifacts AS
-    SELECT a2.id             AS id,
-           a2.gid            AS gid,
-           a2.state          AS state,
-           a2.creation       AS creation,
-           ci2.collection_id AS collection_id
-    FROM   collection_items ci2 
-           JOIN artifacts a2 
-             ON ci2.artifact_id = a2.id 
-           JOIN (SELECT ci.collection_id AS c_id, 
-                        MIN(a.creation)  AS oldest_a 
-                 FROM   collection_items ci 
-                        JOIN artifacts a 
-                          ON ci.artifact_id = a.id 
-                 GROUP  BY ci.collection_id) o 
-             ON o.c_id = ci2.collection_id 
-    WHERE  a2.creation = o.oldest_a;
-
--- DROP VIEW master_artifacts;
--- DROP SEQUENCE USERS_ID_SEQ;
--- DROP SEQUENCE COLLECTIONS_ID_SEQ;
--- DROP SEQUENCE ARTIFACTS_ID_SEQ;
--- DROP SEQUENCE COLLECTION_ITEMS_ID_SEQ;
--- DROP SEQUENCE ARTIFACT_DATA_ID_SEQ;
--- DROP SEQUENCE OUTS_ID_SEQ;
--- DROP SEQUENCE FACETS_ID_SEQ;
--- DROP TABLE facets;
--- DROP TABLE outs;
--- DROP TABLE artifact_data;
--- DROP TABLE collection_items;
--- DROP TABLE collections;
--- DROP TABLE artifacts;
--- DROP TABLE users;
-
-COMMIT;
Binary file contrib/make_flys_release/libs_oracle/ojdbc5.jar has changed
--- a/contrib/make_flys_release/make_release.sh	Thu Mar 07 09:46:11 2013 +0100
+++ b/contrib/make_flys_release/make_release.sh	Wed Mar 13 19:07:52 2013 +0100
@@ -1,147 +1,313 @@
 #!/bin/bash
+# Release script for Flys
+#
+# Authors:
+# Andre Heinecke <aheinecke@intevation.de>
+#
+# Copyright:
+# Copyright (C) 2013 Intevation GmbH
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
 
-echo "INFO: define required variables"
+set -e
+DEFAULT_WD=/tmp/flys-release
+DEVELOPER=aheinecke
+
 ARTIFACTS_HG_REPO="http://wald.intevation.org/hg/dive4elements/artifacts"
 HTTPCLIIENT_HG_REPO="http://wald.intevation.org/hg/dive4elements/http-client"
 FLYS_HG_REPO="http://wald.intevation.org/hg/dive4elements/flys"
 
-ARTIFACTS_HG="hg.artifacts"
-HTTPCLIENT_HG="hg.http-client"
-FLYS_HG="hg.flys"
+REPOS="artifacts http-client flys"
+# Do not use spaces in path
+FLYS_SOURCE_DIR=/local-vol1/aheinecke/flys-release
 
-PREFIX="flys-"
-RELEASE=${RELEASE:-default}
-RELEASE_DATE=`date +'%Y-%m-%d'`
-DIRECTORY=$PREFIX$RELEASE-$RELEASE_DATE
+SCRIPT_DIR=$(readlink -f `dirname $0`)
+usage(){
+    cat << EOF
 
-ARTIFACT_PORT=${ARTIFACT_PORT:-9002}
-TOMCAT_PORT=${TOMCAT_PORT:-8005}
+usage: $0 [options] VERSION
 
+Create a flys package
+
+OPTIONS:
+   -?, --help                      Show this message
+   -w                              The working directory to use. (do not use spaces in path)
+                                   Default: $DEFAULT_WD
+   -t                              Tag the current default branch as "VERSION"
+   -o, --oracle                    Release is for oracle.
+   VERSION must be in the format MAYOR.MINOR.PATCH
+EOF
+exit 0
+}
+#   --backend-db-url                Url of database backend. Default: $BACKENDURL
+#   --backend-db-pass               Backend db password. Default: $BACKENDPASS
+#   --backend-db-port               Backend db port. Default: $BACKENDPORT
+#   --backend-db-user               Backend db user. Default: $BACKENDUSER
+#   --backend-db-backend            Backend db backend name. Default: $BACKENDBACK
+#   --seddb-url                     Sediment db url. Default: $SEDDBURL
+#   --seddb-port                    Sediment db port. Default: $SEDDBPORT
+#   --seddb-user                    Sediment db user. Default: $SEDDBUSER
+#   --seddb-pass                    Sediment db password. Default: $SEDDBPASS
+#   --seddb-back                    Sediment db backend. Default: $SEDDBBACK
+TOMCAT_PORT=${TOMCAT_PORT:-8282}
 MAPSERVER_URL=${MAPSERVER_URL:-czech-republic.atlas.intevation.de}
 WIKI_URL=${WIKI_URL:-https://flys-intern.intevation.de/Flys-3.0}
 
-echo "INFO: create server directories"
-mkdir -p $DIRECTORY/server/bin/lib/own
-mkdir $DIRECTORY/server/shapefiles
-mkdir $DIRECTORY/client
-
-echo "INFO: checkout sources"
-echo " ... checkout $ARTIFACTS_HG_REPO"
-
-rm -rf $ARTIFACTS_HG
-hg clone $ARTIFACTS_HG_REPO $ARTIFACTS_HG
-(cd $ARTIFACTS_HG && hg co $RELEASE)
+# Seddb Configuration
+SEDDBURL=${SEDDBURL:-czech-republic.atlas.intevation.de}
+SEDDBPORT=${SEDDBPORT:-1521}
+SEDDBBACK=${SEDDBBACK:-XE}
+SEDDBUSER=${SEDDBUSER:-seddb}
+SEDDBPASS=${SEDDBPASS:-seddbpass}
 
-echo " ... checkout $HTTPCLIIENT_HG_REPO"
-rm -rf $HTTPCLIENT_HG
-hg clone $HTTPCLIIENT_HG_REPO $HTTPCLIENT_HG
-(cd $HTTPCLIENT_HG && hg co $RELEASE)
+# Backend configuration
+BACKENDURL=${BACKENDURL:-czech-republic.atlas.intevation.de}
+BACKENDPORT=${BACKENDPORT:-5432}
+BACKENDBACK=${BACKENDBACK:-flys_2912}
+BACKENDUSER=${BACKENDUSER:-flys_dami}
+BACKENDPASS=${BACKENDPASS:-flys_dami}
+INITSQLS=${INITSQLS:-}
+LOG_DIR=/var/log/flys
 
-echo " ... checkout $FLYS_HG_REPO"
-rm -rf $FLYS_HG
-hg clone $FLYS_HG_REPO $FLYS_HG
-(cd $FLYS_HG && hg co $RELEASE)
+OPTS=`getopt -o ?w:,t,o \
+     -l help,oracle \
+     -n $0 -- "$@"`
 
-# adapt client configuration
-echo "INFO: prepare configuration of web client"
+if [ $? != 0 ] ; then usage; fi
+eval set -- "$OPTS"
+while true ; do
+  case "$1" in
+    "-?"|"--help")
+      usage;;
+    "--")
+      shift
+      break;;
+    "-w")
+      WORK_DIR=$2
+      shift 2;;
+    "-o"|"--oracle")
+      BUILD_ORACLE="TRUE"
+      shift;;
+    "-t")
+      DO_TAG="TRUE"
+      shift;;
+    *)
+      echo "Unknown Option $1"
+      usage;;
+  esac
+done
 
+if [ $# != 1 ]; then
+    usage
+fi
+
+VERSION=$1
+ARTIFACT_PORT=${ARTIFACT_PORT:-`echo 1$VERSION | sed 's/\.//g'`}
+
+if [ -z $WORK_DIR ]; then
+  WORK_DIR=$DEFAULT_WD
+fi
+
+mkdir -p $WORK_DIR
+
+if [ ! -d $FLYS_SOURCE_DIR ]; then
+    mkdir -p $FLYS_SOURCE_DIR
+    echo "Cloning sources"
+    cd $FLYS_SOURCE_DIR
+    hg clone $ARTIFACTS_HG_REPO artifacts
+    hg clone $HTTPCLIIENT_HG_REPO http-client
+    hg clone $FLYS_HG_REPO flys
+else
+    echo "Updating sources / Reverting changes"
+    cd $FLYS_SOURCE_DIR
+    for repo in $REPOS; do
+        cd $repo && hg pull && hg up && hg revert -a && cd $FLYS_SOURCE_DIR
+    done;
+fi
+
+if [ "$DO_TAG" = "TRUE" ]; then
+    echo "Tagging version $VERSION"
+    for repo in $REPOS; do
+        cd $repo
+        CHANGESET=$(hg log -l1 |head -1 | awk -F: '{print $3}')
+        echo ""
+        echo "Do you really want to tag $repo rev: $CHANGESET as Version $VERSION?"
+        echo "press enter to continue or CTRL+C to abort."
+        echo ""
+        hg log -l1
+        read
+        hg tag $VERSION -m "Added tag $VERSION for changeset $CHANGESET"
+        hg push ssh://$DEVELOPER@scm.wald.intevation.org/hg/dive4elements/$repo
+        cd $FLYS_SOURCE_DIR
+    done;
+fi
+
+# Update to current version
+for repo in $REPOS; do
+    cd $repo
+    hg up $VERSION
+    cd $FLYS_SOURCE_DIR
+done
+
+rm -rf "$WORK_DIR/server" "$WORK_DIR/client"
+cd $WORK_DIR
+mkdir -p "$WORK_DIR/server/bin/lib/own"
+mkdir "$WORK_DIR/server/shapefiles"
+mkdir "$WORK_DIR/client"
+
+echo "[INFO]: Preparing configuration of web client"
+echo "[INFO]: Tomcat Port: $TOMCAT_PORT"
+echo "[INFO]: Artifact Port: $ARTIFACT_PORT"
 sed -i -e "s@http://localhost:8181@http://localhost:$ARTIFACT_PORT@g" \
        -e "s@http://localhost:8888@http://localhost:$TOMCAT_PORT@g" \
-    $FLYS_HG/flys-client/src/main/webapp/WEB-INF/web.xml
-
-sed -i -e "s@/tmp/flys-client.log@/var/log/flys/client-${RELEASE}.log@g" \
-    $FLYS_HG/flys-client/src/main/webapp/WEB-INF/log4j.properties
-
-# Fix the Wiki URLs
-find $FLYS_HG/flys-artifacts/src/main/resources/ -name messages_\*.properties \
-    -exec sed -i "s@https://flys-intern.intevation.de/Flys-3.0@${WIKI_URL}@g" {} \;
+    $FLYS_SOURCE_DIR/flys/flys-client/src/main/webapp/WEB-INF/web.xml
 
-echo "INFO: download OpenLayers-2.11 for client"
-curl -O http://openlayers.org/download/OpenLayers-2.11.tar.gz
-tar xvfz OpenLayers-2.11.tar.gz
-# TODO: Remove more superfluous OpenLayers stuff.
-rm -rf OpenLayers-2.11/doc
-rm -rf OpenLayers-2.11/tests
-rm -rf OpenLayers-2.11/examples
-mv OpenLayers-2.11 $FLYS_HG/flys-client/src/main/webapp/
+sed -i -e "s@/tmp/flys-client.log@${LOG_DIR}/client-${VERSION}.log@g" \
+    $FLYS_SOURCE_DIR/flys/flys-client/src/main/webapp/WEB-INF/log4j.properties
 
-# compile and build our code stuff
+find $FLYS_SOURCE_DIR/flys/flys-artifacts/src/main/resources/ -name messages_\*.properties \
+    -exec sed -i "s@https://flys-intern.intevation.de/Flys-3.0@"'${WIKI_URL}'"@g" {} \;
+
+if [ ! -f $FLYS_SOURCE_DIR/OpenLayers-2.11.tar.gz ]; then
+    echo "INFO: download OpenLayers-2.11 for client"
+    cd $FLYS_SOURCE_DIR
+    curl -O http://openlayers.org/download/OpenLayers-2.11.tar.gz
+    tar xvfz OpenLayers-2.11.tar.gz
+    # TODO: Remove more superfluous OpenLayers stuff.
+    rm -rf OpenLayers-2.11/doc
+    rm -rf OpenLayers-2.11/tests
+    rm -rf OpenLayers-2.11/examples
+    cd $WORK_DIR
+fi
+cp -r $FLYS_SOURCE_DIR/OpenLayers-2.11 $FLYS_SOURCE_DIR/flys/flys-client/src/main/webapp/
+
 echo "INFO: compile and build sources"
-mvn -f $ARTIFACTS_HG/pom.xml clean compile package install
-mvn -f $FLYS_HG/flys-backend/pom.xml clean compile package install
-mvn -f $FLYS_HG/flys-artifacts/pom.xml clean compile package dependency:copy-dependencies install
-mvn -f $HTTPCLIENT_HG/pom.xml clean compile package install
-mvn -f $FLYS_HG/flys-client/pom.xml clean compile package
+mvn -f $FLYS_SOURCE_DIR/artifacts/pom.xml clean compile package install
+mvn -f $FLYS_SOURCE_DIR/flys/flys-backend/pom.xml clean compile package install
+mvn -f $FLYS_SOURCE_DIR/flys/flys-artifacts/pom.xml clean compile package dependency:copy-dependencies install
+mvn -f $FLYS_SOURCE_DIR/http-client/pom.xml clean compile package install
+mvn -f $FLYS_SOURCE_DIR/flys/flys-client/pom.xml clean compile package
 
-## fetch the java stuff
+
 echo "INFO: copy dependencies and libs"
-cp $ARTIFACTS_HG/artifact-database/target/artifact-database-1.0-SNAPSHOT.jar $DIRECTORY/server/bin/lib/own/
-cp $ARTIFACTS_HG/artifacts/target/artifacts-1.0-SNAPSHOT.jar $DIRECTORY/server/bin/lib/own/
-cp $ARTIFACTS_HG/artifacts-common/target/artifacts-common-1.0-SNAPSHOT.jar $DIRECTORY/server/bin/lib/own/
-cp $FLYS_HG/flys-backend/target/flys-backend-1.0-SNAPSHOT.jar $DIRECTORY/server/bin/lib/own/
-cp $FLYS_HG/flys-artifacts/target/flys-artifacts-1.0-SNAPSHOT.jar $DIRECTORY/server/bin/lib/own/
-cp $FLYS_HG/flys-client/target/FLYS-1.0-SNAPSHOT.war $DIRECTORY/client/flys-${RELEASE}.war
-cp $FLYS_HG/flys-artifacts/target/dependency/* $DIRECTORY/server/bin/lib/
+cp $FLYS_SOURCE_DIR/artifacts/artifact-database/target/artifact-database-1.0-SNAPSHOT.jar $WORK_DIR/server/bin/lib/own/
+cp $FLYS_SOURCE_DIR/artifacts/artifacts/target/artifacts-1.0-SNAPSHOT.jar $WORK_DIR/server/bin/lib/own/
+cp $FLYS_SOURCE_DIR/artifacts/artifacts-common/target/artifacts-common-1.0-SNAPSHOT.jar $WORK_DIR/server/bin/lib/own/
+cp $FLYS_SOURCE_DIR/flys/flys-backend/target/flys-backend-1.0-SNAPSHOT.jar $WORK_DIR/server/bin/lib/own/
+cp $FLYS_SOURCE_DIR/flys/flys-artifacts/target/flys-artifacts-1.0-SNAPSHOT.jar $WORK_DIR/server/bin/lib/own/
+cp $FLYS_SOURCE_DIR/flys/flys-client/target/FLYS-1.0-SNAPSHOT.war $WORK_DIR/client/flys-${VERSION}.war
+cp $FLYS_SOURCE_DIR/flys/flys-artifacts/target/dependency/* $WORK_DIR/server/bin/lib/
 
 echo "INFO: copy scripts and libraries to target destination"
-cp bin/run.sh $DIRECTORY/server/bin/
-cp bin/wsplgen.exe $DIRECTORY/server/bin/
-cp libs/* $DIRECTORY/server/bin/lib/
+cp ${SCRIPT_DIR}/bin/run.sh $WORK_DIR/server/bin/
+cp ${SCRIPT_DIR}/bin/wsplgen.exe $WORK_DIR/server/bin/
+cp ${SCRIPT_DIR}/libs/* $WORK_DIR/server/bin/lib/
 
-#echo "INFO: remove PostgreSQL and PostGIS libraries"
-#rm $DIRECTORY/server/bin/lib/postg*
-#rm $DIRECTORY/server/bin/lib/hibernate-spatial-postgis*
 
-echo "INFO: remove Oralce libraries"
-rm -f $DIRECTORY/server/bin/lib/hibernate-spatial-oracle-1.1.jar
-rm -f $DIRECTORY/server/bin/lib/ojdbc*
+if [ "$BUILD_ORACLE" = "TRUE" ]; then
+    echo "INFO: remove PostgreSQL and PostGIS libraries"
+    rm $WORK_DIR/server/bin/lib/postg*
+    rm $WORK_DIR/server/bin/lib/hibernate-spatial-postgis*
+else
+    echo "INFO: remove Oralce libraries"
+    rm -f $WORK_DIR/server/bin/lib/hibernate-spatial-oracle-1.1.jar
+    rm -f $WORK_DIR/server/bin/lib/ojdbc*
+fi
 
 # fetch the configuration stuff
 echo "INFO: copy default configuration to target destination"
-cp -R $FLYS_HG/flys-artifacts/doc/conf $DIRECTORY/server/
-
-#cp confs/* $DIRECTORY/server/conf/
-mkdir -p $DIRECTORY/server/conf
-
-sed "s/8999/$ARTIFACT_PORT/g" \
-    confs/rest-server.xml \
-    > $DIRECTORY/server/conf/rest-server.xml
-
-sed -e "s@http://example.com/@http://${MAPSERVER_URL}/@g" \
-    confs/floodmap.xml \
-    > $DIRECTORY/server/conf/floodmap.xml
+cp -R $FLYS_SOURCE_DIR/flys/flys-artifacts/doc/conf $WORK_DIR/server/
 
-sed -e "s@http://example.com/@http://${MAPSERVER_URL}/@g" \
-    confs/rivermap.xml \
-    > $DIRECTORY/server/conf/rivermap.xml
-
-sed "s@/tmp/flys-server-default.log@/var/log/flys/server-${RELEASE}.log@" \
-    confs/log4j.properties \
-    > $DIRECTORY/server/conf/log4j.properties
+sed -i "s/8181/$ARTIFACT_PORT/g" \
+    $WORK_DIR/server/conf/rest-server.xml
 
-# TODO: Use templating here
-cp confs/seddb-db.xml $DIRECTORY/server/conf/seddb-db.xml
-cp confs/backend-db.xml $DIRECTORY/server/conf/backend-db.xml
-cp confs/artifact-db.xml $DIRECTORY/server/conf/artifact-db.xml
-cp confs/datacage-db.xml $DIRECTORY/server/conf/datacage-db.xml
-cp confs/mapserver/fontset.txt $DIRECTORY/server/conf/mapserver/fontset.txt
+sed -i -e "s@http://example.com/@http://${MAPSERVER_URL}/@g" \
+    $WORK_DIR/server/conf/floodmap.xml
 
-cp $ARTIFACTS_HG/artifact-database/doc/schema-h2.sql h2/artifacts-h2.sql
-cp $FLYS_HG/flys-artifacts/doc/conf/datacage.sql h2/datacage.sql
+sed -i -e "s@http://example.com/@http://${MAPSERVER_URL}/@g" \
+    $WORK_DIR/server/conf/rivermap.xml
+
+sed -i -e "s@/tmp/flys-rivers-wms.log/@${LOG_DIR}/rivers-wms-${VERSION}.log@g" \
+    $WORK_DIR/server/conf/mapserver/river-mapfile.vm
+
+sed -i -e "s@/tmp/flys-user-wms.log/@${LOG_DIR}/user-wms-${VERSION}.log@g" \
+    $WORK_DIR/server/conf/mapserver/mapfile.vm
+
+sed "s@/tmp/flys-server-default.log@${LOG_DIR}/server-${VERSION}.log@" \
+    $SCRIPT_DIR/confs/log4j.properties \
+    > $WORK_DIR/server/conf/log4j.properties
+
+cat > $WORK_DIR/server/conf/seddb-db.xml << EOF
+<?xml version="1.0" encoding="UTF-8" ?>
+<seddb-database>
+    <!-- This is the default SedDB db configuration. -->
+    <user>$SEDDBUSER</user>
+    <password>$SEDDBPASS</password>
+    <dialect>org.hibernate.dialect.Oracle9iDialect</dialect>
+    <driver>oracle.jdbc.driver.OracleDriver</driver>
+    <url>jdbc:oracle:thin:@//$SEDDBURL:$SEDDBPORT/$SEDDBBACK </url>
+</seddb-database>
+EOF
+
+if [ "$BUILD_ORACLE" = "TRUE" ]; then
+    # Oracle backend configuration
+    cat > $WORK_DIR/server/conf/backend-db.xml << EOF
+<?xml version="1.0" encoding="UTF-8" ?>
+<backend-database>
+    <user>$BACKENDUSER</user>
+    <password>$BACKENDPASS</password>
+    <dialect>org.hibernatespatial.oracle.OracleSpatial10gDialect</dialect>
+    <driver>oracle.jdbc.driver.OracleDriver</driver>
+    <url>jdbc:oracle:thin:@//$BACKENDURL:$BACKENDPORT/$BACKENDBACK</url>
+    <connection-init-sqls>$INITSQLS</connection-init-sqls>
+</backend-database>
+EOF
+else
+    #Postgresql backend configuration
+    cat > $WORK_DIR/server/conf/backend-db.xml << EOF
+<?xml version="1.0" encoding="UTF-8" ?>
+<backend-database>
+    <user>$BACKENDUSER</user>
+    <password>$BACKENDPASS</password>
+    <dialect>org.hibernate.dialect.PostgreSQLDialect</dialect>
+    <driver>org.postgresql.Driver</driver>
+    <url>jdbc:postgresql://$BACKENDURL:$BACKENDPORT/$BACKENDBACK</url>
+    <connection-init-sqls>$INITSQLS</connection-init-sqls>
+</backend-database>
+EOF
+fi
+
+mkdir $WORK_DIR/artifactsdb
+mkdir $WORK_DIR/datacagedb
+
+cp $FLYS_SOURCE_DIR/artifacts/artifact-database/doc/schema-h2.sql $WORK_DIR/artifactsdb/artifacts-h2.sql
+cp $FLYS_SOURCE_DIR/flys/flys-artifacts/doc/conf/datacage.sql $WORK_DIR/datacagedb/datacage.sql
 
 echo "INFO: create h2 database for artifacts and datacage"
-h2/createArtifacts.sh
-h2/createDatacage.sh
+$SCRIPT_DIR/h2/createArtifacts.sh $WORK_DIR/artifactsdb/artifacts-h2.sql
+$SCRIPT_DIR/h2/createDatacage.sh $WORK_DIR/datacagedb/datacage.sql
 
-mv artifactsdb $DIRECTORY/server/
-mv datacagedb $DIRECTORY/server/
+mv $WORK_DIR/artifactsdb $WORK_DIR/server/
+mv $WORK_DIR/datacagedb $WORK_DIR/server/
 
 echo "INFO: create tarball"
-tar cvfz $DIRECTORY.tar.gz $DIRECTORY
+mkdir $WORK_DIR/flys-$VERSION
+mv $WORK_DIR/server $WORK_DIR/client $WORK_DIR/flys-$VERSION
+cd $WORK_DIR
+tar cfz flys-$VERSION.tar.gz flys-$VERSION
+echo "INFO: cleanup"
+#rm -r $WORK_DIR/flys-$VERSION
 
-echo "INFO: remove temporary files and directories"
-rm -rf $ARTIFACTS_HG
-rm -rf $HTTPCLIENT_HG
-rm -rf $FLYS_HG
-rm -rf $DIRECTORY
-rm -rf OpenLayers.2.11.tar.gz
+echo "DONE: $WORK_DIR/flys-$VERSION.tar.gz"
--- a/flys-aft/src/main/java/de/intevation/aft/DIPSGauge.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-aft/src/main/java/de/intevation/aft/DIPSGauge.java	Wed Mar 13 19:07:52 2013 +0100
@@ -113,7 +113,7 @@
         String stationString = element.getAttribute("STATIONIERUNG");
         if (stationString.length() == 0) {
             log.warn("DIPS: Setting station of gauge '" + name + "' to zero.");
-            stationString = "0";
+            stationString = "-99999";
         }
         station = Double.parseDouble(stationString);
         if (station == 0d) {
--- a/flys-aft/src/main/java/de/intevation/aft/River.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-aft/src/main/java/de/intevation/aft/River.java	Wed Mar 13 19:07:52 2013 +0100
@@ -65,7 +65,8 @@
     public boolean sync(SyncContext context) throws SQLException {
         log.info("sync river: " + this);
 
-        Map<Long, DIPSGauge> dipsGauges = context.getDIPSGauges();
+        // Only take relevant gauges into account.
+        Map<Long, DIPSGauge> dipsGauges = context.getDIPSGauges(name, from, to);
 
         ConnectedStatements flysStatements = context.getFlysStatements();
         ConnectedStatements aftStatements  = context.getAftStatements();
@@ -78,14 +79,18 @@
             .getStatement("select.messstelle")
             .clearParameters()
             .setInt("GEWAESSER_NR", id2)
-            .setDouble("START_KM", from)
-            .setDouble("END_KM", to)
             .executeQuery();
 
         try {
             while (messstellenRs.next()) {
                 String name = messstellenRs.getString("NAME");
                 String num  = messstellenRs.getString("MESSSTELLE_NR");
+                double station = messstellenRs.getDouble("STATIONIERUNG");
+
+                if (!messstellenRs.wasNull() && !inside(station)) {
+                    log.warn("Station found in AFT but in not range: " + station);
+                    continue;
+                }
 
                 Long number = SyncContext.numberToLong(num);
                 if (number == null) {
@@ -166,12 +171,93 @@
         boolean modified = false;
 
         for (DIPSGauge gauge: gauges) {
+            modified |= updateBfGIdOnMasterDischargeTable(context, gauge);
             modified |= updateGauge(context, gauge);
         }
 
         return modified;
     }
 
+    protected boolean updateBfGIdOnMasterDischargeTable(
+        SyncContext context,
+        DIPSGauge   gauge
+    ) throws SQLException {
+        log.info(
+            "FLYS: Updating master discharge table bfg_id for '" +
+            gauge.getAftName() + "'");
+        ConnectedStatements flysStatements = context.getFlysStatements();
+
+        ResultSet rs = flysStatements
+            .getStatement("select.gauge.master.discharge.table")
+            .clearParameters()
+            .setInt("gauge_id", gauge.getFlysId())
+            .executeQuery();
+
+        int flysId;
+
+        try {
+            if (rs.next()) {
+                log.error(
+                    "FLYS: No master discharge table found for gauge '" +
+                    gauge.getAftName() + "'");
+                return false;
+            }
+            String bfgId = rs.getString("bfg_id");
+            if (!rs.wasNull()) { // already has BFG_ID
+                return false;
+            }
+            flysId = rs.getInt("id");
+        } finally {
+            rs.close();
+        }
+
+        // We need to find out the BFG_ID of the current discharge table
+        // for this gauge in AFT.
+
+        ConnectedStatements aftStatements = context.getAftStatements();
+
+        rs = aftStatements
+            .getStatement("select.bfg.id.current")
+            .clearParameters()
+            .setString("number", "%" + gauge.getOfficialNumber())
+            .executeQuery();
+
+        String bfgId = null;
+
+        try {
+            if (rs.next()) {
+                bfgId = rs.getString("BFG_ID");
+            }
+        } finally {
+            rs.close();
+        }
+
+        if (bfgId == null) {
+            log.warn(
+                "No BFG_ID found for current discharge table of gauge '" +
+                gauge + "'");
+            return false;
+        }
+
+        // Set the BFG_ID in FLYS.
+        flysStatements.beginTransaction();
+        try {
+            flysStatements
+                .getStatement("update.bfg.id.discharge.table")
+                .clearParameters()
+                .setInt("id", flysId)
+                .setString("bfg_id", bfgId)
+                .executeUpdate();
+            flysStatements.commitTransaction();
+        } catch (SQLException sqle) {
+            flysStatements.rollbackTransaction();
+            log.error(sqle, sqle);
+            return false;
+        }
+
+        return true;
+    }
+
     protected boolean updateGauge(
         SyncContext context,
         DIPSGauge   gauge
--- a/flys-aft/src/main/java/de/intevation/aft/SyncContext.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-aft/src/main/java/de/intevation/aft/SyncContext.java	Wed Mar 13 19:07:52 2013 +0100
@@ -87,6 +87,36 @@
         return numberToGauge;
     }
 
+    public Map<Long, DIPSGauge> getDIPSGauges(
+        String riverName,
+        double from,
+        double to
+    ) {
+        if (from > to) {
+            double t = from;
+            from = to;
+            to = t;
+        }
+
+        riverName = riverName.toLowerCase();
+
+        Map<Long, DIPSGauge> result = new HashMap<Long, DIPSGauge>();
+
+        for (Map.Entry<Long, DIPSGauge> entry: numberToGauge.entrySet()) {
+            DIPSGauge gauge = entry.getValue();
+            // XXX: Maybe a bit too sloppy.
+            if (!riverName.contains(gauge.getRiverName().toLowerCase())) {
+                continue;
+            }
+            double station = gauge.getStation();
+            if (station >= from && station <= to) {
+                result.put(entry.getKey(), gauge);
+            }
+        }
+
+        return result;
+    }
+
     protected static Map<Long, DIPSGauge> indexByNumber(Document document) {
         Map<Long, DIPSGauge> map = new HashMap<Long, DIPSGauge>();
         NodeList nodes = document.getElementsByTagName("PEGELSTATION");
--- a/flys-aft/src/main/resources/sql/aft-common.properties	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-aft/src/main/resources/sql/aft-common.properties	Wed Mar 13 19:07:52 2013 +0100
@@ -1,8 +1,9 @@
-select.gewaesser = SELECT GEWAESSER_NR, NAME FROM SL_GEWAESSER
+select.gewaesser = \
+    SELECT GEWAESSER_NR, NAME FROM SL_GEWAESSER
 select.messstelle = \
-    SELECT NAME, MESSSTELLE_NR \
+    SELECT NAME, MESSSTELLE_NR, STATIONIERUNG \
     FROM MESSSTELLE \
-    WHERE GEWAESSER_NR = :GEWAESSER_NR AND STATIONIERUNG BETWEEN :START_KM AND :END_KM
+    WHERE GEWAESSER_NR = :GEWAESSER_NR
 select.abflusstafel = \
     SELECT ABFLUSSTAFEL_NR, \
            ABFLUSSTAFEL_BEZ, \
@@ -12,6 +13,13 @@
            BFG_ID \
     FROM ABFLUSSTAFEL \
     WHERE MESSSTELLE_NR LIKE :number
-select.tafelwert = SELECT TAFELWERT_NR AS id, WASSERSTAND AS w, ABFLUSS AS q FROM TAFELWERT \
-                          WHERE ABFLUSSTAFEL_NR = :number
-
+select.tafelwert = \
+    SELECT TAFELWERT_NR AS id, WASSERSTAND AS w, ABFLUSS AS q FROM TAFELWERT \
+    WHERE ABFLUSSTAFEL_NR = :number
+select.bfg.id.current = \
+    SELECT BFG_ID AS BFG_ID FROM ABFLUSSTAFEL \
+    WHERE GUELTIG_VON IN ( \
+        SELECT min(GUELTIG_VON) FROM ABFLUSSTAFEL \
+        WHERE GUELTIG_VON IS NOT NULL AND GUELTIG_BIS IS NULL \
+        AND MESSSTELLE_NR LIKE :number) \
+    AND MESSSTELLE_NR :number
--- a/flys-aft/src/main/resources/sql/flys-common.properties	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-aft/src/main/resources/sql/flys-common.properties	Wed Mar 13 19:07:52 2013 +0100
@@ -6,29 +6,52 @@
         JOIN wst_column_values wcv ON wcv.wst_column_id = wc.id \
     WHERE w.kind = 0 \
     GROUP BY r.id, r.name
-select.gauges = SELECT id, name, official_number FROM gauges WHERE river_id = :river_id
-next.gauge.id = SELECT NEXTVAL('GAUGES_ID_SEQ') AS gauge_id
-insert.gauge = INSERT INTO gauges (id, name, river_id, station, aeo, official_number, datum) \
-                      VALUES(:id, :name, :river_id, :station, :aeo, :official_number, :datum)
-select.timeintervals = SELECT id, start_time, stop_time FROM time_intervals
-next.timeinterval.id = SELECT NEXTVAL('TIME_INTERVALS_ID_SEQ') AS time_interval_id
-insert.timeinterval = INSERT INTO time_intervals (id, start_time, stop_time) VALUES (:id, :start_time, :stop_time)
-next.discharge.id = SELECT NEXTVAL('DISCHARGE_TABLES_ID_SEQ') AS discharge_table_id
+select.gauges = \
+    SELECT id, name, official_number \
+    FROM gauges \
+    WHERE river_id = :river_id
+next.gauge.id = \
+    SELECT NEXTVAL('GAUGES_ID_SEQ') AS gauge_id
+insert.gauge = \
+    INSERT INTO gauges (id, name, river_id, station, aeo, official_number, datum) \
+    VALUES(:id, :name, :river_id, :station, :aeo, :official_number, :datum)
+select.timeintervals = \
+    SELECT id, start_time, stop_time FROM time_intervals
+next.timeinterval.id = \
+    SELECT NEXTVAL('TIME_INTERVALS_ID_SEQ') AS time_interval_id
+insert.timeinterval = \
+    INSERT INTO time_intervals (id, start_time, stop_time) \
+    VALUES (:id, :start_time, :stop_time)
+next.discharge.id = \
+    SELECT NEXTVAL('DISCHARGE_TABLES_ID_SEQ') AS discharge_table_id
 insert.dischargetable = \
     INSERT INTO discharge_tables \
     (id, gauge_id, description, bfg_id, kind, time_interval_id) \
     VALUES (:id, :gauge_id, :description, :bfg_id, 1, :time_interval_id)
-select.discharge.table.values = SELECT id, w, q FROM discharge_table_values WHERE table_id = :table_id
-next.discharge.table.values.id = SELECT NEXTVAL('DISCHARGE_TABLE_VALUES_ID_SEQ') AS discharge_table_values_id
-insert.discharge.table.value = INSERT INTO discharge_table_values (id, table_id, w, q) VALUES (:id, :table_id, :w, :q)
-delete.discharge.table.value = DELETE FROM discharge_table_values WHERE id = :id
+select.discharge.table.values = \
+    SELECT id, w, q FROM discharge_table_values WHERE table_id = :table_id
+next.discharge.table.values.id = \
+    SELECT NEXTVAL('DISCHARGE_TABLE_VALUES_ID_SEQ') AS discharge_table_values_id
+insert.discharge.table.value = \
+    INSERT INTO discharge_table_values (id, table_id, w, q) \
+    VALUES (:id, :table_id, :w, :q)
+delete.discharge.table.value = \
+    DELETE FROM discharge_table_values WHERE id = :id
 select.gauge.discharge.tables = \
     SELECT \
-        dt.id AS id, \
+        dt.id          AS id, \
         dt.description AS description, \
-        ti.start_time AS start_time, \
-        ti.stop_time AS stop_time, \
-        dt.bfg_id AS bfg_id \
+        ti.start_time  AS start_time, \
+        ti.stop_time   AS stop_time, \
+        dt.bfg_id      AS bfg_id \
     FROM discharge_tables dt \
     LEFT OUTER JOIN time_intervals ti ON dt.time_interval_id = ti.id \
     WHERE gauge_id = :gauge_id
+select.gauge.master.discharge.table = \
+    SELECT \
+        dt.id AS id, \
+        dt.bfg_id AS bfg_id \
+    FROM discharge_tables dt JOIN gauges g ON dt.gauge_id = g.id \
+    WHERE g.id = :gauge_id AND g.kind = 0
+update.bfg.id.discharge.table = \
+    UPDATE discharge_tables SET bfg_id = :bfg_id WHERE id = :id
--- a/flys-aft/src/main/resources/sql/flys-oracle-jdbc-oracledriver.properties	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-aft/src/main/resources/sql/flys-oracle-jdbc-oracledriver.properties	Wed Mar 13 19:07:52 2013 +0100
@@ -1,5 +1,8 @@
-next.gauge.id = SELECT GAUGES_ID_SEQ.NEXTVAL AS gauge_id FROM DUAL
-next.timeinterval.id = SELECT TIME_INTERVALS_ID_SEQ.NEXTVAL AS time_interval_id FROM DUAL
-next.discharge.id = SELECT DISCHARGE_TABLES_ID_SEQ.NEXTVAL AS discharge_table_id FROM DUAL
-next.discharge.table.values.id = SELECT DISCHARGE_TABLE_VALUES_ID_SEQ.NEXTVAL AS discharge_table_values_id FROM DUAL
-
+next.gauge.id = \
+    SELECT GAUGES_ID_SEQ.NEXTVAL AS gauge_id FROM DUAL
+next.timeinterval.id = \
+    SELECT TIME_INTERVALS_ID_SEQ.NEXTVAL AS time_interval_id FROM DUAL
+next.discharge.id = \
+    SELECT DISCHARGE_TABLES_ID_SEQ.NEXTVAL AS discharge_table_id FROM DUAL
+next.discharge.table.values.id = \
+    SELECT DISCHARGE_TABLE_VALUES_ID_SEQ.NEXTVAL AS discharge_table_values_id FROM DUAL
--- a/flys-artifacts/doc/conf/artifact-db.xml	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/doc/conf/artifact-db.xml	Wed Mar 13 19:07:52 2013 +0100
@@ -6,5 +6,5 @@
     <password></password>
     <!-- For use with a postgresql database use the appropriate driver-->
     <!--driver>org.postgresql.Driver</driver-->
-    <url>jdbc:h2:${artifacts.config.dir}/../artifactdb/artifacts.db</url>
+    <url>jdbc:h2:${artifacts.config.dir}/../artifactsdb/artifacts</url>
 </database>
--- a/flys-artifacts/doc/conf/datacage-db.xml	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/doc/conf/datacage-db.xml	Wed Mar 13 19:07:52 2013 +0100
@@ -3,5 +3,5 @@
     <user>SA</user>
     <password/>
     <driver>org.h2.Driver</driver>
-    <url>jdbc:h2:${artifacts.config.dir}/../h2/datacage</url>
+    <url>jdbc:h2:${artifacts.config.dir}/../datacagedb/datacage</url>
 </datacage>
--- a/flys-artifacts/doc/conf/floodmap.xml	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/doc/conf/floodmap.xml	Wed Mar 13 19:07:52 2013 +0100
@@ -21,13 +21,13 @@
     <river name="Mosel">
         <srid value="31467"/>
         <dgm-srid value="31466"/>
-        <river-wms url="http://example.com/cgi-bin/user-wms" layers="Mosel"/>
+        <river-wms url="http://example.com/cgi-bin/river-wms" layers="Mosel"/>
         <background-wms url="http://osm.intevation.de/mapcache/?" layers="OSM-WMS-Dienst"/>
     </river>
     <river name="Elbe">
         <srid value="31467"/>
         <dgm-srid value="31467"/>
-        <river-wms url="http://example.com/cgi-bin/elbe-wms" layers="Elbe"/>
+        <river-wms url="http://example.com/cgi-bin/river-wms" layers="Elbe"/>
         <background-wms url="http://osm.intevation.de/mapcache/?" layers="OSM-WMS-Dienst"/>
     </river>
 </floodmap>
--- a/flys-artifacts/doc/conf/mapserver/river-mapfile.vm	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/doc/conf/mapserver/river-mapfile.vm	Wed Mar 13 19:07:52 2013 +0100
@@ -14,7 +14,7 @@
     END
 
     DEBUG 3
-    CONFIG "MS_ERRORFILE" "log/rivers.log"
+    CONFIG "MS_ERRORFILE" "/tmp/flys-rivers-wms.log"
 
     WEB
       METADATA
@@ -57,4 +57,4 @@
     #foreach ($LAYER in $LAYERS)
         include "$LAYER"
     #end
-END
\ No newline at end of file
+END
--- a/flys-artifacts/doc/conf/meta-data.xml	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/doc/conf/meta-data.xml	Wed Mar 13 19:07:52 2013 +0100
@@ -972,12 +972,15 @@
                 <dems>
                   <dc:context>
                     <dc:statement>
-                      SELECT id    AS dem_id,
-                             lower AS dem_lower,
-                             upper AS dem_upper,
-                             name AS name,
-                             projection || ' | ' || year_from || ' - ' || year_to AS info
-                      FROM dem WHERE river_id = ${river_id}
+                      SELECT d.id    AS dem_id,
+                             r.a     AS dem_lower,
+                             r.b     AS dem_upper,
+                             d.name  AS name,
+                             d.projection || ' | ' || t.start_time || ' - ' || t.stop_time AS info
+                             FROM dem d
+                             JOIN ranges r ON d.range_id = r.id
+                             JOIN time_intervals t ON d.time_interval_id = t.id
+                             WHERE d.river_id = ${river_id}
                     </dc:statement>
                     <dc:elements>
                         <dem>
--- a/flys-artifacts/doc/conf/seddb-db.xml	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/doc/conf/seddb-db.xml	Wed Mar 13 19:07:52 2013 +0100
@@ -6,4 +6,7 @@
     <dialect>org.hibernate.dialect.PostgreSQLDialect</dialect>
     <driver>org.postgresql.Driver</driver>
     <url>jdbc:postgresql://localhost:5432/seddb</url>
+    <!--
+    <connection-init-sqls>ALTER SESSION SET CURRENT_SCHEMA=SEDDB</connection-init-sqls>
+    -->
 </seddb-database>
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/MapArtifact.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/MapArtifact.java	Wed Mar 13 19:07:52 2013 +0100
@@ -158,7 +158,7 @@
                 getID(), hash,
                 getUrl());
 
-            String name = type + "-" + artifact.identifier();
+            String name = artifact.getDataAsString("river");
 
             facet.addLayer(name);
             facet.setExtent(getExtent(false));
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/DGMSelect.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/DGMSelect.java	Wed Mar 13 19:07:52 2013 +0100
@@ -89,8 +89,8 @@
             throw new IllegalArgumentException(ERR_INVALID_DGM);
         }
 
-        double l = dgm.getLower().doubleValue();
-        double u = dgm.getUpper().doubleValue();
+        double l = dgm.getRange().getA().doubleValue();
+        double u = dgm.getRange().getB().doubleValue();
 
         double[] range = FLYSUtils.getKmFromTo(flys);
 
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/FloodMapState.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/FloodMapState.java	Wed Mar 13 19:07:52 2013 +0100
@@ -50,6 +50,7 @@
 import org.geotools.feature.FeatureCollection;
 import org.geotools.feature.FeatureCollections;
 import org.geotools.feature.simple.SimpleFeatureBuilder;
+import org.hibernate.HibernateException;
 import org.opengis.feature.simple.SimpleFeature;
 import org.opengis.feature.simple.SimpleFeatureType;
 
@@ -621,7 +622,7 @@
         try {
             axes = RiverAxis.getRiverAxis(river);
         }
-        catch (IllegalArgumentException iae) {
+        catch (HibernateException iae) {
             logger.warn("No valid river axis found for " + river);
             return;
         }
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/GeometryUtils.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/GeometryUtils.java	Wed Mar 13 19:07:52 2013 +0100
@@ -30,6 +30,7 @@
 import org.geotools.geometry.jts.JTS;
 import org.geotools.geometry.jts.ReferencedEnvelope;
 import org.geotools.referencing.CRS;
+import org.hibernate.HibernateException;
 import org.opengis.feature.simple.SimpleFeature;
 import org.opengis.feature.simple.SimpleFeatureType;
 import org.opengis.referencing.FactoryException;
@@ -71,7 +72,7 @@
                 return max;
             }
         }
-        catch(IllegalArgumentException iae) {
+        catch(HibernateException iae) {
             logger.warn("No vaild river axis forund for " + rivername);
             return null;
         }
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/MapUtils.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/MapUtils.java	Wed Mar 13 19:07:52 2013 +0100
@@ -14,7 +14,7 @@
     private static final Logger logger = Logger.getLogger(MapUtils.class);
 
     public static final Pattern DB_URL_PATTERN =
-        Pattern.compile("(.*)\\/\\/(.*):([0-9]+)\\/([a-zA-Z_-]+)");
+        Pattern.compile("(.*)\\/\\/(.*):([0-9]+)\\/([\\.a-zA-Z0-9_-]+)");
 
     public static final Pattern DB_PSQL_URL_PATTERN =
         Pattern.compile("(.*)\\/\\/(.*):([0-9]+)\\/([a-zA-Z0-9_-]+)");
@@ -56,15 +56,15 @@
         String connection = null;
 
         if (FLYSUtils.isUsingOracle()) {
-            if (groups < 3) {
+            if (groups < 4) {
                 logger.warn("Could only partially parse connection string.");
                 return null;
             }
 
             String host = m.group(2);
             String port = m.group(3);
-
-            connection = user + "/" + pass + "@" + host;
+            String backend = m.group(4);
+            connection = user + "/" + pass + "@" + host + "/" + backend;
         }
         else {
             if (groups < 4) {
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/RiverMapfileGenerator.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/RiverMapfileGenerator.java	Wed Mar 13 19:07:52 2013 +0100
@@ -20,6 +20,7 @@
 import org.apache.log4j.Logger;
 
 import org.apache.velocity.Template;
+import org.hibernate.HibernateException;
 
 public class RiverMapfileGenerator extends MapfileGenerator {
 
@@ -70,7 +71,7 @@
             try {
                 riverAxis = RiverAxis.getRiverAxis(river.getName());
             }
-            catch (IllegalArgumentException iae) {
+            catch (HibernateException iae) {
                 logger.error("No valid riveraxis found for " + river.getName());
                 continue;
             }
--- a/flys-backend/contrib/import_river.sh	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/import_river.sh	Wed Mar 13 19:07:52 2013 +0100
@@ -39,7 +39,7 @@
 MIN_MEMORY="8024m"
 
 if [ -z "$OPTIONAL_LIBS" ]; then
-    OPTIONAL_LIBS="${DIR}"/../opt
+    OPTIONAL_LIBS="$(dirname $0)/opt"
 fi
 
 if [ -d "$OPTIONAL_LIBS" ]; then
@@ -90,16 +90,16 @@
       shift
       break;;
     "-u"|"--username")
-      USER=$2
+      DBUSER=$2
       shift 2;;
     "-w"|"--password")
-      PASS=$2
+      DBPASS=$2
       shift 2;;
     "-h"|"--host")
-      HOST=$2
+      DBHOST=$2
       shift 2;;
     "-p"|"--port")
-      PORT=$2
+      DBPORT=$2
       shift 2;;
     "-l"|"--log-dir")
       LOG=$2
@@ -128,17 +128,17 @@
   esac
 done
 
-if [ -z $USER ]; then
-  USER=$DEFAULT_USER
+if [ -z $DBUSER ]; then
+  DBUSER=$DEFAULT_USER
 fi
-if [ -z $PASS ]; then
-  PASS=$DEFAULT_PASS
+if [ -z $DBPASS ]; then
+  DBPASS=$DEFAULT_PASS
 fi
-if [ -z $PORT ]; then
-  PORT=$DEFAULT_PORT
+if [ -z $DBPORT ]; then
+  DBPORT=$DEFAULT_PORT
 fi
-if [ -z $HOST ]; then
-  HOST=$DEFAULT_HOST
+if [ -z $DBHOST ]; then
+  DBHOST=$DEFAULT_HOST
 fi
 if [ -z $BACKEND_NAME ]; then
   BACKEND_NAME=$DEFAULT_BACKEND_NAME
@@ -167,8 +167,8 @@
       echo "Could not find Postgres importer $JAR"
       exit 1
     fi
-    OGR_CONNECTION="PG:dbname=$BACKEND_NAME host=$HOST port=$PORT \
-      user=$USER password=$PASS"
+    OGR_CONNECTION="PG:dbname=$BACKEND_NAME host=$DBHOST port=$DBPORT \
+      user=$DBUSER password=$DBPASS"
     BACKEND_DB_PREFIX="jdbc:postgresql:"
     BACKEND_DB_DRIVER="org.postgresql.Driver"
     BACKEND_DB_DIALECT="org.hibernate.dialect.PostgreSQLDialect"
@@ -178,7 +178,7 @@
     BACKEND_DB_DIALECT="org.hibernate.dialect.OracleDialect"
 fi
 
-BACKEND_URL=$BACKEND_DB_PREFIX//$HOST:$PORT/$BACKEND_NAME
+BACKEND_URL=$BACKEND_DB_PREFIX//$DBHOST:$DBPORT/$BACKEND_NAME
 
 echo "Importing $RIVER_NAME into $BACKEND_URL."
 
@@ -191,8 +191,8 @@
     -Xmx$MIN_MEMORY \
     -server \
     -Dlog4j.configuration=file://$LOG_DIR/log4j.properties \
-    -Dflys.backend.user=$USER \
-    -Dflys.backend.password=$PASS \
+    -Dflys.backend.user=$DBUSER \
+    -Dflys.backend.password=$DBPASS \
     -Dflys.backend.url=$BACKEND_URL \
     -Dflys.backend.driver=$BACKEND_DB_DRIVER \
     -Dflys.backend.dialect=$BACKEND_DB_DIALECT \
@@ -236,8 +236,8 @@
     -Xmx$MIN_MEMORY \
     -server \
     -Dlog4j.configuration=file://$LOG_DIR/log4j.properties \
-    -Dflys.backend.user=$USER \
-    -Dflys.backend.password=$PASS \
+    -Dflys.backend.user=$DBUSER \
+    -Dflys.backend.password=$DBPASS \
     -Dflys.backend.url=$BACKEND_URL \
     -Dflys.backend.driver=$BACKEND_DB_DRIVER \
     -Dflys.backend.dialect=$BACKEND_DB_DIALECT \
@@ -281,8 +281,8 @@
     -Xmx$MIN_MEMORY \
     -server \
     -Dlog4j.configuration=file://$LOG_DIR/log4j.properties \
-    -Dflys.backend.user=$USER \
-    -Dflys.backend.password=$PASS \
+    -Dflys.backend.user=$DBUSER \
+    -Dflys.backend.password=$DBPASS \
     -Dflys.backend.url=$BACKEND_URL \
     -Dflys.backend.driver=$BACKEND_DB_DRIVER \
     -Dflys.backend.dialect=$BACKEND_DB_DIALECT \
@@ -330,9 +330,9 @@
     --directory $RIVER_PATH \
     --river_name $RIVER_NAME \
     --ogr_connection "$OGR_CONNECTION" \
-    --host $HOST \
-    --user $USER \
-    --password $PASS \
+    --host $DBHOST \
+    --user $DBUSER \
+    --password $DBPASS \
     --verbose 1 \
     > "$LOG_FILE" 2>&1
 }
--- a/flys-backend/contrib/shpimporter/boundaries.py	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/shpimporter/boundaries.py	Wed Mar 13 19:07:52 2013 +0100
@@ -4,6 +4,7 @@
     import ogr
 
 from importer import Importer
+import utils
 
 TABLE_NAME="hydr_boundaries"
 TABLE_NAME_POLY="hydr_boundaries_poly"
@@ -23,7 +24,10 @@
         return NAME
 
     def isGeometryValid(self, geomType):
-        return geomType == ogr.wkbLineString
+        return geomType in [ogr.wkbLineString,
+                            ogr.wkbLineString25D,
+                            ogr.wkbMultiLineString25D,
+                            ogr.wkbMultiLineString]
 
     def isShapeRelevant(self, name, path):
         shp = ogr.Open(path)
@@ -48,7 +52,7 @@
 
         newFeat  = ogr.Feature(featureDef)
         geometry = feat.GetGeometryRef()
-        geometry.SetCoordinateDimension(2)
+        geometry.SetCoordinateDimension(3)
 
         newFeat.SetGeometry(geometry)
         newFeat.SetField("name", args['name'])
@@ -64,7 +68,7 @@
         else:
             newFeat.SetField("river_id", self.river_id)
 
-        return newFeat
+        return utils.convertToMultiLine(newFeat)
 
 class HydrBoundaryPoly(HydrBoundary):
 
--- a/flys-backend/contrib/shpimporter/crosssectiontracks.py	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/shpimporter/crosssectiontracks.py	Wed Mar 13 19:07:52 2013 +0100
@@ -37,6 +37,12 @@
         newFeat.SetGeometry(feat.GetGeometryRef())
         newFeat.SetField("name", args['name'])
 
+        if args['path'].lower().endswith("/qps.shp") and \
+                not "sonstige" in args['path'].lower():
+            newFeat.SetField("kind_id", 1) # offical
+        else:
+            newFeat.SetField("kind_id", 0) # misc
+
         if self.IsFieldSet(feat, "river_id"):
             newFeat.SetField("river_id", feat.GetField("river_id"))
         else:
--- a/flys-backend/contrib/shpimporter/dgm.py	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/shpimporter/dgm.py	Wed Mar 13 19:07:52 2013 +0100
@@ -2,16 +2,17 @@
 
 import codecs
 import utils
+import datetime
 
 def latin(string):
     return unicode(string, "latin1")
 
+import logging
+logger = logging.getLogger("DGM")
+
+
 # <dbfield> : (<csvfield>, conversion function)
 DGM_MAP = {
-    "lower"           : ("km_von", lambda x: float(x)),
-    "upper"           : ("km_bis", lambda x: float(x)),
-    "year_from"       : "Jahr_von",
-    "year_to"         : "Jahr_bis",
     "projection"      : "Projektion",
     "elevation_state" : latin("Höhenstatus"),
     "format"          : "Format",
@@ -23,10 +24,79 @@
     "path"            : ("Pfad_Bestand", lambda x: x),
     }
 
-SQL_INSERT_DGT = "INSERT INTO dem (river_id, name, " + ", ".join(DGM_MAP.keys()) + \
-        ") VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
-SQL_INSERT_DGT_ORA = "INSERT INTO dem (river_id, name, " + ", ".join(DGM_MAP.keys()) + \
-        ") VALUES (:s, :s, :s, :s, :s, :s, :s, :s, :s, :s, :s, :s, :s)"
+SQL_INSERT_DGT = "INSERT INTO dem (river_id, name," \
+        " time_interval_id, range_id, " + ", ".join(DGM_MAP.keys()) + \
+        ") VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
+SQL_INSERT_DGT_ORA = "INSERT INTO dem (river_id, name," \
+        " time_interval_id, range_id, " + ", ".join(DGM_MAP.keys()) + \
+        ") VALUES (:s, :s, :s, :s, :s, :s, :s, :s, :s, :s, :s)"
+SQL_SELECT_TIME_ID = """
+SELECT id FROM time_intervals WHERE start_time = %s AND stop_time = %s
+"""
+SQL_INSERT_TIME_ID = """
+INSERT INTO time_intervals (id, start_time, stop_time) VALUES (%s, %s, %s)
+"""
+SQL_SELECT_TIME_ID_ORA = """
+SELECT id FROM time_intervals WHERE start_time = :s AND stop_time = :s
+"""
+SQL_INSERT_TIME_ID_ORA = """
+INSERT INTO time_intervals (id, start_time, stop_time) VALUES (:s, :s, :s)
+"""
+SQL_SELECT_RANGE_ID = """
+SELECT id FROM ranges WHERE river_id = %s AND a = %s AND b = %s
+"""
+SQL_INSERT_RANGE_ID = """
+INSERT INTO ranges (id, river_id, a, b) VALUES (%s, %s, %s, %s)
+"""
+SQL_SELECT_RANGE_ID_ORA = """
+SELECT id FROM ranges WHERE river_id = :s AND a = :s AND b = :s
+"""
+SQL_INSERT_RANGE_ID_ORA = """
+INSERT INTO ranges (id, river_id, a, b) VALUES (:s, :s, :s, :s)
+"""
+SQL_NEXT_ID     = "SELECT coalesce(max(id), -1) + 1 FROM %s"
+
+
+NEXT_IDS = {}
+def next_id(cur, relation):
+    idx = NEXT_IDS.get(relation)
+    if idx is None:
+        cur.execute(SQL_NEXT_ID % relation)
+        idx = cur.fetchone()[0]
+    NEXT_IDS[relation] = idx + 1
+    return idx
+
+def get_range_id(cur, river_id, a, b, oracle):
+    if oracle:
+        cur.execute(SQL_SELECT_RANGE_ID_ORA, (river_id, a, b))
+    else:
+        cur.execute(SQL_SELECT_RANGE_ID, (river_id, a, b))
+    row = cur.fetchone()
+    if row: return row[0]
+    idx = next_id(cur, "ranges")
+    if oracle:
+        cur.execute(SQL_INSERT_RANGE_ID_ORA, (idx, river_id, a, b))
+    else:
+        cur.execute(SQL_INSERT_RANGE_ID, (idx, river_id, a, b))
+    cur.connection.commit()
+    return idx
+
+def get_time_interval_id(cur, a, b, oracle):
+    if not a or not b:
+        return None
+    if oracle:
+        cur.execute(SQL_SELECT_TIME_ID_ORA, (a, b))
+    else:
+        cur.execute(SQL_SELECT_TIME_ID, (a, b))
+    row = cur.fetchone()
+    if row: return row[0]
+    idx = next_id(cur, "time_intervals")
+    if oracle:
+        cur.execute(SQL_INSERT_TIME_ID_ORA, (idx, a, b))
+    else:
+        cur.execute(SQL_INSERT_TIME_ID, (idx, a, b))
+    cur.connection.commit()
+    return idx
 
 def insertRiverDgm(dbconn, dgmfile, river_name, dry_run, oracle):
     with codecs.open(dgmfile, "r", "latin1") as csvfile:
@@ -42,7 +112,8 @@
         for line in csvfile:
             fields = line.split(";")
             if not fields: continue
-            if fields[namedict[latin("Gewässer")]] != river_name:
+            if fields[namedict[latin("Gewässer")]] != \
+                    unicode(utils.getUTF8(river_name),'UTF-8'):
                 continue
             else:
                 values=[]
@@ -52,15 +123,32 @@
                     else:
                         values.append(unicode.encode(
                             fields[namedict[val]], "UTF-8"))
-                name = "%s KM %s - %s" % (river_name, fields[namedict["km_von"]],
-                        fields[namedict["km_bis"]])
+                km_von = fields[namedict["km_von"]]
+                km_bis = fields[namedict["km_bis"]]
+                year_from = None
+                year_to = None
+                try:
+                    year_from = datetime.datetime(
+                        int(fields[namedict["Jahr_von"]]), 1, 1)
+                    year_to = datetime.datetime(
+                        int(fields[namedict["Jahr_bis"]]),1 ,1)
+                except ValueError:
+                    logger.warn("Invalid numbers (or none) found in year_from and year_to")
+
+                name = "%s KM %s - %s" % (river_name, km_von, km_bis)
                 cur = dbconn.cursor()
+                range_id = get_range_id(cur, river_id, float(km_von),
+                    float(km_bis), oracle)
+                time_interval_id = get_time_interval_id(cur, year_from,
+                    year_to, oracle)
+
                 if oracle:
                     stmt = SQL_INSERT_DGT_ORA
                 else:
                     stmt = SQL_INSERT_DGT
 
-                cur.execute(stmt, [river_id, name] + values)
+                cur.execute(stmt, [river_id, name, time_interval_id,
+                    range_id] + values)
 
         if not dry_run:
             dbconn.commit()
--- a/flys-backend/contrib/shpimporter/fixpoints.py	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/shpimporter/fixpoints.py	Wed Mar 13 19:07:52 2013 +0100
@@ -4,6 +4,9 @@
     import ogr, osr
 
 from importer import Importer
+import logging
+logger = logging.getLogger("Fixpoints")
+fixpoints_no_km_logged=False
 
 TABLE_NAME="fixpoints"
 PATH="Geodaesie/Festpunkte"
@@ -49,6 +52,9 @@
         elif self.IsFieldSet(feat, "ELBE_KM"):
             newFeat.SetField("km", feat.GetFieldAsDouble("ELBE_KM"))
         else:
+            if not fixpoints_no_km_logged:
+                logger.error("Could not find KM attribute")
+                fixpoints_no_km_logged = True
             return None
 
         if self.IsFieldSet(feat, "X"):
--- a/flys-backend/contrib/shpimporter/floodplains.py	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/shpimporter/floodplains.py	Wed Mar 13 19:07:52 2013 +0100
@@ -39,6 +39,12 @@
         newFeat.SetGeometry(geometry)
         newFeat.SetField("name", args['name'])
 
+        if args['path'].lower().endswith("/talaue.shp") and \
+                not "sonstige" in args['path'].lower():
+            newFeat.SetField("kind_id", 1) # offical
+        else:
+            newFeat.SetField("kind_id", 0) # misc
+
         if self.IsFieldSet(feat, "river_id"):
             newFeat.SetField("river_id", feat.GetField("river_id"))
         else:
--- a/flys-backend/contrib/shpimporter/hws.py	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/shpimporter/hws.py	Wed Mar 13 19:07:52 2013 +0100
@@ -10,12 +10,13 @@
 import utils
 
 import logging
-logger = logging.getLogger("Hochwasserschutzanlagen")
+logger = logging.getLogger("HWS")
 
 PATH="Hydrologie/HW-Schutzanlagen"
 NAME="HWS"
 
 # Keep in sync with hws_kinds table:
+# strings need to be lowercase
 HWS_KIND = {
         "durchlass" : 1,
         "damm" : 2,
@@ -23,11 +24,14 @@
         "hochufer" : 2,
         "graben" : 3,
         "rohr1" : 1,
+        "rohr 1" : 1,
+        "rohr 2" : 1,
         "hauptdeich" : 2,
         "sommerdeich" : 2
     }
 
 # Keep in sync with fed_states table:
+# strings need to be lowercase
 FED_STATES = {
     "bayern" : 1,
     "hessen" : 2,
@@ -67,6 +71,8 @@
             "H[oeö]{0,2}he_{0,1}IST$" : "z",
         }
 
+    printedforpath=[]
+
     def getPath(self, base):
         return "%s/%s" % (base, PATH)
 
@@ -83,6 +89,17 @@
         shp = ogr.Open(path)
         return self.isGeometryValid(shp.GetLayerByName(name).GetGeomType())
 
+    def getFedStateIDfromPath(self, path):
+        """
+        Tries to get extract a bundesland from the path
+        """
+        for state in sorted(FED_STATES.keys(), key = len, reverse = True):
+            if state in path.lower():
+                if not path in self.printedforpath:
+                    logger.info("Extracted federal state from path: %s" % state)
+                    self.printedforpath.append(path)
+                return FED_STATES[state]
+
     def createNewFeature(self, featureDef, feat, **args):
         newFeat  = ogr.Feature(featureDef)
         geometry = feat.GetGeometryRef()
@@ -109,9 +126,14 @@
 
             if not fed_id:
                 logger.warn("Unknown Bundesland: %s" % \
-                        feat.GetField("Bundesland"))
+                        feat.GetField(fname))
             else:
                 newFeat.SetField("fed_state_id", fed_id)
+        else:
+            # Try to get the bundesland from path
+            fed_id = self.getFedStateIDfromPath(args['path'])
+            if fed_id:
+                newFeat.SetField("fed_state_id", fed_id)
 
         fname = self.searchField("(ufer$)|(flussseite$)")
         if self.IsFieldSet(feat, fname):
--- a/flys-backend/contrib/shpimporter/km.py	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/shpimporter/km.py	Wed Mar 13 19:07:52 2013 +0100
@@ -29,7 +29,7 @@
 
 
     def isShapeRelevant(self, name, path):
-        return name == "km"
+        return name.lower() == "km"
 
 
     def createNewFeature(self, featureDef, feat, **args):
--- a/flys-backend/contrib/shpimporter/shpimporter.py	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/shpimporter/shpimporter.py	Wed Mar 13 19:07:52 2013 +0100
@@ -22,6 +22,8 @@
 
 logger = logging.getLogger("shpimporter")
 
+os.environ["NLS_LANG"] = ".AL32UTF8"
+
 def initialize_logging(level):
     """Initializes the logging system"""
     root = logging.getLogger()
@@ -196,11 +198,12 @@
         river_id = utils.getRiverId(dbconn_raw, river_name, oracle)
 
         if not river_id:
-            logger.info("Could not find river in database. Skipping: %s"
-                  % river_name)
+            logger.info(u"Could not find river in database. Skipping: %s"
+                  % unicode(utils.getUTF8(river_name), "UTF-8"))
             continue
         else:
-            logger.info("Importing River: %s" % river_name)
+            logger.info(u"Importing River: %s" % unicode(
+                       utils.getUTF8(river_name), "UTF-8"))
 
         for importer in getImporters(river_id, dbconn, config.dry_run):
             if skip_importer(config, importer):
--- a/flys-backend/contrib/shpimporter/uesg.py	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/shpimporter/uesg.py	Wed Mar 13 19:07:52 2013 +0100
@@ -7,7 +7,7 @@
 
 
 TABLE_NAME="floodmaps"
-PATH="Hydrologie/UeSG/Berechnung"
+PATH="Hydrologie/UeSG"
 NAME="UESG"
 
 
@@ -42,7 +42,7 @@
             else:
                 kind = kind + 20
 
-            if path.find("Land") > 0:
+            if path.find("Bundesl") > 0:
                 kind = kind + 2
             else:
                 kind = kind + 1
--- a/flys-backend/contrib/shpimporter/utils.py	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/contrib/shpimporter/utils.py	Wed Mar 13 19:07:52 2013 +0100
@@ -41,7 +41,7 @@
         stmt = SQL_SELECT_RIVER_ID_ORA
     else:
         stmt = SQL_SELECT_RIVER_ID
-    cur.execute(stmt, (name,))
+    cur.execute(stmt, (getUTF8(name),))
     row = cur.fetchone()
     if row:
         return row[0]
--- a/flys-backend/doc/schema/oracle-drop-minfo.sql	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/doc/schema/oracle-drop-minfo.sql	Wed Mar 13 19:07:52 2013 +0100
@@ -42,6 +42,10 @@
 ALTER TABLE sq_relation DROP CONSTRAINT fk_sqr_tinterval_id;
 ALTER TABLE sq_relation DROP CONSTRAINT fk_sqr_river_id;
 ALTER TABLE sq_relation_value DROP CONSTRAINT fk_sqr_id;
+ALTER TABLE measurement_station DROP CONSTRAINT fk_ms_river_id;
+ALTER TABLE measurement_station DROP CONSTRAINT fk_ms_range_id;
+ALTER TABLE measurement_station DROP CONSTRAINT fk_ms_reference_gauge_id;
+ALTER TABLE measurement_station DROP CONSTRAINT fk_ms_observation_timerange_id;
 
 DROP TABLE bed_height_type;
 DROP TABLE location_system;
@@ -69,6 +73,7 @@
 DROP TABLE waterlevel_difference;
 DROP TABLE waterlevel_difference_column;
 DROP TABLE waterlevel_difference_values;
+DROP TABLE measurement_station;
 DROP TABLE sq_relation_value;
 DROP TABLE sq_relation;
 
--- a/flys-backend/doc/schema/oracle-drop-spatial.sql	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/doc/schema/oracle-drop-spatial.sql	Wed Mar 13 19:07:52 2013 +0100
@@ -73,3 +73,5 @@
 DROP TABLE fed_states;
 DROP TABLE axis_kinds;
 DROP TABLE boundary_kinds;
+DROP TABLE cross_section_track_kinds;
+DROP TABLE floodplain_kinds;
--- a/flys-backend/doc/schema/oracle-drop.sql	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/doc/schema/oracle-drop.sql	Wed Mar 13 19:07:52 2013 +0100
@@ -29,6 +29,7 @@
 ALTER TABLE wst_columns DROP CONSTRAINT cWstColumnsWsts;
 ALTER TABLE wst_q_ranges DROP CONSTRAINT cWstQRangesRanges;
 ALTER TABLE wsts DROP CONSTRAINT cWstsRivers;
+ALTER TABLE wsts DROP CONSTRAINT cWstsWstKinds;
 DROP TABLE annotation_types;
 DROP TABLE annotations;
 DROP TABLE attributes;
@@ -57,6 +58,7 @@
 DROP TABLE wst_columns;
 DROP TABLE wst_q_ranges;
 DROP TABLE wsts;
+DROP TABLE wst_kinds;
 DROP SEQUENCE ANNOTATION_TYPES_ID_SEQ;
 DROP SEQUENCE ANNOTATIONS_ID_SEQ;
 DROP SEQUENCE ATTRIBUTES_ID_SEQ;
--- a/flys-backend/doc/schema/oracle-minfo.sql	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/doc/schema/oracle-minfo.sql	Wed Mar 13 19:07:52 2013 +0100
@@ -20,15 +20,19 @@
     CONSTRAINT fk_unit FOREIGN KEY (unit_id) REFERENCES units(id)
 );
 
-CREATE SEQUENCE BED_HEIGHT_TYPE_SEQ;
 
+-- lookup table for bedheight types
 CREATE TABLE bed_height_type (
     id          NUMBER(38,0) NOT NULL,
-    name        VARCHAR(16)  NOT NULL,
-    description VARCHAR(255),
+    name        VARCHAR(65)  NOT NULL,
     PRIMARY KEY(id)
 );
-
+INSERT INTO bed_height_type VALUES (1, 'Querprofile');
+INSERT INTO bed_height_type VALUES (2, 'Flächenpeilung');
+INSERT INTO bed_height_type VALUES (3, 'Flächen- u. Querprofilpeilungen');
+INSERT INTO bed_height_type VALUES (4, 'DGM');
+INSERT INTO bed_height_type VALUES (5, 'TIN');
+INSERT INTO bed_height_type VALUES (6, 'Modell');
 
 
 CREATE SEQUENCE BED_HEIGHT_SINGLE_ID_SEQ;
@@ -281,75 +285,25 @@
 );
 
 
-CREATE SEQUENCE WATERLEVEL_ID_SEQ;
-
-CREATE TABLE waterlevel (
-    id          NUMBER(38,0) NOT NULL,
-    river_id    NUMBER(38,0) NOT NULL,
-    unit_id     NUMBER(38,0) NOT NULL,
-    description VARCHAR(256),
-    PRIMARY KEY (id),
-    CONSTRAINT fk_w_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
-    CONSTRAINT fk_w_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
-);
-
-
-CREATE SEQUENCE WATERLEVEL_Q_RANGES_ID_SEQ;
-
-CREATE TABLE waterlevel_q_range (
-    id              NUMBER(38,0) NOT NULL,
-    waterlevel_id   NUMBER(38,0) NOT NULL,
-    q               NUMBER(38,2) NOT NULL,
-    PRIMARY KEY (id),
-    CONSTRAINT fk_wqr_waterlevel_id FOREIGN KEY (waterlevel_id) REFERENCES waterlevel(id)
-);
-
-
-CREATE SEQUENCE WATERLEVEL_VALUES_ID_SEQ;
-
-CREATE TABLE waterlevel_values (
-    id                      NUMBER(38,0) NOT NULL,
-    waterlevel_q_range_id   NUMBER(38,0) NOT NULL,
-    station                 NUMBER(38,3) NOT NULL,
-    w                       NUMBER(38,2) NOT NULL,
-    PRIMARY KEY (id),
-    CONSTRAINT fk_wv_waterlevel_q_range_id FOREIGN KEY (waterlevel_q_range_id) REFERENCES waterlevel_q_range(id)
-);
-
-
-CREATE SEQUENCE WATERLEVEL_DIFFERENCE_ID_SEQ;
-
-CREATE TABLE waterlevel_difference (
-    id          NUMBER(38,0) NOT NULL,
-    river_id    NUMBER(38,0) NOT NULL,
-    unit_id     NUMBER(38,0) NOT NULL,
-    description VARCHAR(256),
-    PRIMARY KEY (id),
-    CONSTRAINT fk_wd_river_id FOREIGN KEY (river_id) REFERENCES rivers (id),
-    CONSTRAINT fk_wd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
-);
-
-
-CREATE SEQUENCE WATERLEVEL_DIFF_COLUMN_ID_SEQ;
-
-CREATE TABLE waterlevel_difference_column (
-    id              NUMBER(38,0) NOT NULL,
-    difference_id   NUMBER(38,0) NOT NULL,
-    description     VARCHAR(256),
-    PRIMARY KEY (id),
-    CONSTRAINT fk_wdc_difference_id FOREIGN KEY (difference_id) REFERENCES waterlevel_difference (id)
-);
-
-
-CREATE SEQUENCE WATERLEVEL_DIFF_VALUES_ID_SEQ;
-
-CREATE TABLE waterlevel_difference_values (
-    id          NUMBER(38,0) NOT NULL,
-    column_id   NUMBER(38,0) NOT NULL,
-    station     NUMBER(38,3) NOT NULL,
-    value       NUMBER(38,2) NOT NULL,
-    PRIMARY KEY (id),
-    CONSTRAINT fk_wdv_column_id FOREIGN KEY (column_id) REFERENCES waterlevel_difference_column (id)
+CREATE SEQUENCE MEASUREMENT_STATION_ID_SEQ;
+CREATE TABLE measurement_station (
+    id                       NUMBER(38)   NOT NULL,
+    name                     VARCHAR(256) NOT NULL,
+    river_id                 NUMBER(38)   NOT NULL,
+    station                  NUMBER(38,3) NOT NULL,
+    range_id                 NUMBER(38)   NOT NULL,
+    measurement_type         VARCHAR(64)  NOT NULL,
+    riverside                VARCHAR(16),
+    reference_gauge_id       NUMBER(38),
+    observation_timerange_id NUMBER(38),
+    operator                 VARCHAR(64),
+    description              VARCHAR(512),
+    PRIMARY KEY              (id),
+    CONSTRAINT fk_ms_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
+    CONSTRAINT fk_ms_range_id FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE,
+    CONSTRAINT fk_ms_reference_gauge_id FOREIGN KEY (reference_gauge_id) REFERENCES gauges(id) ON DELETE CASCADE,
+    CONSTRAINT fk_ms_observation_timerange_id FOREIGN KEY (observation_timerange_id) REFERENCES time_intervals(id),
+    UNIQUE (river_id, station)
 );
 
 
--- a/flys-backend/doc/schema/oracle-spatial.sql	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/doc/schema/oracle-spatial.sql	Wed Mar 13 19:07:52 2013 +0100
@@ -14,7 +14,7 @@
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
     river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
-    kind_id  NUMBER(38) REFERENCES axis_kinds(id) NOT NULL,
+    kind_id  NUMBER(38) REFERENCES axis_kinds(id) NOT NULL DEFAULT 0,
     name     VARCHAR(64),
     path     VARCHAR(256),
     ID NUMBER PRIMARY KEY NOT NULL
@@ -34,7 +34,7 @@
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
     river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
-    km NUMBER(6,3),
+    km NUMBER(7,3),
     name     VARCHAR(64),
     path     VARCHAR(256),
     ID NUMBER PRIMARY KEY NOT NULL
@@ -49,11 +49,19 @@
 
 
 --Geodaesie/Querprofile/QP-Spuren/qps.shp
+CREATE TABLE cross_section_track_kinds(
+    id NUMBER PRIMARY KEY NOT NULL,
+    name VARCHAR(64)
+);
+INSERT INTO cross_section_track_kinds(id, name) VALUES (0, 'Sonstige');
+INSERT INTO cross_section_track_kinds(id, name) VALUES (1, 'Aktuell');
+
 CREATE SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
 CREATE TABLE cross_section_tracks (
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
     river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
+    kind_id  NUMBER(38) REFERENCES cross_section_track_kinds(id) NOT NULL DEFAULT 0,
     km       NUMBER(38,12) NOT NULL,
     z        NUMBER(38,12) DEFAULT 0 NOT NULL,
     name     VARCHAR(64),
@@ -112,11 +120,19 @@
 
 
 -- Hydrologie/Hydr. Grenzen/talaue.shp
+CREATE TABLE floodplain_kinds(
+    id NUMBER PRIMARY KEY NOT NULL,
+    name VARCHAR(64)
+);
+INSERT INTO floodplain_kinds(id, name) VALUES (0, 'Sonstige');
+INSERT INTO floodplain_kinds(id, name) VALUES (1, 'Aktuell');
+
 CREATE SEQUENCE FLOODPLAIN_ID_SEQ;
 CREATE TABLE floodplain(
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
     river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
+    kind_id  NUMBER(38) REFERENCES floodplain_kinds(id) NOT NULL DEFAULT 0,
     name     VARCHAR(64),
     path     VARCHAR(256),
     ID NUMBER PRIMARY KEY NOT NULL
@@ -130,27 +146,22 @@
 --CREATE INDEX floodplain_spatial_idx ON floodplain(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POLYGON');
 
 
--- TODO: Test-Me. Fix Importer-Skript.
--- NOTE: It's not a spatial schema!
 -- Geodaesie/Hoehenmodelle/*
 CREATE SEQUENCE DEM_ID_SEQ;
 CREATE TABLE dem (
-    ID NUMBER PRIMARY KEY NOT NULL,
-    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
-    -- XXX Should we use the ranges table instead?
-    name            VARCHAR(64),
-    lower           NUMBER(19,5),
-    upper           NUMBER(19,5),
-    year_from       VARCHAR(32),
-    year_to         VARCHAR(32),
-    projection      VARCHAR(32),
-    elevation_state VARCHAR(32),
-    srid            NUMBER NOT NULL,
-    format          VARCHAR(32),
-    border_break    NUMBER(1) DEFAULT 0 NOT NULL,
-    resolution      VARCHAR(16),
-    description     VARCHAR(256),
-    path            VARCHAR(256) NOT NULL UNIQUE
+    ID               NUMBER PRIMARY KEY NOT NULL,
+    river_id         NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
+    name             VARCHAR(64),
+    range_id         NUMBER(38) REFERENCES ranges(id),
+    time_interval_id NUMBER(38) REFERENCES time_intervals(id),
+    projection       VARCHAR(32),
+    elevation_state  VARCHAR(32),
+    srid             NUMBER NOT NULL,
+    format           VARCHAR(32),
+    border_break     NUMBER(1) DEFAULT 0 NOT NULL,
+    resolution       VARCHAR(16),
+    description      VARCHAR(256),
+    path             VARCHAR(256) NOT NULL UNIQUE
 );
 CREATE OR REPLACE TRIGGER dem_trigger BEFORE INSERT ON dem FOR each ROW
     BEGIN
@@ -275,18 +286,29 @@
 /
 
 --Hydrologie/UeSG
+CREATE TABLE floodmap_kinds (
+    id 	     NUMBER PRIMARY KEY NOT NULL,
+    name     varchar(64) NOT NULL
+);
+INSERT INTO floodmap_kinds VALUES (200, 'Messung');
+INSERT INTO floodmap_kinds VALUES (111, 'Berechnung-Aktuell-BfG');
+INSERT INTO floodmap_kinds VALUES (112, 'Berechnung-Aktuell-Bundesländer');
+INSERT INTO floodmap_kinds VALUES (121, 'Berechnung-Potenziell-BfG');
+INSERT INTO floodmap_kinds VALUES (122, 'Berechnung-Potenziell-Bundesländer');
+
 CREATE SEQUENCE FLOODMAPS_ID_SEQ;
 CREATE TABLE floodmaps (
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
     river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     name VARCHAR(255),
-    kind NUMBER(38),
+    kind NUMBER NOT NULL REFERENCES floodmap_kinds(id),
     diff NUMBER(19,5),
     count NUMBER(38),
     area NUMBER(19,5),
     perimeter NUMBER(19,5),
     path     VARCHAR(256),
+    source   varchar(64),
     id NUMBER PRIMARY KEY NOT NULL
 );
 INSERT INTO USER_SDO_GEOM_METADATA VALUES ('floodmaps', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
--- a/flys-backend/doc/schema/oracle-spatial_idx.sql	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/doc/schema/oracle-spatial_idx.sql	Wed Mar 13 19:07:52 2013 +0100
@@ -22,7 +22,7 @@
 
 -- TODO: index prevents importing on 11g.
 -- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein"
--- CREATE INDEX hydr_boundaries_idx ON hydr_boundaries(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+-- CREATE INDEX hydr_boundaries_idx ON hydr_boundaries(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTILINE');
 
 CREATE INDEX hws_points_spatial_idx ON hws_points(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
 CREATE INDEX hws_lines_spatial_idx ON hws_lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTILINE');
--- a/flys-backend/doc/schema/oracle.sql	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/doc/schema/oracle.sql	Wed Mar 13 19:07:52 2013 +0100
@@ -115,14 +115,17 @@
 
 CREATE TABLE gauges (
     id                  NUMBER(38,0) NOT NULL,
-    aeo                 NUMBER(38,2),
-    datum               NUMBER(38,2), 
-    name                VARCHAR2(255),
-    station             NUMBER(38,2),
-    official_number     NUMBER(38,0),
-    range_id            NUMBER(38,0),
+    aeo                 NUMBER(38,2) NOT NULL,
+    datum               NUMBER(38,2) NOT NULL, 
+    name                VARCHAR2(255) NOT NULL,
+    station             NUMBER(38,2) NOT NULL,
+    official_number     NUMBER(38,0) UNIQUE,
+    range_id            NUMBER(38,0) NOT NULL,
+    -- remove river id here because range_id references river already
     river_id            NUMBER(38,0),
-    PRIMARY KEY         (id)
+    PRIMARY KEY         (id),
+    UNIQUE (name, river_id),
+    UNIQUE (river_id, station)
 );
 
 
@@ -261,7 +264,7 @@
 -- TIME_INTERVALS
 CREATE SEQUENCE TIME_INTERVALS_ID_SEQ;
 
-CREATE TABLE time_intervals_ (
+CREATE TABLE time_intervals (
     id                  NUMBER(38,0) NOT NULL, 
     start_time          TIMESTAMP NOT NULL,
     stop_time           TIMESTAMP,
@@ -329,6 +332,21 @@
 
 
 -- WSTS
+--lookup table for wst kinds
+CREATE TABLE wst_kinds (
+    id 	     NUMBER PRIMARY KEY NOT NULL,
+    kind     VARCHAR(64) NOT NULL
+);
+INSERT INTO wst_kinds (id, kind) VALUES (0, 'basedata');
+INSERT INTO wst_kinds (id, kind) VALUES (1, 'basedata_additionals_marks');
+INSERT INTO wst_kinds (id, kind) VALUES (2, 'basedata_fixations_wst');
+INSERT INTO wst_kinds (id, kind) VALUES (3, 'basedata_officials');
+INSERT INTO wst_kinds (id, kind) VALUES (4, 'basedata_heightmarks-points-relative_points');
+INSERT INTO wst_kinds (id, kind) VALUES (5, 'basedata_flood-protections_relative_points');
+INSERT INTO wst_kinds (id, kind) VALUES (6, 'morpho_waterlevel-differences');
+INSERT INTO wst_kinds (id, kind) VALUES (7, 'morpho_waterlevels');
+
+
 CREATE SEQUENCE WSTS_ID_SEQ;
 
 CREATE TABLE wsts (
@@ -375,6 +393,7 @@
 ALTER TABLE wst_column_values ADD CONSTRAINT cWstColumnValuesWstColumns FOREIGN KEY (wst_column_id) REFERENCES wst_columns ON DELETE CASCADE;
 ALTER TABLE wst_q_ranges ADD CONSTRAINT cWstQRangesRanges FOREIGN KEY (range_id) REFERENCES RANGES ON DELETE CASCADE;
 ALTER TABLE wsts ADD CONSTRAINT cWstsRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE;
+ALTER TABLE wsts ADD CONSTRAINT cWstsWstKinds FOREIGN KEY (kind) REFERENCES wst_kinds;
 
 -- VIEWS
 
--- a/flys-backend/doc/schema/postgresql-minfo.sql	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/doc/schema/postgresql-minfo.sql	Wed Mar 13 19:07:52 2013 +0100
@@ -20,15 +20,19 @@
     CONSTRAINT fk_unit FOREIGN KEY (unit_id) REFERENCES units(id)
 );
 
-CREATE SEQUENCE BED_HEIGHT_TYPE_SEQ;
 
+-- lookup table for bedheight types
 CREATE TABLE bed_height_type (
     id          int NOT NULL,
-    name        VARCHAR(16)  NOT NULL,
-    description VARCHAR(255),
+    name        VARCHAR(64) NOT NULL,
     PRIMARY KEY(id)
 );
-
+INSERT INTO bed_height_type VALUES (1, 'Querprofile');
+INSERT INTO bed_height_type VALUES (2, 'Flächenpeilung');
+INSERT INTO bed_height_type VALUES (3, 'Flächen- u. Querprofilpeilungen');
+INSERT INTO bed_height_type VALUES (4, 'DGM');
+INSERT INTO bed_height_type VALUES (5, 'TIN');
+INSERT INTO bed_height_type VALUES (6, 'Modell');
 
 
 CREATE SEQUENCE BED_HEIGHT_SINGLE_ID_SEQ;
@@ -281,97 +285,25 @@
 );
 
 
-CREATE SEQUENCE WATERLEVEL_ID_SEQ;
-
-CREATE TABLE waterlevel (
-    id          int NOT NULL,
-    river_id    int NOT NULL,
-    unit_id     int NOT NULL,
-    description VARCHAR(256),
-    PRIMARY KEY (id),
-    CONSTRAINT fk_w_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
-    CONSTRAINT fk_w_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
-);
-
-
-CREATE SEQUENCE WATERLEVEL_Q_RANGES_ID_SEQ;
-
-CREATE TABLE waterlevel_q_range (
-    id              int NOT NULL,
-    waterlevel_id   int NOT NULL,
-    q               NUMERIC NOT NULL,
-    PRIMARY KEY (id),
-    CONSTRAINT fk_wqr_waterlevel_id FOREIGN KEY (waterlevel_id) REFERENCES waterlevel(id) ON DELETE CASCADE
-);
-
-
-CREATE SEQUENCE WATERLEVEL_VALUES_ID_SEQ;
-
-CREATE TABLE waterlevel_values (
-    id                      int NOT NULL,
-    waterlevel_q_range_id   int NOT NULL,
-    station                 NUMERIC NOT NULL,
-    w                       NUMERIC NOT NULL,
-    PRIMARY KEY (id),
-    CONSTRAINT fk_wv_waterlevel_q_range_id FOREIGN KEY (waterlevel_q_range_id) REFERENCES waterlevel_q_range(id) ON DELETE CASCADE
-);
-
-
-CREATE SEQUENCE WATERLEVEL_DIFFERENCE_ID_SEQ;
-
-CREATE TABLE waterlevel_difference (
-    id          int NOT NULL,
-    river_id    int NOT NULL,
-    unit_id     int NOT NULL,
-    description VARCHAR(256),
-    PRIMARY KEY (id),
-    CONSTRAINT fk_wd_river_id FOREIGN KEY (river_id) REFERENCES rivers (id) ON DELETE CASCADE,
-    CONSTRAINT fk_wd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
-);
-
-
-CREATE SEQUENCE WATERLEVEL_DIFF_COLUMN_ID_SEQ;
-
-CREATE TABLE waterlevel_difference_column (
-    id              int NOT NULL,
-    difference_id   int NOT NULL,
-    description     VARCHAR(256),
-    PRIMARY KEY (id),
-    CONSTRAINT fk_wdc_difference_id FOREIGN KEY (difference_id) REFERENCES waterlevel_difference (id) ON DELETE CASCADE
-);
-
-
-CREATE SEQUENCE WATERLEVEL_DIFF_VALUES_ID_SEQ;
-
-CREATE TABLE waterlevel_difference_values (
-    id          int NOT NULL,
-    column_id   int NOT NULL,
-    station     NUMERIC NOT NULL,
-    value       NUMERIC NOT NULL,
-    PRIMARY KEY (id),
-    CONSTRAINT fk_wdv_column_id FOREIGN KEY (column_id) REFERENCES waterlevel_difference_column (id) ON DELETE CASCADE
-);
-
-
 CREATE SEQUENCE MEASUREMENT_STATION_ID_SEQ;
 CREATE TABLE measurement_station (
-	id			             int          NOT NULL,
-	name		             VARCHAR(256) NOT NULL,
-	river_id                 int          NOT NULL,
-	station                  NUMERIC      NOT NULL,
-	range_id                 int          NOT NULL,
-	measurement_type         VARCHAR(64)  NOT NULL,
-	riverside                VARCHAR(16),
-	reference_gauge_id       int,
-	observation_timerange_id int,
-	operator				 VARCHAR(64),
-	comment					 VARCHAR(512),
-	PRIMARY KEY (id),
-	CONSTRAINT fk_ms_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
-	CONSTRAINT fk_ms_range_id FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE,
-	CONSTRAINT fk_ms_reference_gauge_id FOREIGN KEY (reference_gauge_id) REFERENCES gauges(id) ON DELETE CASCADE,
-	CONSTRAINT fk_ms_observation_timerange_id FOREIGN KEY (observation_timerange_id) REFERENCES time_intervals(id),
-	UNIQUE (river_id, station)
+    id                       int          NOT NULL,
+    name                     VARCHAR(256) NOT NULL,
+    river_id                 int          NOT NULL,
+    station                  NUMERIC      NOT NULL,
+    range_id                 int          NOT NULL,
+    measurement_type         VARCHAR(64)  NOT NULL,
+    riverside                VARCHAR(16),
+    reference_gauge_id       int,
+    observation_timerange_id int,
+    operator                 VARCHAR(64),
+    description              VARCHAR(512),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_ms_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
+    CONSTRAINT fk_ms_range_id FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE,
+    CONSTRAINT fk_ms_reference_gauge_id FOREIGN KEY (reference_gauge_id) REFERENCES gauges(id) ON DELETE CASCADE,
+    CONSTRAINT fk_ms_observation_timerange_id FOREIGN KEY (observation_timerange_id) REFERENCES time_intervals(id),
+    UNIQUE (river_id, station)
 );
 
 
--- a/flys-backend/doc/schema/postgresql-spatial.sql	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/doc/schema/postgresql-spatial.sql	Wed Mar 13 19:07:52 2013 +0100
@@ -36,10 +36,18 @@
 
 
 --Geodaesie/Querprofile/QP-Spuren/qps.shp
+CREATE TABLE cross_section_track_kinds(
+    id int PRIMARY KEY NOT NULL,
+    name VARCHAR(64)
+);
+INSERT INTO cross_section_track_kinds(id, name) VALUES (0, 'Sonstige');
+INSERT INTO cross_section_track_kinds(id, name) VALUES (1, 'Aktuell');
+
 CREATE SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
 CREATE TABLE cross_section_tracks (
     id       int PRIMARY KEY NOT NULL,
     river_id int REFERENCES rivers(id) ON DELETE CASCADE,
+    kind_id  int REFERENCES cross_section_track_kinds(id) NOT NULL DEFAULT 0,
     km       FLOAT8 NOT NULL,
     z        FLOAT8 NOT NULL DEFAULT 0,
     name     VARCHAR(64),
@@ -78,10 +86,18 @@
 
 
 -- Hydrologie/Hydr. Grenzen/talaue.shp
+CREATE TABLE floodplain_kinds(
+    id int PRIMARY KEY NOT NULL,
+    name VARCHAR(64)
+);
+INSERT INTO floodplain_kinds(id, name) VALUES (0, 'Sonstige');
+INSERT INTO floodplain_kinds(id, name) VALUES (1, 'Aktuell');
+
 CREATE SEQUENCE FLOODPLAIN_ID_SEQ;
 CREATE TABLE floodplain (
     id       int PRIMARY KEY NOT NULL,
     river_id int REFERENCES rivers(id) ON DELETE CASCADE,
+    kind_id  int REFERENCES floodplain_kinds(id) NOT NULL DEFAULT 0,
     name     VARCHAR(64),
     path     VARCHAR(256)
 );
@@ -95,19 +111,17 @@
     id       int PRIMARY KEY NOT NULL,
     river_id int REFERENCES rivers(id) ON DELETE CASCADE,
     -- XXX Should we use the ranges table instead?
-    name            VARCHAR(64),
-    lower           FLOAT8,
-    upper           FLOAT8,
-    year_from       VARCHAR(32),
-    year_to         VARCHAR(32),
-    projection      VARCHAR(32),
-    srid	    int NOT NULL,
-    elevation_state VARCHAR(32),
-    format          VARCHAR(32),
-    border_break    BOOLEAN NOT NULL DEFAULT FALSE,
-    resolution      VARCHAR(16),
-    description     VARCHAR(256),
-    path            VARCHAR(256) NOT NULL UNIQUE
+    name             VARCHAR(64),
+    range_id         INT REFERENCES ranges(id);
+    time_interval_id INT REFERENCES time_intervals(id);
+    projection       VARCHAR(32),
+    srid	    int  NOT NULL,
+    elevation_state  VARCHAR(32),
+    format           VARCHAR(32),
+    border_break     BOOLEAN NOT NULL DEFAULT FALSE,
+    resolution       VARCHAR(16),
+    description      VARCHAR(256),
+    path             VARCHAR(256) NOT NULL UNIQUE
 );
 ALTER TABLE dem ALTER COLUMN id SET DEFAULT NEXTVAL('DEM_ID_SEQ');
 
@@ -194,25 +208,28 @@
 
 --
 --Hydrologie/UeSG
---
--- 'kind' can be one of:
--- 200 = Messung
--- 111 = Berechnung->Aktuell->BfG
--- 112 = Berechnung->Aktuell->Land
--- 121 = Berechnung->Potenziell->BfG
--- 122 = Berechnung->Potenziell->Land
---
+CREATE TABLE floodmap_kinds (
+    id 	     int PRIMARY KEY NOT NULL,
+    name     varchar(64) NOT NULL
+);
+INSERT INTO floodmap_kinds VALUES (200, 'Messung');
+INSERT INTO floodmap_kinds VALUES (111, 'Berechnung-Aktuell-BfG');
+INSERT INTO floodmap_kinds VALUES (112, 'Berechnung-Aktuell-Bundesländer');
+INSERT INTO floodmap_kinds VALUES (121, 'Berechnung-Potenziell-BfG');
+INSERT INTO floodmap_kinds VALUES (122, 'Berechnung-Potenziell-Bundesländer');
+
 CREATE SEQUENCE FLOODMAPS_ID_SEQ;
 CREATE TABLE floodmaps (
     id         int PRIMARY KEY NOT NULL,
     river_id   int REFERENCES rivers(id) ON DELETE CASCADE,
     name       varchar(64) NOT NULL,
-    kind       int NOT NULL,
+    kind       int NOT NULL REFERENCES floodmap_kinds(id),
     diff       FLOAT8,
     count      int,
     area       FLOAT8,
     perimeter  FLOAT8,
-    path     VARCHAR(256)
+    path     VARCHAR(256),
+    source   varchar(64)
 );
 SELECT AddGeometryColumn('floodmaps', 'geom', 31467, 'MULTIPOLYGON', 2);
 ALTER TABLE floodmaps DROP CONSTRAINT enforce_geotype_geom;
@@ -255,7 +272,7 @@
     sobek      int REFERENCES sobek_kinds(id),
     path       VARCHAR(256)
 );
-SELECT AddGeometryColumn('hydr_boundaries','geom',31467,'LINESTRING',3);
+SELECT AddGeometryColumn('hydr_boundaries','geom',31467,'MULTILINESTRING',3);
 ALTER TABLE hydr_boundaries ALTER COLUMN id SET DEFAULT NEXTVAL('HYDR_BOUNDARIES_ID_SEQ');
 
 
--- a/flys-backend/doc/schema/postgresql.sql	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/doc/schema/postgresql.sql	Wed Mar 13 19:07:52 2013 +0100
@@ -81,15 +81,16 @@
 CREATE TABLE gauges (
     id              int PRIMARY KEY NOT NULL,
     name            VARCHAR(256)    NOT NULL,
+    -- remove river id here because range_id references river already
     river_id        int             NOT NULL REFERENCES rivers(id) ON DELETE CASCADE,
-    station         NUMERIC         NOT NULL UNIQUE,
+    station         NUMERIC         NOT NULL,
     aeo             NUMERIC         NOT NULL,
-    official_number int8                     UNIQUE,
+    official_number int8            UNIQUE,
 
     -- Pegelnullpunkt
     datum    NUMERIC NOT NULL,
     -- Streckengueltigkeit
-    range_id int REFERENCES ranges (id) ON DELETE CASCADE,
+    range_id int NOT NULL REFERENCES ranges (id) ON DELETE CASCADE,
 
     UNIQUE (name, river_id),
     UNIQUE (river_id, station)
@@ -166,13 +167,28 @@
 );
 
 -- WST files
+--lookup table for wst kinds
+CREATE TABLE wst_kinds (
+    id 	     int PRIMARY KEY NOT NULL,
+    kind     VARCHAR(64) NOT NULL
+);
+INSERT INTO wst_kinds (id, kind) VALUES (0, 'basedata');
+INSERT INTO wst_kinds (id, kind) VALUES (1, 'basedata_additionals_marks');
+INSERT INTO wst_kinds (id, kind) VALUES (2, 'basedata_fixations_wst');
+INSERT INTO wst_kinds (id, kind) VALUES (3, 'basedata_officials');
+INSERT INTO wst_kinds (id, kind) VALUES (4, 'basedata_heightmarks-points-relative_points');
+INSERT INTO wst_kinds (id, kind) VALUES (5, 'basedata_flood-protections_relative_points');
+INSERT INTO wst_kinds (id, kind) VALUES (6, 'morpho_waterlevel-differences');
+INSERT INTO wst_kinds (id, kind) VALUES (7, 'morpho_waterlevels');
+
+
 CREATE SEQUENCE WSTS_ID_SEQ;
 
 CREATE TABLE wsts (
     id          int PRIMARY KEY NOT NULL,
     river_id    int NOT NULL REFERENCES rivers(id) ON DELETE CASCADE,
     description VARCHAR(256) NOT NULL,
-    kind        int NOT NULL DEFAULT 0,
+    kind        int NOT NULL REFERENCES wst_kinds(id) DEFAULT 0,
     -- TODO: more meta infos
     UNIQUE (river_id, description)
 );
--- a/flys-backend/src/main/java/de/intevation/flys/backend/Credentials.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/Credentials.java	Wed Mar 13 19:07:52 2013 +0100
@@ -7,6 +7,7 @@
     protected String   dialect;
     protected String   driver;
     protected String   url;
+    protected String   connectionInitSqls;
     protected Class [] classes;
 
     public Credentials() {
@@ -18,14 +19,16 @@
         String   dialect,
         String   driver,
         String   url,
+        String   connectionInitSqls,
         Class [] classes
     ) {
-        this.user     = user;
-        this.password = password;
-        this.dialect  = dialect;
-        this.driver   = driver;
-        this.url      = url;
-        this.classes  = classes;
+        this.user               = user;
+        this.password           = password;
+        this.dialect            = dialect;
+        this.driver             = driver;
+        this.url                = url;
+        this.connectionInitSqls = connectionInitSqls;
+        this.classes            = classes;
     }
 
     public String getUser() {
@@ -68,6 +71,14 @@
         this.url = url;
     }
 
+    public String getConnectionInitSqls() {
+        return connectionInitSqls;
+    }
+
+    public void setConnectionInitSqls(String connectionInitSqls) {
+        this.connectionInitSqls = connectionInitSqls;
+    }
+
     public Class [] getClasses() {
         return classes;
     }
--- a/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java	Wed Mar 13 19:07:52 2013 +0100
@@ -17,6 +17,7 @@
 import de.intevation.flys.model.CrossSectionLine;
 import de.intevation.flys.model.CrossSectionPoint;
 import de.intevation.flys.model.CrossSectionTrack;
+import de.intevation.flys.model.CrossSectionTrackKind;
 import de.intevation.flys.model.DGM;
 import de.intevation.flys.model.Depth;
 import de.intevation.flys.model.DischargeTable;
@@ -28,6 +29,7 @@
 import de.intevation.flys.model.Fixpoint;
 import de.intevation.flys.model.Floodmaps;
 import de.intevation.flys.model.Floodplain;
+import de.intevation.flys.model.FloodplainKind;
 import de.intevation.flys.model.FlowVelocityMeasurement;
 import de.intevation.flys.model.FlowVelocityMeasurementValue;
 import de.intevation.flys.model.FlowVelocityModel;
@@ -66,12 +68,6 @@
 import de.intevation.flys.model.SedimentYieldValue;
 import de.intevation.flys.model.TimeInterval;
 import de.intevation.flys.model.Unit;
-import de.intevation.flys.model.Waterlevel;
-import de.intevation.flys.model.WaterlevelDifference;
-import de.intevation.flys.model.WaterlevelDifferenceColumn;
-import de.intevation.flys.model.WaterlevelDifferenceValue;
-import de.intevation.flys.model.WaterlevelQRange;
-import de.intevation.flys.model.WaterlevelValue;
 import de.intevation.flys.model.Wst;
 import de.intevation.flys.model.WstColumn;
 import de.intevation.flys.model.WstColumnQRange;
@@ -96,6 +92,9 @@
     public static final String XPATH_URL =
         "/artifact-database/backend-database/url/text()";
 
+    public static final String XPATH_CONNECTION_INIT_SQLS =
+        "/artifact-database/backend-database/connection-init-sqls/text()";
+
     public static final String DEFAULT_USER =
         System.getProperty("flys.backend.user", "flys");
 
@@ -117,6 +116,10 @@
             "flys.backend.url",
             "jdbc:postgresql://localhost:5432/flys");
 
+    public static final String DEFAULT_CONNECTION_INIT_SQLS =
+        System.getProperty(
+            "flys.backend.connection.init.sqls");
+
     public static final Class [] CLASSES = {
         Annotation.class,
         AnnotationType.class,
@@ -133,6 +136,7 @@
         CrossSectionLine.class,
         CrossSectionPoint.class,
         CrossSectionTrack.class,
+        CrossSectionTrackKind.class,
         Depth.class,
         DGM.class,
         DischargeTable.class,
@@ -143,6 +147,7 @@
         FedState.class,
         Fixpoint.class,
         Floodplain.class,
+        FloodplainKind.class,
         Floodmaps.class,
         FlowVelocityMeasurement.class,
         FlowVelocityMeasurementValue.class,
@@ -182,12 +187,6 @@
         SQRelationValue.class,
         TimeInterval.class,
         Unit.class,
-        Waterlevel.class,
-        WaterlevelDifference.class,
-        WaterlevelDifferenceColumn.class,
-        WaterlevelDifferenceValue.class,
-        WaterlevelQRange.class,
-        WaterlevelValue.class,
         WstColumn.class,
         WstColumnQRange.class,
         WstColumnValue.class,
@@ -203,9 +202,11 @@
         String password,
         String dialect,
         String driver,
-        String url
+        String url,
+        String connectionInitSqls
     ) {
-        super(user, password, dialect, driver, url, CLASSES);
+        super(
+            user, password, dialect, driver, url, connectionInitSqls, CLASSES);
     }
 
     private static Credentials instance;
@@ -222,9 +223,13 @@
                 Config.getStringXPath(XPATH_DRIVER, DEFAULT_DRIVER);
             String url =
                 Config.getStringXPath(XPATH_URL, DEFAULT_URL);
+            String connectionInitSqls =
+                Config.getStringXPath(
+                    XPATH_CONNECTION_INIT_SQLS,
+                    DEFAULT_CONNECTION_INIT_SQLS);
 
             instance = new FLYSCredentials(
-                user, password, dialect, driver, url);
+                user, password, dialect, driver, url, connectionInitSqls);
         }
         return instance;
     }
@@ -235,7 +240,8 @@
             DEFAULT_PASSWORD,
             DEFAULT_DIALECT,
             DEFAULT_DRIVER,
-            DEFAULT_URL);
+            DEFAULT_URL,
+            DEFAULT_CONNECTION_INIT_SQLS);
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/backend/SedDBCredentials.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SedDBCredentials.java	Wed Mar 13 19:07:52 2013 +0100
@@ -73,6 +73,9 @@
     public static final String XPATH_URL =
         "/artifact-database/seddb-database/url/text()";
 
+    public static final String XPATH_CONNECTION_INIT_SQLS =
+        "/artifact-database/seddb-database/connection-init-sqls/text()";
+
     public static final String DEFAULT_USER =
         System.getProperty("flys.seddb.user", "seddb");
 
@@ -94,6 +97,10 @@
             "flys.seddb.url",
             "jdbc:postgresql://localhost:5432/seddb");
 
+    public static final String DEFAULT_CONNECTION_INIT_SQLS =
+        System.getProperty(
+            "flys.seddb.connection.init.sqls");
+
     public static final Class [] CLASSES = {
         BezugspegelgewId.class,
         Bezugspegelgew.class,
@@ -159,9 +166,11 @@
         String password,
         String dialect,
         String driver,
-        String url
+        String url,
+        String connectionInitSqls
     ) {
-        super(user, password, dialect, driver, url, CLASSES);
+        super(
+            user, password, dialect, driver, url, connectionInitSqls, CLASSES);
     }
 
     public static synchronized Credentials getInstance() {
@@ -176,9 +185,13 @@
                 Config.getStringXPath(XPATH_DRIVER, DEFAULT_DRIVER);
             String url =
                 Config.getStringXPath(XPATH_URL, DEFAULT_URL);
+            String connectionInitSqls =
+                Config.getStringXPath(
+                    XPATH_CONNECTION_INIT_SQLS,
+                    DEFAULT_CONNECTION_INIT_SQLS);
 
             instance = new SedDBCredentials(
-                user, password, dialect, driver, url);
+                user, password, dialect, driver, url, connectionInitSqls);
         }
         return instance;
     }
@@ -189,7 +202,8 @@
             DEFAULT_PASSWORD,
             DEFAULT_DIALECT,
             DEFAULT_DRIVER,
-            DEFAULT_URL);
+            DEFAULT_URL,
+            DEFAULT_CONNECTION_INIT_SQLS);
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java	Wed Mar 13 19:07:52 2013 +0100
@@ -137,6 +137,11 @@
         props.setProperty(Environment.DRIVER,  credentials.getDriver());
         props.setProperty(Environment.URL,     credentials.getUrl());
 
+        String connectionInitSqls = credentials.getConnectionInitSqls();
+        if (connectionInitSqls != null) {
+            props.setProperty("connectionInitSqls", connectionInitSqls);
+        }
+
         cfg.mergeProperties(props);
 
         return cfg;
--- a/flys-backend/src/main/java/de/intevation/flys/backend/SpatialInfo.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SpatialInfo.java	Wed Mar 13 19:07:52 2013 +0100
@@ -4,6 +4,7 @@
 
 import org.apache.log4j.Logger;
 
+import org.hibernate.HibernateException;
 import org.hibernate.Query;
 import org.hibernate.Session;
 
@@ -93,7 +94,7 @@
                 logger.warn("River has no RiverAxis.");
             }
         }
-        catch(IllegalArgumentException iae) {
+        catch(HibernateException iae) {
             logger.warn("No vaild river axis found for " + river.getName());
             return;
         }
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightType.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightType.java	Wed Mar 13 19:07:52 2013 +0100
@@ -16,19 +16,22 @@
         Logger.getLogger(ImportBedHeightType.class);
 
     protected String name;
-    protected String description;
 
     protected BedHeightType peer;
 
+    public ImportBedHeightType(BedHeightType peer)  {
+        this.peer = peer;
+        name = peer.getName();
+    }
 
-    public ImportBedHeightType(String name, String description) {
+
+    public ImportBedHeightType(String name) {
         this.name        = name;
-        this.description = description;
     }
 
 
     public void storeDependencies() {
-        BedHeightType type = getPeer();
+        getPeer();
     }
 
 
@@ -37,16 +40,14 @@
             Session session = ImporterSession.getInstance().getDatabaseSession();
 
             Query query = session.createQuery(
-                "from BedHeightType where " +
-                "name=:name and description=:description");
+                "from BedHeightType where name=:name and description=:description");
 
             query.setParameter("name", name);
-            query.setParameter("description", description);
 
             List<BedHeightType> types = query.list();
 
             if (types.isEmpty()) {
-                peer = new BedHeightType(name, description);
+                peer = new BedHeightType(name);
                 session.save(peer);
             }
             else {
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java	Wed Mar 13 19:07:52 2013 +0100
@@ -130,6 +130,12 @@
 
     protected List<ImportWst> floodProtection;
 
+    /** Wst-structures from waterlevel-csv files. */
+    protected List<ImportWst> waterlevels;
+
+    /** Wst-structures from waterlevel-difference-csv files. */
+    protected List<ImportWst> waterlevelDifferences;
+
     protected List<ImportBedHeight> bedHeightSingles;
 
     protected List<ImportBedHeight> bedHeightEpochs;
@@ -144,10 +150,6 @@
 
     protected List<ImportSedimentYield> sedimentYields;
 
-    protected List<ImportWaterlevel> waterlevels;
-
-    protected List<ImportWaterlevelDifference> waterlevelDiffs;
-
     protected List<ImportMeasurementStation> measurementStations;
 
     protected List<ImportSQRelation> sqRelations;
@@ -204,14 +206,14 @@
         fixations                 = new ArrayList<ImportWst>();
         officialLines             = new ArrayList<ImportWst>();
         floodWater                = new ArrayList<ImportWst>();
+        waterlevels               = new ArrayList<ImportWst>();
+        waterlevelDifferences     = new ArrayList<ImportWst>();
         floodProtection           = new ArrayList<ImportWst>();
         sedimentDensities         = new ArrayList<ImportSedimentDensity>();
         morphologicalWidths       = new ArrayList<ImportMorphWidth>();
         flowVelocityModels        = new ArrayList<ImportFlowVelocityModel>();
         flowVelocityMeasurements  = new ArrayList<ImportFlowVelocityMeasurement>();
         sedimentYields            = new ArrayList<ImportSedimentYield>();
-        waterlevels               = new ArrayList<ImportWaterlevel>();
-        waterlevelDiffs           = new ArrayList<ImportWaterlevelDifference>();
         measurementStations       = new ArrayList<ImportMeasurementStation>();
         sqRelations               = new ArrayList<ImportSQRelation>();
     }
@@ -550,7 +552,7 @@
         File[] files = wspDir.listFiles();
 
         if (files == null) {
-            log.warn("Cannot read directory '" + wspDir + "'");
+            log.warn("Cannot read directory for wl '" + wspDir + "'");
             return;
         }
 
@@ -562,9 +564,10 @@
 
         // The parsed ImportWaterlevels are converted to
         // 'fixation'-wsts now.
-        for(ImportWst iw: parser.exportWsts()) {
-            //iw.setDescription("CSV" + iw.getDescription());
-            fixations.add(iw);
+        for(ImportWst iw: parser.getWaterlevels()) {
+            iw.setDescription("CSV/" + iw.getDescription());
+            iw.setKind(6);
+            waterlevels.add(iw);
         }
     }
 
@@ -624,7 +627,12 @@
             parser.parse(file);
         }
 
-        waterlevelDiffs = parser.getDifferences();
+        // WaterlevelDifferences become Wsts now.
+        for(ImportWst iw: parser.getDifferences()) {
+            iw.setDescription("CSV/" + iw.getDescription());
+            iw.setKind(7);
+            waterlevelDifferences.add(iw);
+        }
     }
 
 
@@ -1122,16 +1130,43 @@
     }
 
     public void storeFixations() {
-        if (!Config.INSTANCE.skipFixations() || !Config.INSTANCE.skipWaterlevels()) {
-            log.info("store fixation wsts and/or csvs");
+        if (!Config.INSTANCE.skipFixations()) {
+            log.info("store fixation wsts");
             River river = getPeer();
-            for (ImportWst wst: fixations) {
-                log.debug("name: " + wst.getDescription());
-                wst.storeDependencies(river);
+            for (ImportWst fWst: fixations) {
+                log.debug("Fixation name: " + fWst.getDescription());
+                fWst.storeDependencies(river);
             }
         }
     }
 
+
+    /** Store wsts from waterlevel-csv files. */
+    public void storeWaterlevels() {
+        if (!Config.INSTANCE.skipWaterlevels())
+
+        log.info("store waterlevel wsts from csv");
+        River river = getPeer();
+        for (ImportWst wWst: waterlevels) {
+            log.debug("Waterlevel name: " + wWst.getDescription());
+            wWst.storeDependencies(river);
+        }
+    }
+
+
+    /** Store wsts from waterleveldifference-csv files. */
+    public void storeWaterlevelDifferences() {
+        if (!Config.INSTANCE.skipWaterlevelDifferences())
+        
+        log.info("store waterleveldifferences wsts from csv");
+        River river = getPeer();
+        for (ImportWst dWst: waterlevelDifferences) {
+            log.debug("water.diff.: name " + dWst.getDescription());
+            dWst.storeDependencies(river);
+        }
+    }
+        
+
     public void storeExtraWsts() {
         if (!Config.INSTANCE.skipExtraWsts()) {
             log.info("store extra wsts");
@@ -1165,6 +1200,7 @@
         }
     }
 
+
     public void storeFloodProtection() {
         if (!Config.INSTANCE.skipFloodProtection()) {
             log.info("store flood protection wsts");
@@ -1343,40 +1379,6 @@
     }
 
 
-    public void storeWaterlevels() {
-        if (!Config.INSTANCE.skipWaterlevels()) {
-            log.info("store waterlevels");
-
-            River river = getPeer();
-
-            for (ImportWaterlevel waterlevel: waterlevels) {
-                waterlevel.storeDependencies(river);
-            }
-        }
-    }
-
-
-    public void storeWaterlevelDifferences() {
-        if (!Config.INSTANCE.skipWaterlevelDifferences()) {
-            log.info("store waterlevel differences");
-
-            River river = getPeer();
-
-            for (ImportWaterlevelDifference diff: waterlevelDiffs) {
-                try {
-                    diff.storeDependencies(river);
-                }
-                catch (SQLException sqle) {
-                    log.error("Error while storing waterlevel diff.", sqle);
-                }
-                catch (ConstraintViolationException cve) {
-                    log.error("Error while storing waterlevel diff.", cve);
-                }
-            }
-        }
-    }
-
-
     public void storeMeasurementStations() {
         if (!Config.INSTANCE.skipMeasurementStations()) {
             log.info("store measurement stations");
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevel.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,102 +0,0 @@
-package de.intevation.flys.importer;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
-import de.intevation.flys.model.River;
-import de.intevation.flys.model.Unit;
-import de.intevation.flys.model.Waterlevel;
-
-
-public class ImportWaterlevel {
-
-    private static final Logger log = Logger.getLogger(ImportWaterlevel.class);
-
-    private ImportUnit unit;
-
-    private String description;
-
-    private List<ImportWaterlevelQRange> qRanges;
-
-    private Waterlevel peer;
-
-    public ImportWaterlevel(String description) {
-        this.qRanges = new ArrayList<ImportWaterlevelQRange>();
-
-        this.description = description;
-    }
-
-    public String getDescription() {
-        return this.description;
-    }
-
-    public void setUnit(ImportUnit unit) {
-        this.unit = unit;
-    }
-
-    public ImportUnit getUnit() {
-        return this.unit;
-    }
-
-    public void addValue(ImportWaterlevelQRange qRange) {
-        this.qRanges.add(qRange);
-    }
-
-    public List<ImportWaterlevelQRange> getQRanges() {
-        return this.qRanges;
-    }
-
-    public void storeDependencies(River river) {
-        log.info("store dependencies");
-
-        Waterlevel peer = getPeer(river);
-
-        if (peer != null) {
-            int i = 0;
-
-            for (ImportWaterlevelQRange qRange : qRanges) {
-                qRange.storeDependencies(peer);
-                i++;
-            }
-
-            log.info("stored " + i + " waterlevel q ranges");
-        }
-    }
-
-    public Waterlevel getPeer(River river) {
-        Unit u = unit != null ? unit.getPeer() : null;
-        if (u == null) {
-            log.warn("skip invalid waterlevel - no unit set!");
-            return null;
-        }
-
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery("from Waterlevel where "
-                + "   river=:river and " + "   unit=:unit and "
-                + "   description=:description");
-
-            query.setParameter("river", river);
-            query.setParameter("unit", u);
-            query.setParameter("description", description);
-
-            List<Waterlevel> wsts = query.list();
-            if (wsts.isEmpty()) {
-                peer = new Waterlevel(river, u, description);
-                session.save(peer);
-            }
-            else {
-                peer = wsts.get(0);
-            }
-        }
-
-        return peer;
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelDifference.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,95 +0,0 @@
-package de.intevation.flys.importer;
-
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-import org.hibernate.exception.ConstraintViolationException;
-
-import de.intevation.flys.model.River;
-import de.intevation.flys.model.Unit;
-import de.intevation.flys.model.WaterlevelDifference;
-
-
-public class ImportWaterlevelDifference {
-
-    private static final Logger log = Logger
-        .getLogger(ImportWaterlevelDifference.class);
-
-    private ImportUnit unit;
-
-    private String description;
-
-    private List<ImportWaterlevelDifferenceColumn> columns;
-
-    private WaterlevelDifference peer;
-
-    public ImportWaterlevelDifference(String description) {
-        this.columns = new ArrayList<ImportWaterlevelDifferenceColumn>();
-
-        this.description = description;
-    }
-
-    public void setUnit(ImportUnit unit) {
-        this.unit = unit;
-    }
-
-    public void addValue(ImportWaterlevelDifferenceColumn column) {
-        this.columns.add(column);
-    }
-
-    public void storeDependencies(River river) throws SQLException,
-        ConstraintViolationException {
-        log.info("store dependencies");
-
-        WaterlevelDifference peer = getPeer(river);
-
-        if (peer != null) {
-            int i = 0;
-
-            for (ImportWaterlevelDifferenceColumn column : columns) {
-                column.storeDependencies(peer);
-                i++;
-            }
-
-            log.info("stored " + i + " waterlevel difference columns");
-        }
-    }
-
-    public WaterlevelDifference getPeer(River river) {
-        Unit u = unit != null ? unit.getPeer() : null;
-        if (u == null) {
-            log.warn("IWD: skip invalid waterlevel difference - no unit set!");
-            return null;
-        }
-
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session
-                .createQuery("from WaterlevelDifference where "
-                    + "   river=:river and " + "   unit=:unit and "
-                    + "   description=:description");
-
-            query.setParameter("river", river);
-            query.setParameter("unit", u);
-            query.setParameter("description", description);
-
-            List<WaterlevelDifference> diffs = query.list();
-            if (diffs.isEmpty()) {
-                peer = new WaterlevelDifference(river, u, description);
-                session.save(peer);
-            }
-            else {
-                peer = diffs.get(0);
-            }
-        }
-
-        return peer;
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceColumn.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,81 +0,0 @@
-package de.intevation.flys.importer;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
-import de.intevation.flys.model.WaterlevelDifference;
-import de.intevation.flys.model.WaterlevelDifferenceColumn;
-
-
-public class ImportWaterlevelDifferenceColumn {
-
-    private static final Logger log =
-        Logger.getLogger(ImportWaterlevelDifferenceColumn.class);
-
-
-    private String description;
-
-    private List<ImportWaterlevelDifferenceValue> values;
-
-    private WaterlevelDifferenceColumn peer;
-
-
-    public ImportWaterlevelDifferenceColumn(String description) {
-        this.values = new ArrayList<ImportWaterlevelDifferenceValue>();
-
-        this.description = description;
-    }
-
-
-    public void addValue(ImportWaterlevelDifferenceValue value) {
-        this.values.add(value);
-    }
-
-
-    public void storeDependencies(WaterlevelDifference difference) {
-        log.info("store dependencies");
-
-        WaterlevelDifferenceColumn peer = getPeer(difference);
-
-        int i = 0;
-
-        for (ImportWaterlevelDifferenceValue value: values) {
-            value.storeDependencies(peer);
-            i++;
-        }
-
-        log.info("stored " + i + " waterlevel difference values");
-    }
-
-
-    public WaterlevelDifferenceColumn getPeer(WaterlevelDifference diff) {
-        if (peer == null) {
-            Session session = ImporterSession.getInstance().getDatabaseSession();
-            Query query = session.createQuery(
-                "from WaterlevelDifferenceColumn where " +
-                "   difference=:difference and " +
-                "   description=:description"
-            );
-
-            query.setParameter("difference", diff);
-            query.setParameter("description", description);
-
-            List<WaterlevelDifferenceColumn> cols = query.list();
-            if (cols.isEmpty()) {
-                peer = new WaterlevelDifferenceColumn(diff, description);
-                session.save(peer);
-            }
-            else {
-                peer = cols.get(0);
-            }
-        }
-
-        return peer;
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceValue.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,64 +0,0 @@
-package de.intevation.flys.importer;
-
-import java.util.List;
-
-import org.apache.log4j.Logger;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
-import de.intevation.flys.model.WaterlevelDifferenceColumn;
-import de.intevation.flys.model.WaterlevelDifferenceValue;
-
-
-public class ImportWaterlevelDifferenceValue {
-
-    private static final Logger log =
-        Logger.getLogger(ImportWaterlevelDifferenceValue.class);
-
-
-    private Double station;
-    private Double value;
-
-    private WaterlevelDifferenceValue peer;
-
-
-    public ImportWaterlevelDifferenceValue(Double station, Double value) {
-        this.station = station;
-        this.value   = value;
-    }
-
-
-    public void storeDependencies(WaterlevelDifferenceColumn column) {
-        getPeer(column);
-    }
-
-
-    public WaterlevelDifferenceValue getPeer(WaterlevelDifferenceColumn column) {
-        if (peer == null) {
-            Session session = ImporterSession.getInstance().getDatabaseSession();
-            Query query = session.createQuery(
-                "from WaterlevelDifferenceValue where " +
-                "   column=:column and " +
-                "   station=:station and " +
-                "   value=:value"
-            );
-
-            query.setParameter("column", column);
-            query.setParameter("station", station);
-            query.setParameter("value", value);
-
-            List<WaterlevelDifferenceValue> values = query.list();
-            if (values.isEmpty()) {
-                peer = new WaterlevelDifferenceValue(column, station, value);
-                session.save(peer);
-            }
-            else {
-                peer = values.get(0);
-            }
-        }
-
-        return peer;
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelQRange.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,86 +0,0 @@
-package de.intevation.flys.importer;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
-import de.intevation.flys.model.Waterlevel;
-import de.intevation.flys.model.WaterlevelQRange;
-
-
-/** Has a Q and list of W,km values. */
-public class ImportWaterlevelQRange {
-
-    private static final Logger log =
-        Logger.getLogger(ImportWaterlevelQRange.class);
-
-    private Double q;
-
-    private List<ImportWaterlevelValue> values;
-
-    private WaterlevelQRange peer;
-
-
-    public ImportWaterlevelQRange(Double q) {
-        this.values = new ArrayList<ImportWaterlevelValue>();
-        this.q      = q;
-    }
-
-    public void addValue(ImportWaterlevelValue value) {
-        this.values.add(value);
-    }
-
-    public Double getQ() {
-        return this.q;
-    }
-
-    public List<ImportWaterlevelValue> getValues() {
-        return values;
-    }
-
-    public void storeDependencies(Waterlevel waterlevel) {
-        log.info("store dependencies");
-
-        WaterlevelQRange peer = getPeer(waterlevel);
-
-        int i = 0;
-
-        for (ImportWaterlevelValue value: values) {
-            value.storeDependencies(peer);
-            i++;
-        }
-
-        log.info("stored " + i + " waterlevel values");
-    }
-
-
-    public WaterlevelQRange getPeer(Waterlevel waterlevel) {
-        if (peer == null) {
-            Session session = ImporterSession.getInstance().getDatabaseSession();
-            Query query = session.createQuery(
-                "from WaterlevelQRange where " +
-                "   waterlevel=:waterlevel and " +
-                "   q=:q"
-            );
-
-            query.setParameter("waterlevel", waterlevel);
-            query.setParameter("q", q);
-
-            List<WaterlevelQRange> qRanges = query.list();
-            if (qRanges.isEmpty()) {
-                peer = new WaterlevelQRange(waterlevel, q);
-                session.save(peer);
-            }
-            else {
-                peer = qRanges.get(0);
-            }
-        }
-
-        return peer;
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelValue.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,69 +0,0 @@
-package de.intevation.flys.importer;
-
-import java.util.List;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
-import de.intevation.flys.model.WaterlevelQRange;
-import de.intevation.flys.model.WaterlevelValue;
-
-
-/** W and a station. */
-public class ImportWaterlevelValue {
-
-    private Double station;
-    private Double w;
-
-    private WaterlevelValue peer;
-
-
-    public ImportWaterlevelValue(Double station, Double w) {
-        this.station = station;
-        this.w       = w;
-    }
-
-
-    public void storeDependencies(WaterlevelQRange qRange) {
-        getPeer(qRange);
-    }
-
-
-    public Double getStation() {
-        return this.station;
-    }
-
-
-    public Double getW() {
-        return this.w;
-    }
-
-    public WaterlevelValue getPeer(WaterlevelQRange qRange) {
-        if (peer == null) {
-            Session session = ImporterSession.getInstance().getDatabaseSession();
-            Query query = session.createQuery(
-                "from WaterlevelValue where " +
-                "   qrange=:qrange and " +
-                "   station=:station and " +
-                "   w=:w"
-            );
-
-            query.setParameter("qrange", qRange);
-            query.setParameter("station", station);
-            query.setParameter("w", w);
-
-            List<WaterlevelValue> values = query.list();
-
-            if (values.isEmpty()) {
-                peer = new WaterlevelValue(qRange, station, w);
-                session.save(peer);
-            }
-            else {
-                peer = values.get(0);
-            }
-        }
-
-        return peer;
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWst.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWst.java	Wed Mar 13 19:07:52 2013 +0100
@@ -53,6 +53,7 @@
         this.description = description;
     }
 
+    /** Create columns that can be accessed with getColumn. */
     public void setNumberColumns(int numColumns) {
         for (int i = 0; i < numColumns; ++i) {
             columns.add(new ImportWstColumn(this, null, null, i));
@@ -67,6 +68,15 @@
         return columns.get(index);
     }
 
+    public List<ImportWstColumn> getColumns() {
+        return columns;
+    }
+
+    /** Adds a column. Assumes that columns wst is this instance. */
+    public void addColumn(ImportWstColumn column) {
+        columns.add(column);
+    }
+
     public ImportUnit getUnit() {
         return unit;
     }
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstColumn.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstColumn.java	Wed Mar 13 19:07:52 2013 +0100
@@ -93,6 +93,13 @@
             new ImportWstColumnQRange(this, columnQRange));
     }
 
+
+    /** Get the Column Values stored in this column. */
+    public List<ImportWstColumnValue> getColumnValues() {
+        return columnValues;
+    }
+
+
     public void storeDependencies(River river) {
         log.info("store column '" + name + "'");
         WstColumn column = getPeer(river);
--- a/flys-backend/src/main/java/de/intevation/flys/importer/Importer.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/Importer.java	Wed Mar 13 19:07:52 2013 +0100
@@ -76,7 +76,7 @@
             catch (HibernateException he) {
                 Throwable t = he.getCause();
                 while (t instanceof SQLException) {
-                    SQLException sqle = (SQLException)t;
+                    SQLException sqle = (SQLException) t;
                     log.error("SQL exeception chain:", sqle);
                     t = sqle.getNextException();
                 }
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/AnnotationsParser.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/AnnotationsParser.java	Wed Mar 13 19:07:52 2013 +0100
@@ -156,7 +156,7 @@
                     attribute, position, range, edge, type);
 
                 if (!annotations.add(annotation)) {
-                    log.warn("ANN: duplicated annotation '" + parts[0] +
+                    log.info("ANN: duplicated annotation '" + parts[0] +
                         "' in line " + in.getLineNumber());
                 }
             }
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java	Wed Mar 13 19:07:52 2013 +0100
@@ -31,7 +31,7 @@
 import de.intevation.flys.importer.ImportTimeInterval;
 import de.intevation.flys.importer.ImportUnit;
 import de.intevation.flys.model.BedHeightType;
-
+import de.intevation.flys.importer.ImporterSession;
 
 public abstract class BedHeightParser {
 
@@ -331,15 +331,15 @@
         if (m.matches()) {
             String tmp = m.group(1).replace(";", "");
 
-            String name = BedHeightType.getBedHeightName(tmp);
+            BedHeightType bht = BedHeightType.fetchBedHeightTypeForType(
+		tmp, ImporterSession.getInstance().getDatabaseSession());
 
-            if (name != null) {
-                obj.setType(new ImportBedHeightType(name, tmp));
+            if (bht != null) {
+                obj.setType(new ImportBedHeightType(bht));
                 return true;
             }
-            else {
-                log.warn("Unknown bed height type: '" + tmp + "'");
-            }
+
+            log.warn("Unknown bed height type: '" + tmp + "'");
         }
 
         return false;
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java	Wed Mar 13 19:07:52 2013 +0100
@@ -32,29 +32,29 @@
 
     public static final String FRAKTION_START = "Fraktion:";
 
-    public static final Pattern FRACTION_COARSE_STR =
-        Pattern.compile(".*Grobkorn.*");
-
-    public static final Pattern FRACTION_FINE_MIDDLE_STR =
-        Pattern.compile(".*Fein-Mittel-Kies.*");
-
-    public static final Pattern FRACTION_SUSP_SAND_BED =
-        Pattern.compile(".*bettbild_Anteil_susp_Sand.*");
+    public static final String FRACTION_COARSE_STR =
+        ".*Grobkorn.*";
 
-    public static final Pattern FRACTION_SUSP_SAND_BED_EPOCH =
-        Pattern.compile(".*susp_Sand_bettbildAnteil.*");
-
-    public static final Pattern FRACTION_SUSP_SAND =
-        Pattern.compile(".*susp_Sand.*");
+    public static final String FRACTION_FINE_MIDDLE_STR =
+        ".*Fein-Mittel-Kies.*";
 
-    public static final Pattern FRACTION_SAND =
-        Pattern.compile(".*Sand.*");
+    public static final String FRACTION_SAND =
+        ".*Sand.*";
 
-    public static final Pattern FRACTION_SUSPENDED_SEDIMENT =
-        Pattern.compile(".*Schwebstoff.*");
+    public static final String FRACTION_SUSP_SAND =
+        ".*susp_Sand.*";
 
-    public static final Pattern FRACTION_TOTAL =
-        Pattern.compile(".*gesamt.*");
+    public static final String FRACTION_SUSP_SAND_BED =
+        ".*bettbild_Anteil_susp_Sand.*";
+
+    public static final String FRACTION_SUSP_SAND_BED_EPOCH =
+        ".*susp_Sand_bettbildAnteil.*";
+
+    public static final String FRACTION_SUSPENDED_SEDIMENT =
+        ".*Schwebstoff.*";
+
+    public static final String FRACTION_TOTAL =
+        ".*gesamt.*";
 
 
     public static final Pattern TIMEINTERVAL_SINGLE =
@@ -357,33 +357,33 @@
             }
         }
 
-        log.warn("SYP: Unknow grain fraction: '" + gfStr + "'");
+        log.warn("SYP: Unknown grain fraction: '" + gfStr + "'");
 
         return null;
     }
 
 
     public static String getGrainFractionTypeName(String filename) {
-        if (filename.matches(FRACTION_COARSE_STR)) {
-            return GrainFraction.COARSE;
+        if (Pattern.matches(FRACTION_COARSE_STR, filename)) {
+	    return GrainFraction.COARSE;
         }
-        else if (filename.matches(FRACTION_FINE_MIDDLE_STR)) {
+        else if (Pattern.matches(FRACTION_FINE_MIDDLE_STR, filename)) {
             return GrainFraction.FINE_MIDDLE;
         }
-        else if (filename.matches(FRACTION_SUSP_SAND_BED) ||
-            filename.matches(FRACTION_SUSP_SAND_BED_EPOCH)) {
+        else if (Pattern.matches(FRACTION_SUSP_SAND_BED, filename) ||
+            Pattern.matches(FRACTION_SUSP_SAND_BED_EPOCH, filename)) {
             return GrainFraction.SUSP_SAND_BED;
         }
-        else if (filename.matches(FRACTION_SUSP_SAND) {
+        else if (Pattern.matches(FRACTION_SUSP_SAND, filename)) {
             return GrainFraction.SUSP_SAND;
         }
-        else if (filename.matches(FRACTION_SAND) {
+	else if (Pattern.matches(FRACTION_SAND, filename)) {
             return GrainFraction.SAND;
         }
-        else if (filename.matches(FRACTION_SUSPENDED_SEDIMENT)) {
+        else if (Pattern.matches(FRACTION_SUSPENDED_SEDIMENT, filename)) {
             return GrainFraction.SUSPENDED_SEDIMENT;
         }
-        else if (filename.matches(FRACTION_TOTAL)) {
+        else if (Pattern.matches(FRACTION_TOTAL, filename)) {
             return GrainFraction.TOTAL;
         }
         else {
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelDifferencesParser.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelDifferencesParser.java	Wed Mar 13 19:07:52 2013 +0100
@@ -2,6 +2,7 @@
 
 import java.io.File;
 import java.io.IOException;
+import java.math.BigDecimal;
 import java.text.NumberFormat;
 import java.text.ParseException;
 import java.util.ArrayList;
@@ -12,11 +13,14 @@
 import org.apache.log4j.Logger;
 
 import de.intevation.flys.importer.ImportUnit;
-import de.intevation.flys.importer.ImportWaterlevelDifference;
-import de.intevation.flys.importer.ImportWaterlevelDifferenceColumn;
-import de.intevation.flys.importer.ImportWaterlevelDifferenceValue;
+
+import de.intevation.flys.importer.ImportWst;
+import de.intevation.flys.importer.ImportWstColumn;
 
 
+/**
+ * Parse WaterlevelDifferences CSV file.
+ */
 public class WaterlevelDifferencesParser extends LineParser {
 
     private static final Logger log =
@@ -28,32 +32,40 @@
     public static final Pattern META_UNIT =
         Pattern.compile("^Einheit: \\[(.*)\\].*");
 
-
-    private List<ImportWaterlevelDifference> differences;
+    /** List of parsed differences as ImportWst s. */
+    private List<ImportWst> differences;
 
-    private ImportWaterlevelDifferenceColumn[] columns;
+    private ImportWstColumn[] columns;
 
-    private ImportWaterlevelDifference current;
+    /** The currently processed dataset. */
+    private ImportWst current;
 
 
     public WaterlevelDifferencesParser() {
-        differences = new ArrayList<ImportWaterlevelDifference>();
+        differences = new ArrayList<ImportWst>();
     }
 
 
-    public List<ImportWaterlevelDifference> getDifferences() {
+    /** Get the differences as wst parsed so far. */
+    public List<ImportWst> getDifferences() {
         return differences;
     }
 
 
+    /**
+     * Parse a csv waterleveldifferenceparser and create a ImportWst object
+     * from it.
+     */
     @Override
     public void parse(File file) throws IOException {
-        current = new ImportWaterlevelDifference(file.getName());
+        current = new ImportWst(file.getName());
+        current.setKind(7);
 
         super.parse(file);
     }
 
 
+    /** No rewind implemented. */
     @Override
     protected void reset() {
     }
@@ -62,8 +74,10 @@
     @Override
     protected void finish() {
         if (columns != null && current != null) {
-            for (ImportWaterlevelDifferenceColumn col: columns) {
-                current.addValue(col);
+            // TODO figure out if its needed, as the columns
+            //      are registered at their construction time.
+            for (ImportWstColumn col: columns) {
+                // TODO place a current.addColumn(col); here?
             }
 
             differences.add(current);
@@ -73,6 +87,7 @@
         columns = null;
     }
 
+
     @Override
     protected void handleLine(int lineNum, String line) {
         if (line.startsWith(START_META_CHAR)) {
@@ -130,13 +145,15 @@
 
 
     private void initColumns(String[] cols) {
-        columns = new ImportWaterlevelDifferenceColumn[cols.length];
+        current.setNumberColumns(cols.length);
+        columns = current.getColumns().toArray(new ImportWstColumn[cols.length]);
 
         for (int i = 0; i < cols.length; i++) {
             String name = cols[i].replace("\"", "");
 
             log.debug("Create new column '" + name + "'");
-            columns[i] = new ImportWaterlevelDifferenceColumn(name);
+            current.getColumn(i).setName(name);
+            current.getColumn(i).setDescription(name);
         }
     }
 
@@ -145,7 +162,7 @@
         String[] cols = line.split(SEPERATOR_CHAR);
 
         if (cols == null || cols.length < 2) {
-            log.warn("skip invalid waterlevel line: '" + line + "'");
+            log.warn("skip invalid waterlevel-diff line: '" + line + "'");
             return;
         }
 
@@ -163,10 +180,9 @@
                 String value = cols[idx];
 
                 try {
-                    columns[i].addValue(new ImportWaterlevelDifferenceValue(
-                        station,
-                        nf.parse(value).doubleValue()
-                    ));
+                    columns[i].addColumnValue(
+                        new BigDecimal(station),
+                        new BigDecimal(nf.parse(value).doubleValue()));
                 }
                 catch (ParseException pe) {
                     log.warn("Error while parsing value: '" + value + "'");
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java	Wed Mar 13 19:07:52 2013 +0100
@@ -13,13 +13,11 @@
 import org.apache.log4j.Logger;
 
 import de.intevation.flys.importer.ImportUnit;
-import de.intevation.flys.importer.ImportWaterlevel;
-import de.intevation.flys.importer.ImportWaterlevelQRange;
-import de.intevation.flys.importer.ImportWaterlevelValue;
 
-import de.intevation.flys.importer.ImportWstColumn;
 import de.intevation.flys.importer.ImportRange;
 import de.intevation.flys.importer.ImportWst;
+import de.intevation.flys.importer.ImportWstColumn;
+import de.intevation.flys.importer.ImportWstColumnValue;
 import de.intevation.flys.importer.ImportWstQRange;
 
 
@@ -43,71 +41,28 @@
     public static final Pattern META_UNIT =
         Pattern.compile("^Einheit: \\[(.*)\\].*");
 
-    private List<ImportWaterlevel> waterlevels;
+    private List<ImportWst> waterlevels;
 
-    private ImportWaterlevel current;
+    private ImportWst current;
 
-    private ImportWaterlevelQRange currentQ;
+    /** The Waterlevel-Wst s will always have but one column. */
+    private ImportWstColumn column;
+
+    /** The current (incomplete) Q Range. */
+    private ImportWstQRange currentQRange;
+
+    /** The current (incomplete) km range for Q Range. */
+    private ImportRange currentRange;
 
     private String currentDescription;
 
 
     public WaterlevelParser() {
-        waterlevels = new ArrayList<ImportWaterlevel>();
+        waterlevels = new ArrayList<ImportWst>();
     }
 
 
-    /**
-     * Create ImportWst objects from ImportWaterlevel
-     * objects.
-     */
-    public List<ImportWst> exportWsts() {
-        List<ImportWst> wsts = new ArrayList<ImportWst>();
-        for(ImportWaterlevel waterlevel: getWaterlevels()) {
-            String description = waterlevel.getDescription();
-            ImportWst wst = new ImportWst();
-            wsts.add(wst);
-            wst.setDescription(description);
-            // Fixation kind.
-            wst.setKind(2);
-            wst.setUnit(waterlevel.getUnit());
-
-            // Fake WST has but 1 column.
-            wst.setNumberColumns(1);
-            ImportWstColumn column = wst.getColumn(0);
-            column.setDescription(description);
-            column.setName(description);
-            column.setPosition(0);
-
-            // Build Q Range.
-            List<ImportWaterlevelQRange> qRanges = waterlevel.getQRanges();
-            for(ImportWaterlevelQRange range: qRanges) {
-                List<ImportWaterlevelValue> values = range.getValues();
-                if (values.size() < 2) {
-                    log.warn ("Not enough values to build valid QRange");
-                    continue;
-                }
-                ImportRange iRange = new ImportRange(
-                   BigDecimal.valueOf(values.get(0).getStation()),
-                   BigDecimal.valueOf(values.get(values.size() -1).getStation()));
-                column.addColumnQRange(
-                     new ImportWstQRange(iRange, BigDecimal.valueOf(range.getQ())));
-            }
-
-            // The other W/KM values.
-            for(ImportWaterlevelQRange range: qRanges) {
-                for(ImportWaterlevelValue value: range.getValues()) {
-                    column.addColumnValue(BigDecimal.valueOf(value.getStation()),
-                                          BigDecimal.valueOf(value.getW()));
-                }
-            }
-            // TODO Maybe set a timeinterval.
-        }
-        return wsts;
-    }
-
-
-    public List<ImportWaterlevel> getWaterlevels() {
+    public List<ImportWst> getWaterlevels() {
         return waterlevels;
     }
 
@@ -122,16 +77,25 @@
 
     @Override
     protected void reset() {
-        currentQ = null;
-        current  = new ImportWaterlevel(currentDescription);
+        currentQRange = null;
+        current       = new ImportWst(currentDescription);
+        current.setNumberColumns(1);
+        column        = current.getColumn(0);
+        column.setName(currentDescription);
+        column.setDescription(currentDescription);
+        current.setKind(6);
     }
 
 
     @Override
     protected void finish() {
         if (current != null) {
-            if (currentQ != null) {
-                current.addValue(currentQ);
+            if (currentQRange != null) {
+                List<ImportWstColumnValue> cValues = column.getColumnValues();
+                // Set end of range to last station.
+                currentRange.setB(cValues.get(cValues.size() -1).getPosition());
+                currentQRange.setRange(currentRange);
+                column.addColumnQRange(currentQRange);
             }
 
             waterlevels.add(current);
@@ -172,23 +136,21 @@
         if (m.matches()) {
             String unitStr  = m.group(1);
             String valueStr = m.group(2);
-
-            if (currentQ != null) {
-                if (current != null) {
-                    current.addValue(currentQ);
+            try {
+                if (currentQRange != null) {
+                    // Finish off the last one.
+                    List<ImportWstColumnValue> cValues = column.getColumnValues();
+                    // Set end of range to last station.
+                    currentRange.setB(cValues.get(cValues.size() -1).getPosition());
+                    currentQRange.setRange(currentRange);
+                    column.addColumnQRange(currentQRange);
                 }
-                else {
-                    // this should never happen
-                    log.warn("Try to add Q range without waterlevel!");
-                }
-            }
+                currentQRange = new ImportWstQRange(null,
+                    new BigDecimal(nf.parse(valueStr).doubleValue()));
+                currentRange = new ImportRange();
 
-            try {
                 log.debug("Found new Q range: Q=" + valueStr);
 
-                currentQ = new ImportWaterlevelQRange(
-                    nf.parse(valueStr).doubleValue());
-
                 return true;
             }
             catch (ParseException pe) {
@@ -209,10 +171,17 @@
         }
 
         try {
+            // Store the value and remember the position for QRange, if needed.
             Double station = nf.parse(cols[0]).doubleValue();
             Double value   = nf.parse(cols[1]).doubleValue();
 
-            currentQ.addValue(new ImportWaterlevelValue(station, value));
+            BigDecimal stationBD = new BigDecimal(station);
+
+            column.addColumnValue(stationBD, new BigDecimal(value));
+
+            if (currentRange.getA() == null) {
+                currentRange.setA(stationBD);
+            }
         }
         catch (ParseException pe) {
             log.warn("Error while parsing number values: '" + line + "'");
--- a/flys-backend/src/main/java/de/intevation/flys/model/BedHeightType.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightType.java	Wed Mar 13 19:07:52 2013 +0100
@@ -2,6 +2,8 @@
 
 import java.io.Serializable;
 
+import java.util.List;
+
 import javax.persistence.Entity;
 import javax.persistence.Id;
 import javax.persistence.Table;
@@ -12,6 +14,10 @@
 
 import org.apache.log4j.Logger;
 
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+import de.intevation.flys.backend.SessionHolder;
 
 @Entity
 @Table(name = "bed_height_type")
@@ -22,15 +28,13 @@
 
     private Integer id;
     private String  name;
-    private String  description;
 
 
     public BedHeightType() {
     }
 
-    public BedHeightType(String name, String description) {
-        this.name        = name;
-        this.description = description;
+    public BedHeightType(String name) {
+        this.name = name;
     }
 
     @Id
@@ -59,36 +63,24 @@
         this.name = name;
     }
 
-    @Column(name = "description")
-    public String getDescription() {
-        return description;
-    }
-
-    public void setDescription(String description) {
-        this.description = description;
+    public static BedHeightType fetchBedHeightTypeForType(String type) {
+	return fetchBedHeightTypeForType(type, null);
     }
 
+    public static BedHeightType fetchBedHeightTypeForType(String name, Session session) {
 
-    public static String getBedHeightName(String description) {
-        if (description.equals("Flächenpeilung")) {
-            return "FP";
-        }
-        else if ("Querprofile".equals(description)) {
-            return "QP";
-        }
-        else if ("Querprofil".equals(description)) {
-            return "QP";
-        }
-        else if ("TIN".equals(description)) {
-            return "TIN";
-        }
-        else if ("Flächen- u. Querprofilpeilungen".equals(description)) {
-            return "FP-QP";
-        }
-        else {
-            log.warn("Unknown bed height type: " + description);
-            return null;
-        }
+	if (session == null) {
+	    session = SessionHolder.HOLDER.get();
+	}
+
+        Query query = session.createQuery(
+            "from BedHeightType where name=:name");
+
+        query.setParameter("name", name);
+
+        List<Object> results = query.list();
+
+        return results.isEmpty() ? null : (BedHeightType)results.get(0);
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionTrack.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionTrack.java	Wed Mar 13 19:07:52 2013 +0100
@@ -152,6 +152,7 @@
 
         Query query = session.createQuery(
             "from CrossSectionTrack where river.name =:river " +
+            "and kind_id = 1 " +
             "order by abs( km - :mykm)");
         query.setParameter("river", river);
         query.setParameter("mykm", new BigDecimal(km));
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionTrackKind.java	Wed Mar 13 19:07:52 2013 +0100
@@ -0,0 +1,45 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+
+@Entity
+@Table(name = "cross_section_track_kinds")
+public class CrossSectionTrackKind implements Serializable {
+    private Integer id;
+    private String name;
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    /**
+     * Get name.
+     *
+     * @return The name of the Cross section kind as String.
+     */
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Set name.
+     *
+     * @param name the value to set.
+     */
+    public void setName(String name) {
+        this.name = name;
+    }
+}
+
--- a/flys-backend/src/main/java/de/intevation/flys/model/DGM.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/DGM.java	Wed Mar 13 19:07:52 2013 +0100
@@ -24,15 +24,15 @@
 @Table(name = "dem")
 public class DGM implements Serializable {
 
-    private Integer    id;
-    private Integer    srid;
+    private Integer      id;
+    private Integer      srid;
 
-    private River      river;
+    private River        river;
 
-    private BigDecimal lower;
-    private BigDecimal upper;
+    private Range        range;
+    private TimeInterval time_interval;
 
-    private String     path;
+    private String       path;
 
 
     public DGM() {
@@ -66,24 +66,6 @@
         return river;
     }
 
-    public void setLower(BigDecimal lower) {
-        this.lower = lower;
-    }
-
-    @Column(name = "lower")
-    public BigDecimal getLower() {
-        return lower;
-    }
-
-    public void setUpper(BigDecimal upper) {
-        this.upper = upper;
-    }
-
-    @Column(name = "upper")
-    public BigDecimal getUpper() {
-        return upper;
-    }
-
     public void setPath(String path) {
         this.path = path;
     }
@@ -120,8 +102,8 @@
 
         Query query = session.createQuery(
             "from DGM where river.name =:river and " +
-            "lower <=:lower and upper >=:lower and " +
-            "lower <=:upper and upper >=:upper");
+            "range.a <=:lower and range.b >=:lower and " +
+            "range.a <=:upper and range.b >=:upper");
         query.setParameter("river", river);
         query.setParameter("lower", new BigDecimal(lower));
         query.setParameter("upper", new BigDecimal(upper));
@@ -130,5 +112,27 @@
 
         return result.isEmpty() ? null : result.get(0);
     }
+
+    @OneToOne
+    @JoinColumn(name = "range_id")
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id")
+    public TimeInterval getTimeInterval() {
+        return time_interval;
+    }
+
+    public void setTimeInterval(TimeInterval time_interval) {
+        this.time_interval = time_interval;
+    }
+
+
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/Floodplain.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Floodplain.java	Wed Mar 13 19:07:52 2013 +0100
@@ -24,11 +24,13 @@
 public class Floodplain
 implements   Serializable
 {
-    private Integer id;
+    private Integer        id;
 
-    private River   river;
+    private FloodplainKind kind;
 
-    private Polygon geom;
+    private River          river;
+
+    private Polygon        geom;
 
 
     public Floodplain() {
@@ -55,6 +57,16 @@
         this.river = river;
     }
 
+    @OneToOne
+    @JoinColumn(name = "kind_id")
+    public FloodplainKind getKind() {
+        return kind;
+    }
+
+    public void setKind(FloodplainKind kind) {
+        this.kind = kind;
+    }
+
     @Column(name = "geom")
     @Type(type = "org.hibernatespatial.GeometryUserType")
     public Polygon getGeom() {
@@ -69,8 +81,10 @@
     public static Floodplain getFloodplain(String river) {
         Session session = SessionHolder.HOLDER.get();
 
+        // kind_id 0 -> Offical
+        // kind_id 1 -> Misc.
         Query query = session.createQuery(
-            "from Floodplain where river.name =:river");
+            "from Floodplain where river.name =:river and kind_id=1");
         query.setParameter("river", river);
 
         List<Floodplain> result = query.list();
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FloodplainKind.java	Wed Mar 13 19:07:52 2013 +0100
@@ -0,0 +1,45 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+
+@Entity
+@Table(name = "floodplain_kinds")
+public class FloodplainKind implements Serializable {
+    private Integer id;
+    private String name;
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    /**
+     * Get name.
+     *
+     * @return The name of the Floodplain Kind as String.
+     */
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Set name.
+     *
+     * @param name the value to set.
+     */
+    public void setName(String name) {
+        this.name = name;
+    }
+}
+
--- a/flys-backend/src/main/java/de/intevation/flys/model/HWSLine.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HWSLine.java	Wed Mar 13 19:07:52 2013 +0100
@@ -33,7 +33,7 @@
     private HWSKind  kind;
     private FedState fedState;
     private River    river;
-    private Integer  offical;
+    private Integer  official;
     private Integer  shoreSide;
     private String   name;
     private String   path;
@@ -93,22 +93,22 @@
 
 
     /**
-     * Get offical.
+     * Get official.
      *
-     * @return offical as Integer.
+     * @return official as Integer.
      */
-    @Column(name = "offical")
-    public Integer getOffical() {
-        return offical;
+    @Column(name = "official")
+    public Integer getofficial() {
+        return official;
     }
 
     /**
-     * Set offical.
+     * Set official.
      *
-     * @param offical the value to set.
+     * @param official the value to set.
      */
-    public void setOffical(Integer offical) {
-        this.offical = offical;
+    public void setofficial(Integer official) {
+        this.official = official;
     }
 
     /**
--- a/flys-backend/src/main/java/de/intevation/flys/model/HWSPoint.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HWSPoint.java	Wed Mar 13 19:07:52 2013 +0100
@@ -28,7 +28,7 @@
     private HWSKind    kind;
     private FedState   fedState;
     private River      river;
-    private Integer    offical;
+    private Integer    official;
     private Integer    shoreSide;
     private String     name;
     private String     path;
@@ -94,22 +94,22 @@
 
 
     /**
-     * Get offical.
+     * Get official.
      *
-     * @return offical as Integer.
+     * @return official as Integer.
      */
-    @Column(name = "offical")
-    public Integer getOffical() {
-        return offical;
+    @Column(name = "official")
+    public Integer getofficial() {
+        return official;
     }
 
     /**
-     * Set offical.
+     * Set official.
      *
-     * @param offical the value to set.
+     * @param official the value to set.
      */
-    public void setOffical(Integer offical) {
-        this.offical = offical;
+    public void setofficial(Integer official) {
+        this.official = official;
     }
 
     /**
--- a/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java	Wed Mar 13 19:07:52 2013 +0100
@@ -17,7 +17,7 @@
 import org.hibernate.Query;
 import org.hibernate.annotations.Type;
 
-import com.vividsolutions.jts.geom.LineString;
+import com.vividsolutions.jts.geom.MultiLineString;
 
 import de.intevation.flys.backend.SessionHolder;
 
@@ -32,7 +32,7 @@
     private SobekKind  sobek;
     private String     name;
     private River      river;
-    private LineString geom;
+    private MultiLineString geom;
     private BoundaryKind kind;
 
     public HydrBoundary() {
@@ -82,12 +82,12 @@
 
     @Column(name = "geom")
     @Type(type = "org.hibernatespatial.GeometryUserType")
-    public LineString getGeom() {
+    public MultiLineString getGeom() {
         return geom;
     }
 
 
-    public void setGeom(LineString geom) {
+    public void setGeom(MultiLineString geom) {
         this.geom = geom;
     }
 
--- a/flys-backend/src/main/java/de/intevation/flys/model/MeasurementStation.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MeasurementStation.java	Wed Mar 13 19:07:52 2013 +0100
@@ -21,7 +21,7 @@
     private String measurementType;
     private String riverside;
     private String operator;
-    private String comment;
+    private String description;
 
     private Double station;
     private Range range;
@@ -37,7 +37,7 @@
 
     public MeasurementStation(River river, String name, String measurementType,
         String riverside, Double station, Range range, Gauge gauge,
-        TimeInterval observationTimerange, String operator, String comment) {
+        TimeInterval observationTimerange, String operator, String description) {
         this.river = river;
         this.name = name;
         this.measurementType = measurementType;
@@ -47,7 +47,7 @@
         this.gauge = gauge;
         this.observationTimerange = observationTimerange;
         this.operator = operator;
-        this.comment = comment;
+        this.description = description;
     }
 
     @Id
@@ -147,13 +147,13 @@
         this.operator = operator;
     }
 
-    @Column(name = "comment")
-    public String getComment() {
-        return comment;
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
     }
 
-    public void setComment(String comment) {
-        this.comment = comment;
+    public void setDescription(String description) {
+        this.description = description;
     }
 
 }
--- a/flys-backend/src/main/java/de/intevation/flys/model/RiverAxis.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/RiverAxis.java	Wed Mar 13 19:07:52 2013 +0100
@@ -107,20 +107,15 @@
     }
 
     public static List<RiverAxis> getRiverAxis(String river, int kind)
-    throws IllegalArgumentException {
+    throws HibernateException {
         Session session = SessionHolder.HOLDER.get();
         Query query = session.createQuery(
             "from RiverAxis where river.name =:river AND kind.id =:kind");
         query.setParameter("river", river);
         query.setParameter("kind", kind);
 
-        try {
-            List<RiverAxis> list = query.list();
-            return list.isEmpty() ? null : list;
-        }
-        catch (RuntimeException re) {
-            throw re.getCause();
-        }
+        List<RiverAxis> list = query.list();
+        return list.isEmpty() ? null : list;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/Waterlevel.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,113 +0,0 @@
-package de.intevation.flys.model;
-
-import java.io.Serializable;
-import java.util.List;
-
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.Table;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Column;
-import javax.persistence.SequenceGenerator;
-import javax.persistence.GenerationType;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.OneToMany;
-
-import org.apache.log4j.Logger;
-
-
-
-/** Mapped Waterlevel. */
-@Entity
-@Table(name = "waterlevel")
-public class Waterlevel
-implements   Serializable
-{
-    private static Logger logger = Logger.getLogger(Waterlevel.class);
-
-    private Integer id;
-
-    private River river;
-
-    private Unit unit;
-
-    private String  description;
-
-    private List<WaterlevelQRange> qRanges;
-
-
-    public Waterlevel() {
-    }
-
-    public Waterlevel(River river, Unit unit) {
-        this.river = river;
-        this.unit  = unit;
-    }
-
-    public Waterlevel(River river, Unit unit, String description) {
-        this(river, unit);
-        this.description = description;
-    }
-
-    @Id
-    @SequenceGenerator(
-        name           = "SEQUENCE_WATERLEVEL_ID_SEQ",
-        sequenceName   = "WATERLEVEL_ID_SEQ",
-        allocationSize = 1)
-    @GeneratedValue(
-        strategy  = GenerationType.SEQUENCE,
-        generator = "SEQUENCE_WATERLEVEL_ID_SEQ")
-    @Column(name = "id")
-    public Integer getId() {
-        return id;
-    }
-
-    public void setId(Integer id) {
-        this.id = id;
-    }
-
-    @OneToOne
-    @JoinColumn(name = "river_id" )
-    public River getRiver() {
-        return river;
-    }
-
-    public void setRiver(River river) {
-        this.river = river;
-    }
-
-    @OneToOne
-    @JoinColumn(name = "unit_id")
-    public Unit getUnit() {
-        return unit;
-    }
-
-    public void setUnit(Unit unit) {
-        this.unit = unit;
-    }
-
-    @Column(name = "description")
-    public String getDescription() {
-        return description;
-    }
-
-    public void setDescription(String description) {
-        this.description = description;
-    }
-
-    @OneToMany
-    @JoinColumn(name="waterlevel_id")
-    public List<WaterlevelQRange> getQRanges() {
-        return qRanges;
-    }
-
-    public void setQRanges(List<WaterlevelQRange> qRanges) {
-        this.qRanges = qRanges;
-    }
-
-    public void addQRange(WaterlevelQRange qRange) {
-        qRanges.add(qRange);
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelDifference.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,119 +0,0 @@
-package de.intevation.flys.model;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.Table;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Column;
-import javax.persistence.SequenceGenerator;
-import javax.persistence.GenerationType;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.OneToMany;
-
-import org.apache.log4j.Logger;
-
-
-@Entity
-@Table(name = "waterlevel_difference")
-public class WaterlevelDifference
-implements   Serializable
-{
-    private static Logger logger = Logger.getLogger(WaterlevelDifference.class);
-
-    private Integer id;
-
-    private River river;
-
-    private Unit unit;
-
-    private List<WaterlevelDifferenceColumn> columns;
-
-    private String description;
-
-
-    public WaterlevelDifference() {
-        columns = new ArrayList<WaterlevelDifferenceColumn>();
-    }
-
-
-    public WaterlevelDifference(River river, Unit unit) {
-        this();
-
-        this.river = river;
-        this.unit  = unit;
-    }
-
-
-    public WaterlevelDifference(River river, Unit unit, String description) {
-        this(river, unit);
-
-        this.description = description;
-    }
-
-
-    @Id
-    @SequenceGenerator(
-        name           = "SEQUENCE_WATERLEVEL_DIFFERENCE_ID_SEQ",
-        sequenceName   = "WATERLEVEL_DIFFERENCE_ID_SEQ",
-        allocationSize = 1)
-    @GeneratedValue(
-        strategy  = GenerationType.SEQUENCE,
-        generator = "SEQUENCE_WATERLEVEL_DIFFERENCE_ID_SEQ")
-    @Column(name = "id")
-    public Integer getId() {
-        return id;
-    }
-
-    public void setId(Integer id) {
-        this.id = id;
-    }
-
-    @OneToOne
-    @JoinColumn(name = "river_id" )
-    public River getRiver() {
-        return river;
-    }
-
-    public void setRiver(River river) {
-        this.river = river;
-    }
-
-    @OneToOne
-    @JoinColumn(name = "unit_id")
-    public Unit getUnit() {
-        return unit;
-    }
-
-    public void setUnit(Unit unit) {
-        this.unit = unit;
-    }
-
-    @Column(name = "description")
-    public String getDescription() {
-        return description;
-    }
-
-    public void setDescription(String description) {
-        this.description = description;
-    }
-
-    @OneToMany
-    @JoinColumn(name = "difference_id")
-    public List<WaterlevelDifferenceColumn> getColumns() {
-        return columns;
-    }
-
-    public void setColumns(List<WaterlevelDifferenceColumn> columns) {
-        this.columns = columns;
-    }
-
-    public void addColumn(WaterlevelDifferenceColumn column) {
-        this.columns.add(column);
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelDifferenceColumn.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,104 +0,0 @@
-package de.intevation.flys.model;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.Table;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Column;
-import javax.persistence.SequenceGenerator;
-import javax.persistence.GenerationType;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.OneToMany;
-
-import org.apache.log4j.Logger;
-
-
-@Entity
-@Table(name = "waterlevel_difference_column")
-public class WaterlevelDifferenceColumn
-implements   Serializable
-{
-    private static Logger logger =
-        Logger.getLogger(WaterlevelDifferenceColumn.class);
-
-
-    private Integer id;
-
-    private WaterlevelDifference difference;
-
-    private List<WaterlevelDifferenceValue> values;
-
-    private String description;
-
-
-    public WaterlevelDifferenceColumn() {
-        values = new ArrayList<WaterlevelDifferenceValue>();
-    }
-
-    public WaterlevelDifferenceColumn(
-        WaterlevelDifference difference,
-        String               description
-    ) {
-        this();
-
-        this.difference = difference;
-        this.description = description;
-    }
-
-
-    @Id
-    @SequenceGenerator(
-        name           = "SEQUENCE_WATERLEVEL_DIFF_COLUMN_ID_SEQ",
-        sequenceName   = "WATERLEVEL_DIFF_COLUMN_ID_SEQ",
-        allocationSize = 1)
-    @GeneratedValue(
-        strategy  = GenerationType.SEQUENCE,
-        generator = "SEQUENCE_WATERLEVEL_DIFF_COLUMN_ID_SEQ")
-    @Column(name = "id")
-    public Integer getId() {
-        return id;
-    }
-
-    public void setId(Integer id) {
-        this.id = id;
-    }
-
-    @OneToOne
-    @JoinColumn(name = "difference_id" )
-    public WaterlevelDifference getDifference() {
-        return difference;
-    }
-
-    public void setDifference(WaterlevelDifference difference) {
-        this.difference = difference;
-    }
-
-    @Column(name = "description")
-    public String getDescription() {
-        return description;
-    }
-
-    public void setDescription(String description) {
-        this.description = description;
-    }
-
-    @OneToMany
-    @JoinColumn(name = "column_id")
-    public List<WaterlevelDifferenceValue> getValues() {
-        return values;
-    }
-
-    public void setValues(List<WaterlevelDifferenceValue> values) {
-        this.values = values;
-    }
-
-    public void addValue(WaterlevelDifferenceValue value) {
-        this.values.add(value);
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelDifferenceValue.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,94 +0,0 @@
-package de.intevation.flys.model;
-
-import java.io.Serializable;
-
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.Table;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Column;
-import javax.persistence.SequenceGenerator;
-import javax.persistence.GenerationType;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-
-import org.apache.log4j.Logger;
-
-
-@Entity
-@Table(name = "waterlevel_difference_values")
-public class WaterlevelDifferenceValue
-implements   Serializable
-{
-    private static Logger logger =
-        Logger.getLogger(WaterlevelDifferenceValue.class);
-
-
-    private Integer id;
-
-    private WaterlevelDifferenceColumn column;
-
-    private Double station;
-    private Double value;
-
-
-    public WaterlevelDifferenceValue() {
-    }
-
-    public WaterlevelDifferenceValue(
-        WaterlevelDifferenceColumn column,
-        Double                     station,
-        Double                     value
-    ) {
-        this.column  = column;
-        this.station = station;
-        this.value   = value;
-    }
-
-
-    @Id
-    @SequenceGenerator(
-        name           = "SEQUENCE_WATERLEVEL_DIFF_VALUES_ID_SEQ",
-        sequenceName   = "WATERLEVEL_DIFF_VALUES_ID_SEQ",
-        allocationSize = 1)
-    @GeneratedValue(
-        strategy  = GenerationType.SEQUENCE,
-        generator = "SEQUENCE_WATERLEVEL_DIFF_VALUES_ID_SEQ")
-    @Column(name = "id")
-    public Integer getId() {
-        return id;
-    }
-
-    public void setId(Integer id) {
-        this.id = id;
-    }
-
-    @OneToOne
-    @JoinColumn(name = "column_id" )
-    public WaterlevelDifferenceColumn getColumn() {
-        return column;
-    }
-
-    public void setColumn(WaterlevelDifferenceColumn column) {
-        this.column = column;
-    }
-
-    @Column(name = "station")
-    public Double getStation() {
-        return station;
-    }
-
-    public void setStation(Double station) {
-        this.station = station;
-    }
-
-    @Column(name = "value")
-    public Double getValue() {
-        return value;
-    }
-
-    public void setValue(Double value) {
-        this.value = value;
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelQRange.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,100 +0,0 @@
-package de.intevation.flys.model;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.Table;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Column;
-import javax.persistence.SequenceGenerator;
-import javax.persistence.GenerationType;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.OneToMany;
-
-import org.apache.log4j.Logger;
-
-
-
-
-@Entity
-@Table(name = "waterlevel_q_range")
-public class WaterlevelQRange
-implements   Serializable
-{
-    private static Logger logger = Logger.getLogger(WaterlevelQRange.class);
-
-    private Integer id;
-
-    private Waterlevel waterlevel;
-
-    private Double q;
-
-    private List<WaterlevelValue> values;
-
-
-    public WaterlevelQRange() {
-        this.values = new ArrayList<WaterlevelValue>();
-    }
-
-    public WaterlevelQRange(Waterlevel waterlevel, Double q) {
-        this();
-        this.q          = q;
-        this.waterlevel = waterlevel;
-    }
-
-
-    @Id
-    @SequenceGenerator(
-        name           = "SEQUENCE_WATERLEVEL_Q_RANGE_ID_SEQ",
-        sequenceName   = "WATERLEVEL_Q_RANGES_ID_SEQ",
-        allocationSize = 1)
-    @GeneratedValue(
-        strategy  = GenerationType.SEQUENCE,
-        generator = "SEQUENCE_WATERLEVEL_Q_RANGE_ID_SEQ")
-    @Column(name = "id")
-    public Integer getId() {
-        return id;
-    }
-
-    public void setId(Integer id) {
-        this.id = id;
-    }
-
-    @OneToOne
-    @JoinColumn(name = "waterlevel_id" )
-    public Waterlevel getWaterlevel() {
-        return waterlevel;
-    }
-
-    public void setWaterlevel(Waterlevel waterlevel) {
-        this.waterlevel = waterlevel;
-    }
-
-    @Column(name = "q")
-    public Double getQ() {
-        return q;
-    }
-
-    public void setQ(Double q) {
-        this.q = q;
-    }
-
-    @OneToMany
-    @Column(name = "waterlevel_q_range_id")
-    public List<WaterlevelValue> getValues() {
-        return values;
-    }
-
-    public void setValues(List<WaterlevelValue> values) {
-        this.values = values;
-    }
-
-    public void addValue(WaterlevelValue value) {
-        values.add(value);
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelValue.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,90 +0,0 @@
-package de.intevation.flys.model;
-
-import java.io.Serializable;
-
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.Table;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Column;
-import javax.persistence.SequenceGenerator;
-import javax.persistence.GenerationType;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-
-import org.apache.log4j.Logger;
-
-
-
-
-@Entity
-@Table(name = "waterlevel_values")
-public class WaterlevelValue
-implements   Serializable
-{
-    private static Logger logger = Logger.getLogger(WaterlevelValue.class);
-
-    private Integer id;
-
-    private WaterlevelQRange qrange;
-
-    private Double station;
-    private Double w;
-
-
-    public WaterlevelValue() {
-    }
-
-    public WaterlevelValue(WaterlevelQRange qrange, Double station, Double w) {
-        this.qrange  = qrange;
-        this.station = station;
-        this.w       = w;
-    }
-
-
-    @Id
-    @SequenceGenerator(
-        name           = "SEQUENCE_WATERLEVEL_VALUES_ID_SEQ",
-        sequenceName   = "WATERLEVEL_VALUES_ID_SEQ",
-        allocationSize = 1)
-    @GeneratedValue(
-        strategy  = GenerationType.SEQUENCE,
-        generator = "SEQUENCE_WATERLEVEL_VALUES_ID_SEQ")
-    @Column(name = "id")
-    public Integer getId() {
-        return id;
-    }
-
-    public void setId(Integer id) {
-        this.id = id;
-    }
-
-    @OneToOne
-    @JoinColumn(name = "waterlevel_q_range_id" )
-    public WaterlevelQRange getQrange() {
-        return qrange;
-    }
-
-    public void setQrange(WaterlevelQRange qrange) {
-        this.qrange = qrange;
-    }
-
-    @Column(name = "station")
-    public Double getStation() {
-        return station;
-    }
-
-    public void setStation(Double station) {
-        this.station = station;
-    }
-
-    @Column(name = "w")
-    public Double getW() {
-        return w;
-    }
-
-    public void setW(Double w) {
-        this.w = w;
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java	Wed Mar 13 19:07:52 2013 +0100
@@ -21,6 +21,8 @@
 import java.util.Iterator;
 import java.util.Properties;
 import java.util.Map;
+import java.util.Collections;
+import java.util.StringTokenizer;
 
 import org.apache.commons.dbcp.BasicDataSource;
 import org.apache.commons.dbcp.BasicDataSourceFactory;
@@ -194,6 +196,13 @@
             ds = (BasicDataSource)BasicDataSourceFactory
                 .createDataSource(dbcpProperties);
 
+            // This needs to be done manually as it is somehow ignored
+            // by the BasicDataSourceFactory if you set it as a dbcpProperty
+            String connectionInitSqls = props.getProperty("connectionInitSqls");
+            if (connectionInitSqls != null) {
+                StringTokenizer tokenizer = new StringTokenizer(connectionInitSqls, ";");
+                ds.setConnectionInitSqls(Collections.list(tokenizer));
+            }
             // The BasicDataSource has lazy initialization
             // borrowing a connection will start the DataSource
             // and make sure it is configured correctly.
--- a/flys-backend/src/main/java/de/intevation/flys/utils/DgmSqlConverter.java	Thu Mar 07 09:46:11 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,483 +0,0 @@
-package de.intevation.flys.utils;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.util.ArrayList;
-import java.util.List;
-
-import au.com.bytecode.opencsv.CSVReader;
-
-
-/**
- * A converter for CSV files with DGM information. The result of a conversion
- * is an SQL file with "INSERT INTO dem ..." statements.
- * <br>
- * To start the converter, at least the following three system properties are
- * required:
- * <br>
- * <ul>
- * <ol><b>gew.dir</b>: This property must point to the directory where all
- * rivers are stored.</ol>
- * <ol><b>csv</b>: This property must point to the CSV file that contains the
- * DGM information.</ol>
- * <ol><b>sql</b>: This property must point to a (not yet existing) file that
- * will be generated by this converter.</ol>
- * </ul>
- * <br>
- * In addiation, the following properties are accepted to modify log messages,
- * etc.
- * <ul>
- * <ol><b>verbose</b>: Accepts integer values (0, 1, 2, 3) to modify the log
- * messages. The higher the value the more log messages are printed to STDOUT.
- * </ol>
- * <ol><b>full</b>: Accepts true|false values. If true is set, all rivers
- * included in the CSV file are taken into account while parsing. Otherwise,
- * the converter reads information for 'Saar', 'Mosel' and 'Eble' only.</ol>
- * </ul>
- *
- * @author Ingo Weinzierl <a href="mailto:ingo.weinzierl@intevation.de">
- * ingo.weinzierl@intevation.de</a>
- *
- */
-public class DgmSqlConverter {
-
-    public static final String SQL_INSERT = "INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,"
-        + "projection, elevation_state, format, border_break, resolution, description, path) VALUES ("
-        + "%s, '%s', %s, %s, %s, %s, '%s', '%s', '%s', %s, '%s', '%s', '%s');";
-
-    public static final String SQL_SELECT_RIVER = "(SELECT id from rivers WHERE name = '%s')";
-
-    public static final char DEFAULT_SEPERATOR = ',';
-    public static final char DEFAULT_QUOTE = '"';
-    public static final int DEFAULT_LOG_LEVEL = 2;
-
-    public static final boolean FULL_MODE = Boolean.getBoolean("full");
-    public static final String GEW_DIR = System.getProperty("gew.dir", null);
-    public static final String CSV_FILE = System.getProperty("csv");
-    public static final String SQL_FILE = System.getProperty("sql");
-    public static final int LOG_LEVEL = Integer.getInteger("verbose",
-        DEFAULT_LOG_LEVEL);
-
-    public static final int MIN_COLUMN_COUNT = 15;
-
-    public static final int IDX_RIVERNAME = 0;
-    public static final int IDX_NAME = 12;
-    public static final int IDX_LOWER = 1;
-    public static final int IDX_UPPER = 2;
-    public static final int IDX_YEAR_FROM = 3;
-    public static final int IDX_YEAR_TO = 4;
-    public static final int IDX_PROJECTION = 7;
-    public static final int IDX_ELEVATION_STATE = 8;
-    public static final int IDX_FORMAT = 9;
-    public static final int IDX_BORDER_BREAK = 10;
-    public static final int IDX_RESOLUTION = 11;
-    public static final int IDX_DESCRIPTION = 14;
-    public static final int IDX_FILE_NAME = 5;
-    public static final int IDX_FILE_PATH = 6;
-
-    private class DGM {
-
-        public String river;
-        public String name;
-        public String projection;
-        public String elevationState;
-        public String format;
-        public String resolution;
-        public String description;
-        public String path;
-
-        public double lower;
-        public double upper;
-        public Integer yearFrom;
-        public Integer yearTo;
-
-        public boolean borderBreak;
-
-        public DGM() {
-            borderBreak = false;
-        }
-
-        public String toSQL() {
-            String riverId = String.format(SQL_SELECT_RIVER, river);
-            String lower = String.valueOf(this.lower);
-            String upper = String.valueOf(this.upper);
-            String yearFrom = this.yearFrom != null ? String
-                .valueOf(this.yearFrom) : "";
-            String yearTo = this.yearTo != null ? String.valueOf(this.yearTo)
-                : "";
-
-            return String.format(SQL_INSERT, riverId, name, lower, upper,
-                yearFrom, yearTo, projection, elevationState, format,
-                borderBreak, resolution, description, path);
-        }
-    }
-
-    private File riverDir;
-    private File csv;
-    private File sql;
-
-    private List<DGM> dgms;
-
-    public static void debug(String msg) {
-        if (LOG_LEVEL >= 3) {
-            System.out.println("DEBUG: " + msg);
-        }
-    }
-
-    public static void info(String msg) {
-        if (LOG_LEVEL >= 2) {
-            System.out.println("INFO: " + msg);
-        }
-    }
-
-    public static void warn(String msg) {
-        if (LOG_LEVEL >= 1) {
-            System.out.println("WARN: " + msg);
-        }
-    }
-
-    public static void error(String msg) {
-        System.out.println("ERROR: " + msg);
-    }
-
-    public static File getRiverDir(String[] args) {
-        if (GEW_DIR != null && GEW_DIR.length() > 0) {
-            return new File(GEW_DIR);
-        }
-        else if (args != null && args.length > 0) {
-            return new File(args[0]);
-        }
-
-        return null;
-    }
-
-    public static File getCSVFile(String[] args) {
-        if (CSV_FILE != null && CSV_FILE.length() > 0) {
-            return new File(CSV_FILE);
-        }
-        else if (args != null && args.length > 1) {
-            return new File(args[1]);
-        }
-
-        return null;
-    }
-
-    public static File getSQLFile(String[] args) {
-        if (SQL_FILE != null && SQL_FILE.length() > 0) {
-            return new File(SQL_FILE);
-        }
-        else if (args != null && args.length > 2) {
-            return new File(args[2]);
-        }
-
-        return null;
-    }
-
-    public static void main(String[] args) {
-        info("Start convering CSV -> SQL statements");
-
-        if (!FULL_MODE) {
-            info("You are running in DEMO mode; other rivers than 'Saar', 'Mosel' and 'Elbe' are ignored.");
-        }
-
-        File riverDir = getRiverDir(args);
-
-        if (riverDir == null) {
-            warn("No rivers directory specified!");
-            return;
-        }
-        else if (!riverDir.isDirectory()) {
-            warn("Specified rivers directory is not a directory!");
-            return;
-        }
-        else if (!riverDir.canRead()) {
-            warn("Unable to read '" + riverDir.toString() + "'");
-            return;
-        }
-
-        File csv = getCSVFile(args);
-
-        if (csv == null) {
-            warn("No CSV file specified!");
-            return;
-        }
-        else if (csv.isDirectory()) {
-            warn("Specified CSV file is a directory!");
-            return;
-        }
-        else if (!csv.canRead()) {
-            warn("Unable to read '" + csv.toString() + "'");
-            return;
-        }
-
-        File sql = getSQLFile(args);
-
-        if (sql == null) {
-            warn("No destination file specified!");
-            return;
-        }
-        else if (sql.isDirectory()) {
-            warn("Specified destination file is a directory!");
-            return;
-        }
-        else if (sql.exists() && !sql.canWrite()) {
-            warn("Unable to write to '" + sql.toString() + "'");
-            return;
-        }
-        else if (!sql.exists()) {
-            try {
-                sql.createNewFile();
-            }
-            catch (IOException ioe) {
-                warn("Unable to write to '" + sql.toString() + "'");
-                return;
-            }
-        }
-
-        info("Start parsing CSV file '" + csv.toString() + "'");
-
-        try {
-            DgmSqlConverter parser = new DgmSqlConverter(riverDir, csv, sql);
-            parser.read();
-            parser.write();
-        }
-        catch (Exception e) {
-            error("Unexpected error: " + e.getMessage());
-            e.printStackTrace();
-        }
-
-        info("Finished converting CSV -> SQL regularly.");
-    }
-
-    public DgmSqlConverter(File riverDir, File csv, File sql) {
-        this.riverDir = riverDir;
-        this.csv = csv;
-        this.sql = sql;
-        this.dgms = new ArrayList<DGM>();
-    }
-
-    public void read() {
-        info("Read DGM information from CSV file: " + csv.getAbsolutePath());
-
-        InputStream in = null;
-
-        try {
-            in = new BufferedInputStream(new FileInputStream(csv));
-        }
-        catch (FileNotFoundException e) {
-            error("File not found: " + e.getMessage());
-            return;
-        }
-
-        Reader reader = new InputStreamReader(in);
-        CSVReader csvReader = new CSVReader(reader, DEFAULT_SEPERATOR,
-            DEFAULT_QUOTE);
-
-        List<String[]> rows = new ArrayList<String[]>();
-
-        int success = 0;
-
-        try {
-            rows = csvReader.readAll();
-
-            for (int idx = 0; idx < rows.size(); idx++) {
-                String[] row = rows.get(idx);
-                if (readRow(row)) {
-                    success++;
-                }
-                else {
-                    warn("Unable to parse row " + (idx + 1));
-                }
-            }
-        }
-        catch (IOException e) {
-            error("Error while parsing CSV: " + e.getMessage());
-            return;
-        }
-
-        info("Parsed CSV file: " + rows.size() + " lines.");
-        info("Parsed " + success + " line successful");
-    }
-
-    private boolean readRow(String[] row) {
-        if (row == null) {
-            warn("Row is null!");
-            return false;
-        }
-
-        if (row.length < MIN_COLUMN_COUNT) {
-            warn("invalid column count: " + row.length);
-            return false;
-        }
-
-        StringBuffer rowBuffer = new StringBuffer();
-        for (String col : row) {
-            rowBuffer.append(col);
-            rowBuffer.append(" | ");
-        }
-        debug(rowBuffer.toString());
-
-        try {
-            DGM dgm = new DGM();
-            dgm.river = readRiver(row[IDX_RIVERNAME]);
-            dgm.name = row[IDX_NAME];
-            dgm.projection = row[IDX_PROJECTION];
-            dgm.elevationState = row[IDX_ELEVATION_STATE];
-            dgm.format = row[IDX_FORMAT];
-            dgm.resolution = row[IDX_RESOLUTION];
-            dgm.description = row[IDX_DESCRIPTION];
-            dgm.lower = readLower(row[IDX_LOWER]);
-            dgm.upper = readUpper(row[IDX_UPPER]);
-            dgm.yearFrom = readFromYear(row[IDX_YEAR_FROM]);
-            dgm.yearTo = readToYear(row[IDX_YEAR_TO]);
-            dgm.borderBreak = readBorderBreak(row[IDX_BORDER_BREAK]);
-            dgm.path = readPath(dgm.river, row[IDX_FILE_PATH],
-                row[IDX_FILE_NAME]);
-
-            dgms.add(dgm);
-
-            return true;
-        }
-        catch (IllegalArgumentException iae) {
-            warn(iae.getMessage());
-        }
-
-        return false;
-    }
-
-    private String readRiver(String rivername) throws IllegalArgumentException {
-        if (rivername == null || rivername.length() == 0) {
-            throw new IllegalAccessError("Invalid rivername: " + rivername);
-        }
-
-        if (!FULL_MODE
-            && !(rivername.equals("Saar") || rivername.equals("Mosel") || rivername
-                .equals("Elbe"))) {
-            throw new IllegalArgumentException("In DEMO mode; skip river: "
-                + rivername);
-        }
-
-        return rivername;
-    }
-
-    private Double readLower(String lower) throws IllegalArgumentException {
-        try {
-            return Double.valueOf(lower);
-        }
-        catch (NumberFormatException nfe) {
-        }
-
-        throw new IllegalArgumentException("Attribute 'lower' invalid: "
-            + lower);
-    }
-
-    private Double readUpper(String upper) throws IllegalArgumentException {
-        try {
-            return Double.valueOf(upper);
-        }
-        catch (NumberFormatException nfe) {
-        }
-
-        throw new IllegalArgumentException("Attribute 'upper' invalid: "
-            + upper);
-    }
-
-    private Integer readFromYear(String from) throws IllegalArgumentException {
-        try {
-            return Integer.valueOf(from);
-        }
-        catch (NumberFormatException nfe) {
-        }
-
-        return null;
-    }
-
-    private Integer readToYear(String to) throws IllegalArgumentException {
-        try {
-            return Integer.valueOf(to);
-        }
-        catch (NumberFormatException nfe) {
-        }
-
-        return null;
-    }
-
-    private String readPath(String rivername, String dir, String filename)
-        throws IllegalArgumentException {
-        File riverDir = new File(this.riverDir, rivername);
-        File dgmDir = new File(riverDir, dir);
-        File dgmFile = new File(dgmDir, filename);
-
-        try {
-            debug("Path of DGM = " + dgmFile.getAbsolutePath());
-
-            if (dgmFile == null || !dgmFile.exists()) {
-                throw new IllegalAccessError(
-                    "Specified DGM file does not exist: "
-                        + dgmFile.getAbsolutePath());
-            }
-
-            if (!dgmFile.isFile()) {
-                throw new IllegalArgumentException(
-                    "Specified DGM file is no file: "
-                        + dgmFile.getAbsolutePath());
-            }
-        }
-        catch (IllegalAccessError iae) {
-            throw new IllegalArgumentException("Cannot find DGM file (river="
-                + rivername + " | directory=" + dir + " | filename=" + filename
-                + ")");
-        }
-
-        return dgmFile.getAbsolutePath();
-    }
-
-    private boolean readBorderBreak(String borderBreak) {
-        if (borderBreak == null || borderBreak.length() == 0) {
-            return true;
-        }
-        else if (borderBreak.toLowerCase().equals("ja")) {
-            return true;
-        }
-        else if (borderBreak.toLowerCase().equals("nein")) {
-            return false;
-        }
-        else {
-            return true;
-        }
-    }
-
-    public void write() {
-        info("Write DEM information to SQL file: " + sql.getAbsolutePath());
-
-        BufferedWriter bufferedWriter = null;
-        try {
-            bufferedWriter = new BufferedWriter(new FileWriter(sql));
-
-            for (DGM dgm : dgms) {
-                bufferedWriter.write(dgm.toSQL());
-                bufferedWriter.newLine();
-            }
-        }
-        catch (IOException ioe) {
-            error(ioe.getMessage());
-        }
-        finally {
-            if (bufferedWriter != null) {
-                try {
-                    bufferedWriter.close();
-                }
-                catch (IOException ioe) {
-                }
-            }
-        }
-    }
-}
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/AbstractUIProvider.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/AbstractUIProvider.java	Wed Mar 13 19:07:52 2013 +0100
@@ -41,25 +41,25 @@
 {
     private static final long serialVersionUID = -1610874613377494184L;
 
-    /** The message class that provides i18n strings.*/
+    /** The message class that provides i18n strings. */
     protected FLYSConstants MSG = GWT.create(FLYSConstants.class);
 
-    /** The StepForwardHandlers.*/
+    /** The StepForwardHandlers. */
     protected List<StepForwardHandler> forwardHandlers;
 
-    /** The StepForwardHandlers.*/
+    /** The StepForwardHandlers. */
     protected List<StepBackHandler> backHandlers;
 
-    /** The container that is used to position helper widgets.*/
+    /** The container that is used to position helper widgets. */
     protected VLayout helperContainer;
 
-    /** The artifact that contains status information.*/
+    /** The artifact that contains status information. */
     protected Artifact artifact;
 
-    /** The Collection.*/
+    /** The Collection. */
     protected Collection collection;
 
-    /** The ParameterList.*/
+    /** The ParameterList. */
     protected ParameterList parameterList;
 
     /**
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/DoubleArrayPanel.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/DoubleArrayPanel.java	Wed Mar 13 19:07:52 2013 +0100
@@ -9,6 +9,7 @@
 import com.smartgwt.client.widgets.form.fields.StaticTextItem;
 import com.smartgwt.client.widgets.form.fields.TextItem;
 import com.smartgwt.client.widgets.form.fields.events.BlurHandler;
+import com.smartgwt.client.widgets.form.fields.events.FocusHandler;
 
 import de.intevation.flys.client.client.FLYSConstants;
 
@@ -33,7 +34,7 @@
         double[] values,
         BlurHandler handler)
     {
-        this(title, values, handler, TitleOrientation.RIGHT);
+        this(title, values, handler, null, TitleOrientation.RIGHT);
     }
 
 
@@ -44,12 +45,14 @@
      * @param name The name of the TextItem.
      * @param title The title of the TextItem.
      * @param values The double values that should be displayed initially.
-     * @param handler The BlurHandler that is used to valide the input.
+     * @param blurHandler The BlurHandler that is used to valide the input.
+     * @param focusHandler The FocueHandler that is used to valide the input.
      */
     public DoubleArrayPanel(
         String title,
         double[] values,
-        BlurHandler handler,
+        BlurHandler blurHandler,
+        FocusHandler focusHandler,
         TitleOrientation titleOrientation)
     {
         this.title = title;
@@ -60,7 +63,10 @@
         sti.setShowTitle(false);
         sti.setValue(title);
 
-        ti.addBlurHandler(handler);
+        ti.addBlurHandler(blurHandler);
+        if (focusHandler != null) {
+            ti.addFocusHandler(focusHandler);
+        }
 
         if (titleOrientation == TitleOrientation.RIGHT) {
             setFields(ti, sti);
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/QSegmentedInputPanel.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/QSegmentedInputPanel.java	Wed Mar 13 19:07:52 2013 +0100
@@ -297,7 +297,7 @@
             String title = item.getLabel();
 
             DoubleArrayPanel dap = new DoubleArrayPanel(
-                createLineTitle(title), null, this, TitleOrientation.LEFT);
+                createLineTitle(title), null, this, null, TitleOrientation.LEFT);
 
             wqranges.put(title, dap);
 
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/WQAdaptedInputPanel.java	Thu Mar 07 09:46:11 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/WQAdaptedInputPanel.java	Wed Mar 13 19:07:52 2013 +0100
@@ -4,17 +4,23 @@
 import com.google.gwt.i18n.client.NumberFormat;
 import com.google.gwt.user.client.rpc.AsyncCallback;
 
+import com.smartgwt.client.data.Record;
 import com.smartgwt.client.types.TitleOrientation;
 import com.smartgwt.client.types.VerticalAlignment;
 import com.smartgwt.client.util.SC;
 import com.smartgwt.client.widgets.Canvas;
 import com.smartgwt.client.widgets.Label;
 import com.smartgwt.client.widgets.form.DynamicForm;
+import com.smartgwt.client.widgets.form.fields.FormItem;
 import com.smartgwt.client.widgets.form.fields.RadioGroupItem;
 import com.smartgwt.client.widgets.form.fields.events.BlurEvent;
 import com.smartgwt.client.widgets.form.fields.events.BlurHandler;
 import com.smartgwt.client.widgets.form.fields.events.ChangeEvent;
 import com.smartgwt.client.widgets.form.fields.events.ChangeHandler;
+import com.smartgwt.client.widgets.form.fields.events.FocusEvent;
+import com.smartgwt.client.widgets.form.fields.events.FocusHandler;
+import com.smartgwt.client.widgets.grid.events.CellClickEvent;
+import com.smartgwt.client.widgets.grid.events.CellClickHandler;
 import com.smartgwt.client.widgets.layout.HLayout;
 import com.smartgwt.client.widgets.layout.VLayout;
 import com.smartgwt.client.widgets.tab.Tab;
@@ -52,10 +58,13 @@
  */
 public class WQAdaptedInputPanel
 extends      AbstractUIProvider
-implements   ChangeHandler, BlurHandler
+implements   ChangeHandler, BlurHandler, FocusHandler
 {
     private static final long serialVersionUID = -3218827566805476423L;
 
+    /** The message class that provides i18n strings.*/
+    protected FLYSConstants MESSAGE = GWT.create(FLYSConstants.class);
+
     public static final String FIELD_WQ_MODE = "wq_isq";
     public static final String FIELD_WQ_W    = "W";
     public static final String FIELD_WQ_Q    = "Q";
@@ -95,12 +104,18 @@
     /** The RadioGroupItem that determines the w/q input mode.*/
     protected DynamicForm modes;
 
+    /** Table holding Q and D values. */
     protected QDTable qdTable;
 
+    /** Table holding W values. */
     protected WTable wTable;
 
+    /** Tabs in inputhelper area. */
     protected TabSet tabs;
 
+    /** The currently focussed Input element. */
+    protected DoubleArrayPanel itemWithFocus;
+
 
     public WQAdaptedInputPanel() {
         wqranges = new HashMap<String, DoubleArrayPanel>();
@@ -108,6 +123,7 @@
         wranges  = new HashMap<String, double[]>();
         qdTable  = new QDTable();
         wTable   = new WTable();
+        initTableListeners();
     }
 
 
@@ -133,18 +149,19 @@
     }
 
 
+    /** Inits the helper panel. */
+    // TODO duplicate in WQInputPanel
     protected void initHelperPanel() {
         tabs = new TabSet();
         tabs.setWidth100();
         tabs.setHeight100();
 
-        // TODO i18n
-        Tab wTab = new Tab("W");
-        Tab qTab = new Tab("Q / D");
+        Tab wTab = new Tab(MESSAGE.wq_table_w());
+        Tab qTab = new Tab(MESSAGE.wq_table_q());
 
+        qdTable.showSelect();
         wTab.setPane(wTable);
         qTab.setPane(qdTable);
-        qdTable.hideIconFields();
 
         tabs.addTab(wTab, 0);
         tabs.addTab(qTab, 1);
@@ -155,6 +172,31 @@
     }
 
 
+    /**
+     * Initializes the listeners of the WQD tables.
+     */
+    // TODO dupe from WQInputPanel
+    protected void initTableListeners() {
+        CellClickHandler handler = new CellClickHandler() {
+            @Override
+            public void onCellClick(CellClickEvent e) {
+                if (isWMode() || qdTable.isLocked()) {
+                    return;
+                }
+
+                int    idx = e.getColNum();
+                Record r   = e.getRecord();
+                double val = r.getAttributeAsDouble("value");
+
+                if (itemWithFocus != null) {
+                    itemWithFocus.setValues(new double[]{val});
+                }
+            }
+        };
+
+        qdTable.addCellClickHandler(handler);
+    }
+
     @Override
     public Canvas createOld(DataList dataList) {
         List<Data> all = dataList.getAll();
@@ -422,7 +464,7 @@
             String title = item.getLabel();
             String label = item.getStringValue();
             DoubleArrayPanel dap = new DoubleArrayPanel(
-                label, null, this, TitleOrientation.LEFT);
+                label, null, this, this, TitleOrientation.LEFT);
 
             wqranges.put(title, dap);
 
@@ -593,6 +635,13 @@
     }
 
 
+    /** Store the currently focussed DoubleArrayPanel. */
+    @Override
+    public void onFocus(FocusEvent event) {
+        itemWithFocus = (DoubleArrayPanel) event.getForm();
+    }
+
+
     @Override
     public void onBlur(BlurEvent event) {
         DoubleArrayPanel dap = (DoubleArrayPanel) event.getForm();

http://dive4elements.wald.intevation.org