changeset 5135:cad911029c9d

Merged with 'dc-km-filter-rr' branch.
author Raimund Renkert <rrenkert@intevation.de>
date Thu, 28 Feb 2013 12:47:24 +0100
parents 8e52b4829cd1 (diff) 86e79fbb8fa3 (current diff)
children ec3430d12d2e
files flys-artifacts/doc/conf/meta-data.xml
diffstat 118 files changed, 3654 insertions(+), 1853 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/init.d/README.txt	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,15 @@
+SLES-Init-Script fuer Dive4Elements River:
+
+Installation als root:
+- Kopieren nach /etc/init.d/d4e-river
+- chmod 755 /etc/init.d/d4e-river
+- insserv /etc/init.d/d4e-river
+- /etc/init.d/d4e-river start
+
+Deinstallation als root:
+- /etc/init.d/d4e-river stop
+- insserv -r /etc/init.d/d4e-river
+- rm /var/log/d4e-river.log /var/run/d4e-river.pid /etc/init.d/d4e-river
+
+TODO:
+- ggf. logrotate fuer Logdatei /var/log/d4e-river.log konfigurieren
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/init.d/d4e-river	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,70 @@
+#!/bin/bash
+#
+### BEGIN INIT INFO
+# Provides: d4e-server
+# Required-Start: $network $syslog $remote_fs
+# Should-Start: $named $syslog $time
+# Required-Stop: $network $syslog
+# Should-Stop: $named $syslog $time
+# Default-Start:  3 5
+# Default-Stop:   0 1 2 6
+# Short-Description: Dive4Elements server
+# Description:    Start Dive4Elements server
+### END INIT INFO
+
+RUNAS=flys
+DIR="/opt/flys/current/server"
+
+CLASSPATH=
+for l in `find "$DIR/bin/lib" -name \*.jar -print`; do
+    CLASSPATH=$CLASSPATH:$l
+done
+
+
+LOGFILE=/var/log/d4e-river.log
+PIDFILE=/var/run/d4e-river.pid
+ARGS="-Xmx256m \
+     -server \
+     -Djava.awt.headless=true \
+     -Dflys.datacage.recommendations.development=false \
+     -Djava.io.tmpdir=\"$DIR/cache\" \
+     -Dflys.backend.enablejmx=true \
+     -Dflys.uesk.keep.artifactsdir=false \
+     -Dwsplgen.bin.path=\"$DIR/bin/wsplgen.exe\" \
+     -Dwsplgen.log.output=false \
+     -Dartifact.database.dir=\"$DIR/conf\""
+MAINCLASS=de.intevation.artifactdatabase.App
+
+# For SELinux we need to use 'runuser' not 'su'
+if [ -x "/sbin/runuser" ]; then
+    SU="/sbin/runuser"
+else
+    SU="/bin/su"
+fi
+
+case "$1" in
+  start)
+    echo "Starting D4E-river server..."
+    $SU - $RUNAS -c "/usr/bin/java -classpath $CLASSPATH $ARGS $MAINCLASS" &> $LOGFILE &
+    PID=$!
+    echo $PID > $PIDFILE
+    ;;
+  stop)
+    echo "Stopping D4E-river server..."
+    PID=`cat $PIDFILE`
+    STOPRES=0
+    while [ $STOPRES -le 0 ]
+    do
+      kill -15 $PID &> /dev/null
+      STOPRES=$?
+      sleep 1
+    done
+    echo "done."
+    ;;
+  restart)
+    $0 stop && $0 start
+    ;;
+  *)
+    echo "Usage: $0 [start|stop|restart]"
+esac
+
--- a/contrib/make_flys_release/README	Tue Feb 19 10:46:41 2013 +0100
+++ b/contrib/make_flys_release/README	Thu Feb 28 12:47:24 2013 +0100
@@ -38,3 +38,21 @@
 `server` und `client`. Im Server sind alle Konfigurationen sowie notwendige
 Bibliotheken zum Starten des FLYS Servers enthalten. Im Client ist lediglich das
 WAR Archiv für einen Servlet Container (z.B. Tomcat) enthalten.
+
+Importer:
+=========
+Das script um den Importer zu bauen und zu paketieren liegt unter 
+bin/make-importer-package.sh dies muss man anpassen in dem man Pfade angibt
+unter welchem sich weitere pakete befinden.
+Um das Paket zu bauen muss rpm2cpio installiert sein.
+
+Benötigt werden in den einzustellenden Verzeichnissen:
+EXTRAS:
+    - libgdal1-1.9.0-intevation1.x86\_64.rpm
+    - gdal-1.9.0-intevation1.x86\_64.rpm 
+    - python-gdal-1.9.0-intevation1.x86\_64.rpm
+ORACLE:
+    - cx\_Oracle-5.1.2-11g-py26-1.x86\_64.rpm
+    - instantclient-basic-linux-x86-64-11.2.0.2.0.zip
+    - instantclient-sdk-linux-x86-64-11.2.0.2.0.zip
+    - instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/make_flys_release/bin/make-importer-package.sh	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,70 @@
+#!/bin/bash
+set -e
+
+# See README for more information
+
+# The working directory. Resulting tarball will be placed in the directory above.
+PKG_DIR=/tmp/flys-importer
+# Path to oracle zip archives and an oracle_cx rpm
+ORACLE=/path/to/oracle/archives
+# Default conf
+CONF_DIR=/path/to/conf/dir
+# Path to the flys checkout
+FLYS_DIR=/path/to/flys/root
+# Extra packages
+EXTRAS=/path/to/gdal
+
+rm -fr $PKG_DIR
+mkdir -p $PKG_DIR/hydr_morph
+mkdir -p $PKG_DIR/geodaesie
+mkdir -p $PKG_DIR/opt/lib64
+mkdir -p $PKG_DIR/schema
+
+cd ${FLYS_DIR}/flys-backend
+mvn -f pom-oracle.xml clean compile assembly:single
+cp target/flys-backend-1.0-SNAPSHOT-jar-with-dependencies.jar \
+    $PKG_DIR/hydr_morph/importer.jar
+cp ${FLYS_DIR}/flys-backend/contrib/shpimporter/*.py $PKG_DIR/geodaesie
+cp ${FLYS_DIR}/flys-backend/contrib/run_geo.sh \
+    ${FLYS_DIR}/flys-backend/contrib/run_hydr_morph.sh \
+    $PKG_DIR
+cp -r ${CONF_DIR} $PKG_DIR
+ln -s /usr/lib64/libproj.so.0.6.6 $PKG_DIR/opt/lib64/libproj.so # workaround for bad packaging
+rm -rf /tmp/other_rpms
+mkdir /tmp/other_rpms
+cd /tmp/other_rpms
+
+rpm2cpio ${EXTRAS}/libgdal1-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories
+rpm2cpio ${EXTRAS}/gdal-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories
+rpm2cpio ${EXTRAS}/python-gdal-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories
+rpm2cpio ${ORACLE}/cx\_Oracle-5.1.2-11g-py26-1.x86\_64.rpm | cpio -i --make-directories
+cp -r /tmp/other_rpms/usr/* $PKG_DIR/opt
+rm -rf /tmp/other_rpms
+
+cp ${FLYS_DIR}/flys-backend/doc/schema/*.sql $PKG_DIR/schema
+cp ${FLYS_DIR}/flys-backend/doc/documentation/de/importer-manual.pdf $PKG_DIR
+
+# Oracle (Do not distribute)
+unzip ${ORACLE}/instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt
+unzip ${ORACLE}/instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt
+unzip ${ORACLE}/instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt
+
+mkdir $PKG_DIR//opt/instantclient_11_2/lib
+cd $PKG_DIR//opt/instantclient_11_2/lib
+ln -s ../libclntsh.so.11.1 .
+ln -s ../libclntsh.so.11.1 libclntsh.so
+ln -s ../libnnz11.so .
+ln -s ../libocci.so.11.1 .
+ln -s ../libocci.so.11.1 libocci.so
+ln -s ../libociei.so .
+ln -s ../libocijdbc11.so .
+ln -s ../libsqlplusic.so .
+ln -s ../libsqlplus.so .
+# End Oracle
+
+sed -i 's/shpimporter\/shp/geodaesie\/shp/' $PKG_DIR/run_geo.sh
+
+cd $PKG_DIR/..
+DATE=$(date +%Y%m%d%H%m)
+tar -czf flys-importer${DATE}.tar.gz flys-importer
+sha1sum flys-importer${DATE}.tar.gz > flys-importer${DATE}.tar.gz.sha1
--- a/contrib/make_flys_release/make_release.sh	Tue Feb 19 10:46:41 2013 +0100
+++ b/contrib/make_flys_release/make_release.sh	Thu Feb 28 12:47:24 2013 +0100
@@ -18,6 +18,7 @@
 TOMCAT_PORT=${TOMCAT_PORT:-8005}
 
 MAPSERVER_URL=${MAPSERVER_URL:-czech-republic.atlas.intevation.de}
+WIKI_URL=${WIKI_URL:-https://flys-intern.intevation.de/Flys-3.0}
 
 echo "INFO: create server directories"
 mkdir -p $DIRECTORY/server/bin/lib/own
@@ -48,9 +49,13 @@
        -e "s@http://localhost:8888@http://localhost:$TOMCAT_PORT@g" \
     $FLYS_HG/flys-client/src/main/webapp/WEB-INF/web.xml
 
-sed -i -e "s@/tmp/flys-client.log@/tmp/flys-client-${RELEASE}.log@g" \
+sed -i -e "s@/tmp/flys-client.log@/var/log/flys/client-${RELEASE}.log@g" \
     $FLYS_HG/flys-client/src/main/webapp/WEB-INF/log4j.properties
 
+# Fix the Wiki URLs
+find $FLYS_HG/flys-artifacts/src/main/resources/ -name messages_\*.properties \
+    -exec sed -i "s@https://flys-intern.intevation.de/Flys-3.0@${WIKI_URL}@g" {} \;
+
 echo "INFO: download OpenLayers-2.11 for client"
 curl -O http://openlayers.org/download/OpenLayers-2.11.tar.gz
 tar xvfz OpenLayers-2.11.tar.gz
@@ -110,8 +115,7 @@
     confs/rivermap.xml \
     > $DIRECTORY/server/conf/rivermap.xml
 
-
-sed "s@/tmp/flys-server-default.log@/tmp/flys-server-${RELEASE}.log" \
+sed "s@/tmp/flys-server-default.log@/var/log/flys/server-${RELEASE}.log@" \
     confs/log4j.properties \
     > $DIRECTORY/server/conf/log4j.properties
 
--- a/flys-artifacts/doc/conf/artifacts/fixanalysis.xml	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/doc/conf/artifacts/fixanalysis.xml	Thu Feb 28 12:47:24 2013 +0100
@@ -270,6 +270,7 @@
                         <facet name="w_differences.manualpoints" description="Manuelle Punkte"/>
                         <facet name="longitudinal_section.manualpoints" description="Manuelle Punkte"/>
                         <facet name="longitudinal_section.annotations" description="facet.longitudinal_section.annotations"/>
+                        <facet name="longitudinal_section.area" description="facet.longitudinal_section.area"/>
                     </facets>
                 </outputmode>
                 <outputmode name="fix_wq_curve" description="output.fix_wq_curve" mine-type="image/png" type="chart">
@@ -304,6 +305,11 @@
                     <facet name="hyk"                        description="hyks"/>
                   </facets>
                 </outputmode>
+                <outputmode name="fix_wq_curve_at_export" description="output.fix_wq_curve_at_export" mime-type="text/plain" type="export">
+                    <facets>
+                        <facet name="at" description="facet.fix_wq_curve_export.at"/>
+                    </facets>
+                </outputmode>
                 <outputmode name="fix_waterlevel_export" description="output.fix_waterlevel_export" mine-type="text/plain" type="export">
                     <facets>
                         <facet name="csv" description="facet.waterlevel_export.csv" />
--- a/flys-artifacts/doc/conf/conf.xml	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/doc/conf/conf.xml	Thu Feb 28 12:47:24 2013 +0100
@@ -39,24 +39,15 @@
             <artifact-factory name="wmsqpsfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks."
                 ttl="3600000"
                 artifact="de.intevation.flys.artifacts.WMSQPSArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
-            <artifact-factory name="wmshwsfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks."
-                ttl="3600000"
-                artifact="de.intevation.flys.artifacts.WMSHwsArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
             <artifact-factory name="wmshydrboundariesfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks."
                 ttl="3600000"
                 artifact="de.intevation.flys.artifacts.WMSHydrBoundaryArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
             <artifact-factory name="wmshydrboundariespolyfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks."
                 ttl="3600000"
                 artifact="de.intevation.flys.artifacts.WMSHydrBoundaryPolyArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
-            <artifact-factory name="wmscatchmentfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks."
-                ttl="3600000"
-                artifact="de.intevation.flys.artifacts.WMSCatchmentArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
             <artifact-factory name="wmsfloodplainfactory" description="Factory to create an artifact that generates WMS facets for CrossSectionTracks."
                 ttl="3600000"
                 artifact="de.intevation.flys.artifacts.WMSFloodplainArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
-            <artifact-factory name="wmslinefactory" description="Factory to create an artifact to be used in WINFO"
-                ttl="3600000"
-                artifact="de.intevation.flys.artifacts.WMSLineArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
             <artifact-factory name="wmsbuildingsfactory" description="Factory to create an artifact to be used in WINFO"
                 ttl="3600000"
                 artifact="de.intevation.flys.artifacts.WMSBuildingsArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
--- a/flys-artifacts/doc/conf/meta-data.xml	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/doc/conf/meta-data.xml	Thu Feb 28 12:47:24 2013 +0100
@@ -737,6 +737,9 @@
                <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve')">
                  <dc:call-macro name="qsectors"/>
                </dc:if>
+               <dc:if test="dc:contains($artifact-outs, 'longitudinal_section')">
+                 <dc:call-macro name="annotations"/>
+               </dc:if>
                <dc:if test="dc:contains($artifact-outs, 'fix_longitudinal_section_curve')">
                  <dc:call-macro name="annotations"/>
                </dc:if>
@@ -1019,23 +1022,6 @@
                   </dc:elements>
                 </dc:context>
               </dc:macro>
-              <dc:macro name="flood-map-hws">
-                <dc:context>
-                  <dc:statement>
-                    SELECT count(*) as km_exists, name as name
-                    FROM hws WHERE river_id = ${river_id} GROUP BY name
-                  </dc:statement>
-                   <dc:elements>
-                    <dc:if test="$km_exists>0">
-                      <hws>
-                        <dc:attribute name="factory" value="wmshwsfactory"/>
-                        <dc:attribute name="ids" value="${river_id};${name}"/>
-                        <dc:attribute name="name" value="${name}"/>
-                      </hws>
-                    </dc:if>
-                  </dc:elements>
-                </dc:context>
-              </dc:macro>
               <dc:macro name="flood-map-hydr-boundaries">
                 <hydr_boundaries_lines>
                   <dc:call-macro name="flood-map-hydr-boundaries-lines"/>
@@ -1116,23 +1102,6 @@
                   </dc:context>
                 </land>
               </dc:macro>
-              <dc:macro name="flood-map-catchments">
-                <dc:context>
-                  <dc:statement>
-                    SELECT count(*) as km_exists, name as name
-                    FROM catchment WHERE river_id = ${river_id} GROUP BY name
-                  </dc:statement>
-                   <dc:elements>
-                    <dc:if test="$km_exists>0">
-                      <catchment>
-                        <dc:attribute name="factory" value="wmscatchmentfactory"/>
-                        <dc:attribute name="ids" value="${river_id};${name}"/>
-                        <dc:attribute name="name" value="${name}"/>
-                      </catchment>
-                    </dc:if>
-                  </dc:elements>
-                </dc:context>
-              </dc:macro>
               <dc:macro name="flood-map-floodplain">
                 <dc:context>
                   <dc:statement>
@@ -1149,11 +1118,11 @@
                   </dc:elements>
                 </dc:context>
               </dc:macro>
-              <dc:macro name="flood-map-lines">
+              <dc:macro name="flood-map-hwslines">
                 <dc:context>
                   <dc:statement>
                     SELECT count(*) as km_exists, name as name
-                    FROM lines WHERE river_id = ${river_id} GROUP BY name
+                    FROM hws_lines WHERE river_id = ${river_id} GROUP BY name
                   </dc:statement>
                    <dc:elements>
                     <dc:if test="$km_exists>0">
@@ -1313,9 +1282,6 @@
                   <fixpoints>
                     <dc:call-macro name="flood-map-fixpoints"/>
                   </fixpoints>
-                  <hws>
-                    <dc:call-macro name="flood-map-hws"/>
-                  </hws>
                   <hydrboundaries>
                     <dc:call-macro name="flood-map-hydr-boundaries"/>
                     <dc:call-macro name="flood-map-floodplain"/>
@@ -1328,9 +1294,9 @@
                     <dc:call-macro name="flood-map-km"/>
                     <dc:call-macro name="flood-map-qps"/>
                   </kilometrage>
-                  <lines>
-                    <dc:call-macro name="flood-map-lines"/>
-                  </lines>
+                  <hwslines>
+                    <dc:call-macro name="flood-map-hwslines"/>
+                  </hwslines>
                   <dc:call-macro name="flood-map-uesk"/>
                   <gaugelocations>
                     <dc:call-macro name="flood-map-gaugelocations"/>
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/CollectionMonitor.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/CollectionMonitor.java	Thu Feb 28 12:47:24 2013 +0100
@@ -22,10 +22,12 @@
 
 import de.intevation.flys.artifacts.datacage.Recommendations;
 
+/** Monitors collection changes. */
 public class CollectionMonitor implements Hook {
 
     public static final String XPATH_RESULT = "/art:result";
 
+
     @Override
     public void setup(Node cfg) {
     }
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSCatchmentArtifact.java	Tue Feb 19 10:46:41 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,166 +0,0 @@
-package de.intevation.flys.artifacts;
-
-import java.util.List;
-
-import org.w3c.dom.Document;
-
-import org.apache.log4j.Logger;
-
-import com.vividsolutions.jts.geom.Envelope;
-
-import de.intevation.artifacts.ArtifactFactory;
-import de.intevation.artifacts.CallMeta;
-
-import de.intevation.artifactdatabase.state.DefaultOutput;
-import de.intevation.artifactdatabase.state.Facet;
-import de.intevation.artifactdatabase.state.State;
-
-import de.intevation.flys.model.Catchment;
-import de.intevation.flys.model.River;
-
-import de.intevation.flys.artifacts.model.FacetTypes;
-import de.intevation.flys.artifacts.model.RiverFactory;
-import de.intevation.flys.utils.FLYSUtils;
-import de.intevation.flys.utils.GeometryUtils;
-
-
-public class WMSCatchmentArtifact extends WMSDBArtifact {
-
-    public static final String NAME = "catchment";
-
-
-    private static final Logger logger =
-        Logger.getLogger(WMSCatchmentArtifact.class);
-
-
-    @Override
-    public void setup(
-        String          identifier,
-        ArtifactFactory factory,
-        Object          context,
-        CallMeta        callMeta,
-        Document        data)
-    {
-        logger.debug("WMSCatchmentArtifact.setup");
-
-        super.setup(identifier, factory, context, callMeta, data);
-    }
-
-
-    @Override
-    public String getName() {
-        return NAME;
-    }
-
-
-    @Override
-    public State getCurrentState(Object cc) {
-        State s = new CatchmentState(this);
-
-        List<Facet> fs = getFacets(getCurrentStateId());
-
-        DefaultOutput o = new DefaultOutput(
-            "floodmap",
-            "floodmap",
-            "image/png",
-            fs,
-            "map");
-
-        s.getOutputs().add(o);
-
-        return s;
-    }
-
-
-    public static class CatchmentState extends WMSDBState implements FacetTypes
-    {
-        private static final Logger logger =
-            Logger.getLogger(CatchmentState.class);
-
-        protected int riverId;
-
-        public CatchmentState(WMSDBArtifact artifact) {
-            super(artifact);
-            riverId = 0;
-        }
-
-        public int getRiverId() {
-            if (riverId == 0) {
-                String   ids   = artifact.getDataAsString("ids");
-                String[] parts = ids.split(";");
-
-                try {
-                    riverId = Integer.parseInt(parts[0]);
-                }
-                catch (NumberFormatException nfe) {
-                    logger.error("Cannot parse river id from '" + ids + "'");
-                }
-            }
-
-            return riverId;
-        }
-
-        @Override
-        protected String getFacetType() {
-            return FLOODMAP_CATCHMENT;
-        }
-
-        @Override
-        protected String getUrl() {
-            return FLYSUtils.getUserWMSUrl(artifact.identifier());
-        }
-
-        @Override
-        protected String getSrid() {
-            River river = RiverFactory.getRiver(getRiverId());
-            return FLYSUtils.getRiverSrid(river.getName());
-        }
-
-        @Override
-        protected Envelope getExtent(boolean reproject) {
-            List<Catchment> catchments =
-                Catchment.getCatchments(getRiverId(), getName());
-
-            Envelope max = null;
-
-            for (Catchment c: catchments) {
-                Envelope env = c.getGeom().getEnvelopeInternal();
-
-                if (max == null) {
-                    max = env;
-                    continue;
-                }
-
-                max.expandToInclude(env);
-            }
-
-            return max != null && reproject
-                ? GeometryUtils.transform(max, getSrid())
-                : max;
-        }
-
-        @Override
-        protected String getFilter() {
-            return "river_id=" + String.valueOf(getRiverId())
-                + " AND name='" + getName() + "'";
-        }
-
-        @Override
-        protected String getDataString() {
-            String srid = getSrid();
-
-            if (FLYSUtils.isUsingOracle()) {
-                return "geom FROM catchment USING SRID " + srid;
-            }
-            else {
-                return "geom FROM catchment USING UNIQUE id USING SRID " + srid;
-            }
-        }
-
-        @Override
-        protected String getGeometryType() {
-            return "POLYGON";
-        }
-    } // end of WMSKmState
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSHwsArtifact.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSHwsArtifact.java	Thu Feb 28 12:47:24 2013 +0100
@@ -15,8 +15,8 @@
 import de.intevation.artifactdatabase.state.Facet;
 import de.intevation.artifactdatabase.state.State;
 
+import de.intevation.flys.model.HWSLine;
 import de.intevation.flys.model.River;
-import de.intevation.flys.model.Hws;
 
 import de.intevation.flys.artifacts.model.FacetTypes;
 import de.intevation.flys.artifacts.model.RiverFactory;
@@ -118,11 +118,11 @@
 
         @Override
         protected Envelope getExtent(boolean reproject) {
-            List<Hws> hws = Hws.getHws(getRiverId(), getName());
+            List<HWSLine> hws = HWSLine.getLines(getRiverId(), getName());
 
             Envelope max = null;
 
-            for (Hws h: hws) {
+            for (HWSLine h: hws) {
                 Envelope env = h.getGeom().getEnvelopeInternal();
 
                 if (max == null) {
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSLineArtifact.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSLineArtifact.java	Thu Feb 28 12:47:24 2013 +0100
@@ -16,7 +16,7 @@
 import de.intevation.artifactdatabase.state.State;
 
 import de.intevation.flys.model.River;
-import de.intevation.flys.model.Line;
+import de.intevation.flys.model.HWSLine;
 
 import de.intevation.flys.artifacts.model.FacetTypes;
 import de.intevation.flys.artifacts.model.RiverFactory;
@@ -118,11 +118,11 @@
 
         @Override
         protected Envelope getExtent(boolean reproject) {
-            List<Line> lines = Line.getLines(getRiverId(), getName());
+            List<HWSLine> lines = HWSLine.getLines(getRiverId(), getName());
 
             Envelope max = null;
 
-            for (Line l: lines) {
+            for (HWSLine l: lines) {
                 Envelope env = l.getGeom().getEnvelopeInternal();
 
                 if (max == null) {
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/access/FixAccess.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/access/FixAccess.java	Thu Feb 28 12:47:24 2013 +0100
@@ -7,6 +7,7 @@
 
 import org.apache.log4j.Logger;
 
+/** Access for Fixation related data. */
 public class FixAccess
 extends      RangeAccess
 {
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/access/FixRealizingAccess.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/access/FixRealizingAccess.java	Thu Feb 28 12:47:24 2013 +0100
@@ -9,6 +9,8 @@
 
 import org.apache.log4j.Logger;
 
+
+/** Fix-Realizing (Volmer/Ausgelagerte Wasserspiegellage) access. */
 public class FixRealizingAccess
 extends      FixAccess
 {
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/Segment.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/Segment.java	Thu Feb 28 12:47:24 2013 +0100
@@ -53,6 +53,7 @@
         return from < to;
     }
 
+    /** Checks whether given km lies inside the to/from bounds of this segment. */
     public boolean inside(double km) {
         return from < to
             ? km >= from && km <= to
@@ -120,6 +121,7 @@
         return referencePoint;
     }
 
+    /** Use DoubleUtil to parse Segments. */
     public static List<Segment> parseSegments(String input) {
 
         final List<Segment> segments = new ArrayList<Segment>();
@@ -176,18 +178,21 @@
 
                 DischargeTable dt = gauge.fetchMasterDischargeTable();
 
+                //TODO: Change scale from 100 to 1 immediately after
+                //      discharge table import changed to cm!
                 double [][] table =
-                    DischargeTables.loadDischargeTableValues(dt, 1);
+                    DischargeTables.loadDischargeTableValues(dt, 100);
 
                 // need the original values for naming
                 segment.backup();
 
                 for (int i = 0; i < values.length; ++i) {
-                    double w = values[i] / 100.0;
+                    //TODO: s.o.
+                    double w = values[i]; /* / 100.0; */
                     double [] qs = DischargeTables.getQsForW(table, w);
                     if (qs.length == 0) {
                         log.warn("No Qs found for W = " + values[i]);
-                        report.addProblem("cannot.find.w.for.q", values[i]);
+                        report.addProblem("cannot.find.q.for.w", values[i]);
                         values[i] = Double.NaN;
                         success = false;
                     }
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/services/FileUploadService.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/services/FileUploadService.java	Thu Feb 28 12:47:24 2013 +0100
@@ -18,6 +18,7 @@
 import org.w3c.dom.Element;
 
 /**
+ * Service that receives XML-packed Shapefile bundles from the client.
  * @author <a href="mailto:raimund.renkert@intevation.de">Raimund Renkert</a>
  */
 public class FileUploadService extends FLYSService {
@@ -35,7 +36,6 @@
     public FileUploadService() {
     }
 
-
     @Override
     protected Document doProcess(
         Document data,
@@ -44,6 +44,13 @@
     ) {
         logger.debug("FileUploadService.process");
 
+        Document doc = XMLUtils.newDocument();
+        ElementCreator ec = new ElementCreator(doc, null, null);
+        Element resp   = ec.create("response");
+        Element status = ec.create("status");
+        resp.appendChild(status);
+        doc.appendChild(resp);
+
         String uuid = extractUuid(data);
 
         byte[] fileData = extractData(data);
@@ -58,6 +65,9 @@
                     new File(artifactDir, "user-rgd.zip"));
                 try {
                     fos.write(fileData);
+
+                    // Write operation successful
+                    status.setTextContent("Upload erfolgreich!"); // TODO: i18n
                 }
                 finally {
                     fos.close();
@@ -65,27 +75,32 @@
             }
             catch (IOException ioe) {
                 logger.warn(ioe, ioe);
+                status.setTextContent("Upload fehlgeschlagen!");
             }
         }
         else {
             logger.debug("No data in uploaded xml.");
+            status.setTextContent("Upload fehlgeschlagen!");
         }
 
-        Document doc = XMLUtils.newDocument();
-        ElementCreator ec = new ElementCreator(doc, null, null);
-        Element resp = ec.create("response");
-        doc.appendChild(resp);
-
         return doc;
     }
 
-
+    /**
+     * Extracts the UUID from the XML document.
+     * @param data
+     * @return
+     */
     protected String extractUuid(Document data) {
         return XMLUtils.xpathString(
             data, XPATH_ARTIFACT_UUID, ArtifactNamespaceContext.INSTANCE);
     }
 
-
+    /**
+     * Extracts the base64 encoded ZIP file from the XML document.
+     * @param data
+     * @return
+     */
     protected byte[] extractData(Document data) {
         String b64Data = XMLUtils.xpathString(
             data, XPATH_DATA, ArtifactNamespaceContext.INSTANCE);
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/FloodMapState.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/FloodMapState.java	Thu Feb 28 12:47:24 2013 +0100
@@ -408,8 +408,9 @@
         File         dir,
         WSPLGENJob   job
     ) {
+        String river   = artifact.getDataAsString("river");
         String geoJSON = artifact.getDataAsString("uesk.barriers");
-        String srid    = FLYSUtils.getRiverSrid(artifact);
+        String srid    = FLYSUtils.getRiverDGMSrid(river);
         String srs     = "EPSG:" + srid;
 
         if (geoJSON == null || geoJSON.length() == 0) {
@@ -719,7 +720,7 @@
         }
 
         String river = artifact.getDataAsString("river");
-        String srid  = FLYSUtils.getRiverSrid(artifact);
+        String srid  = FLYSUtils.getRiverDGMSrid(river);
         String srs   = "EPSG:" + srid;
 
         Floodplain plain = Floodplain.getFloodplain(river);
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/InputDoubleState.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/InputDoubleState.java	Thu Feb 28 12:47:24 2013 +0100
@@ -6,7 +6,7 @@
 
 
 /**
- * State to keep a double value and validate it against a range
+ * State to keep a double value and validate it against a range.
  */
 public class InputDoubleState extends MinMaxState {
 
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/WQAdapted.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/WQAdapted.java	Thu Feb 28 12:47:24 2013 +0100
@@ -59,6 +59,7 @@
     public static final GaugeOrder GAUGE_UP   = new GaugeOrder(true);
     public static final GaugeOrder GAUGE_DOWN = new GaugeOrder(false);
 
+
     /** Trivial, empty constructor. */
     public WQAdapted() {
     }
@@ -117,6 +118,7 @@
     }
 
 
+    /** Create the items for input to the ranges per mode. */
     protected Element[] createValueItems(
         XMLUtils.ElementCreator cr,
         Artifact    artifact,
@@ -165,7 +167,7 @@
                 double[] mmW = gauge.determineMinMaxW();
 
                 elements.add(createItem(
-                    cr, new String[] { from + ";" + to, ""}, mmQ, mmW));
+                    cr, new String[] { from + ";" + to, gauge.getName()}, mmQ, mmW));
             }
         }
         else {
@@ -186,7 +188,7 @@
                 double[] mmW = gauge.determineMinMaxW();
 
                 elements.add(createItem(
-                    cr, new String[] { to + ";" + from, ""}, mmQ, mmW));
+                    cr, new String[] { to + ";" + from, gauge.getName()}, mmQ, mmW));
             }
         }
 
@@ -449,7 +451,7 @@
             double lower = Double.parseDouble(parts[0]);
             double upper = Double.parseDouble(parts[1]);
 
-            String[] values = parts[2].split(",");
+            String[] values = parts[3].split(",");
 
             int      num = values.length;
             double[] res = new double[num];
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/fixation/FixRealizingCompute.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/fixation/FixRealizingCompute.java	Thu Feb 28 12:47:24 2013 +0100
@@ -130,6 +130,9 @@
                 new DataFacet(PDF, "PDF data", ComputeType.ADVANCE, hash, id));
         }
 
+        facets.add(
+            new DataFacet(AT, "AT data", ComputeType.ADVANCE, hash, id));
+
         facets.add(new FixWQCurveFacet(
             0, Resources.getMsg(meta, I18N_WQ_CURVE, I18N_WQ_CURVE)));
 
--- a/flys-artifacts/src/main/java/de/intevation/flys/collections/FLYSArtifactCollection.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/collections/FLYSArtifactCollection.java	Thu Feb 28 12:47:24 2013 +0100
@@ -376,7 +376,7 @@
         AttributeParser parser    = new AttributeParser(oldAttrs);
         CollectionAttribute cAttr = parser.getCollectionAttribute();
 
-        Output   output   = cAttr.getOutput(name);
+        Output output = cAttr.getOutput(name);
         Settings settings = null;
         if (output != null) {
             settings = output.getSettings();
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/ATWriter.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/ATWriter.java	Thu Feb 28 12:47:24 2013 +0100
@@ -25,6 +25,7 @@
 
 import org.apache.log4j.Logger;
 
+/** Write AT files. */
 public class ATWriter
 {
     private static Logger logger = Logger.getLogger(ATWriter.class);
@@ -156,7 +157,7 @@
     {
         PrintWriter out = new PrintWriter(writer);
 
-        // a header is required, because the desktop version of FLYS will skip
+        // A header is required, because the desktop version of FLYS will skip
         // the first row.
         if (gName != null) {
             printGaugeHeader(out, meta, river, km, gName, datum, date);
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/AbstractExporter.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/AbstractExporter.java	Thu Feb 28 12:47:24 2013 +0100
@@ -74,6 +74,12 @@
     /** The master artifact. */
     protected Artifact master;
 
+    private NumberFormat kmFormat;
+
+    private NumberFormat wFormat;
+
+    private NumberFormat qFormat;
+
 
     /**
      * Concrete subclasses need to use this method to write their special data
@@ -116,6 +122,12 @@
         this.master = master;
     }
 
+    /** Get the callcontext that this exporter has been initialized
+     * with. */
+    public CallContext getCallContext() {
+        return this.context;
+    }
+
 
     @Override
     public void setCollection(FLYSArtifactCollection collection) {
@@ -234,6 +246,10 @@
         return Resources.getMsg(context.getMeta(), key, def);
     }
 
+    protected String msg(String key, String def, Object[] args) {
+        return Resources.getMsg(context.getMeta(), key, def, args);
+    }
+
 
     /**
      * This method starts CSV creation. It makes use of writeCSVData() which has
@@ -295,7 +311,10 @@
      * @return the number formatter for kilometer values.
      */
     protected NumberFormat getKmFormatter() {
-        return Formatter.getWaterlevelKM(context);
+        if (kmFormat == null) {
+            kmFormat = Formatter.getWaterlevelKM(context);
+        }
+        return kmFormat;
     }
 
 
@@ -305,7 +324,10 @@
      * @return the number formatter for W values.
      */
     protected NumberFormat getWFormatter() {
-        return Formatter.getWaterlevelW(context);
+        if (wFormat == null) {
+            wFormat = Formatter.getWaterlevelW(context);
+        }
+        return wFormat;
     }
 
 
@@ -315,7 +337,10 @@
      * @return the number formatter for Q values.
      */
     protected NumberFormat getQFormatter() {
-        return Formatter.getWaterlevelQ(context);
+        if (qFormat == null) {
+            qFormat = Formatter.getWaterlevelQ(context);
+        }
+        return qFormat;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/CrossSectionGenerator.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/CrossSectionGenerator.java	Thu Feb 28 12:47:24 2013 +0100
@@ -17,6 +17,7 @@
 
 import de.intevation.artifactdatabase.state.ArtifactAndFacet;
 import de.intevation.artifacts.DataProvider;
+import de.intevation.flys.artifacts.FLYSArtifact;
 import de.intevation.flys.artifacts.geom.Lines;
 import de.intevation.flys.artifacts.model.CrossSectionFacet;
 import de.intevation.flys.artifacts.model.FacetTypes;
@@ -28,6 +29,7 @@
 import de.intevation.flys.themes.LineStyle;
 import de.intevation.flys.themes.TextStyle;
 import de.intevation.flys.themes.ThemeAccess;
+import de.intevation.flys.utils.FLYSUtils;
 import de.intevation.flys.utils.Formatter;
 import de.intevation.flys.utils.ThemeUtil;
 
@@ -232,7 +234,13 @@
 
     @Override
     protected String getDefaultYAxisLabel(int pos) {
-        return msg(I18N_YAXIS_LABEL, I18N_YAXIS_LABEL_DEFAULT);
+        FLYSArtifact flys = (FLYSArtifact) master;
+
+        String unit = FLYSUtils.getRiver(flys).getWstUnit().getName();
+
+        return msg(I18N_YAXIS_LABEL,
+                   I18N_YAXIS_LABEL_DEFAULT,
+                   new Object[] { unit });
     }
 
 
@@ -341,7 +349,11 @@
         if (ThemeUtil.parseShowLevel(theme) && lines.points.length > 1
                 && lines.points[1].length > 0) {
             NumberFormat nf = Formatter.getMeterFormat(this.context);
-            String labelAdd = "W=" + nf.format(lines.points[1][0]) + "NN+m";
+            FLYSArtifact flys = (FLYSArtifact) master;
+
+            String unit = FLYSUtils.getRiver(flys).getWstUnit().getName();
+
+            String labelAdd = "W=" + nf.format(lines.points[1][0]) + unit;
             if (series.getLabel().length() == 0) {
                 series.setLabel(labelAdd);
             }
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/WaterlevelExporter.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/WaterlevelExporter.java	Thu Feb 28 12:47:24 2013 +0100
@@ -32,10 +32,13 @@
 
 import de.intevation.flys.model.Gauge;
 
+import de.intevation.flys.artifacts.access.FixRealizingAccess;
 import de.intevation.flys.artifacts.access.RangeAccess;
+import de.intevation.flys.artifacts.FixationArtifact;
 import de.intevation.flys.artifacts.FLYSArtifact;
 import de.intevation.flys.artifacts.WINFOArtifact;
 import de.intevation.flys.artifacts.model.CalculationResult;
+import de.intevation.flys.artifacts.model.Segment;
 import de.intevation.flys.artifacts.model.WQCKms;
 import de.intevation.flys.artifacts.model.WQKms;
 import de.intevation.flys.artifacts.model.WKmsJRDataSource;
@@ -107,7 +110,6 @@
     public static final String CSV_NOT_IN_GAUGE_RANGE =
         "export.waterlevel.csv.not.in.gauge.range";
 
-
     public static final Pattern NUMBERS_PATTERN =
         Pattern.compile("\\D*(\\d++.\\d*)\\D*");
 
@@ -183,7 +185,7 @@
      * @param wqkms A WQKms object that should be prepared.
      */
     protected String getColumnTitle(WINFOArtifact winfo, WQKms wqkms) {
-        logger.debug("WaterlevelExporter.prepareNamedValue");
+        logger.debug("WaterlevelExporter.getColumnTitle");
 
         String name = wqkms.getName();
 
@@ -424,10 +426,12 @@
     ) {
         logger.info("WaterlevelExporter.writeCSVHeader");
 
+        String unit = FLYSUtils.getRiver((FLYSArtifact) master).getWstUnit().getName();
+
         if (atGauge) {
             writer.writeNext(new String[] {
                 msg(CSV_KM_HEADER, DEFAULT_CSV_KM_HEADER),
-                msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER),
+                msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER, new Object[] { unit }),
                 msg(CSV_Q_HEADER, DEFAULT_CSV_Q_HEADER),
                 (isQ
                     ? msg(CSV_Q_DESC_HEADER, DEFAULT_CSV_Q_DESC_HEADER)
@@ -439,7 +443,8 @@
         else {
             writer.writeNext(new String[] {
                 msg(CSV_KM_HEADER, DEFAULT_CSV_KM_HEADER),
-                msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER),
+                    // TODO flys/issue1128 (unit per river)
+                msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER, new Object[] { unit }),
                 msg(CSV_Q_HEADER, DEFAULT_CSV_Q_HEADER),
                 msg(CSV_LOCATION_HEADER, DEFAULT_CSV_LOCATION_HEADER)
             });
@@ -447,6 +452,50 @@
     }
 
 
+    /** Linearly search for gauge which is valid at km. */
+    private Gauge findGauge(double km, List<Gauge> gauges) {
+        for (Gauge gauge: gauges) {
+            if (km >= gauge.getRange().getA().doubleValue()
+                && km <= gauge.getRange().getB().doubleValue()) {
+                return gauge;
+            }
+        }
+        return null;
+    }
+
+
+    private void writeRow4(CSVWriter writer, double wqkm[], FLYSArtifact flys) {
+        NumberFormat kmf = getKmFormatter();
+        NumberFormat wf  = getWFormatter();
+        NumberFormat qf  = getQFormatter();
+
+       writer.writeNext(new String[] {
+           kmf.format(wqkm[2]),
+           wf.format(wqkm[0]),
+           qf.format(wqkm[1]),
+           FLYSUtils.getLocationDescription(flys, wqkm[2])
+       });
+    }
+
+
+    /** Write an csv-row at gauge location. */
+    private void writeRow6(CSVWriter writer, double wqkm[], String wOrQDesc,
+        FLYSArtifact flys, String gaugeName) {
+        NumberFormat kmf = getKmFormatter();
+        NumberFormat wf  = getWFormatter();
+        NumberFormat qf  = getQFormatter();
+
+        writer.writeNext(new String[] {
+            kmf.format(wqkm[2]),
+            wf.format(wqkm[0]),
+            qf.format(wqkm[1]),
+            wOrQDesc,
+            FLYSUtils.getLocationDescription(flys, wqkm[2]),
+            gaugeName
+        });
+    }
+
+
     /**
      * Write "rows" of csv data from wqkms with writer.
      */
@@ -471,6 +520,7 @@
         double[] result = new double[3];
 
         FLYSArtifact flys       = (FLYSArtifact) master;
+        List<Gauge>  gauges     = FLYSUtils.getGauges(flys);
         Gauge        gauge      = FLYSUtils.getGauge(flys);
         String       gaugeName  = gauge.getName();
         String       desc       = "";
@@ -493,36 +543,51 @@
         long startTime = System.currentTimeMillis();
 
         String colDesc = desc;
+        List<Segment> segments = null;
+        boolean isFixRealize = false;
         if (flys instanceof WINFOArtifact) {
             if (wqkms != null && wqkms.getRawValue() != null) {
                 WINFOArtifact winfo = (WINFOArtifact) flys;
                 colDesc = FLYSUtils.getNamedMainValue(winfo, wqkms.getRawValue());
             }
         }
+        else if (flys instanceof FixationArtifact) {
+            // Get W/Q input per gauge for this case.
+            FixRealizingAccess fixAccess = new FixRealizingAccess(flys, getCallContext());
+            segments = fixAccess.getSegments();
+            if (segments != null && segments.size() > 0) {
+                isFixRealize = true;
+            }
+        }
 
         for (int i = 0; i < size; i ++) {
             result = wqkms.get(i, result);
 
+            // Check if there has been W input per Gauge and use it.
+            if (segments != null) {
+                for (Segment segment: segments) {
+                    if (segment.inside(result[2])) {
+                        colDesc = "" + segment.getValues()[0];
+                    }
+                }
+            }
+
             if (atGauge) {
-                writer.writeNext(new String[] {
-                    kmf.format(result[2]),
-                    wf.format(result[0]),
-                    qf.format(result[1]),
-                    colDesc,
-                    FLYSUtils.getLocationDescription(flys, result[2]),
+                String gaugeN;
+                // TODO issue1131, name gauge
+                if (isFixRealize) {
+                    gaugeN = findGauge(result[2], gauges).getName();
+                }
+                else {
                     // TODO issue1114: Take correct gauge
-                    result[2] >= a && result[2] <= b
+                    gaugeN = result[2] >= a && result[2] <= b
                         ? gaugeName
-                        : notinrange
-                });
+                        : notinrange;
+                }
+                writeRow6(writer, result, colDesc, flys, gaugeN);
             }
             else {
-                writer.writeNext(new String[] {
-                    kmf.format(result[2]),
-                    wf.format(result[0]),
-                    qf.format(result[1]),
-                    FLYSUtils.getLocationDescription(flys, result[2])
-                });
+                writeRow4(writer, result, flys);
             }
         }
 
@@ -559,6 +624,9 @@
 
         for (WQKms[] tmp: data) {
             for (WQKms wqkms: tmp) {
+                if (wqkms instanceof ConstantWQKms) {
+                    continue;
+                }
                 int size = wqkms != null ? wqkms.size() : 0;
 
                 addWSTColumn(writer, wqkms);
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/fixings/FixATExport.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/fixings/FixATExport.java	Thu Feb 28 12:47:24 2013 +0100
@@ -15,6 +15,7 @@
 import de.intevation.flys.artifacts.model.Parameters;
 
 import de.intevation.flys.artifacts.model.fixings.FixAnalysisResult;
+import de.intevation.flys.artifacts.model.fixings.FixResult;
 
 import de.intevation.flys.exports.AbstractExporter;
 
@@ -46,8 +47,8 @@
         if (data instanceof CalculationResult) {
             CalculationResult cr = (CalculationResult)data;
             Object resData = cr.getData();
-            if (resData instanceof FixAnalysisResult) {
-                this.parameters = ((FixAnalysisResult)resData).getParameters();
+            if (resData instanceof FixResult) {
+                this.parameters = ((FixResult)resData).getParameters();
             }
         }
         else {
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/DoubleUtil.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/DoubleUtil.java	Thu Feb 28 12:47:24 2013 +0100
@@ -177,7 +177,7 @@
 
                 vs.resetQuick();
 
-                for (String valueStr: parts[2].split(",")) {
+                for (String valueStr: parts[3].split(",")) {
                     vs.add(round(Double.parseDouble(valueStr.trim())));
                 }
 
--- a/flys-artifacts/src/main/resources/messages.properties	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/resources/messages.properties	Thu Feb 28 12:47:24 2013 +0100
@@ -151,7 +151,7 @@
 chart.cross_section.title = Cross Section for river {0}
 chart.cross_section.subtitle = {0}-km: {1,number,#.###}
 chart.cross_section.xaxis.label = Distance [m]
-chart.cross_section.yaxis.label = W [NN + m]
+chart.cross_section.yaxis.label = W [{0}]
 
 chart.discharge.curve.title = Discharge Curve
 chart.discharge.curve.xaxis.label = Q [m\u00b3/s]
@@ -301,7 +301,7 @@
 
 
 export.waterlevel.csv.header.km = River-Km
-export.waterlevel.csv.header.w = W [NN + m]
+export.waterlevel.csv.header.w = W [{0}]
 export.waterlevel.csv.header.q = Q [m\u00b3/s]
 export.waterlevel.csv.header.q.desc = Description
 export.waterlevel.csv.header.location = Location
--- a/flys-artifacts/src/main/resources/messages_de.properties	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/resources/messages_de.properties	Thu Feb 28 12:47:24 2013 +0100
@@ -144,7 +144,7 @@
 chart.cross_section.title = Querprofildiagramm f\u00fcr Gew\u00e4sser {0}
 chart.cross_section.subtitle = {0}-km: {1,number,#.###}
 chart.cross_section.xaxis.label = Abstand [m]
-chart.cross_section.yaxis.label = W [NN + m]
+chart.cross_section.yaxis.label = W [{0}]
 
 chart.longitudinal.section.title = W-L\u00e4ngsschnitt
 chart.longitudinal.section.subtitle = Bereich: {0}-km {1,number,#.###} - {2,number,#.###}
@@ -291,7 +291,7 @@
 chart.beddifference.yaxis.label.heights = Absolute H\u00f6he [m]
 
 export.waterlevel.csv.header.km = Fluss-Km
-export.waterlevel.csv.header.w = W [NN + m]
+export.waterlevel.csv.header.w = W [{0}]
 export.waterlevel.csv.header.q = Q [m\u00b3/s]
 export.waterlevel.csv.header.q.desc = Bezeichnung
 export.waterlevel.csv.header.location = Lage
--- a/flys-artifacts/src/main/resources/messages_de_DE.properties	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/resources/messages_de_DE.properties	Thu Feb 28 12:47:24 2013 +0100
@@ -145,7 +145,7 @@
 chart.cross_section.title = Querprofildiagramm f\u00fcr Gew\u00e4sser {0}
 chart.cross_section.subtitle = {0}-km: {1,number,#.###}
 chart.cross_section.xaxis.label = Abstand [m]
-chart.cross_section.yaxis.label = W [NN + m]
+chart.cross_section.yaxis.label = W [{0}]
 
 chart.longitudinal.section.title = W-L\u00e4ngsschnitt
 chart.longitudinal.section.subtitle = Bereich: {0}-km {1,number,#.###} - {2,number,#.###}
@@ -300,7 +300,7 @@
 chart.beddifference.yaxis.label.heights = Absolute H\u00f6he [m]
 
 export.waterlevel.csv.header.km = Fluss-Km
-export.waterlevel.csv.header.w = W [NN + m]
+export.waterlevel.csv.header.w = W [{0}]
 export.waterlevel.csv.header.q = Q [m\u00b3/s]
 export.waterlevel.csv.header.q.desc = Bezeichnung
 export.waterlevel.csv.header.location = Lage
--- a/flys-artifacts/src/main/resources/messages_en.properties	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-artifacts/src/main/resources/messages_en.properties	Thu Feb 28 12:47:24 2013 +0100
@@ -143,7 +143,7 @@
 chart.cross_section.title = Cross Section for river {0}
 chart.cross_section.subtitle = {0}-km: {1,number,#.###}
 chart.cross_section.xaxis.label = Distance [m]
-chart.cross_section.yaxis.label = W [NN + m]
+chart.cross_section.yaxis.label = W [{0}]
 
 chart.longitudinal.section.title = W-Longitudinal Section
 chart.longitudinal.section.subtitle = Range: {0}-km {1,number,#.###} - {2,number,#.###}
@@ -303,7 +303,7 @@
 chart.beddifference.yaxis.label.heights = Absolute Height [m]
 
 export.waterlevel.csv.header.km = River-Km
-export.waterlevel.csv.header.w = W [NN + m]
+export.waterlevel.csv.header.w = W [{0}]
 export.waterlevel.csv.header.q = Q [m\u00b3/s]
 export.waterlevel.csv.header.q.desc = Description
 export.waterlevel.csv.header.location = Location
--- a/flys-backend/README	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/README	Thu Feb 28 12:47:24 2013 +0100
@@ -9,6 +9,12 @@
 $ createuser --no-createrole --no-superuser --pwprompt --no-createdb flys
 $ createdb --encoding=UTF-8 --owner flys flystest1
 
+
+Build an importer package:
+mvn -f pom.xml clean compile assembly:single
+Alternatively with oracle:
+mvn -f pom-oracle.xml clean compile assembly:single
+
 Standalone DateGuesser testing example:
 
 mvn -e -Dexec.mainClass=de.intevation.flys.utils.DateGuesser -Dexec.args="110803" exec:java <<EOF
--- a/flys-backend/contrib/hws_schema.diff	Tue Feb 19 10:46:41 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,128 +0,0 @@
-diff -r 0bb0525bca52 flys-backend/doc/schema/postgresql-drop-spatial.sql
---- a/flys-backend/doc/schema/postgresql-drop-spatial.sql	Fri Jan 25 15:38:34 2013 +0100
-+++ b/flys-backend/doc/schema/postgresql-drop-spatial.sql	Fri Jan 25 15:42:05 2013 +0100
-@@ -27,8 +27,14 @@
- DROP TABLE catchment;
- DROP SEQUENCE CATCHMENT_ID_SEQ;
- 
--DROP TABLE hws;
--DROP SEQUENCE HWS_ID_SEQ;
-+--DROP TABLE hws;
-+--DROP SEQUENCE HWS_ID_SEQ;
-+
-+DROP TABLE hws_points;
-+DROP SEQUENCE HWS_POINTS_ID_SEQ;
-+
-+DROP TABLE hws_lines;
-+DROP SEQUENCE HWS_LINES_ID_SEQ;
- 
- DROP TABLE floodmaps;
- DROP SEQUENCE FLOODMAPS_ID_SEQ;
-@@ -42,4 +48,7 @@
- DROP TABLE gauge_location;
- DROP SEQUENCE GAUGE_LOCATION_ID_SEQ;
- 
-+DROP TABLE fed_states;
-+DROP TABLE hws_kinds;
-+
- COMMIT;
-diff -r 0bb0525bca52 flys-backend/doc/schema/postgresql-spatial.sql
---- a/flys-backend/doc/schema/postgresql-spatial.sql	Fri Jan 25 15:38:34 2013 +0100
-+++ b/flys-backend/doc/schema/postgresql-spatial.sql	Fri Jan 25 15:42:05 2013 +0100
-@@ -132,20 +132,87 @@
- SELECT AddGeometryColumn('catchment','geom',31467,'POLYGON',2);
- ALTER TABLE catchment ALTER COLUMN id SET DEFAULT NEXTVAL('CATCHMENT_ID_SEQ');
- 
-+-- Static lookup tables for Hochwasserschutzanlagen
-+CREATE TABLE hws_kinds (
-+    id int PRIMARY KEY NOT NULL,
-+    kind VARCHAR(64) NOT NULL
-+);
-+INSERT INTO hws_kinds (id, kind) VALUES (1, 'Durchlass');
-+INSERT INTO hws_kinds (id, kind) VALUES (2, 'Damm');
-+INSERT INTO hws_kinds (id, kind) VALUES (3, 'Graben');
- 
----Hydrologie/HW-Schutzanlagen/hws.shp
--CREATE SEQUENCE HWS_ID_SEQ;
--CREATE TABLE hws (
-+CREATE TABLE fed_states (
-     id int PRIMARY KEY NOT NULL,
-+    name VARCHAR(23) NOT NULL
-+);
-+INSERT INTO fed_states (id, name) VALUES (1, 'Bayern');
-+INSERT INTO fed_states (id, name) VALUES (2, 'Hessen');
-+INSERT INTO fed_states (id, name) VALUES (3, 'Niedersachsen');
-+INSERT INTO fed_states (id, name) VALUES (4, 'Nordrhein-Westfalen');
-+INSERT INTO fed_states (id, name) VALUES (5, 'Rheinland-Pfalz');
-+INSERT INTO fed_states (id, name) VALUES (6, 'Saarland');
-+INSERT INTO fed_states (id, name) VALUES (7, 'Schleswig-Holstein');
-+INSERT INTO fed_states (id, name) VALUES (8, 'Brandenburg');
-+INSERT INTO fed_states (id, name) VALUES (9, 'Mecklenburg-Vorpommern');
-+INSERT INTO fed_states (id, name) VALUES (10, 'Thüringen');
-+INSERT INTO fed_states (id, name) VALUES (11, 'Baden-Württemberg');
-+INSERT INTO fed_states (id, name) VALUES (12, 'Sachsen-Anhalt');
-+INSERT INTO fed_states (id, name) VALUES (13, 'Sachsen');
-+INSERT INTO fed_states (id, name) VALUES (14, 'Berlin');
-+INSERT INTO fed_states (id, name) VALUES (15, 'Bremen');
-+INSERT INTO fed_states (id, name) VALUES (16, 'Hamburg');
-+
-+--Hydrologie/HW-Schutzanlagen/*Linien.shp
-+CREATE SEQUENCE HWS_LINES_ID_SEQ;
-+CREATE TABLE hws_lines (
-+    id int PRIMARY KEY NOT NULL,
-+    ogr_fid int,
-+    kind_id int REFERENCES hws_kinds(id) DEFAULT 2,
-+    fed_state_id int REFERENCES fed_states(id),
-     river_id int REFERENCES rivers(id),
--    hws_facility VARCHAR(256),
--    type VARCHAR(256),
--    name VARCHAR(64),
--    path     VARCHAR(256)
-+    name VARCHAR(256),
-+    path VARCHAR(256),
-+    offical INT DEFAULT 0,
-+    agency VARCHAR(256),
-+    range VARCHAR(256),
-+    shore_side INT DEFAULT 0,
-+    source VARCHAR(256),
-+    status_date TIMESTAMP,
-+    description VARCHAR(256)
- );
--SELECT AddGeometryColumn('hws','geom',31467,'LINESTRING',2);
--ALTER TABLE hws ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_ID_SEQ');
-+SELECT AddGeometryColumn('hws_lines', 'geom', 31467, 'LINESTRING', 2);
-+SELECT AddGeometryColumn('hws_lines', 'geom_target', 31467, 'LINESTRING', 2); -- ?
-+SELECT AddGeometryColumn('hws_lines', 'geom_rated_level', 31467, 'LINESTRING', 2); -- ?
-+-- TODO: dike_km_from dike_km_to, are they geometries?
- 
-+ALTER TABLE hws_lines ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_LINES_ID_SEQ');
-+
-+--Hydrologie/HW-Schutzanlagen/*Punkte.shp
-+CREATE SEQUENCE HWS_POINTS_ID_SEQ;
-+CREATE TABLE hws_points (
-+    id int PRIMARY KEY NOT NULL,
-+    ogr_fid int,
-+    kind_id int REFERENCES hws_kinds(id) DEFAULT 2,
-+    fed_state_id int REFERENCES fed_states(id),
-+    river_id int REFERENCES rivers(id),
-+    name VARCHAR,
-+    path VARCHAR,
-+    offical INT DEFAULT 0,
-+    agency VARCHAR,
-+    range VARCHAR,
-+    shore_side INT DEFAULT 0,
-+    source VARCHAR,
-+    status_date VARCHAR,
-+    description VARCHAR,
-+    freeboard FLOAT8,
-+    dike_km FLOAT8,
-+    z FLOAT8,
-+    z_target FLOAT8,
-+    rated_level FLOAT8
-+);
-+SELECT AddGeometryColumn('hws_points', 'geom', 31467, 'POINT', 2);
-+
-+ALTER TABLE hws_points ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_POINTS_ID_SEQ');
- 
- --
- --Hydrologie/UeSG
--- a/flys-backend/contrib/run_geo.sh	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/run_geo.sh	Thu Feb 28 12:47:24 2013 +0100
@@ -2,8 +2,7 @@
 
 # Required
 RIVER_PATH="/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar"
-RIVER_ID=1
-TARGET_SRS=31467
+RIVER_NAME="Saar"
 
 # Set this to your target database for Oracle
 HOST=localhost
@@ -17,7 +16,6 @@
 SKIP_AXIS=0
 SKIP_KMS=0
 SKIP_CROSSSECTIONS=0
-SKIP_LINES=0
 SKIP_FIXPOINTS=0
 SKIP_BUILDINGS=0
 SKIP_FLOODPLAINS=0
@@ -27,15 +25,26 @@
 SKIP_GAUGE_LOCATION=0
 SKIP_CATCHMENTS=0
 SKIP_UESG=0
+SKIP_DGM=0
 
+# There should be no need to change anything below this line
 
 DIR=`dirname $0`
 DIR=`readlink -f "$DIR"`
 
+OPTIONAL_LIBS="${DIR}"/opt
+if [ -d "$OPTIONAL_LIBS" ]; then
+    export PATH="$OPTIONAL_LIBS/bin:$PATH"
+    export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib:$LD_LIBRARY_PATH"
+    export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib64:$LD_LIBRARY_PATH"
+    export PYTHONPATH="$OPTIONAL_LIBS/lib/python2.6/site-packages:$PYTHONPATH"
+    export PYTHONPATH="$OPTIONAL_LIBS/lib64/python2.6/site-packages:$PYTHONPATH"
+    export GDAL_DATA="$OPTIONAL_LIBS/share/gdal"
+fi
+
 exec python $DIR/shpimporter/shpimporter.py \
     --directory $RIVER_PATH \
-    --river_id $RIVER_ID \
-    --target_srs $TARGET_SRS \
+    --river_name $RIVER_NAME \
     --ogr_connection "$OGR_CONNECTION" \
     --host $HOST \
     --user $USER \
@@ -44,13 +53,12 @@
     --skip_axis $SKIP_AXIS \
     --skip_kms $SKIP_KMS \
     --skip_crosssections $SKIP_CROSSSECTIONS \
-    --skip_lines $SKIP_LINES \
     --skip_fixpoints $SKIP_FIXPOINTS \
     --skip_buildings $SKIP_BUILDINGS \
     --skip_floodplains $SKIP_FLOODPLAINS \
     --skip_hydr_boundaries $SKIP_HYDR_BOUNDARIES \
     --skip_gauge_locations $SKIP_GAUGE_LOCATION \
-    --skip_catchments $SKIP_CATCHMENTS \
     --skip_uesgs $SKIP_UESG \
     --skip_hws_lines $SKIP_HWS_LINES \
-    --skip_hws_points $SKIP_HWS_POINTS
+    --skip_hws_points $SKIP_HWS_POINTS \
+    --skip_dgm $SKIP_DGM
--- a/flys-backend/contrib/run_hydr_morph.sh	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/run_hydr_morph.sh	Thu Feb 28 12:47:24 2013 +0100
@@ -8,6 +8,7 @@
 BACKEND_PORT="1521"
 BACKEND_NAME="XE"
 LOG4J_CONFIG="conf/log4j.properties"
+JAR="hydr_morph/importer.jar"
 #####################################################################
 
 
@@ -24,55 +25,48 @@
 IMPORTER_MAINVALUE_TYPES=QWTD
 IMPORTER_ANNOTATION_TYPES="conf/annotation-types.xml"
 
-IMPORTER_SKIP_GAUGES=false
-IMPORTER_SKIP_HISTORICAL_DISCHARGE_GAUGES=false
 IMPORTER_SKIP_ANNOTATIONS=false
-IMPORTER_SKIP_WST=false
-IMPORTER_SKIP_PRFS=false
+IMPORTER_SKIP_BWASTR=false
 IMPORTER_SKIP_DA50S=false
 IMPORTER_SKIP_DA66S=false
-IMPORTER_SKIP_W80S=false
-IMPORTER_SKIP_HYKS=false
 IMPORTER_SKIP_EXTRA_WST=false
 IMPORTER_SKIP_FIXATIONS=false
-IMPORTER_SKIP_OFFICIAL_LINES=false
 IMPORTER_SKIP_FLOOD_WATER=false
 IMPORTER_SKIP_FLOOD_PROTECTION=false
+IMPORTER_SKIP_GAUGES=false
+IMPORTER_SKIP_HISTORICAL_DISCHARGE_GAUGES=false
+IMPORTER_SKIP_HYKS=false
+IMPORTER_SKIP_OFFICIAL_LINES=false
+IMPORTER_SKIP_PRFS=false
+IMPORTER_SKIP_W80S=false
+IMPORTER_SKIP_WST=false
 
 IMPORTER_SKIP_BED_HEIGHT_SINGLE=false
 IMPORTER_SKIP_BED_HEIGHT_EPOCH=false
-IMPORTER_SKIP_SEDIMENT_DENSITY=false
+IMPORTER_SKIP_FLOW_VELOCITY=false
 IMPORTER_SKIP_MORPHOLOGICAL_WIDTH=false
-IMPORTER_SKIP_FLOW_VELOCITY=false
+IMPORTER_SKIP_SEDIMENT_DENSITY=false
 IMPORTER_SKIP_SEDIMENT_YIELD=false
+IMPORTER_SKIP_SQ_RELATION=false
 IMPORTER_SKIP_WATERLEVELS=false
 IMPORTER_SKIP_WATERLEVEL_DIFFERENCES=false
-IMPORTER_SKIP_SQ_RELATION=false
 #####################################################################
 
 #MIN_MEMORY="8192m"
 MIN_MEMORY="1024m"
 
 
-########################## Importer Settings ########################
-APP="de.intevation.flys.importer.Importer"
-DIR=`dirname $0`
-DIR=`readlink -f "$DIR/.."`
-#####################################################################
-
+######################### Run Importer ##############################
+OPTIONAL_LIBS="${DIR}"/../opt
+if [ -d "$OPTIONAL_LIBS" ]; then
+    export PATH="$OPTIONAL_LIBS/bin:$PATH"
+    export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib:$LD_LIBRARY_PATH"
+    export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib64:$LD_LIBRARY_PATH"
+fi
 
-########################## Collect required libraries ###############
-CLASSPATH=
-for l in `find "$DIR/lib" -name \*.jar -print`; do
-   CLASSPATH=$CLASSPATH:$l
-done
+export LC_ALL=de_DE@euro # Workaround encoding problem
 
-export CLASSPATH
-#####################################################################
-
-
-######################### Run Importer ##############################
-exec java \
+exec java -jar \
     -Xmx$MIN_MEMORY \
     -server \
     -Dlog4j.configuration=file://`readlink -f $LOG4J_CONFIG` \
@@ -80,32 +74,33 @@
     -Dflys.backend.main.value.types=$IMPORTER_MAINVALUE_TYPES \
     -Dflys.backend.importer.annotation.types=$IMPORTER_ANNOTATION_TYPES \
     -Dflys.backend.importer.dry.run=$IMPORTER_DRY_RUN \
-    -Dflys.backend.importer.skip.historical.discharge.tables=$IMPORTER_SKIP_HISTORICAL_DISCHARGE_GAUGES \
-    -Dflys.backend.importer.skip.gauges=$IMPORTER_SKIP_GAUGES \
     -Dflys.backend.importer.skip.annotations=$IMPORTER_SKIP_ANNOTATIONS \
-    -Dflys.backend.importer.skip.prfs=$IMPORTER_SKIP_PRFS \
-    -Dflys.backend.importer.skip.w80s=$IMPORTER_SKIP_W80S \
+    -Dflys.backend.importer.skip.bed.height.single=$IMPORTER_SKIP_BED_HEIGHT_SINGLE \
+    -Dflys.backend.importer.skip.bed.height.epoch=$IMPORTER_SKIP_BED_HEIGHT_EPOCH \
+    -Dflys.backend.importer.skip.bwastr=$IMPORTER_SKIP_BWASTR \
     -Dflys.backend.importer.skip.da50s=$IMPORTER_SKIP_DA50S \
     -Dflys.backend.importer.skip.da66s=$IMPORTER_SKIP_DA66S \
-    -Dflys.backend.importer.skip.hyks=$IMPORTER_SKIP_HYKS \
-    -Dflys.backend.importer.skip.wst=$IMPORTER_SKIP_WST \
     -Dflys.backend.importer.skip.extra.wsts=$IMPORTER_SKIP_EXTRA_WST \
     -Dflys.backend.importer.skip.fixations=$IMPORTER_SKIP_FIXATIONS \
-    -Dflys.backend.importer.skip.official.lines=$IMPORTER_SKIP_OFFICIAL_LINES \
     -Dflys.backend.importer.skip.flood.water=$IMPORTER_SKIP_FLOOD_WATER \
     -Dflys.backend.importer.skip.flood.protection=$IMPORTER_SKIP_FLOOD_PROTECTION \
-    -Dflys.backend.importer.skip.bed.height.single=$IMPORTER_SKIP_BED_HEIGHT_SINGLE \
-    -Dflys.backend.importer.skip.bed.height.epoch=$IMPORTER_SKIP_BED_HEIGHT_EPOCH \
+    -Dflys.backend.importer.skip.flow.velocity=$IMPORTER_SKIP_FLOW_VELOCITY \
+    -Dflys.backend.importer.skip.gauges=$IMPORTER_SKIP_GAUGES \
+    -Dflys.backend.importer.skip.historical.discharge.tables=$IMPORTER_SKIP_HISTORICAL_DISCHARGE_GAUGES \
+    -Dflys.backend.importer.skip.hyks=$IMPORTER_SKIP_HYKS \
+    -Dflys.backend.importer.skip.morphological.width=$IMPORTER_SKIP_MORPHOLOGICAL_WIDTH \
+    -Dflys.backend.importer.skip.official.lines=$IMPORTER_SKIP_OFFICIAL_LINES \
+    -Dflys.backend.importer.skip.prfs=$IMPORTER_SKIP_PRFS \
     -Dflys.backend.importer.skip.sediment.density=$IMPORTER_SKIP_SEDIMENT_DENSITY \
-    -Dflys.backend.importer.skip.morphological.width=$IMPORTER_SKIP_MORPHOLOGICAL_WIDTH \
-    -Dflys.backend.importer.skip.flow.velocity=$IMPORTER_SKIP_FLOW_VELOCITY \
     -Dflys.backend.importer.skip.sediment.yield=$IMPORTER_SKIP_SEDIMENT_YIELD \
+    -Dflys.backend.importer.skip.sq.relation=$IMPORTER_SKIP_SQ_RELATION \
+    -Dflys.backend.importer.skip.w80s=$IMPORTER_SKIP_W80S \
     -Dflys.backend.importer.skip.waterlevels=$IMPORTER_SKIP_WATERLEVELS \
     -Dflys.backend.importer.skip.waterlevel.differences=$IMPORTER_SKIP_WATERLEVEL_DIFFERENCES \
-    -Dflys.backend.importer.skip.sq.relation=$IMPORTER_SKIP_SQ_RELATION \
+    -Dflys.backend.importer.skip.wst=$IMPORTER_SKIP_WST \
     -Dflys.backend.user=$BACKEND_USER \
     -Dflys.backend.password=$BACKEND_PASS \
     -Dflys.backend.url=$BACKEND_URL \
     -Dflys.backend.driver=$BACKEND_DB_DRIVER \
     -Dflys.backend.dialect=$BACKEND_DB_DIALECT \
-     $APP
+     $JAR
--- a/flys-backend/contrib/shpimporter/axis.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/axis.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,7 +1,9 @@
-import ogr
+try:
+    from osgeo import ogr
+except ImportError:
+    import ogr
 
 from importer import Importer
-import shpimporter
 
 NAME="Axis"
 TABLE_NAME="river_axes"
@@ -27,7 +29,7 @@
 
 
     def isShapeRelevant(self, name, path):
-        return name == "achse" or name.find("achse") >= 0
+        return "achse" in name.lower()
 
 
     def createNewFeature(self, featureDef, feat, **args):
--- a/flys-backend/contrib/shpimporter/boundaries.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/boundaries.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,10 +1,13 @@
-import ogr
+try:
+    from osgeo import ogr
+except ImportError:
+    import ogr
 
 from importer import Importer
 
 TABLE_NAME="hydr_boundaries"
 TABLE_NAME_POLY="hydr_boundaries_poly"
-PATH="Hydrologie/Hydr.Grenzen/Linien"
+PATH="Hydrologie/Hydr.Grenzen"
 NAME="Hydr. Boundaries"
 
 
@@ -13,29 +16,74 @@
     def getPath(self, base):
         return "%s/%s" % (base, PATH)
 
-
     def getTablename(self):
         return TABLE_NAME
 
-
     def getName(self):
         return NAME
 
-
     def isGeometryValid(self, geomType):
-        return geomType == 2
-
+        return geomType == ogr.wkbLineString
 
     def isShapeRelevant(self, name, path):
-        return True
-
+        shp = ogr.Open(path)
+        if self.isGeometryValid(shp.GetLayerByName(name).GetGeomType()) and \
+                self.getKind(path) > 0:
+            return True
+        else:
+            return False
 
     def getKind(self, path):
-        if path.find("BfG") > 0:
+        if "linien/bfg" in path.lower():
             return 1
+        elif "linien/land" in path.lower():
+            return 2
+        elif "/sonstige/" in path.lower():
+            return 3
         else:
-            return 2
+            return 0
 
+    def createNewFeature(self, featureDef, feat, **args):
+        kind  = self.getKind(args['path'])
+
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+        newFeat.SetField("kind", kind)
+        if self.IsFieldSet(feat, "SECTIE"):
+            newFeat.SetField("sectie", feat.GetField("SECTIE"))
+
+        if self.IsFieldSet(feat, "SOBEK"):
+            newFeat.SetField("sobek", feat.GetField("SOBEK"))
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        return newFeat
+
+class HydrBoundaryPoly(HydrBoundary):
+
+    def getTablename(self):
+        return TABLE_NAME_POLY
+
+    def getName(self):
+        return "%s (Polygons)" % NAME
+
+    def isGeometryValid(self, geomType):
+        return geomType == ogr.wkbPolygon or geomType == ogr.wkbMultiPolygon
+
+    def isShapeRelevant(self, name, path):
+        shp = ogr.Open(path)
+        if self.isGeometryValid(shp.GetLayerByName(name).GetGeomType()) and \
+                self.getKind(path) > 0:
+            return True
+        else:
+            return False
 
     def createNewFeature(self, featureDef, feat, **args):
         kind  = self.getKind(args['path'])
@@ -48,39 +96,12 @@
         newFeat.SetField("name", args['name'])
         newFeat.SetField("kind", kind)
 
-        if self.IsFieldSet(feat, "river_id"):
-            newFeat.SetField("river_id", feat.GetField("river_id"))
-        else:
-            newFeat.SetField("river_id", self.river_id)
-
-        return newFeat
-
-
-
-class HydrBoundaryPoly(HydrBoundary):
-
-    def getTablename(self):
-        return TABLE_NAME_POLY
-
+        if self.IsFieldSet(feat, "SECTIE"):
+            newFeat.SetField("sectie", feat.GetField("SECTIE"))
 
-    def getName(self):
-        return "%s (Polygons)" % NAME
-
-
-    def isGeometryValid(self, geomType):
-        return geomType == 3 or geomType == 6
-
+        if self.IsFieldSet(feat, "SOBEK"):
+            newFeat.SetField("sobek", feat.GetField("SOBEK"))
 
-    def createNewFeature(self, featureDef, feat, **args):
-        kind  = self.getKind(args['path'])
-
-        newFeat  = ogr.Feature(featureDef)
-        geometry = feat.GetGeometryRef()
-        geometry.SetCoordinateDimension(2)
-
-        newFeat.SetGeometry(geometry)
-        newFeat.SetField("name", args['name'])
-        newFeat.SetField("kind", kind)
 
         if self.IsFieldSet(feat, "river_id"):
             newFeat.SetField("river_id", feat.GetField("river_id"))
--- a/flys-backend/contrib/shpimporter/buildings.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/buildings.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,4 +1,7 @@
-import ogr
+try:
+    from osgeo import ogr
+except ImportError:
+    import ogr
 
 from importer import Importer
 
--- a/flys-backend/contrib/shpimporter/catchments.py	Tue Feb 19 10:46:41 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,53 +0,0 @@
-import ogr
-
-from importer import Importer
-
-TABLE_NAME="catchment"
-PATH="Hydrologie/Einzugsgebiet"
-NAME="Catchments"
-
-
-class Catchment(Importer):
-
-    def getPath(self, base):
-        return "%s/%s" % (base, PATH)
-
-
-    def getTablename(self):
-        return TABLE_NAME
-
-
-    def getName(self):
-        return NAME
-
-
-    def isGeometryValid(self, geomType):
-        return geomType == 3 or geomType == 6
-
-
-    def isShapeRelevant(self, name, path):
-        return True
-
-
-    def createNewFeature(self, featureDef, feat, **args):
-        newFeat  = ogr.Feature(featureDef)
-        geometry = feat.GetGeometryRef()
-        geometry.SetCoordinateDimension(2)
-
-        newFeat.SetGeometry(geometry)
-
-        if self.IsFieldSet(feat, "river_id"):
-            newFeat.SetField("river_id", feat.GetField("river_id"))
-        else:
-            newFeat.SetField("river_id", self.river_id)
-
-        if self.IsFieldSet(feat, "Name"):
-            newFeat.SetField("name", feat.GetField("name"))
-        else:
-            newFeat.SetField("name", args['name'])
-
-        if self.IsFieldSet(feat, "AREA"):
-            newFeat.SetField("area", feat.GetField("area"))
-
-        return newFeat
-
--- a/flys-backend/contrib/shpimporter/crosssectiontracks.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/crosssectiontracks.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,4 +1,7 @@
-import ogr
+try:
+    from osgeo import ogr
+except ImportError:
+    import ogr
 
 from importer import Importer
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/dgm.py	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+
+import codecs
+import utils
+
+def latin(string):
+    return unicode(string, "latin1")
+
+# <dbfield> : (<csvfield>, conversion function)
+DGM_MAP = {
+    "lower"           : ("km_von", lambda x: int(x)),
+    "upper"           : ("km_bis", lambda x: int(x)),
+    "year_from"       : "Jahr_von",
+    "year_to"         : "Jahr_bis",
+    "projection"      : "Projektion",
+    "elevation_state" : latin("Höhenstatus"),
+    "format"          : "Format",
+    "border_break"    : ("Bruchkanten",
+        lambda x: True if x.lower() == "Ja" else False),
+    "resolution"      : (latin("Auflösung"), lambda x: x),
+#   "description"     : 
+    "srid"            : "SRID",
+    "path"            : ("Pfad_Bestand", lambda x: x),
+    }
+
+SQL_INSERT_DGT = "INSERT INTO dem (river_id, name, " + ", ".join(DGM_MAP.keys()) + \
+        ") VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
+SQL_INSERT_DGT_ORA = "INSERT INTO dem (river_id, name, " + ", ".join(DGM_MAP.keys()) + \
+        ") VALUES (:s, :s, :s, :s, :s, :s, :s, :s, :s, :s, :s, :s, :s)"
+
+def insertRiverDgm(dbconn, dgmfile, river_name, dry_run, oracle):
+    with codecs.open(dgmfile, "r", "latin1") as csvfile:
+        firstline = csvfile.readline()
+        names = firstline.split(";")
+        namedict = {}
+        field_nr = 0
+        for name in names:
+            namedict[name] = field_nr
+            field_nr += 1
+
+        river_id = utils.getRiverId(dbconn, river_name, oracle)
+        for line in csvfile:
+            fields = line.split(";")
+            if not fields: continue
+            if fields[namedict[latin("Gewässer")]] != river_name:
+                continue
+            else:
+                values=[]
+                for key, val in DGM_MAP.items():
+                    if isinstance(val, tuple):
+                        values.append(val[1](fields[namedict[val[0]]]))
+                    else:
+                        values.append(unicode.encode(
+                            fields[namedict[val]], "UTF-8"))
+                name = "%s KM %s - %s" % (river_name, fields[namedict["km_von"]],
+                        fields[namedict["km_bis"]])
+                cur = dbconn.cursor()
+                if oracle:
+                    stmt = SQL_INSERT_DGT_ORA
+                else:
+                    stmt = SQL_INSERT_DGT
+
+                cur.execute(stmt, [river_id, name] + values)
+
+        if not dry_run:
+            dbconn.commit()
+
--- a/flys-backend/contrib/shpimporter/fixpoints.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/fixpoints.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,4 +1,7 @@
-import ogr, osr
+try:
+    from osgeo import ogr, osr
+except ImportError:
+    import ogr, osr
 
 from importer import Importer
 
--- a/flys-backend/contrib/shpimporter/floodplains.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/floodplains.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,4 +1,7 @@
-import ogr
+try:
+    from osgeo import ogr
+except ImportError:
+    import ogr
 
 from importer import Importer
 
--- a/flys-backend/contrib/shpimporter/gauges.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/gauges.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,4 +1,7 @@
-import ogr
+try:
+    from osgeo import ogr
+except ImportError:
+    import ogr
 
 from importer import Importer
 
--- a/flys-backend/contrib/shpimporter/hws.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/hws.py	Thu Feb 28 12:47:24 2013 +0100
@@ -3,97 +3,66 @@
 
 try:
     from osgeo import ogr
-except ImportErrror:
+except ImportError:
     import ogr
 
 from importer import Importer
 import utils
 
+import logging
+logger = logging.getLogger("Hochwasserschutzanlagen")
+
 PATH="Hydrologie/HW-Schutzanlagen"
 NAME="HWS"
 
 # Keep in sync with hws_kinds table:
 HWS_KIND = {
-        "Durchlass" : 1,
-        "Damm" : 2,
-        "Deich" : 2,
-        "Graben" : 3,
+        "durchlass" : 1,
+        "damm" : 2,
+        "deich" : 2,
+        "hochufer" : 2,
+        "graben" : 3,
+        "rohr1" : 1,
     }
 
 # Keep in sync with fed_states table:
 FED_STATES = {
-    "Bayern" : 1,
-    "Hessen" : 2,
-    "Niedersachsen" : 3,
-    "Nordrhein-Westfalen" : 4,
-    "Rheinland-Pfalz" : 5,
-    "Saarland" : 6,
-    "Schleswig-Holstein" : 7,
-    "Brandenburg" : 8,
-    "Mecklenburg-Vorpommern" : 9,
-    "Thüringen" : 10,
-    "Baden-Württemberg" : 11,
-    "Sachsen-Anhalt" : 12,
-    "Sachsen" : 13,
-    "Berlin" : 14,
-    "Bremen" : 15,
-    "Hamburg" : 16,
+    "bayern" : 1,
+    "hessen" : 2,
+    "niedersachsen" : 3,
+    "nordrhein-westfalen" : 4,
+    "nordrhein westfalen" : 4,
+    "rheinland-pfalz" : 5,
+    "rheinland pfalz" : 5,
+    "saarland" : 6,
+    "schleswig-holstein" : 7,
+    "schleswig holstein" : 7,
+    "brandenburg" : 8,
+    "mecklenburg-vorpommern" : 9,
+    "mecklenburg vorpommern" : 9,
+    "thüringen" : 10,
+    "baden-württemberg" : 11,
+    "baden württemberg" : 11,
+    "sachsen-anhalt" : 12,
+    "sachsen anhalt" : 12,
+    "sachsen" : 13,
+    "berlin" : 14,
+    "bremen" : 15,
+    "hamburg" : 16,
 }
 
-class HWSLines(Importer):
-
-    def getPath(self, base):
-        return "%s/%s" % (base, PATH)
-
-    def getTablename(self):
-        return "hws_lines"
-
-    def getName(self):
-        return "HWS_LINES"
-
-    def isGeometryValid(self, geomType):
-        return geomType == 2
-
-    def isShapeRelevant(self, name, path):
-        return True
-
-    def createNewFeature(self, featureDef, feat, **args):
-        newFeat  = ogr.Feature(featureDef)
-        geometry = feat.GetGeometryRef()
-        geometry.SetCoordinateDimension(2)
-
-        newFeat.SetGeometry(geometry)
-
-        if self.IsFieldSet(feat, "river_id"):
-            newFeat.SetField("river_id", feat.GetField("river_id"))
-        else:
-            newFeat.SetField("river_id", self.river_id)
-
-        if self.IsFieldSet(feat, "TYP"):
-            newFeat.SetField("type", feat.GetField("TYP"))
-
-        if self.IsFieldSet(feat, "Bauart"):
-            newFeat.SetField("hws_facility", feat.GetField("Bauart"))
-
-        if self.IsFieldSet(feat, "Name"):
-            newFeat.SetField("name", feat.GetField("name"))
-        else:
-            newFeat.SetField("name", args['name'])
-
-        return newFeat
-
 class HWSPoints(Importer):
     fieldmap = {
-            "Name" : "name",
-            "Quelle" : "source",
-            "Anmerkung" : "description",
-            "Stand" : "status_date",
-            "Verband" : "agency",
-            "Deich_KM" : "dike_km",
-            "Bereich" : "range",
-            "Höhe_SOLL" : "z_target",
-            "WSP_BfG100" : "rated_level",
-            "Hoehe_IST" : "z",
+            "name$" : "name",
+            "quelle$" : "source",
+            "anmerkung$" : "description",
+            "stand$" : "status_date",
+            "verband$" : "agency",
+            "Deich_{0,1}KM$" : "dike_km",
+            "Bereich$" : "range",
+            "H[oeö]{0,2}he_{0,1}SOLL$" : "z_target",
+            "(WSP_){0,1}BfG_{0,1}100$" : "rated_level",
+            "H[oeö]{0,2}he_{0,1}IST$" : "z",
         }
 
     def getPath(self, base):
@@ -106,13 +75,11 @@
         return "HWS_POINTS"
 
     def isGeometryValid(self, geomType):
-        return geomType == 1
+        return geomType == ogr.wkbPoint or geomType == ogr.wkbPoint25D
 
     def isShapeRelevant(self, name, path):
-        if "punkte" in os.path.basename(path).lower():
-            return True
-        else:
-            return False
+        shp = ogr.Open(path)
+        return self.isGeometryValid(shp.GetLayerByName(name).GetGeomType())
 
     def createNewFeature(self, featureDef, feat, **args):
         newFeat  = ogr.Feature(featureDef)
@@ -123,65 +90,108 @@
 
         newFeat.SetGeometry(geometry)
 
-        newFeat.SetFID(feat.GetFID())
-
-        newFeat.SetField("ogr_fid", feat.GetFID())
-
-        if self.IsFieldSet(feat, "Art"):
-            self.handled("Art")
-            kind_id = HWS_KIND.get(feat.GetField("Art"))
+        artname = self.searchField("art$")
+        if self.IsFieldSet(feat, artname):
+            self.handled(artname)
+            kind_id = HWS_KIND.get(feat.GetField(artname).lower())
             if not kind_id:
-                print ("Unbekannte Art: %s" % \
-                        feat.GetField("Art"))
+                logger.warn("Unknown Art: %s" % \
+                        feat.GetField(artname))
             else:
                 newFeat.SetField("kind_id", kind_id)
 
-        if self.IsFieldSet(feat, "Bundesland"):
-            self.handled("Bundesland")
-            fed_id = FED_STATES.get(feat.GetField("Bundesland"))
+        fname = self.searchField("Bundesland$")
+        if self.IsFieldSet(feat, fname):
+            self.handled(fname)
+            fed_id = FED_STATES.get(feat.GetField(fname).lower())
 
             if not fed_id:
-                print ("Unbekanntes Bundesland: %s" % \
+                logger.warn("Unknown Bundesland: %s" % \
                         feat.GetField("Bundesland"))
             else:
                 newFeat.SetField("fed_state_id", fed_id)
 
-        if self.IsFieldSet(feat, "river_id"):
-            self.handled("river_id")
-            if feat.GetField("river_id") != self.river_id:
-                print ("River_id mismatch between shapefile and"
-                     " importer parameter.")
-            newFeat.SetField("river_id", feat.GetField("river_id"))
-        else:
-            newFeat.SetField("river_id", self.river_id)
-
-        if self.IsFieldSet(feat, "Ufer"):
-            self.handled("Ufer")
-            shoreString = feat.GetField("Ufer")
+        fname = self.searchField("(ufer$)|(flussseite$)")
+        if self.IsFieldSet(feat, fname):
+            self.handled(fname)
+            shoreString = feat.GetField(fname)
             if "links" in shoreString.lower():
                 newFeat.SetField("shore_side", True)
             elif "rechts" in shoreString.lower():
                 newFeat.SetField("shore_side", False)
 
-        if not self.IsFieldSet(feat, "Name"):
-            self.handled("Name")
+
+        fname = self.searchField("river_{0,1}id$")
+        if self.IsFieldSet(feat, fname):
+            self.handled(fname)
+            if feat.GetField(fname) != self.river_id:
+                logger.warn("River_id mismatch between shapefile and"
+                     " importer parameter.")
+            newFeat.SetField("river_id", feat.GetField(fname))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        fname = self.searchField("name$")
+        if not self.IsFieldSet(feat, fname):
             newFeat.SetField("name", args['name'])
 
-        if self.IsFieldSet(feat, "offiziell"):
-            self.handled("offiziell")
-            offiziell = feat.GetField("offiziell")
+        fname = self.searchField("offiziell$")
+        if self.IsFieldSet(feat, fname):
+            self.handled(fname)
+            offiziell = feat.GetField(fname)
             if offiziell == "1" or offiziell == 1:
-                newFeat.SetField("offiziell", True)
+                newFeat.SetField("official", True)
             else:
-                newFeat.SetField("offiziell", False)
+                newFeat.SetField("official", False)
+        # Set the official value based on the file name as a fallback
+        elif args.get("name", "").lower() == "rohre_und_sperren" or \
+                args.get("name", "").lower() == "rohre-und-sperren":
+             newFeat.SetField("official", True)
 
         if self.IsFieldSet(newFeat, "z") and \
             self.IsFieldSet(newFeat, "rated_level"):
-            self.handled("Freibord_m")
+            fname = self.searchField("freibord(_m){0,1}$")
+            self.handled(fname)
             z = newFeat.GetFieldAsDouble("z")
             rl = newFeat.GetFieldAsDouble("rated_level")
             newFeat.SetField("freeboard", z - rl)
 
         return newFeat
 
+class HWSLines(HWSPoints):
 
+    # TODO: GEOM_target, GEOM_rated_level, dike_km_from, dike_km_to
+    fieldmap = {
+            "name$" : "name",
+            "quelle$" : "source",
+            "anmerkung$" : "description",
+            "stand$" : "status_date",
+            "verband$" : "agency",
+            "Bereich$" : "range",
+        }
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+    def getTablename(self):
+        return "hws_lines"
+
+    def getName(self):
+        return "HWS_LINES"
+
+    def isGeometryValid(self, geomType):
+        return geomType == ogr.wkbLineString or geomType == ogr.wkbLineString25D
+
+    def isShapeRelevant(self, name, path):
+        shp = ogr.Open(path)
+        return self.isGeometryValid(shp.GetLayerByName(name).GetGeomType())
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat = HWSPoints.createNewFeature(self, featureDef, feat, **args)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(3)
+        newFeat.SetGeometry(geometry)
+
+        return newFeat
+
+
--- a/flys-backend/contrib/shpimporter/importer.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/importer.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,21 +1,24 @@
 try:
-    from osgeo import ogr
-except ImportErrror:
-    import ogr
-import osr
-import shpimporter
+    from osgeo import ogr, osr
+except ImportError:
+    import ogr, osr
 import utils
+import re
+import logging
+
+logger = logging.getLogger("importer")
 
 class Importer:
 
-    def __init__(self, config, dbconn):
-        self.config = config
+    def __init__(self, river_id, dbconn, dry_run):
+        self.river_id = river_id
         self.dbconn = dbconn
-        self.river_id = config.river_id
+        self.dry_run = dry_run
         self.dest_srs = osr.SpatialReference()
-        self.dest_srs.ImportFromEPSG(config.target_srs)
+        self.dest_srs.ImportFromEPSG(31467)
         self.handled_fields = []
         self.tracking_import = False
+        self.srcLayer = None
 
     def getKind(self, path):
         raise NotImplementedError("Importer.getKind is abstract!")
@@ -30,10 +33,39 @@
         raise NotImplementedError("Importer.getTablename is abstract!")
 
     def IsFieldSet(self, feat, name):
+        if not name:
+            return False
         if feat.GetFieldIndex(name) == -1:
             return False # Avoids an Error in IsFieldSet
         return feat.IsFieldSet(feat.GetFieldIndex(name))
 
+    def searchField(self, regex):
+        """
+        Searches for a field in the current src layer that matches
+        the expression regex.
+        Throws an exception if more then one field matches
+        @param feat: The feature to search for attributes
+        @param regex: The regex to look for
+
+        @returns: The field name as a string
+        """
+
+        if not hasattr(self.srcLayer, "fieldnames"):
+            self.srcLayer.fieldnames = []
+            for i in range(0, self.srcLayer.GetLayerDefn().GetFieldCount()):
+                self.srcLayer.fieldnames.append(
+                    self.srcLayer.GetLayerDefn().GetFieldDefn(i).GetNameRef())
+
+        result = None
+        for name in self.srcLayer.fieldnames:
+            match = re.match(regex, name, re.IGNORECASE)
+            if match:
+                if result:
+                    raise Exception("More then one field matches: %s" % regex)
+                else:
+                    result = match.group(0)
+        return result
+
     def IsDoubleFieldSet(self, feat, name):
         try:
             isset = feat.GetFieldAsDouble(name)
@@ -46,20 +78,23 @@
 
     def walkOverShapes(self, shape):
         (name, path) = shape
-        if not self.isShapeRelevant(name, path):
-            shpimporter.INFO("Skip shapefile '%s'" % path)
-            return
 
         shp = ogr.Open(shape[1])
         if shp is None:
-            shpimporter.ERROR("Shapefile '%s' could not be opened!" % path)
+            logger.error("Shapefile '%s' could not be opened!" % path)
             return
 
-        shpimporter.INFO("Processing shapefile '%s'" % path)
+        if not self.isShapeRelevant(name, path):
+            logger.info("Skip shapefile: '%s' of Type: %s" % (path,
+                utils.getWkbString(shp.GetLayerByName(name).GetGeomType())))
+            return
+
+
+        logger.info("Processing shapefile '%s'" % path)
         srcLayer = shp.GetLayerByName(name)
 
         if srcLayer is None:
-            shpimporter.ERROR("Layer '%s' was not found!" % name)
+            logger.error("Layer '%s' was not found!" % name)
             return
 
         return self.shape2Database(srcLayer, name, path)
@@ -69,11 +104,12 @@
         src_srs  = geometry.GetSpatialReference()
 
         if src_srs is None:
-            shpimporter.ERROR("No source SRS given! No transformation possible!")
+            logger.error("No source SRS given! No transformation possible!")
             return feat
 
         transformer = osr.CoordinateTransformation(src_srs, self.dest_srs)
-        geometry.Transform(transformer)
+        if geometry.Transform(transformer):
+            return None
 
         return feat
 
@@ -90,15 +126,19 @@
         """
         Checks the mapping dictonary for key value pairs to
         copy from the source to the destination feature.
+        The keys can be reguar expressions that are matched
+        agains the source fieldnames
 
         The Key is the attribute of the source feature to be copied
         into the target attribute named by the dict's value.
         """
         self.tracking_import = True
-        self.handled_fields.extend(mapping.keys())
         for key, value in mapping.items():
-            if src.GetFieldIndex(key) == -1:
+            realname = self.searchField(key)
+            if realname == None:
                 continue
+            if not realname in self.handled_fields:
+                self.handled_fields.append(realname)
             # 0 OFTInteger, Simple 32bit integer
             # 1 OFTIntegerList, List of 32bit integers
             # 2 OFTReal, Double Precision floating point
@@ -111,31 +151,32 @@
             # 9 OFTDate, Date
             # 10 OFTTime, Time
             # 11 OFTDateTime, Date and Time
-            if src.IsFieldSet(src.GetFieldIndex(key)):
-                if src.GetFieldType(key) == 2:
-                    target.SetField(value, src.GetFieldAsDouble(key))
+            if src.IsFieldSet(src.GetFieldIndex(realname)):
+                if src.GetFieldType(realname) == 2:
+                    target.SetField(value, src.GetFieldAsDouble(realname))
                 else:
-                    target.SetField(value, src.GetField(key))
+                    target.SetField(value, utils.getUTF8(src.GetField(realname)))
 
     def shape2Database(self, srcLayer, name, path):
         destLayer = self.dbconn.GetLayerByName(self.getTablename())
 
         if srcLayer is None:
-            shpimporter.ERROR("Shapefile is None!")
+            logger.error("Shapefile is None!")
             return -1
 
         if destLayer is None:
-            shpimporter.ERROR("No destination layer given!")
+            logger.error("No destination layer given!")
             return -1
 
         count = srcLayer.GetFeatureCount()
-        shpimporter.DEBUG("Try to add %i features to database." % count)
+        logger.debug("Try to add %i features to database." % count)
 
         srcLayer.ResetReading()
+        self.srcLayer = srcLayer
 
         geomType    = -1
         success     = 0
-        unsupported = 0
+        unsupported = {}
         creationFailed = 0
         featureDef  = destLayer.GetLayerDefn()
 
@@ -143,7 +184,7 @@
             geom     = feat.GetGeometryRef()
 
             if geom is None:
-                shpimporter.DEBUG("Unkown Geometry reference for feature")
+                logger.debug("Unkown Geometry reference for feature")
                 continue
 
             geomType = geom.GetGeometryType()
@@ -151,25 +192,31 @@
             if self.isGeometryValid(geomType):
                 newFeat = self.createNewFeature(featureDef,
                                                 feat,
-                                                name=name,
+                                                name=utils.getUTF8(name),
                                                 path=path)
 
                 if newFeat is not None:
                     newFeat.SetField("path", utils.getUTF8Path(path))
                     newFeat = self.transform(newFeat)
-                    res = destLayer.CreateFeature(newFeat)
-                    if res is None or res > 0:
-                        shpimporter.ERROR("Unable to insert feature. Error: %r" % res)
+                    if newFeat:
+                        res = destLayer.CreateFeature(newFeat)
+                        if res is None or res > 0:
+                            logger.error("Unable to insert feature. Error: %r" % res)
+                        else:
+                            success = success + 1
                     else:
-                        success = success + 1
+                        logger.error("Could not transform feature: %s " % feat.GetFID())
+                        creationFailed += 1
                 else:
                     creationFailed = creationFailed + 1
             else:
-                unsupported = unsupported + 1
+                unsupported[utils.getWkbString(geomType)] = \
+                        unsupported.get(utils.getWkbString(geomType), 0) + 1
 
-        shpimporter.INFO("Inserted %i features" % success)
-        shpimporter.INFO("Failed to create %i features" % creationFailed)
-        shpimporter.INFO("Found %i unsupported features" % unsupported)
+        logger.info("Inserted %i features" % success)
+        logger.info("Failed to create %i features" % creationFailed)
+        for key, value in unsupported.items():
+            logger.info("Found %i unsupported features of type: %s" % (value, key))
 
         if self.tracking_import:
             unhandled = []
@@ -179,14 +226,14 @@
                     unhandled.append(act_field)
 
             if len(unhandled):
-                shpimporter.INFO("Did not import values from fields: %s " % \
+                logger.info("Did not import values from fields: %s " % \
                         " ".join(unhandled))
 
         try:
-            if self.config.dry_run > 0:
+            if self.dry_run:
                 return geomType
             destLayer.CommitTransaction()
         except e:
-            shpimporter.ERROR("Exception while committing transaction.")
+            logger.error("Exception while committing transaction.")
 
         return geomType
--- a/flys-backend/contrib/shpimporter/km.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/km.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,4 +1,7 @@
-import ogr
+try:
+    from osgeo import ogr
+except ImportError:
+    import ogr
 
 from importer import Importer
 
--- a/flys-backend/contrib/shpimporter/lines.py	Tue Feb 19 10:46:41 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,56 +0,0 @@
-import ogr
-
-from importer import Importer
-
-TABLE_NAME="lines"
-PATH="Geodaesie/Linien"
-NAME="Lines"
-
-
-class Line(Importer):
-
-    def getPath(self, base):
-        return "%s/%s" % (base, PATH)
-
-
-    def getTablename(self):
-        return TABLE_NAME
-
-
-    def getName(self):
-        return NAME
-
-
-    def isGeometryValid(self, geomType):
-        return geomType == 2 or geomType == -2147483646
-
-
-    def isShapeRelevant(self, name, path):
-        return True
-
-
-    def createNewFeature(self, featureDef, feat, **args):
-        newFeat  = ogr.Feature(featureDef)
-        geometry = feat.GetGeometryRef()
-        geometry.SetCoordinateDimension(2)
-
-        newFeat.SetGeometry(geometry)
-        newFeat.SetField("name", args['name'])
-
-        if self.IsFieldSet(feat, "river_id"):
-            newFeat.SetField("river_id", feat.GetField("river_id"))
-        else:
-            newFeat.SetField("river_id", self.river_id)
-
-        if self.IsFieldSet(feat, "TYP"):
-            newFeat.SetField("kind", feat.GetFieldAsDouble("TYP"))
-        else:
-            newFeat.SetField("kind", "DAMM")
-
-        if self.IsFieldSet(feat, "Z"):
-            newFeat.SetField("z", feat.GetFieldAsDouble("Z"))
-        else:
-            newFeat.SetField("z", 9999)
-
-        return newFeat
-
--- a/flys-backend/contrib/shpimporter/run.sh	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/run.sh	Thu Feb 28 12:47:24 2013 +0100
@@ -20,7 +20,6 @@
 SKIP_HYDR_BOUNDARIES=0
 SKIP_HWS=0
 SKIP_GAUGE_LOCATION=0
-SKIP_CATCHMENTS=0
 SKIP_UESG=0
 
 exec python shpimporter.py \
@@ -41,6 +40,5 @@
     --skip_hydr_boundaries $SKIP_HYDR_BOUNDARIES \
     --skip_hws $SKIP_HWS \
     --skip_gauge_locations $SKIP_GAUGE_LOCATION \
-    --skip_catchments $SKIP_CATCHMENTS \
     --skip_uesgs $SKIP_UESG
 
--- a/flys-backend/contrib/shpimporter/shpimporter.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/shpimporter.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,14 +1,16 @@
 try:
     from osgeo import ogr
-except ImportErrror:
+except ImportError:
     import ogr
 
 import utils, optparse
+import sys
+import os
+import logging
 
 from uesg  import UESG
 from axis  import Axis
 from km    import KM
-from lines import Line
 from fixpoints import Fixpoint
 from buildings import Building
 from crosssectiontracks import CrosssectionTrack
@@ -16,44 +18,33 @@
 from boundaries import HydrBoundary, HydrBoundaryPoly
 from hws import HWSLines, HWSPoints
 from gauges import GaugeLocation
-from catchments import Catchment
-
-
-VERBOSE_DEBUG=2
-VERBOSE_INFO=1
-
-
-def DEBUG(msg):
-    config = getConfig()
-    if config.verbose >= VERBOSE_DEBUG:
-        print "DEBUG: %s" % msg
-
-def INFO(msg):
-    config = getConfig()
-    if config.verbose >= VERBOSE_INFO:
-        print "INFO: %s" % msg
+from dgm import insertRiverDgm
 
-def ERROR(msg):
-    config = getConfig()
-    print "ERROR: %s" % msg
+logger = logging.getLogger("shpimporter")
 
+def initialize_logging(level):
+    """Initializes the logging system"""
+    root = logging.getLogger()
+    root.setLevel(level)
+    hdlr = logging.StreamHandler()
+    fmt = logging.Formatter("%(levelname)s %(name)s: %(message)s")
+    hdlr.setFormatter(fmt)
+    root.addHandler(hdlr)
 
-def getImporters(config, dbconn):
+def getImporters(river_id, dbconn, dry_run):
     return [
-        Axis(config, dbconn),
-        KM(config, dbconn),
-        CrosssectionTrack(config, dbconn),
-        Line(config, dbconn),
-        Fixpoint(config, dbconn),
-        Building(config, dbconn),
-        Floodplain(config, dbconn),
-        HydrBoundary(config, dbconn),
-        HydrBoundaryPoly(config, dbconn),
-        HWSLines(config, dbconn),
-        HWSPoints(config, dbconn),
-        GaugeLocation(config, dbconn),
-        Catchment(config, dbconn),
-        UESG(config, dbconn)
+        Axis(river_id, dbconn, dry_run),
+        KM(river_id, dbconn, dry_run),
+        CrosssectionTrack(river_id, dbconn, dry_run),
+        Fixpoint(river_id, dbconn, dry_run),
+        Building(river_id, dbconn, dry_run),
+        Floodplain(river_id, dbconn, dry_run),
+        HydrBoundary(river_id, dbconn, dry_run),
+        HydrBoundaryPoly(river_id, dbconn, dry_run),
+        HWSLines(river_id, dbconn, dry_run),
+        HWSPoints(river_id, dbconn, dry_run),
+        GaugeLocation(river_id, dbconn, dry_run),
+        UESG(river_id, dbconn, dry_run)
         ]
 
 
@@ -64,7 +55,7 @@
     parser.add_option("--host", type="string")
     parser.add_option("--user", type="string")
     parser.add_option("--password", type="string")
-    parser.add_option("--river_id", type="int")
+    parser.add_option("--river_name", type="string")
     parser.add_option("--verbose", type="int", default=1)
     parser.add_option("--dry_run", type="int", default=0)
     parser.add_option("--ogr_connection", type="string")
@@ -72,33 +63,36 @@
     parser.add_option("--skip_hydr_boundaries", type="int")
     parser.add_option("--skip_buildings", type="int")
     parser.add_option("--skip_crosssections", type="int")
-    parser.add_option("--skip_lines", type="int")
     parser.add_option("--skip_fixpoints", type="int")
     parser.add_option("--skip_floodplains", type="int")
     parser.add_option("--skip_hws_lines", type="int")
     parser.add_option("--skip_hws_points", type="int")
     parser.add_option("--skip_gauge_locations", type="int")
-    parser.add_option("--skip_catchments", type="int")
     parser.add_option("--skip_kms", type="int")
     parser.add_option("--skip_uesgs", type="int")
+    parser.add_option("--skip_dgm", type="int")
     (config, args) = parser.parse_args()
 
+    if config.verbose > 1:
+        initialize_logging(logging.DEBUG)
+    elif config.verbose == 1:
+        initialize_logging(logging.INFO)
+    else:
+        initialize_logging(logging.WARN)
+
     if config.directory == None:
-        ERROR("No river directory specified!")
+        logger.error("No river directory specified!")
         raise Exception("Invalid config")
     if not config.ogr_connection:
         if not config.host:
-            ERROR("No database host specified!")
+            logger.error("No database host specified!")
             raise Exception("Invalid config")
         if not config.user:
-            ERROR("No databaser user specified!")
+            logger.error("No databaser user specified!")
             raise Exception("Invalid config")
         if not config.password:
-            ERROR("No password specified!")
+            logger.error("No password specified!")
             raise Exception("Invalid config")
-    if config.river_id == None:
-        ERROR("No river id specified!")
-        raise Exception("Invalid config")
 
     return config
 
@@ -114,20 +108,17 @@
         return True
     elif config.skip_crosssections == 1 and isinstance(importer, CrosssectionTrack):
         return True
-    elif config.skip_lines == 1 and isinstance(importer, Line):
-        return True
     elif config.skip_fixpoints == 1 and isinstance(importer, Fixpoint):
         return True
     elif config.skip_floodplains == 1 and isinstance(importer, Floodplain):
         return True
-    elif config.skip_hws_points == 1 and isinstance(importer, HWSPoints):
-        return True
     elif config.skip_hws_lines == 1 and isinstance(importer, HWSLines):
         return True
+    elif config.skip_hws_points == 1 and isinstance(importer, HWSPoints) and \
+            not isinstance(importer, HWSLines):
+        return True
     elif config.skip_gauge_locations == 1 and isinstance(importer, GaugeLocation):
         return True
-    elif config.skip_catchments == 1 and isinstance(importer, Catchment):
-        return True
     elif config.skip_kms == 1 and isinstance(importer, KM):
         return True
     elif config.skip_uesgs == 1 and isinstance(importer, UESG):
@@ -135,7 +126,6 @@
 
     return False
 
-
 def main():
     config=None
     try:
@@ -144,48 +134,109 @@
         return -1
 
     if config == None:
-        ERROR("Unable to read config from command line!")
+        logger.error("Unable to read config from command line!")
         return
 
     if config.dry_run > 0:
-        INFO("You enable 'dry_run'. No database transaction will take place!")
+        logger.info("You enable 'dry_run'. No database transaction will take place!")
 
     if config.ogr_connection:
         connstr = config.ogr_connection
     else:
         connstr = 'OCI:%s/%s@%s' % (config.user, config.password, config.host)
 
+    oracle = False # Marker if oracle is used.
+    if 'OCI:' in connstr:
+        oracle = True
+        try:
+            import cx_Oracle as dbapi
+            raw_connstr=connstr.replace("OCI:", "")
+        except ImportError:
+            logger.error("Module cx_Oracle not found in: %s\n"
+                  "Neccessary to connect to a Oracle Database.\n"
+                  "Please refer to the installation "
+                  "documentation." % sys.path)
+            return -1
+
+    else: # Currently only support for oracle and postgres
+        try:
+            import psycopg2 as dbapi
+            raw_connstr=connstr.replace("PG:", "")
+        except ImportError:
+            logger.error("Module psycopg2 not found in: %s\n"
+                  "Neccessary to connect to a Posgresql Database.\n"
+                  "Please refer to the installation "
+                  "documentation." % sys.path)
+            return -1
+
+    dbconn_raw = dbapi.connect(raw_connstr)
     dbconn = ogr.Open(connstr)
 
     if dbconn == None:
-        ERROR("Could not connect to database %s" % connstr)
+        logger.error("Could not connect to database %s" % connstr)
         return -1
 
-    importers = getImporters(config, dbconn)
     types = {}
 
-    for importer in importers:
-        if skip_importer(config, importer):
-            INFO("Skip import of '%s'" % importer.getName())
-            continue
-
-        INFO("Start import of '%s'" % importer.getName())
-
-        shapes = utils.findShapefiles(importer.getPath(config.directory))
-        DEBUG("Found %i Shapefiles" % len(shapes))
+    directories = []
+    if not config.river_name:
+        for file in [os.path.join(config.directory, d) for d in \
+                os.listdir(config.directory)]:
+            if os.path.isdir(file):
+                directories.append(file)
+    else:
+        directories.append(config.directory)
 
-        for shpTuple in shapes:
-            geomType = importer.walkOverShapes(shpTuple)
-            try:
-                if geomType is not None:
-                    num = types[geomType]
-                    types[geomType] = num+1
-            except:
-                types[geomType] = 1
+    for directory in directories:
+        if not config.river_name:
+            river_name = utils.getUTF8Path(
+                    os.path.basename(os.path.normpath(directory)))
+        else:
+            river_name = config.river_name
+        river_id = utils.getRiverId(dbconn_raw, river_name, oracle)
 
-    for key in types:
-        DEBUG("%i x geometry type %s" % (types[key], key))
+        if not river_id:
+            logger.info("Could not find river in database. Skipping: %s"
+                  % river_name)
+            continue
+        else:
+            logger.info("Importing River: %s" % river_name)
 
+        for importer in getImporters(river_id, dbconn, config.dry_run):
+            if skip_importer(config, importer):
+                logger.info("Skip import of '%s'" % importer.getName())
+                continue
+
+            logger.info("Start import of '%s'" % importer.getName())
+
+            shapes = utils.findShapefiles(importer.getPath(config.directory))
+            logger.debug("Found %i Shapefiles" % len(shapes))
+
+            for shpTuple in shapes:
+                geomType = importer.walkOverShapes(shpTuple)
+                try:
+                    if geomType is not None:
+                        num = types[geomType]
+                        types[geomType] = num+1
+                except:
+                    types[geomType] = 1
+
+        for key in types:
+            logger.debug("%i x geometry type %s" % (types[key], key))
+
+        if not config.skip_dgm:
+            dgmfilename = os.path.join(
+                    config.directory, "..", "DGMs.csv")
+            if not os.access(dgmfilename, os.R_OK) or not \
+                    os.path.isfile(dgmfilename):
+                logger.info("Could not find or access DGM file: %s \n"
+                     "Skipping DGM import." % dgmfilename)
+            else:
+                logger.info("Inserting DGM meta information in 'dem' table.")
+                insertRiverDgm(dbconn_raw, dgmfilename, river_name,
+                        config.dry_run, oracle)
+        else:
+            logger.info("Skip import of DGM.")
 
 if __name__ == '__main__':
     main()
--- a/flys-backend/contrib/shpimporter/uesg.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/uesg.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,4 +1,7 @@
-import ogr
+try:
+    from osgeo import ogr
+except ImportError:
+    import ogr
 
 from importer import Importer
 
--- a/flys-backend/contrib/shpimporter/utils.py	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/contrib/shpimporter/utils.py	Thu Feb 28 12:47:24 2013 +0100
@@ -1,8 +1,17 @@
 import os
 import sys
-from shpimporter import DEBUG, INFO, ERROR
+import logging
+
+try:
+    from osgeo import ogr
+except ImportError:
+    import ogr
+
+logger = logging.getLogger("utils")
 
 SHP='.shp'
+SQL_SELECT_RIVER_ID="SELECT id FROM rivers WHERE name = %s"
+SQL_SELECT_RIVER_ID_ORA="SELECT id FROM rivers WHERE name = :s"
 
 def findShapefiles(path):
     shapes = []
@@ -11,7 +20,7 @@
         if len(files) == 0:
             continue
 
-        DEBUG("Processing directory '%s' with %i files " % (root, len(files)))
+        logger.debug("Processing directory '%s' with %i files " % (root, len(files)))
 
         for f in files:
             idx = f.find(SHP)
@@ -20,6 +29,36 @@
 
     return shapes
 
+def getRiverId(dbconn, name, oracle):
+    """
+    Returns the id of the river "name"
+    Dbconn must be a python database connection api compliant object
+    """
+    cur = dbconn.cursor()
+    if oracle:
+        # This is stupid and shoudl not be neccessary. But I don't
+        # know how to make it work both ways. aheinecke - 02/2013
+        stmt = SQL_SELECT_RIVER_ID_ORA
+    else:
+        stmt = SQL_SELECT_RIVER_ID
+    cur.execute(stmt, (name,))
+    row = cur.fetchone()
+    if row:
+        return row[0]
+    else:
+        return 0
+
+def getUTF8(string):
+    """
+    Tries to convert the string to a UTF-8 encoding by first checking if it
+    is UTF-8 and then trying cp1252
+    """
+    try:
+        return unicode.encode(unicode(string, "UTF-8"), "UTF-8")
+    except UnicodeDecodeError:
+        # Probably European Windows names so lets try again
+        return unicode.encode(unicode(string, "cp1252"), "UTF-8")
+
 def getUTF8Path(path):
     """
     Tries to convert path to utf-8 by first checking the filesystemencoding
@@ -31,3 +70,30 @@
     except UnicodeDecodeError:
         # Probably European Windows names so lets try again
         return unicode.encode(unicode(path, "cp1252"), "UTF-8")
+
+WKB_MAP = {
+    ogr.wkb25Bit :                'wkb25Bit',
+    ogr.wkbGeometryCollection :   'wkbGeometryCollection',
+    ogr.wkbGeometryCollection25D :'wkbGeometryCollection25D',
+    ogr.wkbLineString :           'wkbLineString',
+    ogr.wkbLineString25D :        'wkbLineString25D',
+    ogr.wkbLinearRing :           'wkbLinearRing',
+    ogr.wkbMultiLineString :      'wkbMultiLineString',
+    ogr.wkbMultiLineString25D :   'wkbMultiLineString25D',
+    ogr.wkbMultiPoint :           'wkbMultiPoint',
+    ogr.wkbMultiPoint25D :        'wkbMultiPoint25D',
+    ogr.wkbMultiPolygon :         'wkbMultiPolygon',
+    ogr.wkbMultiPolygon25D :      'wkbMultiPolygon25D',
+    ogr.wkbNDR :                  'wkbNDR',
+    ogr.wkbNone :                 'wkbNone',
+    ogr.wkbPoint :                'wkbPoint',
+    ogr.wkbPoint25D :             'wkbPoint25D',
+    ogr.wkbPolygon :              'wkbPolygon',
+    ogr.wkbPolygon25D :           'wkbPolygon25D',
+    ogr.wkbUnknown :              'wkbUnknown',
+    ogr.wkbXDR :                  'wkbXDR'
+}
+
+def getWkbString(type):
+    return WKB_MAP.get(type) or "Unknown"
+
--- a/flys-backend/doc/README.txt	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/README.txt	Thu Feb 28 12:47:24 2013 +0100
@@ -18,8 +18,9 @@
 System-Properties gesteuert werden. Diese werden im
 Allgemeinen in der Form -Dkey=value angegeben.
 
-Will man z.B. mit dem Importer nur in simulierierten Import
-durchführen, dann kann dies über die Angabe von
+Will man z.B. mit dem Importer nur einen simulierten Import
+durchführen (also nicht in die Datenbank geschrieben werden),
+dann kann dies über die Angabe von
 '-Dflys.backend.importer.dry.run=true' erfolgen.
 
 !!! Der Import geht wie Desktop-FLYS davon aus, dass die Dateien
--- a/flys-backend/doc/documentation/de/importer-geodaesie.tex	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/documentation/de/importer-geodaesie.tex	Thu Feb 28 12:47:24 2013 +0100
@@ -1,10 +1,14 @@
 \section{Geodatenimport}
 
-Der Geodaten Importer ist ein in Python geschriebenes Kommandozeilen Tool zum
-Import von Shapefiles in eine Datenbank. Zum Lesen der Shapefiles und zum
-Schreiben der Geodaten in die Datenbank wird GDAL verwendet. Der Import in eine
-Oracle Datenbank erfordert, dass GDAL und GDAL Python Bindungs mit
-Oracle Unterstützung installiert sind. Weitere Details hierzu befinden sich im
+Der Geodaten Importer ist ein in der Programmiersprache Python
+geschriebenes Kommandozeilen Werkzeug zum Import von Shapefiles in
+eine Datenbank.
+Zum Lesen der Shapefiles und zum schreiben der Geodaten
+in die Datenbank wird die GDAL Bibliothek verwendet.
+Um Daten in eine Oracle Datenbank zu importieren ist es nötig, dass
+GDAL und GDAL Python Bindungs mit Oracle Unterstützung installiert
+sind. Bei der Verwendung von PostgreSQL entfällt dieser Schritt.
+Weitere Details hierzu befinden sich im
 Kapitel \ref{Systemanforderungen} und \ref{Installationsanleitung}.
 
 Der Importer kann mit einem Shellscript von der Kommandozeile gestartet werden
@@ -13,23 +17,32 @@
 importiert werden sollen. Für jede Klasse gibt es einen speziellen
 Parser, der die speziellen Attribute eines Shapefiles liest und in die entsprechende
 Relation der Datenbank schreibt. Die Parser sind speziell auf das
-Dateisystem der BfG ausgerichtet. So wird z.B. erwartet, dass die Shapefiles der
+Dateisystem der BfG ausgerichtet. So wird beispielsweise erwartet, dass die Shapefiles der
 Gewässerachse im Ordner $Geodaesie/Flussachse+km$ liegen. Weitere Informationen zu
 den einzelnen Parsern sind dem nächsten Kapitel \ref{Beschreibung der Parser} zu
 entnehmen. Der Erfolg oder Misserfolg eines Shape-Imports wird je nach
 Konfiguration im Logfile vermerkt. Folgende Einträge können dem Logfile
 entnommen werden:
 
+%TODO etwas zum srs schreiben.
+
 \textbf{INFO: Inserted 4 features}
 \\Gibt die Anzahl der erfolgreich importierten Features an.\\
 
 \textbf{INFO: Failed to create 2 features}
 \\Gibt die Anzahl der Features an, die nicht importiert werden konnten.\\
 
-\textbf{INFO: Found 3 unsupported features}
+\textbf{INFO: Found 3 unsupported features of type: wbkMultiLineString}
 \\Gibt die Anzahl der Features an, die aufgrund ihres Datentyps nicht importiert
-werden konnten. Z.B: es werden Linien erwartet, im Shapefile sind jedoch
-Polygone enthalten.\\
+werden konnten. Wenn etwa Punkte erwartet wurden aber sich im Shapefile
+Polygone befanden.\\
+
+\textbf{INFO: Did not import values from fields: TYP ID GRUENDUNG BHW}
+\\Manche Importer versuchen neben der Geographischen Information weitere
+Felder in die Datenbank einzulesen. Um festzustellen ob ein Feld aufgrund
+von Tippfehlern oder unterschiedlicher Schreibweise nicht importiert wurde,
+gibt diese Information Auskunft darüber welche Felder aus der Shape Datei
+nicht verwendet wurden.\\
 
 \textbf{ERROR: No source SRS given! No transformation possible!}
 \\Das Shapefile enthält keine Information, in welcher Projektion die Geometrien
@@ -88,7 +101,7 @@
 \begin{tabular}[t]{ll}
 Pfad        &   Hydrologie/Hydr.Grenzen/Linien \\
 Geometrie   &   LINESTRING, POLYGON \\
-Attribute   &   name, kind \\
+Attribute   &   name, kind, sectie, sobek \\
 \end{tabular}
 
 \subsubsection{Bauwerke}
@@ -149,8 +162,9 @@
 \hspace{1cm}
 \begin{tabular}[t]{ll}
 Pfad        &   Hydrologie/HW-Schutzanlagen \\
-Geometrie   &   LINESTRING \\
-Attribute   &   TYP, Bauart, Name, name \\
+Geometrie   &   LINESTRING, POINT \\
+Attribute   &   name, source, description, status\_date, agency,
+                dike\_km, range, z\_target, rated\_level, z \\
 \end{tabular}
 
 
@@ -163,19 +177,6 @@
 \end{tabular}
 
 
-\subsubsection{Linien}
-\hspace{1cm}
-\begin{tabular}[t]{ll}
-Pfad        &   Geodaesie/Linien \\
-Geometrie   &   LINESTRING, MULTILINESTRING \\
-Attribute   &   name, TYP, Z \\
-
-Anmerkung   & Wenn kein Attribut 'TYP' definiert ist, wird standardmäßig der Wert \\
-            & 'DAMM' angenommen. Fehlt ein Attribut 'Z' wird '9999' als Höhe \\
-            & angenommen. \\
-\end{tabular}
-
-
 \subsubsection{Überschwemmungsfläche}
 \hspace{1cm}
 \begin{tabular}[t]{ll}
@@ -184,79 +185,6 @@
 Attribut    &   name, diff, count, area, perimeter \\
 \end{tabular}
 
-
-\subsection{Systemanforderungen}
-\label{Systemanforderungen}
-\begin{itemize}
-  \item Oracle Datenbank inkl. Schema für FLYS
-  \item GDAL Binding für Python mit Oracle Support
-  \item ogr2ogr
-  \item Python $>=$ 2.6
-\end{itemize}
-
-
-\subsection{Installationsanleitung}
-\label{Installationsanleitung}
-\begin{itemize}
-
- \item Python\\
- Zum Starten des Importers ist es notwendig Python zu installieren. Dies können
- Sie mit folgendem Befehl auf der Kommandozeile erledigen:
-
- \begin{lstlisting}
-    zypper in python
- \end{lstlisting}
-
- \item Oracle Instantclient\\
- Der Oracle Instantclient 11.2 wird benötigt, damit der Importer mittels Python
- und GDAL in die bestehende Oracle Datenbank schreiben kann. Dazu ist es
- erforderlich, folgende Archive von Oracle herunterzuladen. Zu finden sind die
- folgenden Pakete unter\\
- \href{http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html}{http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html}
-
- \begin{itemize}
-    \item instantclient-basic-linux-x86-64-11.2.0.2.0.zip
-    \item instantclient-sdk-linux-x86-64-11.2.0.2.0.zip
-    \item instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip
- \end{itemize}
-
- Anschließend führen Sie folgende Befehle auf der Kommandozeile aus:
-
- \begin{lstlisting}
-
-    mkdir /opt
-
-    unzip ~/instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d /opt
-    unzip ~/instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d /opt
-    unzip ~/instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d /opt
-
-    mkdir /opt/instantclient_11_2/lib
-    cd /opt/instantclient_11_2/lib
-    ln -s ../libclntsh.so.11.1 .
-    ln -s ../libclntsh.so.11.1 libclntsh.so
-    ln -s ../libnnz11.so .
-    ln -s ../libocci.so.11.1 .
-    ln -s ../libocci.so.11.1 libocci.so
-    ln -s ../libociei.so .
-    ln -s ../libocijdbc11.so .
-    ln -s ../libsqlplusic.so .
-    ln -s ../libsqlplus.so .
-
-    rpm -i --nodeps ~/flys-importer/rpm/RPMS/x86_64/libgdal1180-1.8.0-intevation1.x86_64.rpm 
-    rpm -i --nodeps ~/flys-importer/rpm/RPMS/x86_64/libgdal180-devel-1.8.0-intevation1.x86_64.rpm
-    rpm -i --nodeps ~/flys-importer/rpm/RPMS/x86_64/gdal180-1.8.0-intevation1.x86_64.rpm
-
- \end{lstlisting}
-
- Sollten keine Fehler aufgetreten sein, haben Sie den \textit{Oracle
- Instantclient 11.2} erfolgreich entpackt und im Dateisystem unter
- \textit{/opt/instantclient\_11\_2} abgelegt. Mit den Befehlen $rpm -i --nodeps$
- haben Sie anschließend die notwendigen Bindings installiert, damit der Importer
- die Geodaten in die Oracle Datenbank schreiben kann.
-
-\end{itemize}
-
-
 \subsection{Konfiguration}
 \label{Konfiguration}
 Der Geodaten Importer kann über die Datei \textit{contrib/run\_geo.sh}
@@ -267,12 +195,11 @@
 \textbf{RIVER\_PATH}
 \\Der Pfad zum Gewässer im Dateisystem.
 
-\textbf{RIVER\_ID}
-\\Die Datenbank ID des zu importierenden Gewässers.
-
-\textbf{TARGET\_SRS}
-\\Das EPSG Referenzsystem in das die Geodaten beim Import projeziert werden
-sollen.
+\textbf{RIVER\_NAME}
+\\Der Datenbank Name des zu importierenden Gewässers. Wird dieser Parameter
+nicht übergeben werden die Ordnernamen im mit dem Parameter RIVER\_PATH
+angegebenen Verzeichnis als Flussnamen interpretiert und es wird versucht
+diese zu Importieren.
 
 \textbf{HOST}
 \\Der Host der Datenbank.
@@ -312,9 +239,6 @@
 \textbf{SKIP\_CROSSSECTIONS}
 \\Bei gesetztem Wert `1` werden keine Querprofilespuren importiert.
 
-\textbf{SKIP\_LINES}
-\\Bei gesetztem Wert `1` werden keine Linien importiert.
-
 \textbf{SKIP\_FIXPOINTS}
 \\Bei gesetztem Wert `1` werden keine Festpunkte importiert.
 
@@ -342,6 +266,9 @@
 \textbf{SKIP\_UESG}
 \\Bei gesetztem Wert `1` werden keine Überschwemmungsflächen importiert.
 
+\textbf{SKIP\_DGM}
+\\Bei gesetztem Wert `1` werden keine Informationen über Digitale Geländemodelle importiert.
+
 
 \subsection{Starten des Geodaten Importers}
 \label{Starten des Geodaten Importers}
--- a/flys-backend/doc/documentation/de/importer-hydr-morph.tex	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/documentation/de/importer-hydr-morph.tex	Thu Feb 28 12:47:24 2013 +0100
@@ -32,7 +32,7 @@
 \end{lstlisting}
 
 Auf gleiche Weise können dem Importer sämtliche Optionen zur Konfiguration
-beim Start mitgegeben werden. Im folgenden werden die möglichen System-Properties und
+beim Start mitgegeben werden. Im Folgenden werden die möglichen System-Properties und
 ihre Auswirkung auf den Import genauer beschrieben. In den Kapiteln
 \ref{configuration} und \ref{start-hydr} wird zur Einfachheit jedoch ein
 Shellskript verwendet, das eine Standardkonfiguration vorgibt und den Importer
@@ -53,6 +53,12 @@
 festgelegt, anhand derer die Klassifikation während des Import-Vorgangs
 vorgenommen wird. Details hierzu befinden sich im Kapitel \ref{annotation-types}.
 
+\subsubsection{Bundeswasserstraßen-IDs}
+Der Import und die Zuweisung der Bundeswasserstraßen-IDs kann mit
+\textbf{-Dflys.backend.importer.skip.bwastr=true} unterbunden werden.
+Beim Import wird davon ausgegangen dass sich die Datei \textbf{BWASTR\_ID.csv}
+neben der oder den zu importierenden gew-Dateien befindet.
+
 \subsubsection{Pegel, Stammdaten (*.glt, *.sta-Dateien)}
 Der Import von Pegel- und Stammdaten kann mit \textbf{'-Dflys.backend.importer.skip.gauges=true'}
 unterdrückt werden. Die .glt-Datei, die neben der .wst-Datei liegt, wird zuerst
@@ -65,7 +71,7 @@
 unterdrückt werden. Diese Flagge wird nur ausgewertet, wenn überhaupt
 Pegel-Daten (siehe oben) importiert werden.
 
-Mittels \textbf{-Dflys.backend.sta.parse.gauge.numbers=true'} wird versucht, die
+Mittels \textbf{-Dflys.backend.sta.parse.gauge.numbers=true} wird versucht, die
 offiziellen Pegelnummern aus den Stammdaten zu extrahieren.
 \textbf{Dies ist mit Vorsicht zu behandeln, denn die meisten STA-Dateien
 enthalten invalide Pegelnummern.}
@@ -100,6 +106,13 @@
 eines Längen- und eines MD5-Summen-Vergleichs inhaltliche Duplikate
 ausgeschlossen.
 
+\subsubsection{Profilspuren (*.w80-Dateien)}
+Der Import von W80-Profilspuren kann mit \textbf{-Dflys.backend.importer.skip.w80=true}
+unterdrückt werden. Es werden rekursiv alle *.w80-Dateien aus \textit{../../..}
+relativ zur gewaesser.wst-Datei betrachtet. Vor dem Import werden mit Hilfe
+eines Längen- und eines MD5-Summen-Vergleichs inhaltliche Duplikate
+ausgeschlossen.
+
 \subsubsection{Hydraulische Kennzahlen (*.hyk)}
 Der Import von hydraulischen Kennzahlen kann mit \textbf{-Dflys.backend.importer.skip.hyks=true} unterdrückt
 werden. Es werden rekursiv alle *.hyk-Dateien aus \textit{../../..} relativ zur
@@ -747,7 +760,7 @@
 gestartet. Dazu führen folgenden Befehl aus:\\
 
 \begin{lstlisting}
-    contrib/run_hydr_morph.sh
+    ./run_hydr_morph.sh
 \end{lstlisting}
 
 Nachdem der Prompt der Konsole zurückkehrt, ist der Import abgeschlossen oder es
--- a/flys-backend/doc/documentation/de/importer-manual.tex	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/documentation/de/importer-manual.tex	Thu Feb 28 12:47:24 2013 +0100
@@ -19,14 +19,16 @@
 \usepackage{listings}
 \usepackage[hyperindex=true, bookmarks=true, breaklinks=true,
 colorlinks=true, linkcolor=red,bookmarksopen]{hyperref}
+% German Silbentrennung
+\usepackage[ngerman]{babel}
 
 %----------------------------------------------
 % Document DATE and VERSION
 % set these values when releasing a new version
 
-\newcommand{\documentdate}{30. August 2012}
-\newcommand{\documentversion}{1.0}
-\newcommand{\documentrevision}{rev5303}
+\newcommand{\documentdate}{19. Februar 2013}
+\newcommand{\documentversion}{1.1}
+\newcommand{\documentrevision}{rev5062}
 \newcommand{\documentID}{importer-manual.tex}
 %----------------------------------------------
 
--- a/flys-backend/doc/documentation/de/overview.tex	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/documentation/de/overview.tex	Thu Feb 28 12:47:24 2013 +0100
@@ -2,7 +2,7 @@
 
 Diese Dokumentation beschreibt die von Intevation entwickelten Werkzeuge zum
 Importieren der hydrologischen, morphologischen und geodätischen Daten der BfG.
-Die im folgenden\\ beschriebenen Werkzeuge zum Importieren der fachlichen und
+Die im Folgenden\\ beschriebenen Werkzeuge zum Importieren der fachlichen und
 geodätischen Daten sind speziell auf das Verzeichnissystem der BfG ausgerichtet.
 Dabei wird angenommen, dass sich das Verzeichnis eines Gewässers auf oberster
 Ebene in drei Unterverzeichnisse aufgliedert:
@@ -22,10 +22,11 @@
 \subsubsection{Entpacken des Datenimporters}
 
 Damit die Software performant und korrekt ausgeführt werden kann, ist es
-erforderlich, dass sie auf dem selben System installiert und ausgeführt wird,
-wie auch die Datenbank installiert ist. Sollten Sie das Paket nicht auf dem
+erforderlich, dass sie auf dem selben System wie die Datenbank installiert
+und ausgeführt wird.
+Sollten Sie das Paket nicht auf dem
 Zielsystem selbst heruntergeladen haben, sind ggf. weitere Werkzeuge notwendig.
-Im Fall, dass das Sie von einem Windows System auf das Zielsystem zugreifen
+Wenn Sie von einem Windows System auf das Zielsystem zugreifen
 wollen, können Sie beispielsweise folgende Werkzeuge verwenden:
 
 \begin{itemize}
@@ -48,8 +49,15 @@
 Bitte beachten Sie, dass diese Werkzeuge nicht zur Installtion und zum Betrieb
 der Software selbst notwendig sind!
 
+\subsection{Systemanforderungen}
+\label{Systemanforderungen}
+\begin{itemize}
+  \item Oracle oder PosgreSQL Datenbank inkl. Schema für FLYS
+  \item SUSE Enterprise Linux 11.2 SP 1
+\end{itemize}
 
-\subsubsection{Vorbereiten der Datenbank}
+\subsection{Installationsanleitung}
+\label{Installationsanleitung}
 
 Nachdem Sie das Paket nun in das Heimatverzeichnis des Nutzers auf das
 Zielsystem kopiert haben, entpacken Sie es mit folgenden Befehlen:
@@ -60,6 +68,95 @@
     cd flys-importer
 \end{lstlisting}
 
+\subsubsection{Java}
+Der flys-importer benötigt Java Version 6 um diese zu installieren laden Sie
+bitte von \url{http://www.oracle.com/technetwork/java/javase/downloads/jdk6downloads-1902814.html}
+eine aktulle Java Version als -rpm.bin herunter. Zum Beispiel: jdk-6u41-linux-x64-rpm.bin
+
+Nach dem Herunterladen, öffnen Sie eine konsole und wechseln in das Downloadverzeichnis.
+Führen Sie nun folgende Befehle aus:
+
+ \begin{lstlisting}
+    su - # login als root
+    sh jdk-6u41-linux-x64-rpm.bin
+    <bestaetigen mit enter>
+    update-alternatives --install /usr/bin/java java /usr/java/jdk1.6.0_41/bin/java 5
+    update-alternatives --install /etc/alternatives/jre jre /usr/java/jdk1.6.0_41/jre 5
+    update-alternatives --config java
+ \end{lstlisting}
+
+\subsubsection{Python und GDAL}
+Installieren Sie nun die restlichen benötigten Pakete.
+Dazu installieren Sie zuerst einige Abhängigkeiten und anschließend die
+von der Intevation GmbH bereitgestellten speziellen Versionen von proj und libgeos
+
+Um die Abhängigkeiten zu installieren führen Sie bitte folgende Befehle aus:
+
+ \begin{lstlisting}
+     zypper ar http://download.opensuse.org/repositories/home:/intevation:/bfg/SLE_11/ "intevation:bfg"
+     rpm --import http://download.opensuse.org/repositories/home:/intevation:/bfg/SLE_11/repodata/repomd.xml.key
+     zypper ref # Paketlist neu laden
+     zypper in python libgeos0 libproj0 proj netcdf libnetcdf4 \
+        xerces-c libxerces-c-3_0 unixODBC postgresql-libs
+     zypper mr -d "intevation:bfg"
+     zypper ref # Paketliste neu laden
+ \end{lstlisting}
+
+%\subsubsection Oracle Instantclient\\
+%Der Oracle Instantclient 11.2.0.2.0 wird benötigt, damit der Importer mittels Python
+%und GDAL in die bestehende Oracle Datenbqlnk schreiben kann. Wenn Sie
+%eine PosgreSQL Datenbank verwenden, können Sie diesen Schritt überspringen.
+%
+%Zur Anbindung von Oracle ist es erforderlich, folgende Archive von
+%Oracle herunterzuladen (Sie benötigen dafür ein Oracle Benutzerkonto):
+%
+%Der Oracle Instantclient 11.2 wird benötigt, damit der Importer mittels Python
+%und GDAL in die bestehende Oracle Datenbank schreiben kann. Dazu ist es
+%erforderlich, folgende Archive von Oracle herunterzuladen. 
+%
+% \begin{itemize}
+%    \item instantclient-basic-linux-x86-64-11.2.0.2.0.zip
+%    \item instantclient-sdk-linux-x86-64-11.2.0.2.0.zip
+%    \item instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip
+% \end{itemize}
+%
+%Zu finden sind die
+% Pakete unter:\\
+%\href{http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html}
+%{http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html}
+%
+%
+%Um diese Pakete zu installieren, öffnen Sie eine Konsole und wechseln
+%in das Verzeichnis in welches Sie die heruntergeladenen Pakete
+%gespeichert haben. (z.B.: cd /home/benutzername/Downloads )
+% Anschließend führen Sie folgende Befehle auf der Kommandozeile aus:
+%
+% \begin{lstlisting}
+%     unzip instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d /opt
+%     unzip instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d /opt
+%     unzip instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d /opt
+%
+%     mkdir /opt/instantclient_11_2/lib
+%     cd /opt/instantclient_11_2/lib
+%     ln -s ../libclntsh.so.11.1 .
+%     ln -s ../libclntsh.so.11.1 libclntsh.so
+%     ln -s ../libnnz11.so .
+%     ln -s ../libocci.so.11.1 .
+%     ln -s ../libocci.so.11.1 libocci.so
+%     ln -s ../libociei.so .
+%     ln -s ../libocijdbc11.so .
+%     ln -s ../libsqlplusic.so .
+%     ln -s ../libsqlplus.so .
+%
+%     echo "/opt/instantclient_11_2/lib/" > /etc/ld.so.conf.d/oci.conf 
+%     ldconfig
+% \end{lstlisting}
+%
+%Sollten keine Fehler aufgetreten sein, haben Sie den \textit{Oracle
+% Instantclient 11.2} erfolgreich entpackt und im Dateisystem unter
+% \textit{/opt/instantclient\_11\_2} abgelegt.
+%
+\subsubsection{Vorbereiten der Datenbank}
 Bevor die Importer verwendet werden können, ist es notwendig, dass eine leere
 Oracle Datenbank vorhanden ist. Anschließend müssen folgende SQL Skripte in
 diese Datenbank eingespielt werden:
@@ -81,24 +178,27 @@
 Mittels diesem SQL Skript werden die Indizes zum geodätischen Datenbankschema\\
 hinzugefügt.
 
-\item import-dems.sql \\
-In diesem Skript sind Befehle zum Einfügen der digitalen Geländemodelle
-enthalten. Die Dateipfade in diesem Skript sind so anzupassen, dass sie auf die
-entsprechenden Geländemodelle im Dateisystem verweisen. Es ist notwendig die
-Pfade absolut anzugeben.
-
 \end{enumerate}
 
 Zum Einspielen dieser Schemata setzen Sie folgende Befehle auf der Kommandozeile
 ab. Beachten Sie, dass $sqlplus$ im Pfad liegen muss, und der Linux-Nutzer
-dies Kommando ausführen können muss. Außerdem sind $benutzername$ und $passwort$
+dieses Kommando ausführen können muss. Außerdem sind $benutzername$ und $passwort$
 entsprechend Ihres Datenbank-Zugangs anzupassen.
+SQLPlus befindet sich in /opt/instantclient\_11\_2 um es verfügbar zu machen
+führen Sie im Importer Verzeichnis folgende Befehle aus:
+
+\begin{lstlisting}
+export LD_LIBRARY_PATH=opt/instantclient\_11\_2/lib:$LD_LIBRARY_PATH
+export PATH=opt/instantclient\_11\_2:$PATH
+\end{lstlisting}
+
+Nun erstellen Sie das Schema:
 
 \begin{lstlisting}
     sqlplus benutzername/passwort @schema/oracle.sql
     sqlplus benutzername/passwort @schema/oracle-minfo.sql
     sqlplus benutzername/passwort @schema/oracle-spatial.sql
     sqlplus benutzername/passwort @schema/oracle-spatial_idx.sql
-    sqlplus benutzername/passwort @schema/import-dems.sql
 \end{lstlisting}
 
+
--- a/flys-backend/doc/schema/import-dems.sql	Tue Feb 19 10:46:41 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,24 +0,0 @@
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_00000_01010', 0.0, 101.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_00000_10110.grd');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_00992_02030', 99.0, 203.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_09920_20300.grd');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_02020_02998', 202.0, 300.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_20200_29980.grd');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_02981_04010', 298.0, 401.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_29810_40100.grd');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_04000_05009', 400.0, 501.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_40000_50090.grd');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Elbe'), 'GRD_05001_05830', 500.0, 583.0, 2003, 2007, 'GK-3', 'DHHN92', 'ESRI-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_50010_58330.grd');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_00000_00058', 0.0, 6.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0000-0580.xyz');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_00058_00153', 6.0, 15.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0058-0153.xyz');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_00153_00416', 15.0, 42.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0153-0416.xyz');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_00414_01012_O', 41.0, 101.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', 'muss überarbeitet werden', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0414-1012O.xyz');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_00414_01012_W', 41.0, 101.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', 'muss überarbeitet werden', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0414-1012W.xyz');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_01012_01488', 101.0, 145.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1012-1488.xyz');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_01488_01666', 145.0, 167.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1488-1666.xyz');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_01666_01960', 167.0, 196.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1666-1960.xyz');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_01960_02044', 196.0, 204.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1960-2044.XYZ');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_02044_02184', 204.0, 218.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/2044-2184.XYZ');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Mosel'), 'GRD_02184_02420', 218.0, 242.0, null, null, 'GK-2', 'DHHN85', 'ASCII-Grid', false, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/525480MO.XYZ');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00000_00079', 0.0, 8.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0000-0079_long.txt');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00080_00204', 8.0, 20.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0080-0204_long.txt');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00205_00314', 20.0, 31.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0205-0314_long.txt');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00315_00541', 31.0, 54.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0315-0541_long.txt');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00542_00655', 54.0, 65.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0542-0655_long.txt');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00656_00828', 65.0, 83.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0656-0828_long.txt');
-INSERT INTO dem (river_id, name, lower, upper, year_from, year_to,projection, elevation_state, format, border_break, resolution, description, path) VALUES ((SELECT id from rivers WHERE name = 'Saar'), 'GRD_00829_00931', 83.0, 93.0, 1999, 2002, 'GK-2', '', 'ASCII-Grid', true, '2', '', '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0829-0931_erweitert.txt');
--- a/flys-backend/doc/schema/oracle-drop-spatial.sql	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/schema/oracle-drop-spatial.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -13,11 +13,6 @@
 DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'CROSS_SECTION_TRACKS';
 DROP SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
 
-DROP TRIGGER lines_trigger;
-DROP TABLE lines;
-DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'LINES';
-DROP SEQUENCE LINES_ID_SEQ;
-
 DROP TRIGGER buildings_trigger;
 DROP TABLE buildings;
 DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'BUILDINGS';
@@ -42,10 +37,15 @@
 DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'CATCHMENT';
 DROP SEQUENCE CATCHMENT_ID_SEQ;
 
-DROP TRIGGER hws_trigger;
-DROP TABLE hws;
-DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HWS';
-DROP SEQUENCE HWS_ID_SEQ;
+DROP TRIGGER hws_lines_trigger;
+DROP TABLE hws_lines;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HWS_LINES';
+DROP SEQUENCE HWS_LINES_ID_SEQ;
+
+DROP TRIGGER hws_points_trigger;
+DROP TABLE hws_points;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HWS_POINTS';
+DROP SEQUENCE HWS_POINTS_ID_SEQ;
 
 DROP TRIGGER floodmaps_trigger;
 DROP TABLE floodmaps;
@@ -66,3 +66,9 @@
 DROP TABLE gauge_location;
 DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'GAUGE_LOCATION';
 DROP SEQUENCE GAUGE_LOCATION_ID_SEQ;
+
+DROP TABLE hws_kinds;
+DROP TABLE sectie_kinds;
+DROP TABLE sobek_kinds;
+DROP TABLE fed_states;
+DROP TABLE boundary_kinds;
--- a/flys-backend/doc/schema/oracle-drop.sql	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/schema/oracle-drop.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -88,3 +88,7 @@
 DROP VIEW wst_value_table;
 DROP VIEW wst_w_values ;
 DROP VIEW wst_q_values;
+DROP VIEW official_lines;
+DROP VIEW q_main_values;
+DROP VIEW official_q_values;
+DROP VIEW wst_ranges;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-spatial-migrate-dami.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,122 @@
+DROP TRIGGER hws_trigger;
+DROP TABLE hws;
+DROP SEQUENCE HWS_ID_SEQ;
+
+--Static lookup tables for Hochwasserschutzanlagen
+CREATE TABLE hws_kinds (
+    id NUMBER PRIMARY KEY NOT NULL,
+    kind VARCHAR(64) NOT NULL
+);
+INSERT INTO hws_kinds (id, kind) VALUES (1, 'Durchlass');
+INSERT INTO hws_kinds (id, kind) VALUES (2, 'Damm');
+INSERT INTO hws_kinds (id, kind) VALUES (3, 'Graben');
+
+CREATE TABLE fed_states (
+    id NUMBER PRIMARY KEY NOT NULL,
+    name VARCHAR(23) NOT NULL
+);
+INSERT INTO fed_states (id, name) VALUES (1, 'Bayern');
+INSERT INTO fed_states (id, name) VALUES (2, 'Hessen');
+INSERT INTO fed_states (id, name) VALUES (3, 'Niedersachsen');
+INSERT INTO fed_states (id, name) VALUES (4, 'Nordrhein-Westfalen');
+INSERT INTO fed_states (id, name) VALUES (5, 'Rheinland-Pfalz');
+INSERT INTO fed_states (id, name) VALUES (6, 'Saarland');
+INSERT INTO fed_states (id, name) VALUES (7, 'Schleswig-Holstein');
+INSERT INTO fed_states (id, name) VALUES (8, 'Brandenburg');
+INSERT INTO fed_states (id, name) VALUES (9, 'Mecklenburg-Vorpommern');
+INSERT INTO fed_states (id, name) VALUES (10, 'Thüringen');
+INSERT INTO fed_states (id, name) VALUES (11, 'Baden-Württemberg');
+INSERT INTO fed_states (id, name) VALUES (12, 'Sachsen-Anhalt');
+INSERT INTO fed_states (id, name) VALUES (13, 'Sachsen');
+INSERT INTO fed_states (id, name) VALUES (14, 'Berlin');
+INSERT INTO fed_states (id, name) VALUES (15, 'Bremen');
+INSERT INTO fed_states (id, name) VALUES (16, 'Hamburg');
+
+-- HWS-Lines
+CREATE SEQUENCE HWS_LINES_ID_SEQ;
+CREATE TABLE hws_lines (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    kind_id NUMBER(2) DEFAULT 2 REFERENCES hws_kinds(id),
+    fed_state_id NUMBER(2) REFERENCES fed_states(id),
+    river_id NUMBER(38) REFERENCES rivers(id),
+    name VARCHAR(256),
+    path VARCHAR(256),
+    official NUMBER DEFAULT 0,
+    agency VARCHAR(256),
+    range VARCHAR(256),
+    shore_side NUMBER DEFAULT 0,
+    source VARCHAR(256),
+    status_date TIMESTAMP,
+    description VARCHAR(256),
+    id NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hws_lines', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE INDEX hws_lines_spatial_idx ON hws_lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+CREATE OR REPLACE TRIGGER hws_lines_trigger BEFORE INSERT ON hws_lines FOR each ROW
+    BEGIN
+        SELECT HWS_LINES_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+
+-- HWS Points lookup tables
+CREATE TABLE sectie_kinds (
+    id NUMBER PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO sectie_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sectie_kinds (id, name) VALUES (1, 'Flussschlauch');
+INSERT INTO sectie_kinds (id, name) VALUES (2, 'Uferbank');
+INSERT INTO sectie_kinds (id, name) VALUES (3, 'Ãœberflutungsbereich');
+
+CREATE TABLE sobek_kinds (
+    id NUMBER PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO sobek_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sobek_kinds (id, name) VALUES (1, 'Stromführend');
+INSERT INTO sobek_kinds (id, name) VALUES (2, 'Stromspeichernd');
+
+CREATE TABLE boundary_kinds (
+    id NUMBER PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO boundary_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO boundary_kinds (id, name) VALUES (1, 'BfG');
+INSERT INTO boundary_kinds (id, name) VALUES (2, 'Land');
+INSERT INTO boundary_kinds (id, name) VALUES (3, 'Sonstige');
+
+-- HWS Points
+CREATE SEQUENCE HWS_POINTS_ID_SEQ;
+CREATE TABLE hws_points (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    ogr_fid NUMBER,
+    kind_id NUMBER DEFAULT 2 REFERENCES hws_kinds(id),
+    fed_state_id NUMBER REFERENCES fed_states(id),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
+    name VARCHAR(256),
+    path VARCHAR(256),
+    official NUMBER DEFAULT 0,
+    agency VARCHAR(256),
+    range VARCHAR(256),
+    shore_side NUMBER DEFAULT 0,
+    source VARCHAR(256),
+    status_date VARCHAR(256),
+    description VARCHAR(256),
+    freeboard NUMBER(19,5),
+    dike_km NUMBER(19,5),
+    z NUMBER(19,5),
+    z_target NUMBER(19,5),
+    rated_level NUMBER(19,5),
+    id NUMBER PRIMARY KEY NOT NULL
+);
+
+-- Altrications
+ALTER TABLE dem ADD srid NUMBER NOT NULL;
+ALTER TABLE hydr_boundaries_poly ADD sectie NUMBER REFERENCES sectie_kinds(id);
+ALTER TABLE hydr_boundaries_poly ADD sobek NUMBER REFERENCES sobek_kinds(id);
+ALTER TABLE hydr_boundaries ADD sectie NUMBER REFERENCES sectie_kinds(id);
+ALTER TABLE hydr_boundaries ADD sobek NUMBER REFERENCES sobek_kinds(id);
+ALTER TABLE hydr_boundaries ADD kind NUMBER REFERENCES boundary_kinds(id);
+ALTER TABLE hydr_boundaries_poly ADD kind NUMBER REFERENCES boundary_kinds(id);
--- a/flys-backend/doc/schema/oracle-spatial.sql	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/schema/oracle-spatial.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -1,9 +1,10 @@
+WHENEVER SQLERROR EXIT;
 -- Geodaesie/Flussachse+km/achse
 CREATE SEQUENCE RIVER_AXES_ID_SEQ;
 CREATE TABLE river_axes(
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     kind     NUMBER(38) DEFAULT 0 NOT NULL,
     name     VARCHAR(64),
     path     VARCHAR(256),
@@ -23,7 +24,7 @@
 CREATE TABLE river_axes_km(
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     km NUMBER(6,3),
     name     VARCHAR(64),
     path     VARCHAR(256),
@@ -43,7 +44,7 @@
 CREATE TABLE cross_section_tracks (
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     km       NUMBER(38,12) NOT NULL,
     z        NUMBER(38,12) DEFAULT 0 NOT NULL,
     name     VARCHAR(64),
@@ -59,39 +60,12 @@
 --CREATE INDEX CrossSectionTracks_spatial_idx ON cross_section_tracks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
 
 
--- TODO: TestMe. Fix Importer-Script. Fix oracle_spatial_idx.sql script.
--- Geodaesie/Linien/rohre-und-speeren
-CREATE SEQUENCE LINES_ID_SEQ;
-CREATE TABLE lines (
-    OGR_FID NUMBER(38),
-    GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
-    kind     VARCHAR2(16) NOT NULL,
-    z        NUMBER(38,12) DEFAULT 0,
-    name     VARCHAR(64),
-    path     VARCHAR(256),
-    ID NUMBER PRIMARY KEY NOT NULL
-);
-INSERT INTO USER_SDO_GEOM_METADATA VALUES ('lines', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
-CREATE OR REPLACE TRIGGER lines_trigger BEFORE INSERT ON lines FOR each ROW
-    BEGIN
-        SELECT LINES_ID_SEQ.nextval INTO :new.id FROM dual;
-    END;
-/
--- NOTE: Should lines should be 3D.
--- TODO: Test index. 
---CREATE INDEX lines_idx ON lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
--- 'kind':
--- 0: ROHR1
--- 1: DAMM
-
-
 -- Geodaesie/Bauwerke/Wehre.shp
 CREATE SEQUENCE BUILDINGS_ID_SEQ;
 CREATE TABLE buildings(
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     name VARCHAR2(255),
     path     VARCHAR(256),
     ID NUMBER PRIMARY KEY NOT NULL
@@ -110,7 +84,7 @@
 CREATE TABLE fixpoints (
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     x NUMBER(38,11),
     y NUMBER(38,11),
     km NUMBER(38,11) NOT NULL,
@@ -133,7 +107,7 @@
 CREATE TABLE floodplain(
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     name     VARCHAR(64),
     path     VARCHAR(256),
     ID NUMBER PRIMARY KEY NOT NULL
@@ -153,20 +127,21 @@
 CREATE SEQUENCE DEM_ID_SEQ;
 CREATE TABLE dem (
     ID NUMBER PRIMARY KEY NOT NULL,
-    river_id NUMBER(38),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     -- XXX Should we use the ranges table instead?
     name            VARCHAR(64),
     lower           NUMBER(19,5),
     upper           NUMBER(19,5),
-    year_from       VARCHAR(32) NOT NULL,
-    year_to         VARCHAR(32) NOT NULL,
-    projection      VARCHAR(32) NOT NULL,
+    year_from       VARCHAR(32),
+    year_to         VARCHAR(32),
+    projection      VARCHAR(32),
     elevation_state VARCHAR(32),
+    srid            NUMBER NOT NULL,
     format          VARCHAR(32),
-    border_break    BOOLEAN NOT NULL DEFAULT FALSE,
+    border_break    NUMBER(1) DEFAULT 0 NOT NULL,
     resolution      VARCHAR(16),
     description     VARCHAR(256),
-    path            VARCHAR(256)
+    path            VARCHAR(256) NOT NULL
 );
 CREATE OR REPLACE TRIGGER dem_trigger BEFORE INSERT ON dem FOR each ROW
     BEGIN
@@ -174,54 +149,128 @@
     END;
 /
 
+--Static lookup tables for Hochwasserschutzanlagen
+CREATE TABLE hws_kinds (
+    id NUMBER PRIMARY KEY NOT NULL,
+    kind VARCHAR(64) NOT NULL
+);
+INSERT INTO hws_kinds (id, kind) VALUES (1, 'Durchlass');
+INSERT INTO hws_kinds (id, kind) VALUES (2, 'Damm');
+INSERT INTO hws_kinds (id, kind) VALUES (3, 'Graben');
 
--- Hydrologie/Einzugsgebiete/EZG.shp
-CREATE SEQUENCE CATCHMENT_ID_SEQ;
-CREATE TABLE catchment(
+CREATE TABLE fed_states (
+    id NUMBER PRIMARY KEY NOT NULL,
+    name VARCHAR(23) NOT NULL
+);
+INSERT INTO fed_states (id, name) VALUES (1, 'Bayern');
+INSERT INTO fed_states (id, name) VALUES (2, 'Hessen');
+INSERT INTO fed_states (id, name) VALUES (3, 'Niedersachsen');
+INSERT INTO fed_states (id, name) VALUES (4, 'Nordrhein-Westfalen');
+INSERT INTO fed_states (id, name) VALUES (5, 'Rheinland-Pfalz');
+INSERT INTO fed_states (id, name) VALUES (6, 'Saarland');
+INSERT INTO fed_states (id, name) VALUES (7, 'Schleswig-Holstein');
+INSERT INTO fed_states (id, name) VALUES (8, 'Brandenburg');
+INSERT INTO fed_states (id, name) VALUES (9, 'Mecklenburg-Vorpommern');
+INSERT INTO fed_states (id, name) VALUES (10, 'Thüringen');
+INSERT INTO fed_states (id, name) VALUES (11, 'Baden-Württemberg');
+INSERT INTO fed_states (id, name) VALUES (12, 'Sachsen-Anhalt');
+INSERT INTO fed_states (id, name) VALUES (13, 'Sachsen');
+INSERT INTO fed_states (id, name) VALUES (14, 'Berlin');
+INSERT INTO fed_states (id, name) VALUES (15, 'Bremen');
+INSERT INTO fed_states (id, name) VALUES (16, 'Hamburg');
+
+--Hydrologie/HW-Schutzanlagen/hws.shp
+-- HWS-Lines
+CREATE SEQUENCE HWS_LINES_ID_SEQ;
+CREATE TABLE hws_lines (
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
-    area NUMBER(19,5),
-    name VARCHAR2(255),
-    path     VARCHAR(256),
-    ID NUMBER PRIMARY KEY NOT NULL
+    kind_id NUMBER(2) DEFAULT 2 REFERENCES hws_kinds(id),
+    fed_state_id NUMBER(2) REFERENCES fed_states(id),
+    river_id NUMBER(38) REFERENCES rivers(id),
+    name VARCHAR(256),
+    path VARCHAR(256),
+    official NUMBER DEFAULT 0,
+    agency VARCHAR(256),
+    range VARCHAR(256),
+    shore_side NUMBER DEFAULT 0,
+    source VARCHAR(256),
+    status_date TIMESTAMP,
+    description VARCHAR(256),
+    id NUMBER PRIMARY KEY NOT NULL
 );
-INSERT INTO USER_SDO_GEOM_METADATA VALUES ('CATCHMENT', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
-
-CREATE TRIGGER catchment_trigger BEFORE INSERT ON catchment FOR each ROW
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hws_lines', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER hws_lines_trigger BEFORE INSERT ON hws_lines FOR each ROW
     BEGIN
-        SELECT CATCHMENT_ID_SEQ.nextval INTO :new.id FROM dual;
+        SELECT HWS_LINES_ID_SEQ.nextval INTO :new.id FROM dual;
     END;
 /
---CREATE INDEX catchment_spatial_idx ON catchment(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=polygon');
+-- HWS Points lookup tables
+CREATE TABLE sectie_kinds (
+    id NUMBER PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO sectie_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sectie_kinds (id, name) VALUES (1, 'Flussschlauch');
+INSERT INTO sectie_kinds (id, name) VALUES (2, 'Uferbank');
+INSERT INTO sectie_kinds (id, name) VALUES (3, 'Ãœberflutungsbereich');
 
---Hydrologie/HW-Schutzanlagen/hws.shp
-CREATE SEQUENCE HWS_ID_SEQ;
-CREATE TABLE hws(
+CREATE TABLE sobek_kinds (
+    id NUMBER PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO sobek_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sobek_kinds (id, name) VALUES (1, 'Stromführend');
+INSERT INTO sobek_kinds (id, name) VALUES (2, 'Stromspeichernd');
+
+CREATE TABLE boundary_kinds (
+    id NUMBER PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO boundary_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO boundary_kinds (id, name) VALUES (1, 'BfG');
+INSERT INTO boundary_kinds (id, name) VALUES (2, 'Land');
+INSERT INTO boundary_kinds (id, name) VALUES (3, 'Sonstige');
+
+-- HWS Points
+CREATE SEQUENCE HWS_POINTS_ID_SEQ;
+CREATE TABLE hws_points (
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
-    hws_facility VARCHAR2(255),
-    type VARCHAR2(255),
-    name VARCHAR(64),
-    path     VARCHAR(256),
-    ID NUMBER PRIMARY KEY NOT NULL
+    kind_id NUMBER DEFAULT 2 REFERENCES hws_kinds(id),
+    fed_state_id NUMBER REFERENCES fed_states(id),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
+    name VARCHAR(256),
+    path VARCHAR(256),
+    official NUMBER DEFAULT 0,
+    agency VARCHAR(256),
+    range VARCHAR(256),
+    shore_side NUMBER DEFAULT 0,
+    source VARCHAR(256),
+    status_date VARCHAR(256),
+    description VARCHAR(256),
+    freeboard NUMBER(19,5),
+    dike_km NUMBER(19,5),
+    z NUMBER(19,5),
+    z_target NUMBER(19,5),
+    rated_level NUMBER(19,5),
+    id NUMBER PRIMARY KEY NOT NULL
 );
-INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hws', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
-CREATE OR REPLACE TRIGGER hws_trigger BEFORE INSERT ON hws FOR each ROW
+
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hws_points', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+
+CREATE OR REPLACE TRIGGER hws_points_trigger BEFORE INSERT ON hws_points FOR each ROW
     BEGIN
-        SELECT HWS_ID_SEQ.nextval INTO :new.id FROM dual;
+        SELECT HWS_POINTS_ID_SEQ.nextval INTO :new.id FROM dual;
     END;
 /
---CREATE INDEX hws_spatial_idx ON hws(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
-
 
 --Hydrologie/UeSG
 CREATE SEQUENCE FLOODMAPS_ID_SEQ;
 CREATE TABLE floodmaps (
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     name VARCHAR(255),
     kind NUMBER(38),
     diff NUMBER(19,5),
@@ -237,17 +286,17 @@
         SELECT FLOODMAPS_ID_SEQ.nextval INTO :new.id FROM dual;
     END;
 /
-CREATE INDEX floodmaps_spatial_idx ON floodmaps(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON');
-
 
 --Hydrologie/Hydr.Grenzen/Linien
 CREATE SEQUENCE HYDR_BOUNDARIES_ID_SEQ;
 CREATE TABLE hydr_boundaries (
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     name VARCHAR(255),
-    kind NUMBER(38),
+    kind   NUMBER(38) REFERENCES boundary_kinds(id),
+    sectie NUMBER(38) REFERENCES sectie_kinds(id),
+    sobek  NUMBER(38) REFERENCES sobek_kinds(id),
     path     VARCHAR(256),
     id NUMBER PRIMARY KEY NOT NULL
 );
@@ -257,15 +306,16 @@
         SELECT HYDR_BOUNDARIES_ID_SEQ.nextval INTO :new.id FROM dual;
     END;
 /
-CREATE INDEX hydr_boundaries_idx ON hydr_boundaries(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
 
 CREATE SEQUENCE HYDR_BOUNDARIES_POLY_ID_SEQ;
 CREATE TABLE hydr_boundaries_poly (
     OGR_FID NUMBER(38),
     GEOM MDSYS.SDO_GEOMETRY,
-    river_id NUMBER(38),
+    river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
     name VARCHAR(255),
-    kind NUMBER(38),
+    kind   NUMBER(38) REFERENCES boundary_kinds(id),
+    sectie NUMBER(38) REFERENCES sectie_kinds(id),
+    sobek  NUMBER(38) REFERENCES sobek_kinds(id),
     path     VARCHAR(256),
     id NUMBER PRIMARY KEY NOT NULL
 );
@@ -275,8 +325,6 @@
         SELECT HYDR_BOUNDARIES_POLY_ID_SEQ.nextval INTO :new.id FROM dual;
     END;
 /
-CREATE INDEX hydr_boundaries_poly_idx ON hydr_boundaries_poly(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON');
-
 
 -- Hydrologie/Streckendaten/
 CREATE SEQUENCE GAUGE_LOCATION_ID_SEQ;
@@ -294,4 +342,3 @@
         SELECT GAUGE_LOCATION_ID_SEQ.nextval INTO :new.id FROM dual;
     END;
 /
-CREATE INDEX gauge_location_idx ON gauge_location(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
--- a/flys-backend/doc/schema/oracle-spatial_idx.sql	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/schema/oracle-spatial_idx.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -1,9 +1,32 @@
-CREATE INDEX catchment_spatial_idx ON catchment(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=multipolygon');
-CREATE INDEX river_axes_km_spatial_idx ON river_axes_km(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=point');
-CREATE INDEX buildings_spatial_idx ON buildings(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
-CREATE INDEX fixpoints_spatial_idx ON fixpoints(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
-CREATE INDEX river_axes_spatial_idx ON river_axes(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
-CREATE INDEX CrossSectionTracks_spatial_idx ON cross_section_tracks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
-CREATE INDEX hws_spatial_idx ON hws(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+-- TODO: index prevents `DELETE FROM rivers' on 11g
+-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein"
+-- CREATE INDEX river_axes_km_spatial_idx ON river_axes_km(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=point');
+
+-- TODO: index prevents importing on 11g.
+-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein"
+-- CREATE INDEX buildings_spatial_idx ON buildings(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+-- TODO: index prevents `DELETE FROM rivers' on 11g
+-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein"
+-- CREATE INDEX fixpoints_spatial_idx ON fixpoints(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
+
+-- TODO: index prevents importing on 11g.
+-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein"
+-- CREATE INDEX river_axes_spatial_idx ON river_axes(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+-- TODO: index prevents importing on 11g.
+-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein"
+-- CREATE INDEX CrossSectionTracks_spatial_idx ON cross_section_tracks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
 CREATE INDEX floodplain_spatial_idx ON floodplain(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POLYGON');
-CREATE INDEX lines_idx ON lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+-- TODO: index prevents importing on 11g.
+-- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein"
+-- CREATE INDEX hydr_boundaries_idx ON hydr_boundaries(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+CREATE INDEX hws_points_spatial_idx ON hws_points(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
+CREATE INDEX hws_lines_spatial_idx ON hws_lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+CREATE INDEX floodmaps_spatial_idx ON floodmaps(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON');
+CREATE INDEX gauge_location_idx ON gauge_location(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
+CREATE INDEX hydr_boundaries_poly_idx ON hydr_boundaries_poly(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON');
+
--- a/flys-backend/doc/schema/oracle.sql	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/schema/oracle.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -340,38 +340,40 @@
 
 
 -- ADD CONSTRAINTs
-ALTER TABLE annotations ADD CONSTRAINT cAnnotationsRanges FOREIGN KEY (range_id) REFERENCES ranges;
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsAttributes FOREIGN KEY (attribute_id) REFERENCES attributes;
 ALTER TABLE annotations ADD CONSTRAINT cAnnotationsEdges FOREIGN KEY (edge_id) REFERENCES edges;
 ALTER TABLE annotations ADD CONSTRAINT cAnnotationsPositions FOREIGN KEY (position_id) REFERENCES positions;
-ALTER TABLE annotations ADD CONSTRAINT cAnnotationsAttributes FOREIGN KEY (attribute_id) REFERENCES attributes;
 ALTER TABLE annotations ADD CONSTRAINT cAnnotationsTypes FOREIGN KEY (type_id) REFERENCES annotation_types;
-ALTER TABLE cross_section_lines ADD CONSTRAINT cQPSLinesCrossSections FOREIGN KEY (cross_section_id) REFERENCES cross_sections;
-ALTER TABLE cross_section_points ADD CONSTRAINT cQPSPointsCrossSectionLines FOREIGN KEY (cross_section_line_id) REFERENCES cross_section_lines;
-ALTER TABLE cross_sections ADD CONSTRAINT cCrossSectionsRivers FOREIGN KEY (river_id) REFERENCES rivers;
 ALTER TABLE cross_sections ADD CONSTRAINT cCrossSectionsTimeIntervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
-ALTER TABLE discharge_table_values ADD CONSTRAINT cTableValuesDischargeTables foreign key (table_id) REFERENCES discharge_tables;
 ALTER TABLE discharge_tables ADD CONSTRAINT cDischargeTablesTime_intervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
-ALTER TABLE discharge_tables ADD CONSTRAINT cDischargeTablesGauges FOREIGN KEY (gauge_id) REFERENCES gauges;
-ALTER TABLE gauges ADD CONSTRAINT cGaugesRivers FOREIGN KEY (river_id) REFERENCES rivers;
-ALTER TABLE gauges ADD CONSTRAINT cGaugesRanges FOREIGN KEY (range_id) REFERENCES ranges;
-ALTER TABLE hyk_entries ADD CONSTRAINT cHykEntriesHyks FOREIGN KEY (hyk_id) REFERENCES hyks;
-ALTER TABLE hyk_flow_zones ADD CONSTRAINT cHykFlowZonesHykFormations FOREIGN KEY (formation_id) REFERENCES hyk_formations;
 ALTER TABLE hyk_flow_zones ADD CONSTRAINT cHykFlowZonesHykFlowZoneTypes FOREIGN KEY (type_id) REFERENCES hyk_flow_zone_types;
-ALTER TABLE hyks ADD CONSTRAINT cHyksRivers FOREIGN KEY (river_id) REFERENCES rivers;
-ALTER TABLE hyk_formations ADD CONSTRAINT cHykFormationsHykEntries FOREIGN KEY (hyk_entry_id) REFERENCES hyk_entries;
+ALTER TABLE main_values ADD CONSTRAINT cMainValuesNamedMainValues FOREIGN KEY (named_value_id) REFERENCES named_main_values;
 ALTER TABLE main_values ADD CONSTRAINT cMainValuesTimeIntervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
-ALTER TABLE main_values ADD CONSTRAINT cMainValuesGauges FOREIGN KEY (gauge_id) REFERENCES gauges;
-ALTER TABLE main_values ADD CONSTRAINT cMainValuesNamedMainValues FOREIGN KEY (named_value_id) REFERENCES named_main_values;
 ALTER TABLE named_main_values ADD CONSTRAINT cNamedMainValuesMainValueTypes FOREIGN KEY (type_id) REFERENCES main_value_types;
-ALTER TABLE ranges ADD CONSTRAINT cRangesRivers FOREIGN KEY (river_id) REFERENCES rivers;
 ALTER TABLE rivers ADD CONSTRAINT cRiversUnits FOREIGN KEY (wst_unit_id) REFERENCES units;
-ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstColums FOREIGN KEY (wst_column_id) REFERENCES wst_columns;
-ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstQRanges FOREIGN KEY (wst_q_range_id) REFERENCES wst_q_ranges;
-ALTER TABLE wst_column_values ADD CONSTRAINT cWstColumnValuesWstColumns FOREIGN KEY (wst_column_id) REFERENCES wst_columns;
 ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsTime_intervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
-ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsWsts FOREIGN KEY (wst_id) REFERENCES wsts;
-ALTER TABLE wst_q_ranges ADD CONSTRAINT cWstQRangesRanges FOREIGN KEY (range_id) REFERENCES RANGES;
-ALTER TABLE wsts ADD CONSTRAINT cWstsRivers FOREIGN KEY (river_id) REFERENCES rivers;
+
+-- Cascading references
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsRanges FOREIGN KEY (range_id) REFERENCES ranges ON DELETE CASCADE;
+ALTER TABLE cross_section_lines ADD CONSTRAINT cQPSLinesCrossSections FOREIGN KEY (cross_section_id) REFERENCES cross_sections ON DELETE CASCADE;
+ALTER TABLE cross_section_points ADD CONSTRAINT cQPSPointsCrossSectionLines FOREIGN KEY (cross_section_line_id) REFERENCES cross_section_lines ON DELETE CASCADE;
+ALTER TABLE cross_sections ADD CONSTRAINT cCrossSectionsRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE;
+ALTER TABLE discharge_tables ADD CONSTRAINT cDischargeTablesGauges FOREIGN KEY (gauge_id) REFERENCES gauges ON DELETE CASCADE;
+ALTER TABLE discharge_table_values ADD CONSTRAINT cTableValuesDischargeTables FOREIGN KEY (table_id) REFERENCES discharge_tables ON DELETE CASCADE;
+ALTER TABLE gauges ADD CONSTRAINT cGaugesRanges FOREIGN KEY (range_id) REFERENCES ranges ON DELETE CASCADE;
+ALTER TABLE gauges ADD CONSTRAINT cGaugesRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE;
+ALTER TABLE hyk_entries ADD CONSTRAINT cHykEntriesHyks FOREIGN KEY (hyk_id) REFERENCES hyks ON DELETE CASCADE;
+ALTER TABLE hyk_flow_zones ADD CONSTRAINT cHykFlowZonesHykFormations FOREIGN KEY (formation_id) REFERENCES hyk_formations ON DELETE CASCADE;
+ALTER TABLE hyk_formations ADD CONSTRAINT cHykFormationsHykEntries FOREIGN KEY (hyk_entry_id) REFERENCES hyk_entries ON DELETE CASCADE;
+ALTER TABLE hyks ADD CONSTRAINT cHyksRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE;
+ALTER TABLE main_values ADD CONSTRAINT cMainValuesGauges FOREIGN KEY (gauge_id) REFERENCES gauges ON DELETE CASCADE;
+ALTER TABLE ranges ADD CONSTRAINT cRangesRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE;
+ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstColums FOREIGN KEY (wst_column_id) REFERENCES wst_columns ON DELETE CASCADE;
+ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstQRanges FOREIGN KEY (wst_q_range_id) REFERENCES wst_q_ranges ON DELETE CASCADE;
+ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsWsts FOREIGN KEY (wst_id) REFERENCES wsts ON DELETE CASCADE;
+ALTER TABLE wst_column_values ADD CONSTRAINT cWstColumnValuesWstColumns FOREIGN KEY (wst_column_id) REFERENCES wst_columns ON DELETE CASCADE;
+ALTER TABLE wst_q_ranges ADD CONSTRAINT cWstQRangesRanges FOREIGN KEY (range_id) REFERENCES RANGES ON DELETE CASCADE;
+ALTER TABLE wsts ADD CONSTRAINT cWstsRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE;
 
 -- VIEWS
 
--- a/flys-backend/doc/schema/postgresql-drop-spatial.sql	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/schema/postgresql-drop-spatial.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -9,9 +9,6 @@
 DROP TABLE cross_section_tracks;
 DROP SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
 
-DROP TABLE lines;
-DROP SEQUENCE LINES_ID_SEQ;
-
 DROP TABLE buildings;
 DROP SEQUENCE BUILDINGS_ID_SEQ;
 
@@ -24,11 +21,11 @@
 DROP TABLE dem;
 DROP SEQUENCE DEM_ID_SEQ;
 
-DROP TABLE catchment;
-DROP SEQUENCE CATCHMENT_ID_SEQ;
+DROP TABLE hws_points;
+DROP SEQUENCE HWS_POINTS_ID_SEQ;
 
-DROP TABLE hws;
-DROP SEQUENCE HWS_ID_SEQ;
+DROP TABLE hws_lines;
+DROP SEQUENCE HWS_LINES_ID_SEQ;
 
 DROP TABLE floodmaps;
 DROP SEQUENCE FLOODMAPS_ID_SEQ;
@@ -42,4 +39,10 @@
 DROP TABLE gauge_location;
 DROP SEQUENCE GAUGE_LOCATION_ID_SEQ;
 
+DROP TABLE fed_states;
+DROP TABLE hws_kinds;
+DROP TABLE sobek_kinds;
+DROP TABLE sectie_kinds;
+DROP TABLE boundary_kinds;
+
 COMMIT;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/postgresql-migrate-dami.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,127 @@
+DROP table hws;
+DROP sequence HWS_ID_SEQ;
+DROP table lines;
+DROP sequence LINES_ID_SEQ;
+DROP table catchment;
+DROP sequence CATCHMENT_ID_SEQ;
+
+-- Static lookup tables for Hochwasserschutzanlagen
+CREATE TABLE hws_kinds (
+    id int PRIMARY KEY NOT NULL,
+    kind VARCHAR(64) NOT NULL
+);
+INSERT INTO hws_kinds (id, kind) VALUES (1, 'Durchlass');
+INSERT INTO hws_kinds (id, kind) VALUES (2, 'Damm');
+INSERT INTO hws_kinds (id, kind) VALUES (3, 'Graben');
+
+CREATE TABLE fed_states (
+    id int PRIMARY KEY NOT NULL,
+    name VARCHAR(23) NOT NULL
+);
+INSERT INTO fed_states (id, name) VALUES (1, 'Bayern');
+INSERT INTO fed_states (id, name) VALUES (2, 'Hessen');
+INSERT INTO fed_states (id, name) VALUES (3, 'Niedersachsen');
+INSERT INTO fed_states (id, name) VALUES (4, 'Nordrhein-Westfalen');
+INSERT INTO fed_states (id, name) VALUES (5, 'Rheinland-Pfalz');
+INSERT INTO fed_states (id, name) VALUES (6, 'Saarland');
+INSERT INTO fed_states (id, name) VALUES (7, 'Schleswig-Holstein');
+INSERT INTO fed_states (id, name) VALUES (8, 'Brandenburg');
+INSERT INTO fed_states (id, name) VALUES (9, 'Mecklenburg-Vorpommern');
+INSERT INTO fed_states (id, name) VALUES (10, 'Thüringen');
+INSERT INTO fed_states (id, name) VALUES (11, 'Baden-Württemberg');
+INSERT INTO fed_states (id, name) VALUES (12, 'Sachsen-Anhalt');
+INSERT INTO fed_states (id, name) VALUES (13, 'Sachsen');
+INSERT INTO fed_states (id, name) VALUES (14, 'Berlin');
+INSERT INTO fed_states (id, name) VALUES (15, 'Bremen');
+INSERT INTO fed_states (id, name) VALUES (16, 'Hamburg');
+
+CREATE TABLE sectie_kinds (
+    id int PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO sectie_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sectie_kinds (id, name) VALUES (1, 'Flussschlauch');
+INSERT INTO sectie_kinds (id, name) VALUES (2, 'Uferbank');
+INSERT INTO sectie_kinds (id, name) VALUES (3, 'Ãœberflutungsbereich');
+
+CREATE TABLE sobek_kinds (
+    id int PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO sobek_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sobek_kinds (id, name) VALUES (1, 'Stromführend');
+INSERT INTO sobek_kinds (id, name) VALUES (2, 'Stromspeichernd');
+
+CREATE TABLE boundary_kinds (
+    id int PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO boundary_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO boundary_kinds (id, name) VALUES (1, 'BfG');
+INSERT INTO boundary_kinds (id, name) VALUES (2, 'Land');
+INSERT INTO boundary_kinds (id, name) VALUES (3, 'Sonstige');
+
+--Hydrologie/HW-Schutzanlagen/*Linien.shp
+CREATE SEQUENCE HWS_LINES_ID_SEQ;
+CREATE TABLE hws_lines (
+    id int PRIMARY KEY NOT NULL,
+    ogr_fid int,
+    kind_id int REFERENCES hws_kinds(id) DEFAULT 2,
+    fed_state_id int REFERENCES fed_states(id),
+    river_id int REFERENCES rivers(id),
+    name VARCHAR(256),
+    path VARCHAR(256),
+    offical INT DEFAULT 0,
+    agency VARCHAR(256),
+    range VARCHAR(256),
+    shore_side INT DEFAULT 0,
+    source VARCHAR(256),
+    status_date TIMESTAMP,
+    description VARCHAR(256)
+);
+SELECT AddGeometryColumn('hws_lines', 'geom', 31467, 'LINESTRING', 3);
+-- TODO: dike_km_from dike_km_to, are they geometries?
+
+ALTER TABLE hws_lines ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_LINES_ID_SEQ');
+
+--Hydrologie/HW-Schutzanlagen/*Punkte.shp
+CREATE SEQUENCE HWS_POINTS_ID_SEQ;
+CREATE TABLE hws_points (
+    id int PRIMARY KEY NOT NULL,
+    ogr_fid int,
+    kind_id int REFERENCES hws_kinds(id) DEFAULT 2,
+    fed_state_id int REFERENCES fed_states(id),
+    river_id int REFERENCES rivers(id),
+    name VARCHAR,
+    path VARCHAR,
+    offical INT DEFAULT 0,
+    agency VARCHAR,
+    range VARCHAR,
+    shore_side INT DEFAULT 0,
+    source VARCHAR,
+    status_date VARCHAR,
+    description VARCHAR,
+    freeboard FLOAT8,
+    dike_km FLOAT8,
+    z FLOAT8,
+    z_target FLOAT8,
+    rated_level FLOAT8
+);
+SELECT AddGeometryColumn('hws_points', 'geom', 31467, 'POINT', 2);
+
+ALTER TABLE hws_points ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_POINTS_ID_SEQ');
+
+ALTER TABLE hydr_boundaries_poly ADD COLUMN sectie INT REFERENCES sectie_kinds(id);
+ALTER TABLE hydr_boundaries_poly ADD COLUMN sobek INT REFERENCES sobek_kinds(id);
+ALTER TABLE hydr_boundaries_poly ADD FOREIGN KEY (kind) REFERENCES boundary_kinds(id);
+ALTER TABLE hydr_boundaries ADD COLUMN sectie INT REFERENCES sectie_kinds(id);
+ALTER TABLE hydr_boundaries ADD COLUMN sobek INT REFERENCES sobek_kinds(id);
+ALTER TABLE hydr_boundaries ADD FOREIGN KEY (kind) REFERENCES boundary_kinds(id);
+ALTER TABLE dem ADD COLUMN srid INT NOT NULL;
+ALTER TABLE dem ALTER COLUMN year_from DROP NOT NULL;
+ALTER TABLE dem ALTER COLUMN year_to DROP NOT NULL;
+ALTER TABLE dem ALTER COLUMN projection DROP NOT NULL;
+ALTER TABLE dem ALTER COLUMN path SET NOT NULL;
+
+COMMIT;
+
--- a/flys-backend/doc/schema/postgresql-minfo.sql	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/schema/postgresql-minfo.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -46,12 +46,12 @@
     evaluation_by           VARCHAR(255),
     description             VARCHAR(255),
     PRIMARY KEY(id),
-    CONSTRAINT fk_bed_single_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_bed_single_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
     CONSTRAINT fk_type FOREIGN KEY (type_id) REFERENCES bed_height_type(id),
     CONSTRAINT fk_location_system FOREIGN KEY (location_system_id) REFERENCES location_system(id),
     CONSTRAINT fk_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id),
     CONSTRAINT fk_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id),
-    CONSTRAINT fk_range FOREIGN KEY (range_id) REFERENCES ranges(id)
+    CONSTRAINT fk_range FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE
 );
 
 
@@ -72,7 +72,7 @@
     CONSTRAINT fk_time_interval FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id),
     CONSTRAINT fk_epoch_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id),
     CONSTRAINT fk_epoch_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id),
-    CONSTRAINT fk_epoch_range FOREIGN KEY (range_id) REFERENCES ranges(id)
+    CONSTRAINT fk_epoch_range FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE
 );
 
 
@@ -88,7 +88,7 @@
     sounding_width          NUMERIC,
     width                   NUMERIC,
     PRIMARY KEY(id),
-    CONSTRAINT fk_bed_single_values_parent FOREIGN KEY (bed_height_single_id) REFERENCES bed_height_single(id)
+    CONSTRAINT fk_bed_single_values_parent FOREIGN KEY (bed_height_single_id) REFERENCES bed_height_single(id) ON DELETE CASCADE
 );
 
 
@@ -100,7 +100,7 @@
     station                 NUMERIC NOT NULL,
     height                  NUMERIC,
     PRIMARY KEY(id),
-    CONSTRAINT fk_bed_epoch_values_parent FOREIGN KEY (bed_height_epoch_id) REFERENCES bed_height_epoch(id)
+    CONSTRAINT fk_bed_epoch_values_parent FOREIGN KEY (bed_height_epoch_id) REFERENCES bed_height_epoch(id) ON DELETE CASCADE
 );
 
 
@@ -125,7 +125,7 @@
     unit_id     int NOT NULL,
     description VARCHAR(256),
     PRIMARY KEY(id),
-    CONSTRAINT fk_sd_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sd_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
     CONSTRAINT fk_sd_depth_id FOREIGN KEY (depth_id) REFERENCES depths(id),
     CONSTRAINT fk_sd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
 );
@@ -141,7 +141,7 @@
     description         VARCHAR(256),
     year                int,
     PRIMARY KEY(id),
-    CONSTRAINT fk_sdv_sediment_density_id FOREIGN KEY(sediment_density_id) REFERENCES sediment_density(id)
+    CONSTRAINT fk_sdv_sediment_density_id FOREIGN KEY(sediment_density_id) REFERENCES sediment_density(id) ON DELETE CASCADE
 );
 
 
@@ -152,7 +152,7 @@
     river_id    int NOT NULL,
     unit_id     int NOT NULL,
     PRIMARY KEY(id),
-    CONSTRAINT fk_mw_river_id FOREIGN KEY(river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_mw_river_id FOREIGN KEY(river_id) REFERENCES rivers(id) ON DELETE CASCADE,
     CONSTRAINT fk_mw_unit_id FOREIGN KEY(unit_id) REFERENCES units(id)
 );
 
@@ -166,7 +166,7 @@
     width                   NUMERIC NOT NULL,
     description             VARCHAR(256),
     PRIMARY KEY(id),
-    CONSTRAINT fk_mwv_morphologic_width_id FOREIGN KEY (morphologic_width_id) REFERENCES morphologic_width(id)
+    CONSTRAINT fk_mwv_morphologic_width_id FOREIGN KEY (morphologic_width_id) REFERENCES morphologic_width(id) ON DELETE CASCADE
 );
 
 
@@ -180,7 +180,7 @@
     lower_discharge         VARCHAR(16)  NOT NULL,
     upper_discharge         VARCHAR(16),
     PRIMARY KEY(id),
-    CONSTRAINT fk_dz_river_id FOREIGN KEY (river_id) REFERENCES rivers(id)
+    CONSTRAINT fk_dz_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE
 );
 
 
@@ -192,8 +192,8 @@
     discharge_zone_id   int NOT NULL,
     description         VARCHAR(256),
     PRIMARY KEY (id),
-    CONSTRAINT fk_fvm_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
-    CONSTRAINT fk_fvm_discharge_zone_id FOREIGN KEY (discharge_zone_id) REFERENCES discharge_zone (id)
+    CONSTRAINT fk_fvm_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
+    CONSTRAINT fk_fvm_discharge_zone_id FOREIGN KEY (discharge_zone_id) REFERENCES discharge_zone (id) ON DELETE CASCADE
 );
 
 
@@ -208,7 +208,7 @@
     main_channel            NUMERIC NOT NULL,
     shear_stress            NUMERIC NOT NULL,
     PRIMARY KEY(id),
-    CONSTRAINT fk_fvv_flow_velocity_model_id FOREIGN KEY (flow_velocity_model_id) REFERENCES flow_velocity_model(id)
+    CONSTRAINT fk_fvv_flow_velocity_model_id FOREIGN KEY (flow_velocity_model_id) REFERENCES flow_velocity_model(id) ON DELETE CASCADE
 );
 
 
@@ -220,7 +220,7 @@
     river_id    int NOT NULL,
     description VARCHAR(256),
     PRIMARY KEY (id),
-    CONSTRAINT fk_fvm_rivers_id FOREIGN KEY (river_id) REFERENCES rivers(id)
+    CONSTRAINT fk_fvm_rivers_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE
 );
 
 CREATE SEQUENCE FV_MEASURE_VALUES_ID_SEQ;
@@ -235,7 +235,7 @@
     v               NUMERIC NOT NULL,
     description     VARCHAR(256),
     PRIMARY KEY (id),
-    CONSTRAINT fk_fvmv_measurements_id FOREIGN KEY (measurements_id) REFERENCES flow_velocity_measurements (id)
+    CONSTRAINT fk_fvmv_measurements_id FOREIGN KEY (measurements_id) REFERENCES flow_velocity_measurements (id) ON DELETE CASCADE
 );
 
 
@@ -262,7 +262,7 @@
     time_interval_id    int NOT NULL,
     description         VARCHAR(256),
     PRIMARY KEY (id),
-    CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
     CONSTRAINT fk_sy_grain_fraction_id FOREIGN KEY (grain_fraction_id) REFERENCES grain_fraction(id),
     CONSTRAINT fk_sy_unit_id FOREIGN KEY (unit_id) REFERENCES units(id),
     CONSTRAINT fk_sy_time_interval_id FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id)
@@ -277,7 +277,7 @@
     station             NUMERIC NOT NULL,
     value               NUMERIC NOT NULL,
     PRIMARY KEY (id),
-    CONSTRAINT fk_syv_sediment_yield_id FOREIGN KEY (sediment_yield_id) REFERENCES sediment_yield(id)
+    CONSTRAINT fk_syv_sediment_yield_id FOREIGN KEY (sediment_yield_id) REFERENCES sediment_yield(id) ON DELETE CASCADE
 );
 
 
@@ -289,7 +289,7 @@
     unit_id     int NOT NULL,
     description VARCHAR(256),
     PRIMARY KEY (id),
-    CONSTRAINT fk_w_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_w_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
     CONSTRAINT fk_w_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
 );
 
@@ -301,7 +301,7 @@
     waterlevel_id   int NOT NULL,
     q               NUMERIC NOT NULL,
     PRIMARY KEY (id),
-    CONSTRAINT fk_wqr_waterlevel_id FOREIGN KEY (waterlevel_id) REFERENCES waterlevel(id)
+    CONSTRAINT fk_wqr_waterlevel_id FOREIGN KEY (waterlevel_id) REFERENCES waterlevel(id) ON DELETE CASCADE
 );
 
 
@@ -313,7 +313,7 @@
     station                 NUMERIC NOT NULL,
     w                       NUMERIC NOT NULL,
     PRIMARY KEY (id),
-    CONSTRAINT fk_wv_waterlevel_q_range_id FOREIGN KEY (waterlevel_q_range_id) REFERENCES waterlevel_q_range(id)
+    CONSTRAINT fk_wv_waterlevel_q_range_id FOREIGN KEY (waterlevel_q_range_id) REFERENCES waterlevel_q_range(id) ON DELETE CASCADE
 );
 
 
@@ -325,7 +325,7 @@
     unit_id     int NOT NULL,
     description VARCHAR(256),
     PRIMARY KEY (id),
-    CONSTRAINT fk_wd_river_id FOREIGN KEY (river_id) REFERENCES rivers (id),
+    CONSTRAINT fk_wd_river_id FOREIGN KEY (river_id) REFERENCES rivers (id) ON DELETE CASCADE,
     CONSTRAINT fk_wd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
 );
 
@@ -337,7 +337,7 @@
     difference_id   int NOT NULL,
     description     VARCHAR(256),
     PRIMARY KEY (id),
-    CONSTRAINT fk_wdc_difference_id FOREIGN KEY (difference_id) REFERENCES waterlevel_difference (id)
+    CONSTRAINT fk_wdc_difference_id FOREIGN KEY (difference_id) REFERENCES waterlevel_difference (id) ON DELETE CASCADE
 );
 
 
@@ -349,7 +349,7 @@
     station     NUMERIC NOT NULL,
     value       NUMERIC NOT NULL,
     PRIMARY KEY (id),
-    CONSTRAINT fk_wdv_column_id FOREIGN KEY (column_id) REFERENCES waterlevel_difference_column (id)
+    CONSTRAINT fk_wdv_column_id FOREIGN KEY (column_id) REFERENCES waterlevel_difference_column (id) ON DELETE CASCADE
 );
 
 
@@ -367,9 +367,9 @@
 	operator				 VARCHAR(64),
 	comment					 VARCHAR(512),
 	PRIMARY KEY (id),
-	CONSTRAINT fk_ms_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
-	CONSTRAINT fk_ms_range_id FOREIGN KEY (range_id) REFERENCES ranges(id),
-	CONSTRAINT fk_ms_reference_gauge_id FOREIGN KEY (reference_gauge_id) REFERENCES gauges(id),
+	CONSTRAINT fk_ms_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
+	CONSTRAINT fk_ms_range_id FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE,
+	CONSTRAINT fk_ms_reference_gauge_id FOREIGN KEY (reference_gauge_id) REFERENCES gauges(id) ON DELETE CASCADE,
 	CONSTRAINT fk_ms_observation_timerange_id FOREIGN KEY (observation_timerange_id) REFERENCES time_intervals(id),
 	UNIQUE (river_id, station)
 );
@@ -383,7 +383,7 @@
     time_interval_id int NOT NULL,
     description      VARCHAR(256),
     PRIMARY KEY (id),
-    CONSTRAINT fk_sqr_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sqr_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
     CONSTRAINT fk_sqr_tinterval_id FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id)
 );
 
@@ -400,6 +400,6 @@
     a              NUMERIC NOT NULL,
     b              NUMERIC NOT NULL,
     PRIMARY KEY (id),
-    CONSTRAINT fk_sqr_id FOREIGN KEY (sq_relation_id) REFERENCES sq_relation(id)
+    CONSTRAINT fk_sqr_id FOREIGN KEY (sq_relation_id) REFERENCES sq_relation(id) ON DELETE CASCADE
 );
 COMMIT;
--- a/flys-backend/doc/schema/postgresql-spatial.sql	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/schema/postgresql-spatial.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -4,7 +4,7 @@
 CREATE SEQUENCE RIVER_AXES_ID_SEQ;
 CREATE TABLE river_axes (
     id       int PRIMARY KEY NOT NULL,
-    river_id int REFERENCES rivers(id),
+    river_id int REFERENCES rivers(id) ON DELETE CASCADE,
     kind     int             NOT NULL DEFAULT 0,
     name     VARCHAR(64),
     path     VARCHAR(256)
@@ -18,8 +18,8 @@
 CREATE SEQUENCE RIVER_AXES_KM_ID_SEQ;
 CREATE TABLE river_axes_km (
     id       int PRIMARY KEY NOT NULL,
-    river_id int REFERENCES rivers(id),
-    km       NUMERIC NOT NULL,
+    river_id int REFERENCES rivers(id) ON DELETE CASCADE,
+    km       FLOAT8 NOT NULL,
     name     VARCHAR(64),
     path     VARCHAR(256)
 );
@@ -31,9 +31,9 @@
 CREATE SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
 CREATE TABLE cross_section_tracks (
     id       int PRIMARY KEY NOT NULL,
-    river_id int REFERENCES rivers(id),
-    km       NUMERIC NOT NULL,
-    z        NUMERIC NOT NULL DEFAULT 0,
+    river_id int REFERENCES rivers(id) ON DELETE CASCADE,
+    km       FLOAT8 NOT NULL,
+    z        FLOAT8 NOT NULL DEFAULT 0,
     name     VARCHAR(64),
     path     VARCHAR(256)
 );
@@ -41,28 +41,11 @@
 ALTER TABLE cross_section_tracks ALTER COLUMN id SET DEFAULT NEXTVAL('CROSS_SECTION_TRACKS_ID_SEQ');
 
 
--- Geodaesie/Linien/rohre-und-spreen
-CREATE SEQUENCE LINES_ID_SEQ;
-CREATE TABLE lines (
-    id       int PRIMARY KEY NOT NULL,
-    river_id int REFERENCES rivers(id),
-    kind     VARCHAR(16) NOT NULL,
-    z        NUMERIC DEFAULT 0,
-    name     VARCHAR(64),
-    path     VARCHAR(256)
-);
-SELECT AddGeometryColumn('lines', 'geom', 31467, 'LINESTRING', 3);
-ALTER TABLE lines ALTER COLUMN id SET DEFAULT NEXTVAL('LINES_ID_SEQ');
--- 'kind':
--- 0: ROHR1
--- 1: DAMM
-
-
 -- Geodaesie/Bauwerke/Wehre.shp
 CREATE SEQUENCE BUILDINGS_ID_SEQ;
 CREATE TABLE buildings (
     id       int PRIMARY KEY NOT NULL,
-    river_id int REFERENCES rivers(id),
+    river_id int REFERENCES rivers(id) ON DELETE CASCADE,
     name     VARCHAR(256),
     path     VARCHAR(256)
 );
@@ -74,10 +57,10 @@
 CREATE SEQUENCE FIXPOINTS_ID_SEQ;
 CREATE TABLE fixpoints (
     id       int PRIMARY KEY NOT NULL,
-    river_id int REFERENCES rivers(id),
-    x        int,
-    y        int,
-    km       NUMERIC NOT NULL,
+    river_id int REFERENCES rivers(id) ON DELETE CASCADE,
+    x        FLOAT8,
+    y        FLOAT8,
+    km       FLOAT8 NOT NULL,
     HPGP     VARCHAR(2),
     name     VARCHAR(64),
     path     VARCHAR(256)
@@ -90,7 +73,7 @@
 CREATE SEQUENCE FLOODPLAIN_ID_SEQ;
 CREATE TABLE floodplain (
     id       int PRIMARY KEY NOT NULL,
-    river_id int REFERENCES rivers(id),
+    river_id int REFERENCES rivers(id) ON DELETE CASCADE,
     name     VARCHAR(64),
     path     VARCHAR(256)
 );
@@ -102,50 +85,104 @@
 CREATE SEQUENCE DEM_ID_SEQ;
 CREATE TABLE dem (
     id       int PRIMARY KEY NOT NULL,
-    river_id int REFERENCES rivers(id),
+    river_id int REFERENCES rivers(id) ON DELETE CASCADE,
     -- XXX Should we use the ranges table instead?
     name            VARCHAR(64),
-    lower           NUMERIC,
-    upper           NUMERIC,
-    year_from       VARCHAR(32) NOT NULL,
-    year_to         VARCHAR(32) NOT NULL,
-    projection      VARCHAR(32) NOT NULL,
+    lower           FLOAT8,
+    upper           FLOAT8,
+    year_from       VARCHAR(32),
+    year_to         VARCHAR(32),
+    projection      VARCHAR(32),
+    srid	    int NOT NULL,
     elevation_state VARCHAR(32),
     format          VARCHAR(32),
     border_break    BOOLEAN NOT NULL DEFAULT FALSE,
     resolution      VARCHAR(16),
     description     VARCHAR(256),
-    path            VARCHAR(256)
+    path            VARCHAR(256) NOT NULL
 );
 ALTER TABLE dem ALTER COLUMN id SET DEFAULT NEXTVAL('DEM_ID_SEQ');
 
 
--- Hydrologie/Einzugsgebiete/EZG.shp
-CREATE SEQUENCE CATCHMENT_ID_SEQ;
-CREATE TABLE catchment (
+-- Static lookup tables for Hochwasserschutzanlagen
+CREATE TABLE hws_kinds (
     id int PRIMARY KEY NOT NULL,
-    river_id int REFERENCES rivers(id),
-    area NUMERIC,
-    name VARCHAR(256),
-    path     VARCHAR(256)
+    kind VARCHAR(64) NOT NULL
 );
-SELECT AddGeometryColumn('catchment','geom',31467,'POLYGON',2);
-ALTER TABLE catchment ALTER COLUMN id SET DEFAULT NEXTVAL('CATCHMENT_ID_SEQ');
-
+INSERT INTO hws_kinds (id, kind) VALUES (1, 'Durchlass');
+INSERT INTO hws_kinds (id, kind) VALUES (2, 'Damm');
+INSERT INTO hws_kinds (id, kind) VALUES (3, 'Graben');
 
---Hydrologie/HW-Schutzanlagen/hws.shp
-CREATE SEQUENCE HWS_ID_SEQ;
-CREATE TABLE hws (
+CREATE TABLE fed_states (
     id int PRIMARY KEY NOT NULL,
-    river_id int REFERENCES rivers(id),
-    hws_facility VARCHAR(256),
-    type VARCHAR(256),
-    name VARCHAR(64),
-    path     VARCHAR(256)
+    name VARCHAR(23) NOT NULL
 );
-SELECT AddGeometryColumn('hws','geom',31467,'LINESTRING',2);
-ALTER TABLE hws ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_ID_SEQ');
+INSERT INTO fed_states (id, name) VALUES (1, 'Bayern');
+INSERT INTO fed_states (id, name) VALUES (2, 'Hessen');
+INSERT INTO fed_states (id, name) VALUES (3, 'Niedersachsen');
+INSERT INTO fed_states (id, name) VALUES (4, 'Nordrhein-Westfalen');
+INSERT INTO fed_states (id, name) VALUES (5, 'Rheinland-Pfalz');
+INSERT INTO fed_states (id, name) VALUES (6, 'Saarland');
+INSERT INTO fed_states (id, name) VALUES (7, 'Schleswig-Holstein');
+INSERT INTO fed_states (id, name) VALUES (8, 'Brandenburg');
+INSERT INTO fed_states (id, name) VALUES (9, 'Mecklenburg-Vorpommern');
+INSERT INTO fed_states (id, name) VALUES (10, 'Thüringen');
+INSERT INTO fed_states (id, name) VALUES (11, 'Baden-Württemberg');
+INSERT INTO fed_states (id, name) VALUES (12, 'Sachsen-Anhalt');
+INSERT INTO fed_states (id, name) VALUES (13, 'Sachsen');
+INSERT INTO fed_states (id, name) VALUES (14, 'Berlin');
+INSERT INTO fed_states (id, name) VALUES (15, 'Bremen');
+INSERT INTO fed_states (id, name) VALUES (16, 'Hamburg');
 
+--Hydrologie/HW-Schutzanlagen/*Linien.shp
+CREATE SEQUENCE HWS_LINES_ID_SEQ;
+CREATE TABLE hws_lines (
+    id int PRIMARY KEY NOT NULL,
+    ogr_fid int,
+    kind_id int REFERENCES hws_kinds(id) DEFAULT 2,
+    fed_state_id int REFERENCES fed_states(id),
+    river_id int REFERENCES rivers(id) ON DELETE CASCADE,
+    name VARCHAR(256),
+    path VARCHAR(256),
+    official INT DEFAULT 0,
+    agency VARCHAR(256),
+    range VARCHAR(256),
+    shore_side INT DEFAULT 0,
+    source VARCHAR(256),
+    status_date TIMESTAMP,
+    description VARCHAR(256)
+);
+SELECT AddGeometryColumn('hws_lines', 'geom', 31467, 'LINESTRING', 3);
+-- TODO: dike_km_from dike_km_to, are they geometries?
+
+ALTER TABLE hws_lines ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_LINES_ID_SEQ');
+
+--Hydrologie/HW-Schutzanlagen/*Punkte.shp
+CREATE SEQUENCE HWS_POINTS_ID_SEQ;
+CREATE TABLE hws_points (
+    id int PRIMARY KEY NOT NULL,
+    ogr_fid int,
+    kind_id int REFERENCES hws_kinds(id) DEFAULT 2,
+    fed_state_id int REFERENCES fed_states(id),
+    river_id int REFERENCES rivers(id) ON DELETE CASCADE,
+    name VARCHAR,
+    path VARCHAR,
+    official INT DEFAULT 0,
+    agency VARCHAR,
+    range VARCHAR,
+    shore_side INT DEFAULT 0,
+    source VARCHAR,
+    status_date VARCHAR,
+    description VARCHAR,
+    freeboard FLOAT8,
+    dike_km FLOAT8,
+    z FLOAT8,
+    z_target FLOAT8,
+    rated_level FLOAT8
+);
+SELECT AddGeometryColumn('hws_points', 'geom', 31467, 'POINT', 2);
+
+ALTER TABLE hws_points ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_POINTS_ID_SEQ');
 
 --
 --Hydrologie/UeSG
@@ -160,13 +197,13 @@
 CREATE SEQUENCE FLOODMAPS_ID_SEQ;
 CREATE TABLE floodmaps (
     id         int PRIMARY KEY NOT NULL,
-    river_id   int REFERENCES rivers(id),
+    river_id   int REFERENCES rivers(id) ON DELETE CASCADE,
     name       varchar(64) NOT NULL,
     kind       int NOT NULL,
-    diff       real,
+    diff       FLOAT8,
     count      int,
-    area       real,
-    perimeter  real,
+    area       FLOAT8,
+    perimeter  FLOAT8,
     path     VARCHAR(256)
 );
 SELECT AddGeometryColumn('floodmaps', 'geom', 31467, 'MULTIPOLYGON', 2);
@@ -174,13 +211,40 @@
 ALTER TABLE floodmaps ADD CONSTRAINT enforce_geotype_geom CHECK (geometrytype(geom) = 'POLYGON'::text OR geometrytype(geom) = 'MULTIPOLYGON'::text);
 ALTER TABLE floodmaps ALTER COLUMN id SET DEFAULT NEXTVAL('FLOODMAPS_ID_SEQ');
 
+CREATE TABLE sectie_kinds (
+    id int PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO sectie_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sectie_kinds (id, name) VALUES (1, 'Flussschlauch');
+INSERT INTO sectie_kinds (id, name) VALUES (2, 'Uferbank');
+INSERT INTO sectie_kinds (id, name) VALUES (3, 'Ãœberflutungsbereich');
+
+CREATE TABLE sobek_kinds (
+    id int PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO sobek_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sobek_kinds (id, name) VALUES (1, 'Stromführend');
+INSERT INTO sobek_kinds (id, name) VALUES (2, 'Stromspeichernd');
+
+CREATE TABLE boundary_kinds (
+    id int PRIMARY KEY NOT NULL,
+    name VARCHAR(64) NOT NULL
+);
+INSERT INTO boundary_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO boundary_kinds (id, name) VALUES (1, 'BfG');
+INSERT INTO boundary_kinds (id, name) VALUES (2, 'Land');
+INSERT INTO boundary_kinds (id, name) VALUES (3, 'Sonstige');
 
 CREATE SEQUENCE HYDR_BOUNDARIES_ID_SEQ;
 CREATE TABLE hydr_boundaries (
     id         int PRIMARY KEY NOT NULL,
-    river_id   int REFERENCES rivers(id),
+    river_id   int REFERENCES rivers(id) ON DELETE CASCADE,
     name       VARCHAR(255),
-    kind       int,
+    kind       int REFERENCES boundary_kinds(id),
+    sectie     int REFERENCES sectie_kinds(id),
+    sobek      int REFERENCES sobek_kinds(id),
     path       VARCHAR(256)
 );
 SELECT AddGeometryColumn('hydr_boundaries','geom',31467,'LINESTRING',3);
@@ -190,9 +254,11 @@
 CREATE SEQUENCE HYDR_BOUNDARIES_POLY_ID_SEQ;
 CREATE TABLE hydr_boundaries_poly (
     id         int PRIMARY KEY NOT NULL,
-    river_id   int REFERENCES rivers(id),
+    river_id   int REFERENCES rivers(id) ON DELETE CASCADE,
     name       VARCHAR(255),
-    kind       int,
+    kind       int REFERENCES boundary_kinds(id),
+    sectie     int REFERENCES sectie_kinds(id),
+    sobek      int REFERENCES sobek_kinds(id),
     path       VARCHAR(256)
 );
 SELECT AddGeometryColumn('hydr_boundaries_poly','geom',31467,'POLYGON',3);
@@ -202,7 +268,7 @@
 CREATE SEQUENCE GAUGE_LOCATION_ID_SEQ;
 CREATE TABLE gauge_location (
     id         int PRIMARY KEY NOT NULL,
-    river_id   int REFERENCES rivers(id),
+    river_id   int REFERENCES rivers(id) ON DELETE CASCADE,
     name       VARCHAR(255),
     path       VARCHAR(256)
 );
--- a/flys-backend/doc/schema/postgresql.sql	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/doc/schema/postgresql.sql	Thu Feb 28 12:47:24 2013 +0100
@@ -31,7 +31,7 @@
 
 CREATE TABLE ranges (
     id       int PRIMARY KEY NOT NULL,
-    river_id int             NOT NULL REFERENCES rivers(id),
+    river_id int             NOT NULL REFERENCES rivers(id) ON DELETE CASCADE,
     a        NUMERIC         NOT NULL,
     b        NUMERIC,
     UNIQUE (river_id, a, b)
@@ -68,7 +68,7 @@
 
 CREATE TABLE annotations (
     id           int PRIMARY KEY NOT NULL,
-    range_id     int             NOT NULL REFERENCES ranges(id),
+    range_id     int             NOT NULL REFERENCES ranges(id) ON DELETE CASCADE,
     attribute_id int             NOT NULL REFERENCES attributes(id),
     position_id  int REFERENCES positions(id),
     edge_id      int REFERENCES edges(id),
@@ -81,7 +81,7 @@
 CREATE TABLE gauges (
     id              int PRIMARY KEY NOT NULL,
     name            VARCHAR(256)    NOT NULL,
-    river_id        int             NOT NULL REFERENCES rivers(id),
+    river_id        int             NOT NULL REFERENCES rivers(id) ON DELETE CASCADE,
     station         NUMERIC         NOT NULL UNIQUE,
     aeo             NUMERIC         NOT NULL,
     official_number int8                     UNIQUE,
@@ -89,7 +89,7 @@
     -- Pegelnullpunkt
     datum    NUMERIC NOT NULL,
     -- Streckengueltigkeit
-    range_id int REFERENCES ranges (id),
+    range_id int REFERENCES ranges (id) ON DELETE CASCADE,
 
     UNIQUE (name, river_id),
     UNIQUE (river_id, station)
@@ -128,7 +128,7 @@
 
 CREATE TABLE main_values (
     id             int PRIMARY KEY NOT NULL,
-    gauge_id       int NOT NULL REFERENCES gauges(id),
+    gauge_id       int NOT NULL REFERENCES gauges(id) ON DELETE CASCADE,
     named_value_id int NOT NULL REFERENCES named_main_values(id),
     value          NUMERIC NOT NULL,
 
@@ -143,7 +143,7 @@
 
 CREATE TABLE discharge_tables (
     id               int PRIMARY KEY NOT NULL,
-    gauge_id         int NOT NULL REFERENCES gauges(id),
+    gauge_id         int NOT NULL REFERENCES gauges(id) ON DELETE CASCADE,
     description      VARCHAR(256) NOT NULL,
     bfg_id           VARCHAR(50),
     kind             int NOT NULL DEFAULT 0,
@@ -158,7 +158,7 @@
 
 CREATE TABLE discharge_table_values (
     id       int PRIMARY KEY NOT NULL,
-    table_id int NOT NULL REFERENCES discharge_tables(id),
+    table_id int NOT NULL REFERENCES discharge_tables(id) ON DELETE CASCADE,
     q        NUMERIC NOT NULL,
     w        NUMERIC NOT NULL,
 
@@ -170,7 +170,7 @@
 
 CREATE TABLE wsts (
     id          int PRIMARY KEY NOT NULL,
-    river_id    int NOT NULL REFERENCES rivers(id),
+    river_id    int NOT NULL REFERENCES rivers(id) ON DELETE CASCADE,
     description VARCHAR(256) NOT NULL,
     kind        int NOT NULL DEFAULT 0,
     -- TODO: more meta infos
@@ -182,7 +182,7 @@
 
 CREATE TABLE wst_columns (
     id          int PRIMARY KEY NOT NULL,
-    wst_id      int NOT NULL REFERENCES wsts(id),
+    wst_id      int NOT NULL REFERENCES wsts(id) ON DELETE CASCADE,
     name        VARCHAR(256) NOT NULL,
     description VARCHAR(256),
     position    int NOT NULL DEFAULT 0,
@@ -198,7 +198,7 @@
 
 CREATE TABLE wst_column_values (
     id            int PRIMARY KEY NOT NULL,
-    wst_column_id int NOT NULL REFERENCES wst_columns(id),
+    wst_column_id int NOT NULL REFERENCES wst_columns(id) ON DELETE CASCADE,
     position      NUMERIC NOT NULL,
     w             NUMERIC NOT NULL,
 
@@ -211,7 +211,7 @@
 
 CREATE TABLE wst_q_ranges (
     id       int PRIMARY KEY NOT NULL,
-    range_id int NOT NULL REFERENCES ranges(id),
+    range_id int NOT NULL REFERENCES ranges(id) ON DELETE CASCADE,
     q        NUMERIC NOT NULL
 );
 
@@ -220,8 +220,8 @@
 
 CREATE TABLE wst_column_q_ranges (
     id             int PRIMARY KEY NOT NULL,
-    wst_column_id  int NOT NULL REFERENCES wst_columns(id),
-    wst_q_range_id int NOT NULL REFERENCES wst_q_ranges(id),
+    wst_column_id  int NOT NULL REFERENCES wst_columns(id) ON DELETE CASCADE,
+    wst_q_range_id int NOT NULL REFERENCES wst_q_ranges(id) ON DELETE CASCADE,
 
     UNIQUE (wst_column_id, wst_q_range_id)
 );
@@ -277,7 +277,7 @@
 
 CREATE TABLE cross_sections (
     id               int PRIMARY KEY NOT NULL,
-    river_id         int             NOT NULL REFERENCES rivers(id),
+    river_id         int             NOT NULL REFERENCES rivers(id) ON DELETE CASCADE,
     time_interval_id int                      REFERENCES time_intervals(id),
     description      VARCHAR(256)
 );
@@ -287,7 +287,7 @@
 CREATE TABLE cross_section_lines (
     id               int PRIMARY KEY NOT NULL,
     km               NUMERIC         NOT NULL,
-    cross_section_id int             NOT NULL REFERENCES cross_sections(id),
+    cross_section_id int             NOT NULL REFERENCES cross_sections(id) ON DELETE CASCADE,
     UNIQUE (km, cross_section_id)
 );
 
@@ -295,7 +295,7 @@
 
 CREATE TABLE cross_section_points (
     id                    int PRIMARY KEY NOT NULL,
-    cross_section_line_id int             NOT NULL REFERENCES cross_section_lines(id),
+    cross_section_line_id int             NOT NULL REFERENCES cross_section_lines(id) ON DELETE CASCADE,
     col_pos               int             NOT NULL,
     x                     NUMERIC         NOT NULL,
     y                     NUMERIC         NOT NULL,
@@ -314,7 +314,7 @@
 
 CREATE TABLE hyks (
     id          int PRIMARY KEY NOT NULL,
-    river_id    int             NOT NULL REFERENCES rivers(id),
+    river_id    int             NOT NULL REFERENCES rivers(id) ON DELETE CASCADE,
     description VARCHAR(256)    NOT NULL
 );
 
@@ -322,7 +322,7 @@
 
 CREATE TABLE hyk_entries (
     id          int PRIMARY KEY NOT NULL,
-    hyk_id      int             NOT NULL REFERENCES hyks(id),
+    hyk_id      int             NOT NULL REFERENCES hyks(id) ON DELETE CASCADE,
     km          NUMERIC         NOT NULL,
     measure     TIMESTAMP,
     UNIQUE (hyk_id, km)
@@ -333,7 +333,7 @@
 CREATE TABLE hyk_formations (
     id            int PRIMARY KEY NOT NULL,
     formation_num int             NOT NULL DEFAULT 0,
-    hyk_entry_id  int             NOT NULL REFERENCES hyk_entries(id),
+    hyk_entry_id  int             NOT NULL REFERENCES hyk_entries(id) ON DELETE CASCADE,
     top           NUMERIC         NOT NULL,
     bottom        NUMERIC         NOT NULL,
     distance_vl   NUMERIC         NOT NULL,
@@ -354,7 +354,7 @@
 
 CREATE TABLE hyk_flow_zones (
     id           int PRIMARY KEY NOT NULL,
-    formation_id int             NOT NULL REFERENCES hyk_formations(id),
+    formation_id int             NOT NULL REFERENCES hyk_formations(id) ON DELETE CASCADE,
     type_id      int             NOT NULL REFERENCES hyk_flow_zone_types(id),
     a            NUMERIC         NOT NULL,
     b            NUMERIC         NOT NULL,
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/pom-oracle.xml	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,142 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <groupId>de.intevation.flys</groupId>
+  <artifactId>flys-backend</artifactId>
+  <version>1.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+
+  <name>flys-backend</name>
+  <url>http://maven.apache.org</url>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>hibernate3-maven-plugin</artifactId>
+        <version>2.2</version>
+        <!--
+        <configuration>
+            <componentProperties>
+                <propertyfile>src/main/config/hbm.properties</propertyfile>
+            </componentProperties>
+        </configuration>
+        -->
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <archive>
+            <manifest>
+              <mainClass>de.intevation.flys.importer.Importer</mainClass>
+              <packageName>de.intevation.flys.importer</packageName>
+            </manifest>
+          </archive>
+        </configuration>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <archive>
+            <manifest>
+              <mainClass>de.intevation.flys.importer.Importer</mainClass>
+            </manifest>
+          </archive>
+          <descriptorRefs>
+            <descriptorRef>jar-with-dependencies</descriptorRef>
+          </descriptorRefs>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>de.intevation.artifacts.common</groupId>
+      <artifactId>artifacts-common</artifactId>
+      <version>1.0-SNAPSHOT</version>
+    </dependency>    
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>3.8.1</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>net.sf.opencsv</groupId>
+      <artifactId>opencsv</artifactId>
+      <version>2.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-core</artifactId>
+      <version>3.6.5.Final</version>
+    </dependency>
+    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-entitymanager</artifactId>
+      <version>3.6.5.Final</version>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <version>1.2.14</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-dbcp</groupId>
+      <artifactId>commons-dbcp</artifactId>
+      <version>1.4</version>
+    </dependency>
+    <dependency>
+        <groupId>org.hibernatespatial</groupId>
+        <artifactId>hibernate-spatial-postgis</artifactId>
+        <version>1.1</version>
+    </dependency>
+    <dependency>
+        <groupId>org.hibernatespatial</groupId>
+        <artifactId>hibernate-spatial-oracle</artifactId>
+        <version>1.1</version>
+    </dependency>
+    <dependency>
+        <groupId>org.postgis</groupId>
+        <artifactId>postgis-jdbc</artifactId>
+        <version>1.3.3</version>
+    </dependency>
+    <dependency>
+       <groupId>ojdbc5.jar</groupId>
+       <artifactId>ojdbc5</artifactId>
+       <version>0</version>
+    </dependency>
+  </dependencies>
+
+  <repositories>
+    <repository>
+      <id>repository.jboss.org/nexus</id>
+      <name>JBoss Repository - Nexus</name>
+      <url>http://repository.jboss.org/nexus/content/groups/public/</url>
+    </repository>
+    <repository>
+        <id>OSGEO GeoTools repo</id>
+        <url>http://download.osgeo.org/webdav/geotools</url>
+    </repository>
+    <repository>
+        <id>Hibernate Spatial repo</id>
+        <url>http://www.hibernatespatial.org/repository</url>
+    </repository>
+  </repositories>
+</project>
--- a/flys-backend/pom.xml	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/pom.xml	Thu Feb 28 12:47:24 2013 +0100
@@ -37,6 +37,31 @@
           <target>1.6</target>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <configuration>
+          <archive>
+            <manifest>
+              <mainClass>de.intevation.flys.importer.Importer</mainClass>
+              <packageName>de.intevation.flys.importer</packageName>
+            </manifest>
+          </archive>
+        </configuration>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <archive>
+            <manifest>
+              <mainClass>de.intevation.flys.importer.Importer</mainClass>
+            </manifest>
+          </archive>
+          <descriptorRefs>
+            <descriptorRef>jar-with-dependencies</descriptorRef>
+          </descriptorRefs>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
 
@@ -89,11 +114,6 @@
         <version>1.1</version>
     </dependency>
     <dependency>
-        <groupId>org.hibernatespatial</groupId>
-        <artifactId>hibernate-spatial-oracle</artifactId>
-        <version>1.1</version>
-    </dependency>
-    <dependency>
         <groupId>org.postgis</groupId>
         <artifactId>postgis-jdbc</artifactId>
         <version>1.3.3</version>
--- a/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java	Thu Feb 28 12:47:24 2013 +0100
@@ -11,7 +11,7 @@
 import de.intevation.flys.model.BedHeightSingleValue;
 import de.intevation.flys.model.BedHeightType;
 import de.intevation.flys.model.Building;
-import de.intevation.flys.model.Catchment;
+import de.intevation.flys.model.BoundaryKind;
 import de.intevation.flys.model.CrossSection;
 import de.intevation.flys.model.CrossSectionLine;
 import de.intevation.flys.model.CrossSectionPoint;
@@ -23,6 +23,7 @@
 import de.intevation.flys.model.DischargeZone;
 import de.intevation.flys.model.Edge;
 import de.intevation.flys.model.ElevationModel;
+import de.intevation.flys.model.FedState;
 import de.intevation.flys.model.Fixpoint;
 import de.intevation.flys.model.Floodmaps;
 import de.intevation.flys.model.Floodplain;
@@ -33,15 +34,15 @@
 import de.intevation.flys.model.Gauge;
 import de.intevation.flys.model.GaugeLocation;
 import de.intevation.flys.model.GrainFraction;
+import de.intevation.flys.model.HWSKind;
+import de.intevation.flys.model.HWSLine;
 import de.intevation.flys.model.HYK;
 import de.intevation.flys.model.HYKEntry;
 import de.intevation.flys.model.HYKFlowZone;
 import de.intevation.flys.model.HYKFlowZoneType;
 import de.intevation.flys.model.HYKFormation;
-import de.intevation.flys.model.Hws;
 import de.intevation.flys.model.HydrBoundary;
 import de.intevation.flys.model.HydrBoundaryPoly;
-import de.intevation.flys.model.Line;
 import de.intevation.flys.model.LocationSystem;
 import de.intevation.flys.model.MainValue;
 import de.intevation.flys.model.MainValueType;
@@ -56,6 +57,8 @@
 import de.intevation.flys.model.RiverAxisKm;
 import de.intevation.flys.model.SQRelation;
 import de.intevation.flys.model.SQRelationValue;
+import de.intevation.flys.model.SectieKind;
+import de.intevation.flys.model.SobekKind;
 import de.intevation.flys.model.SedimentDensity;
 import de.intevation.flys.model.SedimentDensityValue;
 import de.intevation.flys.model.SedimentYield;
@@ -123,7 +126,7 @@
         BedHeightSingleValue.class,
         BedHeightType.class,
         Building.class,
-        Catchment.class,
+        BoundaryKind.class,
         CrossSection.class,
         CrossSectionLine.class,
         CrossSectionPoint.class,
@@ -135,6 +138,7 @@
         DischargeZone.class,
         Edge.class,
         ElevationModel.class,
+        FedState.class,
         Fixpoint.class,
         Floodplain.class,
         Floodmaps.class,
@@ -145,7 +149,8 @@
         Gauge.class,
         GaugeLocation.class,
         GrainFraction.class,
-        Hws.class,
+        HWSKind.class,
+        HWSLine.class,
         HydrBoundary.class,
         HydrBoundaryPoly.class,
         HYK.class,
@@ -153,7 +158,6 @@
         HYKFormation.class,
         HYKFlowZoneType.class,
         HYKFlowZone.class,
-        Line.class,
         LocationSystem.class,
         MainValueType.class,
         MeasurementStation.class,
@@ -166,6 +170,8 @@
         River.class,
         RiverAxis.class,
         RiverAxisKm.class,
+        SectieKind.class,
+        SobekKind.class,
         SedimentDensity.class,
         SedimentDensityValue.class,
         SedimentYield.class,
--- a/flys-backend/src/main/java/de/intevation/flys/backend/SpatialInfo.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SpatialInfo.java	Thu Feb 28 12:47:24 2013 +0100
@@ -10,9 +10,9 @@
 import de.intevation.flys.model.Building;
 import de.intevation.flys.model.CrossSectionTrack;
 import de.intevation.flys.model.Fixpoint;
-import de.intevation.flys.model.Line;
 import de.intevation.flys.model.River;
 import de.intevation.flys.model.RiverAxis;
+import de.intevation.flys.model.HWSLine;
 
 
 public class SpatialInfo {
@@ -42,7 +42,6 @@
             logger.info("Spatial information of River '" + RIVERNAME + "'");
             spatial.doRiverAxisInfo(river);
             spatial.doCrossSectionTracksInfo(river);
-            spatial.doLinesInfo(river);
             spatial.doBuildingsInfo(river);
             spatial.doFixpointsInfo(river);
         }
@@ -112,23 +111,6 @@
     }
 
 
-    protected void doLinesInfo(River river) {
-        Query query = session.createQuery(
-            "from Line where river =:river");
-        query.setParameter("river", river);
-
-        List<Line> list = query.list();
-
-        if (list == null || list.size() == 0) {
-            logger.warn("No Lines for '" + river.getName() + "' found!");
-            return;
-        }
-        else {
-            logger.info("River contains " + list.size() + " Lines.");
-        }
-    }
-
-
     protected void doBuildingsInfo(River river) {
         Query query = session.createQuery(
             "from Building where river =:river");
@@ -161,5 +143,26 @@
             logger.info("River contains " + list.size() + " Fixpoints.");
         }
     }
+
+    @Deprecated
+    protected void doLinesInfo(River river) {
+        doHWSLinesInfo(river);
+    }
+
+    protected void doHWSLinesInfo(River river) {
+        Query query = session.createQuery(
+            "from hws_lines where river =:river");
+        query.setParameter("river", river);
+
+        List<HWSLine> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No Lines for '" + river.getName() + "' found!");
+            return;
+        }
+        else {
+            logger.info("River contains " + list.size() + " Lines.");
+        }
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/flys-backend/src/main/java/de/intevation/flys/importer/Config.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/Config.java	Thu Feb 28 12:47:24 2013 +0100
@@ -17,6 +17,9 @@
     public static final String SKIP_GAUGES =
         "flys.backend.importer.skip.gauges";
 
+    public static final String SKIP_BWASTR =
+        "flys.backend.importer.skip.bwastr";
+
     public static final String SKIP_HISTORICAL_DISCHARGE_TABLES =
         "flys.backend.importer.skip.historical.discharge.tables";
 
@@ -119,6 +122,10 @@
         return getFlag(SKIP_HISTORICAL_DISCHARGE_TABLES);
     }
 
+    public boolean skipBWASTR() {
+        return getFlag(SKIP_BWASTR);
+    }
+
     public boolean skipAnnotations() {
         return getFlag(SKIP_ANNOTATIONS);
     }
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportCrossSectionLine.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportCrossSectionLine.java	Thu Feb 28 12:47:24 2013 +0100
@@ -25,9 +25,9 @@
             }
         };
 
-    protected Double         km;
+    protected Double km;
     protected ImportCrossSection crossSection;
-    protected List<XY>           points;
+    protected List<XY> points;
 
     protected CrossSectionLine peer;
 
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java	Thu Feb 28 12:47:24 2013 +0100
@@ -46,6 +46,7 @@
 /** Import all river-related data (files) that can be found. */
 public class ImportRiver
 {
+    /** Private logger. */
     private static Logger log = Logger.getLogger(ImportRiver.class);
 
     public static final String PEGEL_GLT = "PEGEL.GLT";
@@ -105,9 +106,11 @@
 
     protected String name;
 
-    protected File   wstFile;
+    protected Long officialNumber;
 
-    protected File   bbInfoFile;
+    protected File wstFile;
+
+    protected File bbInfoFile;
 
     protected List<ImportGauge> gauges;
 
@@ -155,6 +158,7 @@
 
     protected AnnotationClassifier annotationClassifier;
 
+    /** Database-mapped River instance. */
     protected River peer;
 
 
@@ -233,6 +237,14 @@
         this.name = name;
     }
 
+    public Long getOfficialNumber() {
+        return this.officialNumber;
+    }
+
+    public void setOfficialNumber(Long officialNumber) {
+        this.officialNumber = officialNumber;
+    }
+
     public File getWstFile() {
         return wstFile;
     }
@@ -329,6 +341,13 @@
         }
     }
 
+    public void storeOfficialNumber() {
+        if (Config.INSTANCE.skipBWASTR()) {
+            log.info("skip storing official number.");
+            return;
+        }
+        getPeer().setOfficialNumber(officialNumber);
+    }
 
     public void parseBedHeight() throws IOException {
         File minfoDir     = getMinfoDir();
@@ -541,7 +560,12 @@
             parser.parse(file);
         }
 
-        waterlevels = parser.getWaterlevels();
+        // The parsed ImportWaterlevels are converted to
+        // 'fixation'-wsts now.
+        for(ImportWst iw: parser.exportWsts()) {
+            //iw.setDescription("CSV" + iw.getDescription());
+            fixations.add(iw);
+        }
     }
 
     protected void parseMeasurementStations() throws IOException {
@@ -1058,6 +1082,7 @@
         storeWaterlevelDifferences();
         storeMeasurementStations();
         storeSQRelations();
+        storeOfficialNumber();
     }
 
     public void storeWstUnit() {
@@ -1097,8 +1122,8 @@
     }
 
     public void storeFixations() {
-        if (!Config.INSTANCE.skipFixations()) {
-            log.info("store fixation wsts");
+        if (!Config.INSTANCE.skipFixations() || !Config.INSTANCE.skipWaterlevels()) {
+            log.info("store fixation wsts and/or csvs");
             River river = getPeer();
             for (ImportWst wst: fixations) {
                 log.debug("name: " + wst.getDescription());
@@ -1435,13 +1460,19 @@
             Session session = ImporterSession.getInstance().getDatabaseSession();
             Query query = session.createQuery("from River where name=:name");
 
-            Unit u = wstUnit.getPeer();
+            Unit u = null;
+            if (wstUnit != null) {
+                u = wstUnit.getPeer();
+            }
 
             query.setString("name", name);
             List<River> rivers = query.list();
             if (rivers.isEmpty()) {
                 log.info("Store new river '" + name + "'");
                 peer = new River(name, u);
+                if (!Config.INSTANCE.skipBWASTR()) {
+                    peer.setOfficialNumber(officialNumber);
+                }
                 session.save(peer);
             }
             else {
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevel.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevel.java	Thu Feb 28 12:47:24 2013 +0100
@@ -31,14 +31,26 @@
         this.description = description;
     }
 
+    public String getDescription() {
+        return this.description;
+    }
+
     public void setUnit(ImportUnit unit) {
         this.unit = unit;
     }
 
+    public ImportUnit getUnit() {
+        return this.unit;
+    }
+
     public void addValue(ImportWaterlevelQRange qRange) {
         this.qRanges.add(qRange);
     }
 
+    public List<ImportWaterlevelQRange> getQRanges() {
+        return this.qRanges;
+    }
+
     public void storeDependencies(River river) {
         log.info("store dependencies");
 
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelQRange.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelQRange.java	Thu Feb 28 12:47:24 2013 +0100
@@ -12,12 +12,12 @@
 import de.intevation.flys.model.WaterlevelQRange;
 
 
+/** Has a Q and list of W,km values. */
 public class ImportWaterlevelQRange {
 
     private static final Logger log =
         Logger.getLogger(ImportWaterlevelQRange.class);
 
-
     private Double q;
 
     private List<ImportWaterlevelValue> values;
@@ -34,6 +34,13 @@
         this.values.add(value);
     }
 
+    public Double getQ() {
+        return this.q;
+    }
+
+    public List<ImportWaterlevelValue> getValues() {
+        return values;
+    }
 
     public void storeDependencies(Waterlevel waterlevel) {
         log.info("store dependencies");
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelValue.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelValue.java	Thu Feb 28 12:47:24 2013 +0100
@@ -9,6 +9,7 @@
 import de.intevation.flys.model.WaterlevelValue;
 
 
+/** W and a station. */
 public class ImportWaterlevelValue {
 
     private Double station;
@@ -28,6 +29,15 @@
     }
 
 
+    public Double getStation() {
+        return this.station;
+    }
+
+
+    public Double getW() {
+        return this.w;
+    }
+
     public WaterlevelValue getPeer(WaterlevelQRange qRange) {
         if (peer == null) {
             Session session = ImporterSession.getInstance().getDatabaseSession();
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWst.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWst.java	Thu Feb 28 12:47:24 2013 +0100
@@ -23,6 +23,7 @@
 
     protected ImportUnit unit;
 
+    /** Wst as in db. */
     protected Wst peer;
 
     public ImportWst() {
@@ -87,6 +88,7 @@
         session.flush();
     }
 
+    /** Get corresponding mapped wst (from database). */
     public Wst getPeer(River river) {
         if (peer == null) {
             Session session = ImporterSession.getInstance().getDatabaseSession();
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstColumn.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstColumn.java	Thu Feb 28 12:47:24 2013 +0100
@@ -15,6 +15,8 @@
 
 import org.apache.log4j.Logger;
 
+
+/** Unmapped column of a WST. */
 public class ImportWstColumn
 {
     private static Logger log = Logger.getLogger(ImportWstColumn.class);
@@ -112,6 +114,7 @@
         this.timeInterval = timeInterval;
     }
 
+    /** Get corresponding mapped wst-column (from database). */
     public WstColumn getPeer(River river) {
         if (peer == null) {
             Wst w = wst.getPeer(river);
--- a/flys-backend/src/main/java/de/intevation/flys/importer/Importer.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/Importer.java	Thu Feb 28 12:47:24 2013 +0100
@@ -2,13 +2,16 @@
 
 import de.intevation.artifacts.common.utils.XMLUtils;
 
+import de.intevation.flys.importer.parsers.AnnotationClassifier;
+import de.intevation.flys.importer.parsers.BundesWasserStrassenParser;
 import de.intevation.flys.importer.parsers.InfoGewParser;
-import de.intevation.flys.importer.parsers.AnnotationClassifier;
 
 import java.io.File;
 import java.io.IOException;
 
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import java.sql.SQLException;
 
@@ -19,12 +22,16 @@
 
 import org.w3c.dom.Document;
 
+import de.intevation.flys.utils.StringUtil;
 
-/** Data Importer. */
+/** Data Importer. Further processing happens per-river. */
 public class Importer
 {
+    /** Private logger. */
     private static Logger log = Logger.getLogger(Importer.class);
 
+    private static String BWASTR_ID_CSV_FILE = "BWASTR_ID.csv";
+
     protected List<ImportRiver> rivers;
 
     public Importer() {
@@ -42,6 +49,7 @@
         this.rivers = rivers;
     }
 
+    /** Write rivers and their dependencies/dependants to db. */
     public void writeRivers() {
         log.debug("write rivers started");
 
@@ -121,27 +129,66 @@
 
         log.info("Start parsing rivers...");
 
+        File bwastrFile = null;
+
         for (String gew: args) {
             log.info("parsing info gew file: " + gew);
+            File gewFile = new File(gew);
+            if (bwastrFile == null) {
+                bwastrFile = new File(gewFile.getParentFile(), BWASTR_ID_CSV_FILE);
+            }
             try {
-                infoGewParser.parse(new File(gew));
+                infoGewParser.parse(gewFile);
             }
             catch (IOException ioe) {
-                log.error("error while parsing gew: " + gew);
+                log.error("error while parsing gew: " + gew, ioe);
+                System.exit(1);
             }
         }
 
         String gew = Config.INSTANCE.getInfoGewFile();
         if (gew != null && gew.length() > 0) {
             log.info("parsing info gew file: " + gew);
+            File gewFile = new File(gew);
+            if (bwastrFile == null) {
+                bwastrFile = new File(gewFile.getParentFile(), BWASTR_ID_CSV_FILE);
+            }
             try {
-                infoGewParser.parse(new File(gew));
+                infoGewParser.parse(gewFile);
             }
             catch (IOException ioe) {
-                log.error("error while parsing gew: " + gew);
+                log.error("error while parsing gew: " + gew, ioe);
+                System.exit(1);
             }
         }
 
+        // Look for official numbers.
+        BundesWasserStrassenParser bwastrIdParser =
+            new BundesWasserStrassenParser();
+
+        // Read bwastFile (river-dir + BWASTR_ID_CSV_FILE).
+        if (!Config.INSTANCE.skipBWASTR()) {
+            try{
+                bwastrIdParser.parse(bwastrFile);
+                HashMap<String,Long> map = bwastrIdParser.getMap();
+
+                // Now link rivers with official numbers.
+                for(ImportRiver river: infoGewParser.getRivers()) {
+                    for(Map.Entry<String, Long> entry: map.entrySet()) {
+                        if (StringUtil.containsIgnoreCase(entry.getKey(), river.getName())) {
+                            river.setOfficialNumber(entry.getValue());
+                            log.debug(river.getName() + " is mapped to bwastr " + entry.getValue());
+                        }
+                    }
+                }
+            } catch (IOException ioe) {
+                log.warn("BWASTR-file could not be loaded.");
+            }
+        }
+        else {
+            log.debug("skip reading BWASTR_ID.csv");
+        }
+
         if (!Config.INSTANCE.dryRun()) {
             new Importer(infoGewParser.getRivers()).writeToDatabase();
         }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BundesWasserStrassenParser.java	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,89 @@
+package de.intevation.flys.importer.parsers;
+
+import java.math.BigDecimal;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportMorphWidth;
+import de.intevation.flys.importer.ImportMorphWidthValue;
+import de.intevation.flys.importer.ImportUnit;
+
+/** Parse CSV file that contains official numbers for rivers. */
+public class BundesWasserStrassenParser extends LineParser {
+
+    /** Private logger. */
+    private static final Logger logger =
+        Logger.getLogger(BundesWasserStrassenParser.class);
+
+    /** Map from rivernames to Official numbers. */
+    private HashMap<String,Long> numberMap;
+
+
+    public BundesWasserStrassenParser() {
+        numberMap = new HashMap<String,Long>();
+    }
+
+
+    /** No need to reset. */
+    @Override
+    protected void reset() {
+    }
+
+
+    /** No action needed on eof. */
+    @Override
+    protected void finish() {
+    }
+
+
+    /** Handle a line of the bwastr-id file. */
+    @Override
+    protected void handleLine(int lineNum, String line) {
+        String[] vals = line.split(",");
+        // Try both "," and ";" as separator.
+        if (vals.length != 2) {
+            vals = line.split(";");
+            if (vals.length != 2) {
+                logger.warn("Invalid bwastr-id line:\n" + line);
+                return;
+            }
+        }
+        try{
+            String name = unwrap(vals[0].toLowerCase());
+            String numberStr = unwrap(vals[1]);
+            Long number = Long.valueOf(numberStr);
+            numberMap.put(name, number);
+        }
+        catch (NumberFormatException e) {
+            logger.warn("Invalid number in bwastr-id line:\n" + line);
+        }
+    }
+
+
+    /** Get river->official number mapping. */
+    public HashMap<String,Long> getMap() {
+        return numberMap;
+    }
+
+
+    /** Remove leading and trailing quotes. */
+    protected String unwrap(String input) {
+        if (input.startsWith("\"")) {
+            input = input.substring(1);
+        }
+        if (input.endsWith("\"")) {
+            input = input.substring(0, input.length() - 1);
+        }
+        return input;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/InfoGewParser.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/InfoGewParser.java	Thu Feb 28 12:47:24 2013 +0100
@@ -57,7 +57,7 @@
         return f.replace("\\", "/").replace("/", File.separator);
     }
 
-    /* Handle a gew, wst, or bb_info file. */
+    /** Handle a gew, wst, or bb_info file. */
     public void parse(File file) throws IOException {
 
         LineNumberReader in = null;
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/MorphologicalWidthParser.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/MorphologicalWidthParser.java	Thu Feb 28 12:47:24 2013 +0100
@@ -22,14 +22,11 @@
     private static final Logger log =
         Logger.getLogger(MorphologicalWidthParser.class);
 
-
     public static final NumberFormat nf = NumberFormat.getInstance(DEFAULT_LOCALE);
 
-
     public static final Pattern META_UNIT =
         Pattern.compile("^Einheit: \\[(.*)\\].*");
 
-
     protected List<ImportMorphWidth> morphWidths;
 
     protected ImportMorphWidth current;
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java	Thu Feb 28 12:47:24 2013 +0100
@@ -2,6 +2,7 @@
 
 import java.io.File;
 import java.io.IOException;
+import java.math.BigDecimal;
 import java.text.NumberFormat;
 import java.text.ParseException;
 import java.util.ArrayList;
@@ -16,7 +17,19 @@
 import de.intevation.flys.importer.ImportWaterlevelQRange;
 import de.intevation.flys.importer.ImportWaterlevelValue;
 
+import de.intevation.flys.importer.ImportWstColumn;
+import de.intevation.flys.importer.ImportRange;
+import de.intevation.flys.importer.ImportWst;
+import de.intevation.flys.importer.ImportWstQRange;
 
+
+/**
+ * Parse CSV Waterlevel files.
+ * As these waterlevels are probably used in fixation analysis
+ * only, functionality to export them to "fixation"-wsts
+ * has been added (the ImportWaterlevel*- stuff is actually
+ * not needed to do so.)
+ */
 public class WaterlevelParser extends LineParser {
 
     private static final Logger log = Logger.getLogger(WaterlevelParser.class);
@@ -30,7 +43,6 @@
     public static final Pattern META_UNIT =
         Pattern.compile("^Einheit: \\[(.*)\\].*");
 
-
     private List<ImportWaterlevel> waterlevels;
 
     private ImportWaterlevel current;
@@ -45,6 +57,56 @@
     }
 
 
+    /**
+     * Create ImportWst objects from ImportWaterlevel
+     * objects.
+     */
+    public List<ImportWst> exportWsts() {
+        List<ImportWst> wsts = new ArrayList<ImportWst>();
+        for(ImportWaterlevel waterlevel: getWaterlevels()) {
+            String description = waterlevel.getDescription();
+            ImportWst wst = new ImportWst();
+            wsts.add(wst);
+            wst.setDescription(description);
+            // Fixation kind.
+            wst.setKind(2);
+            wst.setUnit(waterlevel.getUnit());
+
+            // Fake WST has but 1 column.
+            wst.setNumberColumns(1);
+            ImportWstColumn column = wst.getColumn(0);
+            column.setDescription(description);
+            column.setName(description);
+            column.setPosition(0);
+
+            // Build Q Range.
+            List<ImportWaterlevelQRange> qRanges = waterlevel.getQRanges();
+            for(ImportWaterlevelQRange range: qRanges) {
+                List<ImportWaterlevelValue> values = range.getValues();
+                if (values.size() < 2) {
+                    log.warn ("Not enough values to build valid QRange");
+                    continue;
+                }
+                ImportRange iRange = new ImportRange(
+                   BigDecimal.valueOf(values.get(0).getStation()),
+                   BigDecimal.valueOf(values.get(values.size() -1).getStation()));
+                column.addColumnQRange(
+                     new ImportWstQRange(iRange, BigDecimal.valueOf(range.getQ())));
+            }
+
+            // The other W/KM values.
+            for(ImportWaterlevelQRange range: qRanges) {
+                for(ImportWaterlevelValue value: range.getValues()) {
+                    column.addColumnValue(BigDecimal.valueOf(value.getStation()),
+                                          BigDecimal.valueOf(value.getW()));
+                }
+            }
+            // TODO Maybe set a timeinterval.
+        }
+        return wsts;
+    }
+
+
     public List<ImportWaterlevel> getWaterlevels() {
         return waterlevels;
     }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BoundaryKind.java	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,45 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+
+@Entity
+@Table(name = "boundary_kinds")
+public class BoundaryKind implements Serializable {
+
+    private Integer id;
+    private String name;
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    /**
+     * Get name.
+     *
+     * @return name of the kind of boundary as String.
+     */
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Set name.
+     *
+     * @param name the value to set.
+     */
+    public void setName(String name) {
+        this.name = name;
+    }
+}
--- a/flys-backend/src/main/java/de/intevation/flys/model/Catchment.java	Tue Feb 19 10:46:41 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,107 +0,0 @@
-package de.intevation.flys.model;
-
-import java.io.Serializable;
-import java.math.BigDecimal;
-import java.util.List;
-
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-import org.hibernate.annotations.Type;
-
-import com.vividsolutions.jts.geom.Geometry;
-
-import de.intevation.flys.backend.SessionHolder;
-
-
-@Entity
-@Table(name = "catchment")
-public class Catchment
-implements   Serializable
-{
-    private Integer    id;
-    private BigDecimal area;
-    private String     name;
-    private River      river;
-    private Geometry    geom;
-
-    public Catchment() {
-    }
-
-
-    @Id
-    @Column(name = "id")
-    public Integer getId() {
-        return id;
-    }
-
-
-    public void setId(Integer id) {
-        this.id = id;
-    }
-
-
-    @OneToOne
-    @JoinColumn(name = "river_id")
-    public River getRiver() {
-        return river;
-    }
-
-
-    public void setRiver(River river) {
-        this.river = river;
-    }
-
-
-    @Column(name = "name")
-    public String getName() {
-        return name;
-    }
-
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-
-    @Column(name = "area")
-    public BigDecimal getArea() {
-        return area;
-    }
-
-
-    public void setArea(BigDecimal area) {
-        this.area = area;
-    }
-
-
-    @Column(name = "geom")
-    @Type(type = "org.hibernatespatial.GeometryUserType")
-    public Geometry getGeom() {
-        return geom;
-    }
-
-
-    public void setGeom(Geometry geom) {
-        this.geom = geom;
-    }
-
-
-    public static List<Catchment> getCatchments(int riverId, String name) {
-        Session session = SessionHolder.HOLDER.get();
-
-        Query query = session.createQuery(
-            "from Catchment where river.id =:river_id AND name=:name");
-        query.setParameter("river_id", riverId);
-        query.setParameter("name", name);
-
-        return query.list();
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/DGM.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/DGM.java	Thu Feb 28 12:47:24 2013 +0100
@@ -6,10 +6,13 @@
 
 import javax.persistence.Column;
 import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
 import javax.persistence.Id;
 import javax.persistence.JoinColumn;
 import javax.persistence.OneToOne;
 import javax.persistence.Table;
+import javax.persistence.SequenceGenerator;
 
 import org.hibernate.Session;
 import org.hibernate.Query;
@@ -22,6 +25,7 @@
 public class DGM implements Serializable {
 
     private Integer    id;
+    private Integer    srid;
 
     private River      river;
 
@@ -40,6 +44,13 @@
     }
 
     @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_DEM_ID_SEQ",
+        sequenceName   = "DEM_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_DEM_ID_SEQ")
     @Column(name = "id")
     public Integer getId() {
         return id;
@@ -82,6 +93,14 @@
         return path;
     }
 
+    public void setSrid(int srid) {
+        this.srid = srid;
+    }
+
+    @Column(name = "srid")
+    public int getSrid() {
+        return srid;
+    }
 
     public static DGM getDGM(int id) {
         Session session = SessionHolder.HOLDER.get();
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FedState.java	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,45 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+
+@Entity
+@Table(name = "fed_states")
+public class FedState implements Serializable {
+
+    private Integer id;
+    private String name;
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+     /**
+     * Get name.
+     *
+     * @return name of the Federal State as String.
+     */
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Set name.
+     *
+     * @param name the value to set.
+     */
+    public void setName(String name) {
+        this.name = name;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HWSKind.java	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,44 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+
+@Entity
+@Table(name = "hws_kinds")
+public class HWSKind implements Serializable {
+    private Integer id;
+    private String name;
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    /**
+     * Get name.
+     *
+     * @return The name of the Hochwasserschutzanlagenart as String.
+     */
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Set name.
+     *
+     * @param name the value to set.
+     */
+    public void setName(String name) {
+        this.name = name;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HWSLine.java	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,336 @@
+package de.intevation.flys.model;
+
+import com.vividsolutions.jts.geom.Geometry;
+
+import de.intevation.flys.model.HWSKind;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+import javax.persistence.SequenceGenerator;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import de.intevation.flys.backend.SessionHolder;
+
+@Entity
+@Table(name = "hws_lines")
+public class HWSLine implements Serializable {
+
+    private Integer  id;
+
+    private Integer  ogrFid;
+    private HWSKind  kind;
+    private FedState fedState;
+    private River    river;
+    private Integer  offical;
+    private Integer  shoreSide;
+    private String   name;
+    private String   path;
+    private String   agency;
+    private String   range;
+    private String   source;
+    private String   status_date;
+    private String   description;
+    private Geometry geom;
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HWS_LINES_ID_SEQ",
+        sequenceName   = "HWS_LINES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HWS_LINES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Geometry getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Geometry geom) {
+        this.geom = geom;
+    }
+
+    /**
+     * Get ogrFid.
+     *
+     * @return ogrFid as Integer.
+     */
+    @Column(name = "ogr_fid")
+    public Integer getOgrFid() {
+        return ogrFid;
+    }
+
+    /**
+     * Set ogrFid.
+     *
+     * @param ogrFid the value to set.
+     */
+    public void setOgrFid(Integer ogrFid) {
+        this.ogrFid = ogrFid;
+    }
+
+
+    /**
+     * Get offical.
+     *
+     * @return offical as Integer.
+     */
+    @Column(name = "offical")
+    public Integer getOffical() {
+        return offical;
+    }
+
+    /**
+     * Set offical.
+     *
+     * @param offical the value to set.
+     */
+    public void setOffical(Integer offical) {
+        this.offical = offical;
+    }
+
+    /**
+     * Get shoreSide.
+     *
+     * @return shoreSide as Integer.
+     */
+    @Column(name = "shore_side")
+    public Integer getShoreSide() {
+        return shoreSide;
+    }
+
+    /**
+     * Set shoreSide.
+     *
+     * @param shoreSide the value to set.
+     */
+    public void setShoreSide(Integer shoreSide) {
+        this.shoreSide = shoreSide;
+    }
+
+    /**
+     * Get name.
+     *
+     * @return name as String.
+     */
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Set name.
+     *
+     * @param name the value to set.
+     */
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    /**
+     * Get path.
+     *
+     * @return path as String.
+     */
+    @Column(name = "path")
+    public String getPath() {
+        return path;
+    }
+
+    /**
+     * Set path.
+     *
+     * @param path the value to set.
+     */
+    public void setPath(String path) {
+        this.path = path;
+    }
+
+    /**
+     * Get agency.
+     *
+     * @return agency as String.
+     */
+    @Column(name = "agency")
+    public String getAgency() {
+        return agency;
+    }
+
+    /**
+     * Set agency.
+     *
+     * @param agency the value to set.
+     */
+    public void setAgency(String agency) {
+        this.agency = agency;
+    }
+
+    /**
+     * Get range.
+     *
+     * @return range as String.
+     */
+    @Column(name = "range")
+    public String getRange() {
+        return range;
+    }
+
+    /**
+     * Set range.
+     *
+     * @param range the value to set.
+     */
+    public void setRange(String range) {
+        this.range = range;
+    }
+
+    /**
+     * Get source.
+     *
+     * @return source as String.
+     */
+    @Column(name = "source")
+    public String getSource() {
+        return source;
+    }
+
+    /**
+     * Set source.
+     *
+     * @param source the value to set.
+     */
+    public void setSource(String source) {
+        this.source = source;
+    }
+
+    /**
+     * Get status_date.
+     *
+     * @return status_date as String.
+     */
+    @Column(name = "status_date")
+    public String getStatusDate() {
+        return status_date;
+    }
+
+    /**
+     * Set status_date.
+     *
+     * @param status_date the value to set.
+     */
+    public void setStatusDate(String status_date) {
+        this.status_date = status_date;
+    }
+
+    /**
+     * Get description.
+     *
+     * @return description as String.
+     */
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    /**
+     * Set description.
+     *
+     * @param description the value to set.
+     */
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    /**
+     * Get kind.
+     *
+     * @return kind as HWSKind.
+     */
+    @OneToOne
+    @JoinColumn(name = "kind_id")
+    public HWSKind getKind() {
+        return kind;
+    }
+
+    /**
+     * Set kind.
+     *
+     * @param kind the value to set.
+     */
+    public void setKind(HWSKind kind) {
+        this.kind = kind;
+    }
+
+    /**
+     * Get fedState.
+     *
+     * @return fedState as FedState.
+     */
+    @OneToOne
+    @JoinColumn(name = "fed_state_id")
+    public FedState getFedState() {
+        return fedState;
+    }
+
+    /**
+     * Set fedState.
+     *
+     * @param fedState the value to set.
+     */
+    public void setFedState(FedState fedState) {
+        this.fedState = fedState;
+    }
+
+    /**
+     * Get river.
+     *
+     * @return river as River.
+     */
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    /**
+     * Set river.
+     *
+     * @param river the value to set.
+     */
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    public static List<HWSLine> getLines(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+                    "from HWSLine where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HWSPoint.java	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,441 @@
+package de.intevation.flys.model;
+
+import com.vividsolutions.jts.geom.Geometry;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+import javax.persistence.SequenceGenerator;
+
+import org.hibernate.annotations.Type;
+
+@Entity
+@Table(name = "hws_points")
+public class HWSPoint implements Serializable {
+
+    private Integer    id;
+
+    private Integer    ogrFid;
+    private HWSKind    kind;
+    private FedState   fedState;
+    private River      river;
+    private Integer    offical;
+    private Integer    shoreSide;
+    private String     name;
+    private String     path;
+    private String     agency;
+    private String     range;
+    private String     source;
+    private String     statusDate;
+    private String     description;
+    private BigDecimal freeboard;
+    private BigDecimal dikeKm;
+    private BigDecimal z;
+    private BigDecimal zTarget;
+    private BigDecimal ratedLevel;
+    private Geometry   geom;
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HWS_POINTS_ID_SEQ",
+        sequenceName   = "HWS_POINTS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HWS_POINTS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Geometry getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Geometry geom) {
+        this.geom = geom;
+    }
+
+    /**
+     * Get ogrFid.
+     *
+     * @return ogrFid as Integer.
+     */
+    @Column(name = "ogr_fid")
+    public Integer getOgrFid() {
+        return ogrFid;
+    }
+
+    /**
+     * Set ogrFid.
+     *
+     * @param ogrFid the value to set.
+     */
+    public void setOgrFid(Integer ogrFid) {
+        this.ogrFid = ogrFid;
+    }
+
+
+    /**
+     * Get offical.
+     *
+     * @return offical as Integer.
+     */
+    @Column(name = "offical")
+    public Integer getOffical() {
+        return offical;
+    }
+
+    /**
+     * Set offical.
+     *
+     * @param offical the value to set.
+     */
+    public void setOffical(Integer offical) {
+        this.offical = offical;
+    }
+
+    /**
+     * Get shoreSide.
+     *
+     * @return shoreSide as Integer.
+     */
+    @Column(name = "shore_side")
+    public Integer getShoreSide() {
+        return shoreSide;
+    }
+
+    /**
+     * Set shoreSide.
+     *
+     * @param shoreSide the value to set.
+     */
+    public void setShoreSide(Integer shoreSide) {
+        this.shoreSide = shoreSide;
+    }
+
+    /**
+     * Get name.
+     *
+     * @return name as String.
+     */
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Set name.
+     *
+     * @param name the value to set.
+     */
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    /**
+     * Get path.
+     *
+     * @return path as String.
+     */
+    @Column(name = "path")
+    public String getPath() {
+        return path;
+    }
+
+    /**
+     * Set path.
+     *
+     * @param path the value to set.
+     */
+    public void setPath(String path) {
+        this.path = path;
+    }
+
+    /**
+     * Get agency.
+     *
+     * @return agency as String.
+     */
+    @Column(name = "agency")
+    public String getAgency() {
+        return agency;
+    }
+
+    /**
+     * Set agency.
+     *
+     * @param agency the value to set.
+     */
+    public void setAgency(String agency) {
+        this.agency = agency;
+    }
+
+    /**
+     * Get range.
+     *
+     * @return range as String.
+     */
+    @Column(name = "range")
+    public String getRange() {
+        return range;
+    }
+
+    /**
+     * Set range.
+     *
+     * @param range the value to set.
+     */
+    public void setRange(String range) {
+        this.range = range;
+    }
+
+    /**
+     * Get source.
+     *
+     * @return source as String.
+     */
+    @Column(name = "source")
+    public String getSource() {
+        return source;
+    }
+
+    /**
+     * Set source.
+     *
+     * @param source the value to set.
+     */
+    public void setSource(String source) {
+        this.source = source;
+    }
+
+    /**
+     * Get statusDate.
+     *
+     * @return statusDate as String.
+     */
+    @Column(name = "status_date")
+    public String getStatusDate() {
+        return statusDate;
+    }
+
+    /**
+     * Set statusDate.
+     *
+     * @param statusDate the value to set.
+     */
+    public void setStatusDate(String statusDate)
+    {
+        this.statusDate = statusDate;
+    }
+
+    /**
+     * Get description.
+     *
+     * @return description as String.
+     */
+    @Column(name = "description")
+    public String getDescription()
+    {
+        return description;
+    }
+
+    /**
+     * Set description.
+     *
+     * @param description the value to set.
+     */
+    public void setDescription(String description)
+    {
+        this.description = description;
+    }
+
+    /**
+     * Get freeboard.
+     *
+     * @return freeboard as BigDecimal.
+     */
+    @Column(name = "freeboard")
+    public BigDecimal getFreeboard()
+    {
+        return freeboard;
+    }
+
+    /**
+     * Set freeboard.
+     *
+     * @param freeboard the value to set.
+     */
+    public void setFreeboard(BigDecimal freeboard)
+    {
+        this.freeboard = freeboard;
+    }
+
+    /**
+     * Get dikeKm.
+     *
+     * @return dikeKm as BigDecimal.
+     */
+    @Column(name = "dike_km")
+    public BigDecimal getDikeKm()
+    {
+        return dikeKm;
+    }
+
+    /**
+     * Set dikeKm.
+     *
+     * @param dikeKm the value to set.
+     */
+    public void setDikeKm(BigDecimal dikeKm)
+    {
+        this.dikeKm = dikeKm;
+    }
+
+    /**
+     * Get z.
+     *
+     * @return z as BigDecimal.
+     */
+    @Column(name = "z")
+    public BigDecimal getZ()
+    {
+        return z;
+    }
+
+    /**
+     * Set z.
+     *
+     * @param z the value to set.
+     */
+    public void setZ(BigDecimal z)
+    {
+        this.z = z;
+    }
+
+    /**
+     * Get zTarget.
+     *
+     * @return zTarget as BigDecimal.
+     */
+    @Column(name = "z_target")
+    public BigDecimal getZTarget()
+    {
+        return zTarget;
+    }
+
+    /**
+     * Set zTarget.
+     *
+     * @param zTarget the value to set.
+     */
+    public void setZTarget(BigDecimal zTarget)
+    {
+        this.zTarget = zTarget;
+    }
+
+    /**
+     * Get ratedLevel.
+     *
+     * @return ratedLevel as BigDecimal.
+     */
+    @Column(name = "rated_level")
+    public BigDecimal getRatedLevel()
+    {
+        return ratedLevel;
+    }
+
+    /**
+     * Set ratedLevel.
+     *
+     * @param ratedLevel the value to set.
+     */
+    public void setRatedLevel(BigDecimal ratedLevel)
+    {
+        this.ratedLevel = ratedLevel;
+    }
+
+    /**
+     * Get kind.
+     *
+     * @return kind as HWSKind.
+     */
+    @OneToOne
+    @JoinColumn(name = "kind_id")
+    public HWSKind getKind()
+    {
+        return kind;
+    }
+
+    /**
+     * Set kind.
+     *
+     * @param kind the value to set.
+     */
+    public void setKind(HWSKind kind)
+    {
+        this.kind = kind;
+    }
+
+    /**
+     * Get fedState.
+     *
+     * @return fedState as FedState.
+     */
+    @OneToOne
+    @JoinColumn(name = "fed_state_id")
+    public FedState getFedState()
+    {
+        return fedState;
+    }
+
+    /**
+     * Set fedState.
+     *
+     * @param fedState the value to set.
+     */
+    public void setFedState(FedState fedState)
+    {
+        this.fedState = fedState;
+    }
+
+    /**
+     * Get river.
+     *
+     * @return river as River.
+     */
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver()
+    {
+        return river;
+    }
+
+    /**
+     * Set river.
+     *
+     * @param river the value to set.
+     */
+    public void setRiver(River river)
+    {
+        this.river = river;
+    }
+}
+
--- a/flys-backend/src/main/java/de/intevation/flys/model/Hws.java	Tue Feb 19 10:46:41 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,106 +0,0 @@
-package de.intevation.flys.model;
-
-import java.io.Serializable;
-import java.util.List;
-
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-import org.hibernate.annotations.Type;
-
-import com.vividsolutions.jts.geom.LineString;
-
-import de.intevation.flys.backend.SessionHolder;
-
-
-@Entity
-@Table(name = "hws")
-public class Hws
-implements   Serializable
-{
-    private Integer    id;
-    private String     facility;
-    private String     type;
-    private River      river;
-    private LineString geom;
-
-    public Hws() {
-    }
-
-
-    @Id
-    @Column(name = "id")
-    public Integer getId() {
-        return id;
-    }
-
-
-    public void setId(Integer id) {
-        this.id = id;
-    }
-
-
-    @OneToOne
-    @JoinColumn(name = "river_id")
-    public River getRiver() {
-        return river;
-    }
-
-
-    public void setRiver(River river) {
-        this.river = river;
-    }
-
-
-    @Column(name = "hws_facility")
-    public String getFacility() {
-        return facility;
-    }
-
-
-    public void setFacility(String facility) {
-        this.facility = facility;
-    }
-
-
-    @Column(name = "type")
-    public String getType() {
-        return type;
-    }
-
-
-    public void setType(String type) {
-        this.type = type;
-    }
-
-
-    @Column(name = "geom")
-    @Type(type = "org.hibernatespatial.GeometryUserType")
-    public LineString getGeom() {
-        return geom;
-    }
-
-
-    public void setGeom(LineString geom) {
-        this.geom = geom;
-    }
-
-
-    public static List<Hws> getHws(int riverId, String name) {
-        Session session = SessionHolder.HOLDER.get();
-
-        Query query = session.createQuery(
-            "from Hws where river.id =:river_id and name=:name");
-        query.setParameter("river_id", riverId);
-        query.setParameter("name", name);
-
-        return query.list();
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java	Thu Feb 28 12:47:24 2013 +0100
@@ -5,10 +5,13 @@
 
 import javax.persistence.Column;
 import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
 import javax.persistence.Id;
 import javax.persistence.JoinColumn;
 import javax.persistence.OneToOne;
 import javax.persistence.Table;
+import javax.persistence.SequenceGenerator;
 
 import org.hibernate.Session;
 import org.hibernate.Query;
@@ -25,15 +28,25 @@
 implements   Serializable
 {
     private Integer    id;
+    private SectieKind sectie;
+    private SobekKind  sobek;
     private String     name;
     private River      river;
     private LineString geom;
+    private BoundaryKind kind;
 
     public HydrBoundary() {
     }
 
 
     @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYDR_BOUNDARIES_ID_SEQ",
+        sequenceName   = "HYDR_BOUNDARIES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYDR_BOUNDARIES_ID_SEQ")
     @Column(name = "id")
     public Integer getId() {
         return id;
@@ -44,7 +57,6 @@
         this.id = id;
     }
 
-
     @OneToOne
     @JoinColumn(name = "river_id")
     public River getRiver() {
@@ -90,5 +102,71 @@
 
         return query.list();
     }
+
+    /**
+     * Get sectie.
+     *
+     * @return sectie as SectieKind.
+     */
+    @OneToOne
+    @JoinColumn(name = "sectie")
+    public SectieKind getSectie()
+    {
+        return sectie;
+    }
+
+    /**
+     * Set sectie.
+     *
+     * @param sectie the value to set.
+     */
+    public void setSectie(SectieKind sectie)
+    {
+        this.sectie = sectie;
+    }
+
+    /**
+     * Get sobek.
+     *
+     * @return sobek as SobekKind.
+     */
+    @OneToOne
+    @JoinColumn(name = "sobek")
+    public SobekKind getSobek()
+    {
+        return sobek;
+    }
+
+    /**
+     * Set sobek.
+     *
+     * @param sobek the value to set.
+     */
+    public void setSobek(SobekKind sobek)
+    {
+        this.sobek = sobek;
+    }
+
+    /**
+     * Get kind.
+     *
+     * @return kind as BoundaryKind.
+     */
+    @OneToOne
+    @JoinColumn(name = "kind")
+    public BoundaryKind getKind()
+    {
+        return kind;
+    }
+
+    /**
+     * Set kind.
+     *
+     * @param kind the value to set.
+     */
+    public void setKind(BoundaryKind kind)
+    {
+        this.kind = kind;
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java	Thu Feb 28 12:47:24 2013 +0100
@@ -5,10 +5,13 @@
 
 import javax.persistence.Column;
 import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
 import javax.persistence.Id;
 import javax.persistence.JoinColumn;
 import javax.persistence.OneToOne;
 import javax.persistence.Table;
+import javax.persistence.SequenceGenerator;
 
 import org.hibernate.Session;
 import org.hibernate.Query;
@@ -28,12 +31,22 @@
     private String     name;
     private River      river;
     private Geometry   geom;
+    private SectieKind sectie;
+    private SobekKind  sobek;
+    private BoundaryKind kind;
 
     public HydrBoundaryPoly() {
     }
 
 
     @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYDR_BOUNDARIES_POLY_ID_SEQ",
+        sequenceName   = "HYDR_BOUNDARIES_POLY_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYDR_BOUNDARIES_POLY_ID_SEQ")
     @Column(name = "id")
     public Integer getId() {
         return id;
@@ -90,5 +103,71 @@
 
         return query.list();
     }
+
+    /**
+     * Get sectie.
+     *
+     * @return sectie as SectieKind.
+     */
+    @OneToOne
+    @JoinColumn(name = "sectie")
+    public SectieKind getSectie()
+    {
+        return sectie;
+    }
+
+    /**
+     * Set sectie.
+     *
+     * @param sectie the value to set.
+     */
+    public void setSectie(SectieKind sectie)
+    {
+        this.sectie = sectie;
+    }
+
+    /**
+     * Get sobek.
+     *
+     * @return sobek as SobekKind.
+     */
+    @OneToOne
+    @JoinColumn(name = "sobek")
+    public SobekKind getSobek()
+    {
+        return sobek;
+    }
+
+    /**
+     * Set sobek.
+     *
+     * @param sobek the value to set.
+     */
+    public void setSobek(SobekKind sobek)
+    {
+        this.sobek = sobek;
+    }
+
+    /**
+     * Get kind.
+     *
+     * @return kind as BoundaryKind.
+     */
+    @OneToOne
+    @JoinColumn(name = "kind")
+    public BoundaryKind getKind()
+    {
+        return kind;
+    }
+
+    /**
+     * Set kind.
+     *
+     * @param kind the value to set.
+     */
+    public void setKind(BoundaryKind kind)
+    {
+        this.kind = kind;
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-backend/src/main/java/de/intevation/flys/model/Line.java	Tue Feb 19 10:46:41 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,108 +0,0 @@
-package de.intevation.flys.model;
-
-import java.io.Serializable;
-import java.math.BigDecimal;
-import java.util.List;
-
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
-import org.hibernate.annotations.Type;
-
-import com.vividsolutions.jts.geom.LineString;
-
-import de.intevation.flys.backend.SessionHolder;
-
-
-@Entity
-@Table(name = "lines")
-public class Line
-implements   Serializable
-{
-    private Integer    id;
-    private String     kind;
-    private River      river;
-    private LineString geom;
-    private BigDecimal z;
-
-    public Line() {
-    }
-
-
-    @Id
-    @Column(name = "id")
-    public Integer getId() {
-        return id;
-    }
-
-
-    public void setId(Integer id) {
-        this.id = id;
-    }
-
-
-    @OneToOne
-    @JoinColumn(name = "river_id")
-    public River getRiver() {
-        return river;
-    }
-
-
-    public void setRiver(River river) {
-        this.river = river;
-    }
-
-
-    @Column(name = "kind")
-    public String getKind() {
-        return kind;
-    }
-
-
-    public void setKind(String kind) {
-        this.kind = kind;
-    }
-
-
-    @Column(name = "geom")
-    @Type(type = "org.hibernatespatial.GeometryUserType")
-    public LineString getGeom() {
-        return geom;
-    }
-
-
-    public void setGeom(LineString geom) {
-        this.geom = geom;
-    }
-
-
-    @Column(name = "z")
-    public BigDecimal getZ() {
-        return z;
-    }
-
-
-    public void setZ(BigDecimal z) {
-        this.z = z;
-    }
-
-
-    public static List<Line> getLines(int riverId, String name) {
-        Session session = SessionHolder.HOLDER.get();
-
-        Query query = session.createQuery(
-            "from Line where river.id =:river_id and name=:name");
-        query.setParameter("river_id", riverId);
-        query.setParameter("name", name);
-
-        return query.list();
-    }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SectieKind.java	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,44 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+
+@Entity
+@Table(name = "sectie_kinds")
+public class SectieKind implements Serializable {
+    private Integer id;
+    private String name;
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    /**
+     * Get name.
+     *
+     * @return name of the kind of sectie as String.
+     */
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Set name.
+     *
+     * @param name the value to set.
+     */
+    public void setName(String name) {
+        this.name = name;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SobekKind.java	Thu Feb 28 12:47:24 2013 +0100
@@ -0,0 +1,45 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+
+@Entity
+@Table(name = "sobek_kinds")
+public class SobekKind implements Serializable {
+
+    private Integer id;
+    private String name;
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    /**
+     * Get name.
+     *
+     * @return name of the kind of sobek as String.
+     */
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Set name.
+     *
+     * @param name the value to set.
+     */
+    public void setName(String name) {
+        this.name = name;
+    }
+}
--- a/flys-backend/src/main/java/de/intevation/flys/model/Waterlevel.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Waterlevel.java	Thu Feb 28 12:47:24 2013 +0100
@@ -18,7 +18,7 @@
 
 
 
-
+/** Mapped Waterlevel. */
 @Entity
 @Table(name = "waterlevel")
 public class Waterlevel
--- a/flys-backend/src/main/java/de/intevation/flys/utils/StringUtil.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-backend/src/main/java/de/intevation/flys/utils/StringUtil.java	Thu Feb 28 12:47:24 2013 +0100
@@ -819,5 +819,11 @@
         testQuote();
         testStringArray2D();
     }
+
+    /** Check for occurence of needle in hay, converting both to lowercase
+     * to be ignorant of cases. */
+    public static boolean containsIgnoreCase(String hay, String needle) {
+        return hay.toLowerCase().contains(needle.toLowerCase());
+    }
 }
 // end of file
--- a/flys-client/pom.xml	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/pom.xml	Thu Feb 28 12:47:24 2013 +0100
@@ -227,6 +227,11 @@
         <id>org.mapfish</id>
         <url>http://dev.mapfish.org/maven/repository</url>
       </repository>
+      <repository>
+          <id>osgeo</id>
+          <name>Open Source Geospatial Foundation Repository</name>
+          <url>http://download.osgeo.org/webdav/geotools/</url>
+      </repository>
     </repositories>
 </project>
 
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYS.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYS.java	Thu Feb 28 12:47:24 2013 +0100
@@ -446,11 +446,11 @@
         double km = (gauge.getKmEnd() + gauge.getKmStart())/2d;
         mainValueView.addItem(new WQAutoTabSet(gauge.getRiverName(),
             new double[] {km, km}));
-        mainValueView.setWidth(1010);
-        mainValueView.setHeight(700);
+        mainValueView.setWidth(450);
+        mainValueView.setHeight(600);
 
-        mainValueView.setMaximized(true);
-
+        mainValueView.setMaximized(false);
+        mainValueView.centerInPage();
         mainValueView.setCanDragReposition(true);
         mainValueView.setCanDragResize(true);
         mainValueView.setShowMaximizeButton(true);
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.java	Thu Feb 28 12:47:24 2013 +0100
@@ -252,6 +252,8 @@
 
     String wqQ();
 
+    String wqQatGauge();
+
     String wqQGauge();
 
     String wqSingle();
@@ -1048,6 +1050,8 @@
 
     String areatransparency();
 
+    String attribution();
+
     // Manual Points editor
 
     String addpoints();
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.properties	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.properties	Thu Feb 28 12:47:24 2013 +0100
@@ -166,6 +166,7 @@
 measurementStationPanelTitle = Measurement Station Information
 wqW = W at Gauge [cm]
 wqQ = Q [m\u00b3/s]
+wqQatGauge = Q at Gauge [m\u00b3/s]
 wqWFree = W free position [m+NHN]
 wqQGauge = Discharge at Gauge
 wqSingle = Single values
@@ -231,11 +232,11 @@
 historical_discharge_export = Historical Discharge Curve Export
 showextramark = Show begin of extrapolation
 extreme_wq_curve = W/Q
-fix_wq_curve = W/Q
+fix_wq_curve = W/Q-Diagram
 fix_deltawt_curve = \u0394 W/t
 fix_longitudinal_section_curve = Longitudinal Section
 fix_derivate_curve = Derivate
-fix_vollmer_wq_curve = W/Q
+fix_vollmer_wq_curve = W/Q-Diagram
 datacage_add_pair = Add difference pair
 load_diameter = Bedload Diameter
 bed_diameter = Bed Diameter
@@ -543,6 +544,7 @@
 wsplgen_cat3 = Fill Color 2.0 <= DIFF < 3
 wsplgen_cat4 = Fill Color 3.0 <= DIFF < 4
 wsplgen_cat5 = Fill Color 4.0 <= DIFF
+attribution = &copyIntevation GmbH 2012<br>Data &copy<a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>
 
 # Manual Points Editor
 addpoints = Add points
@@ -571,7 +573,7 @@
 
 sq_overview=Overview
 
-gauge_zero = Gauge zero ground
+gauge_zero = GZG
 gauge_q_unit = m\u00b3/s
 gauge_river_info_link = Riverinfo
 gauge_info_link = Gaugeinfo
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_de.properties	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_de.properties	Thu Feb 28 12:47:24 2013 +0100
@@ -154,6 +154,7 @@
 measurementStationPanelTitle = Gew\u00e4sser/Messstellen-Info
 wqW = W am Pegel [cm]
 wqQ = Q [m\u00b3/s]
+wqQatGauge = Q am Pegel [m\u00b3/s]
 wqWFree = W auf freier Strecke [m+NHN]
 wqQGauge = Kennzeichnender Abfluss am Pegel
 wqSingle = Einzelwerte
@@ -232,11 +233,11 @@
 historical_discharge_export = Historische Abflusskurven Export
 showextramark = Zeige Anfang der Extrapolation
 extreme_wq_curve = W/Q
-fix_wq_curve = W/Q
+fix_wq_curve = W/Q-Diagram
 fix_deltawt_curve = \u0394 W/t
 fix_longitudinal_section_curve = L\u00e4ngsschnitt
 fix_derivate_curve = Ableitungskurve
-fix_vollmer_wq_curve = W/Q
+fix_vollmer_wq_curve = W/Q-Diagramm
 datacage_add_pair = Differenzenpaar hinzuf\u00fcgen
 load_diameter = Geschiebedurchmesser
 bed_diameter = Sohldurchmesser
@@ -402,6 +403,7 @@
 requireDGM = Sie m\u00fcssen ein DGM ausw\u00e4hlen.
 upload_file = hochladen
 shape_file_upload = Shapedatei hochladen
+attribution = &copyIntevation GmbH 2012<br>Data &copy<a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>
 
 # data cage
 waterlevels = Wasserst\u00e4nde
@@ -571,7 +573,7 @@
 fix_parameters = CSV
 sq_overview=\u00dcbersicht
 
-gauge_zero = Pegelnullpunkt
+gauge_zero = PNP
 gauge_q_unit = m\u00b3/s
 gauge_river_info_link = Gew\u00e4sserinfo
 gauge_info_link = Pegelinfo
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_en.properties	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_en.properties	Thu Feb 28 12:47:24 2013 +0100
@@ -167,6 +167,7 @@
 measurementStationPanelTitle = Measurement Station Information
 wqW = W at Gauge [cm]
 wqQ = Q [m\u00b3/s]
+wqQatGauge = Q at Gauge [m\u00b3/s]
 wqWFree = W at free position [m+NHN]
 wqQGauge = Discharge at Gauge
 wqSingle = Single values
@@ -232,11 +233,11 @@
 historical_discharge_export = Historical Discharge Curve Export
 showextramark = Show begin of extrapolation
 extreme_wq_curve = W/Q
-fix_wq_curve = W/Q
+fix_wq_curve = W/Q-Diagram
 fix_deltawt_curve = \u0394 W/t
 fix_longitudinal_section_curve = Longitudinal Section
 fix_derivate_curve = Derivate
-fix_vollmer_wq_curve = W/Q
+fix_vollmer_wq_curve = W/Q-Diagram
 datacage_add_pair = Add difference pair
 load_diameter = Bedload Diameter
 bed_diameter = Bed Diameter
@@ -402,6 +403,7 @@
 requireDGM = You need to choose a DEM.
 upload_file = upload
 shape_file_upload = Upload shapefile
+attribution = &copyIntevation GmbH 2012<br>Data &copy<a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>
 
 # data cage
 waterlevels = Waterlevels
@@ -571,7 +573,7 @@
 fix_parameters = CSV
 sq_overview=Overview
 
-gauge_zero = Gauge zero ground
+gauge_zero = GZG
 gauge_q_unit = m\u00b3/s
 gauge_river_info_link = Riverinfo
 gauge_info_link = Gaugeinfo
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/DigitizePanel.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/DigitizePanel.java	Thu Feb 28 12:47:24 2013 +0100
@@ -2,6 +2,7 @@
 
 import com.google.gwt.core.client.GWT;
 import com.google.gwt.user.client.rpc.AsyncCallback;
+
 import com.smartgwt.client.types.Encoding;
 import com.smartgwt.client.types.VerticalAlignment;
 import com.smartgwt.client.util.SC;
@@ -38,6 +39,7 @@
 import java.util.List;
 
 import org.gwtopenmaps.openlayers.client.Map;
+import org.gwtopenmaps.openlayers.client.control.Attribution;
 import org.gwtopenmaps.openlayers.client.feature.VectorFeature;
 import org.gwtopenmaps.openlayers.client.format.GeoJSON;
 import org.gwtopenmaps.openlayers.client.layer.WMS;
@@ -179,10 +181,13 @@
             combobox.setShowTitle(false);
             form.setItems(combobox);
 
-            HTMLPane frame = new HTMLPane();
-            frame.setWidth("1px");
-            frame.setHeight("1px");
-            frame.setContents("<iframe id='uploadTarget' name='uploadTarget'></iframe>");
+            HTMLPane uploadTargetFrame = new HTMLPane();
+            uploadTargetFrame.setWidth("200px");
+            uploadTargetFrame.setHeight("50px");
+            uploadTargetFrame.setContents(
+                    "<iframe id='uploadTarget' name='uploadTarget' scrolling='no' width=200 height=50 style='border: 0px'></iframe>");
+            uploadTargetFrame.setBorder("0px");
+            uploadTargetFrame.setScrollbarSize(0);
 
             final DynamicForm uploadForm = new DynamicForm();
             uploadForm.setAction("flys/fileupload?uuid=" + artifact.getUuid());
@@ -200,13 +205,16 @@
                     uploadForm.submitForm();
                 }
             });
-            layout.addMember(frame);
+
             layout.addMember(label);
             layout.addMember(form);
             layout.addMember(uploadLabel);
             layout.addMember(uploadForm);
             layout.addMember(submit);
             layout.addMember(getNextButton());
+
+            layout.setMembersMargin(10);
+            layout.addMember(uploadTargetFrame);
         }
 
         form.setValues(initial);
@@ -262,7 +270,7 @@
             VectorFeature[] features = new GeoJSON().read(geojson);
             floodMap.getBarrierLayer().addFeatures(features);
         }
-
+        map.addControl(new Attribution());
         map.zoomToMaxExtent();
     }
 
@@ -278,7 +286,9 @@
         opts.setSingleTile(true);
         opts.setRatio(1);
         opts.setBuffer(0);
-
+        if (layers.equals("OSM-WMS-Dienst")) {
+            opts.setAttribution(MSG.attribution());
+        }
         WMS wms = new WMS(layers, url, params, opts);
         wms.setIsVisible(true);
         wms.setIsBaseLayer(x);
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/DoubleArrayPanel.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/DoubleArrayPanel.java	Thu Feb 28 12:47:24 2013 +0100
@@ -22,6 +22,8 @@
 
     protected TextItem ti;
 
+    private String title;
+
     /** The constant input field name. */
     public static final String FIELD_NAME = "doublearray";
 
@@ -50,6 +52,7 @@
         BlurHandler handler,
         TitleOrientation titleOrientation)
     {
+        this.title = title;
         ti                 = new TextItem(FIELD_NAME);
         StaticTextItem sti = new StaticTextItem("staticarray");
 
@@ -263,5 +266,9 @@
     public double[] getInputValues() {
         return getInputValues(ti);
     }
+
+    public String getItemTitle() {
+        return this.title;
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/QSegmentedInputPanel.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/QSegmentedInputPanel.java	Thu Feb 28 12:47:24 2013 +0100
@@ -180,7 +180,7 @@
             HLayout h = new HLayout();
 
             String[] parts  = gauge.split(GAUGE_PART_SEPARATOR);
-            String[] values = parts[2].split(VALUE_SEPARATOR);
+            String[] values = parts[3].split(VALUE_SEPARATOR);
 
             Label l = new Label(parts[0] + " - " + parts[1] + ": ");
 
@@ -361,10 +361,10 @@
 
             double[] values = dap.getInputValues();
             if (wqvalue == null) {
-                wqvalue = createValueString(key, values);
+                wqvalue = createValueString(key + "; ", values);
             }
             else {
-                wqvalue += GAUGE_SEPARATOR + createValueString(key, values);
+                wqvalue += GAUGE_SEPARATOR + createValueString(key + "; ", values);
             }
         }
 
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/WQAdaptedInputPanel.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/WQAdaptedInputPanel.java	Thu Feb 28 12:47:24 2013 +0100
@@ -129,7 +129,6 @@
         layout.addMember(widget);
         layout.addMember(submit);
 
-
         return layout;
     }
 
@@ -190,6 +189,7 @@
     }
 
 
+    /** Create area showing previously entered w or q data. */
     protected Canvas createOldWQValues(Data wqData) {
         VLayout layout = new VLayout();
 
@@ -202,9 +202,9 @@
             HLayout h = new HLayout();
 
             String[] parts  = gauge.split(GAUGE_PART_SEPARATOR);
-            String[] values = parts[2].split(VALUE_SEPARATOR);
+            String[] values = parts[3].split(VALUE_SEPARATOR);
 
-            Label l = new Label(parts[0] + " - " + parts[1] + ": ");
+            Label l = new Label(parts[2] + ": ");
 
             StringBuilder sb = new StringBuilder();
             boolean    first = true;
@@ -268,8 +268,7 @@
         }
     }
 
-
-    protected List<String> validateW() {
+    protected List<String> validateRange(Map<String, double[]> ranges) {
         List<String> errors = new ArrayList<String>();
         NumberFormat nf     = NumberFormat.getDecimalFormat();
 
@@ -286,7 +285,7 @@
                 return errors;
             }
 
-            double[] mm  = wranges.get(key);
+            double[] mm  = ranges.get(key);
             if (mm == null) {
                 SC.warn(MSG.error_read_minmax_values());
                 continue;
@@ -326,65 +325,17 @@
     }
 
 
-    protected List<String> validateQ() {
-        List<String> errors = new ArrayList<String>();
-        NumberFormat nf     = NumberFormat.getDecimalFormat();
-
-        Iterator<String> iter = wqranges.keySet().iterator();
-
-        while (iter.hasNext()) {
-            List<String> tmpErrors = new ArrayList<String>();
-
-            String           key = iter.next();
-            DoubleArrayPanel dap = wqranges.get(key);
-
-            if (!dap.validateForm()) {
-                errors.add(MSG.error_invalid_double_value());
-                return errors;
-            }
-
-            double[] mm = qranges.get(key);
-            if (mm == null) {
-                SC.warn(MSG.error_read_minmax_values());
-                continue;
-            }
-
-            double[] values = dap.getInputValues();
-            double[] good   = new double[values.length];
+    protected List<String> validateW() {
+        return validateRange(wranges);
+    }
 
-            int idx = 0;
 
-            for (double value: values) {
-                if (value < mm[0] || value > mm[1]) {
-                    String tmp = MSG.error_validate_range();
-                    tmp = tmp.replace("$1", nf.format(value));
-                    tmp = tmp.replace("$2", nf.format(mm[0]));
-                    tmp = tmp.replace("$3", nf.format(mm[1]));
-                    tmpErrors.add(tmp);
-                }
-                else {
-                    good[idx++] = value;
-                }
-            }
-
-            double[] justGood = new double[idx];
-            for (int i = 0; i < justGood.length; i++) {
-                justGood[i] = good[i];
-            }
-
-            if (!tmpErrors.isEmpty()) {
-                dap.setValues(justGood);
-
-                errors.addAll(tmpErrors);
-            }
-        }
-
-        return errors;
+    protected List<String> validateQ() {
+        return validateRange(qranges);
     }
 
 
     protected void initUserDefaults(DataList dataList) {
-
         initUserWQValues(dataList);
         initUserWQMode(dataList);
     }
@@ -464,9 +415,9 @@
 
         for (DataItem item: items) {
             String title = item.getLabel();
-
+            String label = item.getStringValue();
             DoubleArrayPanel dap = new DoubleArrayPanel(
-                createLineTitle(title), null, this, TitleOrientation.LEFT);
+                label, null, this, TitleOrientation.LEFT);
 
             wqranges.put(title, dap);
 
@@ -513,7 +464,7 @@
 
         LinkedHashMap wqValues = new LinkedHashMap();
         wqValues.put(FIELD_WQ_W, MSG.wqW());
-        wqValues.put(FIELD_WQ_Q, MSG.wqQ());
+        wqValues.put(FIELD_WQ_Q, MSG.wqQatGauge());
 
         wq.setValueMap(wqValues);
 
@@ -587,13 +538,14 @@
         while (iter.hasNext()) {
             String           key = iter.next();
             DoubleArrayPanel dap = wqranges.get(key);
+            String label = dap.getItemTitle();
 
             double[] values = dap.getInputValues();
             if (wqvalue == null) {
-                wqvalue = createValueString(key, values);
+                wqvalue = createValueString(key + ";" + label, values);
             }
             else {
-                wqvalue += GAUGE_SEPARATOR + createValueString(key, values);
+                wqvalue += GAUGE_SEPARATOR + createValueString(key + ";" + label, values);
             }
         }
 
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/chart/ChartToolbar.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/chart/ChartToolbar.java	Thu Feb 28 12:47:24 2013 +0100
@@ -113,6 +113,7 @@
                 baseUrl + MSG.downloadCSV(),
                 moduleUrl + "export" +
                    "?uuid=" + chartTab.getCollection().identifier() +
+                   "&name=" + chartTab.getMode().getName() +
                    "&mode=" + chartTab.getMode().getName() + "_at_export" +
                    "&type=at" +
                    "&server=" + config.getServerUrl() +
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/map/FloodMap.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/map/FloodMap.java	Thu Feb 28 12:47:24 2013 +0100
@@ -6,6 +6,7 @@
 import org.gwtopenmaps.openlayers.client.MapOptions;
 import org.gwtopenmaps.openlayers.client.MapWidget;
 import org.gwtopenmaps.openlayers.client.Style;
+import org.gwtopenmaps.openlayers.client.control.Attribution;
 import org.gwtopenmaps.openlayers.client.control.ScaleLine;
 import org.gwtopenmaps.openlayers.client.control.ScaleLineOptions;
 import org.gwtopenmaps.openlayers.client.event.VectorFeatureAddedListener;
@@ -56,6 +57,7 @@
                 Integer.toString(height),
                 opts);
         map       = mapWidget.getMap();
+        map.addControl(new Attribution());
     }
 
 
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/map/MapOutputTab.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/map/MapOutputTab.java	Thu Feb 28 12:47:24 2013 +0100
@@ -454,7 +454,6 @@
         if (url == null || layers == null) {
             return null;
         }
-
         this.wmsUrls.put(name, url);
 
         WMSParams params = new WMSParams();
@@ -466,7 +465,9 @@
         opts.setProjection("EPSG:" + getSrid());
         opts.setSingleTile(true);
         opts.setRatio(1);
-
+        if (layers.equals("OSM-WMS-Dienst")) {
+            opts.setAttribution(MSG.attribution());
+        }
         WMS wms = new WMS(layers, url, params, opts);
         wms.setIsVisible(at.getActive() == 1);
         wms.setIsBaseLayer(false);
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/GaugeInfoPanel.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/GaugeInfoPanel.java	Thu Feb 28 12:47:24 2013 +0100
@@ -5,7 +5,7 @@
 import com.google.gwt.event.dom.client.ClickHandler;
 import com.google.gwt.i18n.client.NumberFormat;
 import com.google.gwt.user.client.ui.Anchor;
-import com.google.gwt.user.client.ui.Grid;
+import com.smartgwt.client.widgets.layout.HLayout;
 import com.smartgwt.client.widgets.layout.VLayout;
 import com.smartgwt.client.widgets.Label;
 
@@ -24,43 +24,64 @@
     public GaugeInfoPanel(GaugeInfo gauge, FLYS flys) {
         this.flys = flys;
         setStyleName("gaugeinfopanel");
-        setWidth100();
-
-        Grid grid = new Grid(5, 2);
 
         NumberFormat nf = NumberFormat.getDecimalFormat();
 
+        VLayout grid = new VLayout();
+        HLayout line1 = new HLayout();
+
         Double minw = gauge.getMinW();
         Double maxw = gauge.getMaxW();
         if (minw != null && maxw != null) {
-            grid.setText(0, 0, MSG.wq_value_q());
-            grid.setText(0, 1, nf.format(minw) +
-                    " - " + nf.format(maxw));
+            Label key = new Label(MSG.wq_value_q());
+            Label value = new Label(nf.format(minw) +
+                                    " - " + nf.format(maxw));
+            key.setWidth(150);
+            line1.addMember(key);
+            line1.addMember(value);
         }
 
+        HLayout line2 = new HLayout();
         Double minq = gauge.getMinQ();
         Double maxq = gauge.getMaxQ();
         if (minq != null && maxq != null) {
-            grid.setText(1, 0, MSG.wq_value_w());
-            grid.setText(1, 1, nf.format(minq) +
+            Label key = new Label(MSG.wq_value_w());
+            Label value = new Label( nf.format(minq) +
                     " - " + nf.format(maxq));
+            key.setWidth(150);
+            line2.addMember(key);
+            line2.addMember(value);
         }
 
+        HLayout line3 = new HLayout();
         Double aeo = gauge.getAeo();
         if (aeo != null) {
-            grid.setText(2, 0, "AEO [km²]");
-            grid.setText(2, 1, nf.format(aeo));
+            Label key = new Label("AEO [km²]");
+            Label value = new Label(nf.format(aeo));
+            key.setWidth(150);
+            line3.addMember(key);
+            line3.addMember(value);
         }
 
+        HLayout line4 = new HLayout();
         Double datum = gauge.getDatum();
         if (datum != null) {
-            grid.setText(3, 0, MSG.gauge_zero() + " [" +
+            Label key = new Label(MSG.gauge_zero() + " [" +
                     gauge.getWstUnit() + "]");
-            grid.setText(3, 1, nf.format(datum));
+            Label value = new Label(nf.format(datum));
+            key.setWidth(150);
+            line4.addMember(key);
+            line4.addMember(value);
         }
 
-        grid.setWidget(4,0, new GaugeMainValueAnchor(flys, gauge));
+        HLayout line5 = new HLayout();
+        line5.addMember(new GaugeMainValueAnchor(flys, gauge));
 
+        grid.addMember(line1);
+        grid.addMember(line2);
+        grid.addMember(line3);
+        grid.addMember(line4);
+        grid.addMember(line5);
         addMember(grid);
     }
 
--- a/flys-client/src/main/java/de/intevation/flys/client/server/FileUploadServiceImpl.java	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/java/de/intevation/flys/client/server/FileUploadServiceImpl.java	Thu Feb 28 12:47:24 2013 +0100
@@ -7,7 +7,9 @@
 import de.intevation.artifacts.httpclient.http.HttpClientImpl;
 
 import java.io.ByteArrayOutputStream;
+import java.io.IOException;
 import java.io.InputStream;
+import java.io.PrintWriter;
 
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
@@ -28,11 +30,6 @@
 
     @Override
     protected void doPost(HttpServletRequest req, HttpServletResponse resp) {
-        processPost(req, resp);
-    }
-
-
-    protected void processPost(HttpServletRequest req, HttpServletResponse resp) {
         logger.debug("handling post request.");
 
         String url  = getServletContext().getInitParameter("server-url");
@@ -47,15 +44,31 @@
         try {
             Document result = client.callService(url, "fileupload", request);
 
+            PrintWriter respWriter = resp.getWriter();
+            respWriter.write("<html><link href='FLYS.css' rel='stylesheet' type='text/css'>");
+            respWriter.write("<body><div style='font-face: Arial,Verdana,sans-serif; font-size: 11px'>");
+
             if (result == null) {
                 logger.warn("FileUpload service returned no result.");
+                respWriter.write("FileUpload service returned no result");
             }
+            else {
+                String status = result.getElementsByTagName("status")
+                        .item(0).getTextContent();
+                respWriter.write(status);
+            }
+
+            respWriter.write("</div></body></html>");
+            respWriter.flush();
 
             return;
         }
         catch (ConnectionException ce) {
             logger.error(ce, ce);
         }
+        catch (IOException e) {
+            logger.error(e, e);
+        }
     }
 
 
@@ -67,8 +80,6 @@
 
             while (iter.hasNext()) {
                 FileItemStream item = iter.next();
-
-                String name = item.getFieldName();
                 InputStream stream = item.openStream();
 
                 // Process the input stream
--- a/flys-client/src/main/webapp/FLYS.css	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/webapp/FLYS.css	Thu Feb 28 12:47:24 2013 +0100
@@ -235,3 +235,8 @@
       color: #a9c9e6;
       margin-left: 10px;
 }
+
+.olControlAttribution {
+    bottom: 1em !important;
+    font-size: 12px !important;
+}
--- a/flys-client/src/main/webapp/WEB-INF/config.yaml	Tue Feb 19 10:46:41 2013 +0100
+++ b/flys-client/src/main/webapp/WEB-INF/config.yaml	Thu Feb 28 12:47:24 2013 +0100
@@ -25,7 +25,7 @@
   - 2000000
   - 4000000
 
-outputFilename: 'flys-${date}.pdf'
+outputFilename: "flys-${date}.pdf"
 
 #===========================================================================
 # the list of allowed hosts
@@ -54,65 +54,12 @@
     port: 80
 
 layouts:
-  #===========================================================================
-  A4 portrait:
-  #===========================================================================
-    metaData:
-      title: '${title}'
-      author: 'Flys'
-      subject: 'Kartendruck A4 Portrait'
-      keywords: 'map,print'
-      creator: 'd4e-river'
-
-    #-------------------------------------------------------------------------
-    mainPage:
-      pageSize: A4
-      landscape: false
-      header:
-        height: 50
-        items:
-          - !image
-            align: left
-            maxWidth: 60
-            maxHeight: 60
-            url: '${configDir}/../images/FLYS_Logo.png'
-      items:
-        - !text
-            font: Helvetica
-            fontSize: 30
-            align: right
-            text: '${mapTitle}'
-            spacingAfter: 30
-        - !map
-          spacingAfter: 30
-          width: 440
-          height: 483
-        - !scalebar
-          type: bar
-          maxSize: 100
-          barBgColor: white
-          fontSize: 8
-          align: right
-        - !text
-          text: '${comment}'
-          spacingAfter: 30
-        - !text
-          font: Helvetica
-          fontSize: 9
-          align: right
-          text: '1:${scale} ${now MM.dd.yyyy}'
-        - !legends
-          align: left
-          maxIconWidth: 32
-          maxIconHeight: 32
-      footer: *commonFooter
-      
 
   #===========================================================================
   A4 landscape:
   #===========================================================================
     metaData:
-      title: '${title}'
+      title: "${title}"
       author: 'Flys'
       subject: 'Kartendruck A4 Quer'
       keywords: 'map,print'
@@ -125,96 +72,27 @@
       header:
         height: 50
         items:
-          - !image
-            align: left
-            maxWidth: 60
-            maxHeight: 60
-            url: '${configDir}/../images/FLYS_Logo.png'
+          - !columns
+            config:
+              cells:
+                - paddingBottom: 5
+            items:
+            - !image
+              align: left
+              maxWidth: 50
+              maxHeight: 50
+              url: "${configDir}/../images/FLYS_Logo.png"
+            - !text
+              font: Helvetica
+              fontSize: 30
+              align: right
+              text: "${mapTitle}"
+              
       items:
-        - !text
-            font: Helvetica
-            fontSize: 30
-            align: right
-            text: '${mapTitle}'
-            spacingAfter: 30
-        - !map
-          absoluteX: 45
-          absoluteY: 510
-          #spacingAfter: 30
-          width: 540
-          height: 480
-          align: left
-        - !scalebar
-          type: bar
-          maxSize: 100
-          barBgColor: white
-          fontSize: 8
-          align: right
-        - !text
-          text: '${comment}'
-          spacingAfter: 30
-        - !text
-          font: Helvetica
-          fontSize: 9
-          align: right
-          text: '1:${scale} ${now MM.dd.yyyy}'
-        - !legends
-          align: right
-          maxIconWidth: 32
-          maxIconHeight: 32
-#         - !columns
-#           widths: [500, 200]
-#           items:
-#             - !map
-#               width: 500
-#               height: 500
-#             - !legends
-      footer: *commonFooter
-
-
-  #===========================================================================
-  A0 portrait:
-  #===========================================================================
-    metaData:
-      title: '${title}'
-      author: 'MapFish print module'
-      subject: 'Simple layout'
-      keywords: 'map,print'
-      creator: 'MapFish'
-
-    #-------------------------------------------------------------------------
-    mainPage:
-      pageSize: A0
-      rotation: true
-      header:
-        height: 50
-        items:
-          - !text
-            font: Helvetica
-            fontSize: 30
-            align: right
-            text: '${mapTitle}'
-      items:
-        - !map
-          spacingAfter: 30
-          width: 1760
-          height: 1932
-        - !scalebar
-          type: bar
-          maxSize: 100
-          barBgColor: white
-          fontSize: 8
-          align: right
-        - !text
-          text: '${comment}'
-          spacingAfter: 30
-        - !text
-          font: Helvetica
-          fontSize: 9
-          align: right
-          text: '1:${scale} ${now MM.dd.yyyy}'
-        - !legends
-          align: left
-          maxIconWidth: 32
-          maxIconHeight: 32
-      footer: *commonFooter
+      - !map
+        absoluteX: 45
+        absoluteY: 510
+        spacingAfter: 200
+        width: 540
+        height: 480
+        align: left

http://dive4elements.wald.intevation.org