Mercurial > dive4elements > river
changeset 5187:eec78b9f73b8
Merge closing commit of the dami branch
author | Andre Heinecke <aheinecke@intevation.de> |
---|---|
date | Thu, 07 Mar 2013 12:02:02 +0100 |
parents | 801866a89ee0 (diff) 538d6c28cd8a (current diff) |
children | 2b7f44c80857 |
files | |
diffstat | 92 files changed, 1964 insertions(+), 1357 deletions(-) [+] |
line wrap: on
line diff
--- a/.hgtags Thu Feb 28 11:49:48 2013 +0100 +++ b/.hgtags Thu Mar 07 12:02:02 2013 +0100 @@ -26,3 +26,6 @@ 859278918eb14a8687fef60f2b33dcf89fe71f90 2.9.9 859278918eb14a8687fef60f2b33dcf89fe71f90 2.9.9 53be7313310416e1f8c3e0ec414684ca9c6c71df 2.9.9 +f459911fdbfbe2b2d23e06faba4e338514dd7b54 2.9.10 +f459911fdbfbe2b2d23e06faba4e338514dd7b54 2.9.10 +8c65acf01adc7083c5936d0f8acf67374c97140b 2.9.10
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/init.d/README.txt Thu Mar 07 12:02:02 2013 +0100 @@ -0,0 +1,15 @@ +SLES-Init-Script fuer Dive4Elements River: + +Installation als root: +- Kopieren nach /etc/init.d/d4e-river +- chmod 755 /etc/init.d/d4e-river +- insserv /etc/init.d/d4e-river +- /etc/init.d/d4e-river start + +Deinstallation als root: +- /etc/init.d/d4e-river stop +- insserv -r /etc/init.d/d4e-river +- rm /var/log/d4e-river.log /var/run/d4e-river.pid /etc/init.d/d4e-river + +TODO: +- ggf. logrotate fuer Logdatei /var/log/d4e-river.log konfigurieren
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/init.d/d4e-river Thu Mar 07 12:02:02 2013 +0100 @@ -0,0 +1,70 @@ +#!/bin/bash +# +### BEGIN INIT INFO +# Provides: d4e-server +# Required-Start: $network $syslog $remote_fs +# Should-Start: $named $syslog $time +# Required-Stop: $network $syslog +# Should-Stop: $named $syslog $time +# Default-Start: 3 5 +# Default-Stop: 0 1 2 6 +# Short-Description: Dive4Elements server +# Description: Start Dive4Elements server +### END INIT INFO + +RUNAS=flys +DIR="/opt/flys/current/server" + +CLASSPATH= +for l in `find "$DIR/bin/lib" -name \*.jar -print`; do + CLASSPATH=$CLASSPATH:$l +done + + +LOGFILE=/var/log/d4e-river.log +PIDFILE=/var/run/d4e-river.pid +ARGS="-Xmx256m \ + -server \ + -Djava.awt.headless=true \ + -Dflys.datacage.recommendations.development=false \ + -Djava.io.tmpdir=\"$DIR/cache\" \ + -Dflys.backend.enablejmx=true \ + -Dflys.uesk.keep.artifactsdir=false \ + -Dwsplgen.bin.path=\"$DIR/bin/wsplgen.exe\" \ + -Dwsplgen.log.output=false \ + -Dartifact.database.dir=\"$DIR/conf\"" +MAINCLASS=de.intevation.artifactdatabase.App + +# For SELinux we need to use 'runuser' not 'su' +if [ -x "/sbin/runuser" ]; then + SU="/sbin/runuser" +else + SU="/bin/su" +fi + +case "$1" in + start) + echo "Starting D4E-river server..." + $SU - $RUNAS -c "/usr/bin/java -classpath $CLASSPATH $ARGS $MAINCLASS" &> $LOGFILE & + PID=$! + echo $PID > $PIDFILE + ;; + stop) + echo "Stopping D4E-river server..." + PID=`cat $PIDFILE` + STOPRES=0 + while [ $STOPRES -le 0 ] + do + kill -15 $PID &> /dev/null + STOPRES=$? + sleep 1 + done + echo "done." + ;; + restart) + $0 stop && $0 start + ;; + *) + echo "Usage: $0 [start|stop|restart]" +esac +
--- a/contrib/make_flys_release/README Thu Feb 28 11:49:48 2013 +0100 +++ b/contrib/make_flys_release/README Thu Mar 07 12:02:02 2013 +0100 @@ -41,18 +41,13 @@ Importer: ========= -Das script um den Importer zu bauen und zu paketieren liegt unter -bin/make-importer-package.sh dies muss man anpassen in dem man Pfade angibt -unter welchem sich weitere pakete befinden. -Um das Paket zu bauen muss rpm2cpio installiert sein. +Das script um den Importer zu bauen und zu paketieren liegt unter +bin/make-importer-package.sh +Dieses muss man anpassen und ein paar pfade setzen -Benötigt werden in den einzustellenden Verzeichnissen: -EXTRAS: - - libgdal1-1.9.0-intevation1.x86\_64.rpm - - gdal-1.9.0-intevation1.x86\_64.rpm - - python-gdal-1.9.0-intevation1.x86\_64.rpm -ORACLE: - - cx\_Oracle-5.1.2-11g-py26-1.x86\_64.rpm - - instantclient-basic-linux-x86-64-11.2.0.2.0.zip - - instantclient-sdk-linux-x86-64-11.2.0.2.0.zip - - instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip +Wenn man ein "Standalone" Paket bauen möchte kann man diesem script +einen Parameter übergeben an welchem sich ein tarball befindet +der mit ins importer paket gepackt werden soll. Dieser Tarball +kann abhängigkeiten (gdal / proj / oracle) enthalten. +Das skript um diesen tarball für sles zu erstellen ist +bin/make-opt-package.sh
--- a/contrib/make_flys_release/bin/make-importer-package.sh Thu Feb 28 11:49:48 2013 +0100 +++ b/contrib/make_flys_release/bin/make-importer-package.sh Thu Mar 07 12:02:02 2013 +0100 @@ -5,66 +5,55 @@ # The working directory. Resulting tarball will be placed in the directory above. PKG_DIR=/tmp/flys-importer -# Path to oracle zip archives and an oracle_cx rpm -ORACLE=/path/to/oracle/archives # Default conf CONF_DIR=/path/to/conf/dir # Path to the flys checkout FLYS_DIR=/path/to/flys/root -# Extra packages -EXTRAS=/path/to/gdal +# Tarball that will be extracted into flys-imprter/opt +EXTRAS=$1 rm -fr $PKG_DIR mkdir -p $PKG_DIR/hydr_morph mkdir -p $PKG_DIR/geodaesie mkdir -p $PKG_DIR/opt/lib64 mkdir -p $PKG_DIR/schema +mkdir -p $PKG_DIR/conf + +cat > "$PKG_DIR/conf/log4j.properties" << "EOF" +log4j.rootLogger=DEBUG, IMPORTER +log4j.appender.IMPORTER.layout=org.apache.log4j.PatternLayout +log4j.appender.IMPORTER.layout.ConversionPattern=%d [%t] %-5p %c - %m%n +log4j.appender.IMPORTER=org.apache.log4j.RollingFileAppender +log4j.appender.IMPORTER.File=./import.log +log4j.appender.IMPORTER.MaxFileSize=100000KB +log4j.appender.IMPORTER.MaxBackupIndex=10 +EOF cd ${FLYS_DIR}/flys-backend mvn -f pom-oracle.xml clean compile assembly:single cp target/flys-backend-1.0-SNAPSHOT-jar-with-dependencies.jar \ $PKG_DIR/hydr_morph/importer.jar +mvn -f pom.xml clean compile assembly:single +cp target/flys-backend-1.0-SNAPSHOT-jar-with-dependencies.jar \ + $PKG_DIR/hydr_morph/importer_psql.jar cp ${FLYS_DIR}/flys-backend/contrib/shpimporter/*.py $PKG_DIR/geodaesie cp ${FLYS_DIR}/flys-backend/contrib/run_geo.sh \ ${FLYS_DIR}/flys-backend/contrib/run_hydr_morph.sh \ + ${FLYS_DIR}/flys-backend/contrib/import_river.sh \ $PKG_DIR -cp -r ${CONF_DIR} $PKG_DIR -ln -s /usr/lib64/libproj.so.0.6.6 $PKG_DIR/opt/lib64/libproj.so # workaround for bad packaging -rm -rf /tmp/other_rpms -mkdir /tmp/other_rpms -cd /tmp/other_rpms - -rpm2cpio ${EXTRAS}/libgdal1-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories -rpm2cpio ${EXTRAS}/gdal-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories -rpm2cpio ${EXTRAS}/python-gdal-1.9.0-intevation1.x86\_64.rpm | cpio -i --make-directories -rpm2cpio ${ORACLE}/cx\_Oracle-5.1.2-11g-py26-1.x86\_64.rpm | cpio -i --make-directories -cp -r /tmp/other_rpms/usr/* $PKG_DIR/opt -rm -rf /tmp/other_rpms +cp ${FLYS_DIR}/flys-backend/doc/annotation-types.xml $PKG_DIR/conf +if [ -f $EXTRAS ]; then + cd $PKG_DIR + tar -xzf $EXTRAS +fi cp ${FLYS_DIR}/flys-backend/doc/schema/*.sql $PKG_DIR/schema cp ${FLYS_DIR}/flys-backend/doc/documentation/de/importer-manual.pdf $PKG_DIR -# Oracle (Do not distribute) -unzip ${ORACLE}/instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt -unzip ${ORACLE}/instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt -unzip ${ORACLE}/instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d $PKG_DIR//opt - -mkdir $PKG_DIR//opt/instantclient_11_2/lib -cd $PKG_DIR//opt/instantclient_11_2/lib -ln -s ../libclntsh.so.11.1 . -ln -s ../libclntsh.so.11.1 libclntsh.so -ln -s ../libnnz11.so . -ln -s ../libocci.so.11.1 . -ln -s ../libocci.so.11.1 libocci.so -ln -s ../libociei.so . -ln -s ../libocijdbc11.so . -ln -s ../libsqlplusic.so . -ln -s ../libsqlplus.so . -# End Oracle - sed -i 's/shpimporter\/shp/geodaesie\/shp/' $PKG_DIR/run_geo.sh cd $PKG_DIR/.. DATE=$(date +%Y%m%d%H%m) -tar -czf flys-importer${DATE}.tar.gz flys-importer -sha1sum flys-importer${DATE}.tar.gz > flys-importer${DATE}.tar.gz.sha1 +tar -czf flys-importer_${DATE}.tar.gz flys-importer +sha1sum flys-importer_${DATE}.tar.gz > flys-importer_${DATE}.tar.gz.sha1 +echo Package is at: `readlink -f flys-importer_${DATE}.tar.gz`
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/make_flys_release/bin/make-opt-package.sh Thu Mar 07 12:02:02 2013 +0100 @@ -0,0 +1,86 @@ +# Required packages are the build essential stuff make gcc etc. +# and: +# postgresql-devel libexpat-devel python-devel +set -e +# This script is intended to be run on suse enterprise linux + +# Path to the oracle zip archives +ORACLE_LOC=/home/intevation +# Path to the Source tarballs of gdal-1.9.2.tar.gz proj-4.8.0.tar.gz cx_Oracle-5.1.2.tar.gz +SOURCES=/home/intevation/Downloads +#mkdir -p $SOURCES +#cd $SOURCES +#wget http://download.osgeo.org/gdal/gdal-1.9.2.tar.gz +#wget http://download.osgeo.org/proj/proj-4.8.0.tar.gz +#wget http://downloads.sourceforge.net/project/cx-oracle/5.1.2/cx_Oracle-5.1.2.tar.gz + +DEVELDIR=/tmp/gdalbuild +INSTALL_PREFIX=$DEVELDIR/opt +export ORACLE_HOME=$DEVELDIR/opt/instantclient_11_2 +export LD_LIBRARY_PATH=$ORACLE_HOME/lib:$LD_LIBRARY_PATH + +rm -rf $DEVELDIR +mkdir -p $DEVELDIR + +mkdir -p $SOURCES +cd $SOURCES +wget http://download.osgeo.org/gdal/gdal-1.9.2.tar.gz +wget http://download.osgeo.org/proj/proj-4.8.0.tar.gz +wget http://downloads.sourceforge.net/project/cx-oracle/5.1.2/cx_Oracle-5.1.2.tar.gz + + +# Oracle +unzip $ORACLE_LOC/instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d $DEVELDIR/opt +unzip $ORACLE_LOC/instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d $DEVELDIR/opt +unzip $ORACLE_LOC/instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d $DEVELDIR/opt +mkdir $ORACLE_HOME/lib +cd $ORACLE_HOME/lib +ln -s ../libclntsh.so.11.1 . +ln -s ../libclntsh.so.11.1 libclntsh.so +ln -s ../libnnz11.so . +ln -s ../libocci.so.11.1 . +ln -s ../libocci.so.11.1 libocci.so +ln -s ../libociei.so . +ln -s ../libocijdbc11.so . +ln -s ../libsqlplusic.so . +ln -s ../libsqlplus.so . +cd $ORACLE_HOME +ln -s libclntsh.so.11.1 libclntsh.so + +cd $DEVELDIR +tar -xf $SOURCES/proj-4.8.0.tar.gz +cd proj-4.8.0 +./configure --prefix=$INSTALL_PREFIX && make && make install + + +cd $DEVELDIR +tar -xf $SOURCES/gdal-1.9.2.tar.gz +cd gdal-1.9.2 +patch -l -p0 << "EOF" +Index: ogr/ogrsf_frmts/oci/ogrocitablelayer.cpp +=================================================================== +--- ogr/ogrsf_frmts/oci/ogrocitablelayer.cpp (revision 25700) ++++ ogr/ogrsf_frmts/oci/ogrocitablelayer.cpp (working copy) +@@ -264,7 +264,7 @@ + char **papszResult; + int iDim = -1; + +- oDimCmd.Append( "SELECT COUNT(*) FROM ALL_SDO_GEOM_METADATA u," ); ++ oDimCmd.Append( "SELECT COUNT(*) FROM USER_SDO_GEOM_METADATA u," ); + oDimCmd.Append( " TABLE(u.diminfo) t" ); + oDimCmd.Append( " WHERE u.table_name = '" ); + oDimCmd.Append( osTableName ); +EOF +LDFLAGS="-Wl,--no-as-needed" ./configure --with-python --with-oci=$ORACLE_HOME \ + --prefix=$INSTALL_PREFIX && make && make install + +cd $DEVELDIR +tar -xf $SOURCES/cx_Oracle-5.1.2.tar.gz +cd cx_Oracle-5.1.2 +python setup.py build +python setup.py install --prefix=$INSTALL_PREFIX + +cd $DEVELDIR +tar -czf flys-importer-opt.tar.gz opt +echo "Package is:" +readlink -f flys-importer-opt.tar.gz
--- a/contrib/make_flys_release/make_release.sh Thu Feb 28 11:49:48 2013 +0100 +++ b/contrib/make_flys_release/make_release.sh Thu Mar 07 12:02:02 2013 +0100 @@ -18,6 +18,7 @@ TOMCAT_PORT=${TOMCAT_PORT:-8005} MAPSERVER_URL=${MAPSERVER_URL:-czech-republic.atlas.intevation.de} +WIKI_URL=${WIKI_URL:-https://flys-intern.intevation.de/Flys-3.0} echo "INFO: create server directories" mkdir -p $DIRECTORY/server/bin/lib/own @@ -48,9 +49,13 @@ -e "s@http://localhost:8888@http://localhost:$TOMCAT_PORT@g" \ $FLYS_HG/flys-client/src/main/webapp/WEB-INF/web.xml -sed -i -e "s@/tmp/flys-client.log@/tmp/flys-client-${RELEASE}.log@g" \ +sed -i -e "s@/tmp/flys-client.log@/var/log/flys/client-${RELEASE}.log@g" \ $FLYS_HG/flys-client/src/main/webapp/WEB-INF/log4j.properties +# Fix the Wiki URLs +find $FLYS_HG/flys-artifacts/src/main/resources/ -name messages_\*.properties \ + -exec sed -i "s@https://flys-intern.intevation.de/Flys-3.0@${WIKI_URL}@g" {} \; + echo "INFO: download OpenLayers-2.11 for client" curl -O http://openlayers.org/download/OpenLayers-2.11.tar.gz tar xvfz OpenLayers-2.11.tar.gz @@ -110,8 +115,7 @@ confs/rivermap.xml \ > $DIRECTORY/server/conf/rivermap.xml - -sed "s@/tmp/flys-server-default.log@/tmp/flys-server-${RELEASE}.log" \ +sed "s@/tmp/flys-server-default.log@/var/log/flys/server-${RELEASE}.log@" \ confs/log4j.properties \ > $DIRECTORY/server/conf/log4j.properties
--- a/flys-artifacts/doc/conf/artifacts/fixanalysis.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/artifacts/fixanalysis.xml Thu Mar 07 12:02:02 2013 +0100 @@ -270,6 +270,7 @@ <facet name="w_differences.manualpoints" description="Manuelle Punkte"/> <facet name="longitudinal_section.manualpoints" description="Manuelle Punkte"/> <facet name="longitudinal_section.annotations" description="facet.longitudinal_section.annotations"/> + <facet name="longitudinal_section.area" description="facet.longitudinal_section.area"/> </facets> </outputmode> <outputmode name="fix_wq_curve" description="output.fix_wq_curve" mine-type="image/png" type="chart">
--- a/flys-artifacts/doc/conf/artifacts/winfo.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/artifacts/winfo.xml Thu Mar 07 12:02:02 2013 +0100 @@ -402,6 +402,8 @@ <facets> <facet name="discharge_longitudinal_section.w"/> <facet name="discharge_longitudinal_section.q"/> + <facet name="discharge_longitudinal_section.q.infolding"/> + <facet name="discharge_longitudinal_section.q.cutting"/> <facet name="discharge_longitudinal_section.c"/> <facet name="discharge_longitudinal_section.manualpoints" description="Manuelle Punkte"/> <facet name="other.wqkms.w"/>
--- a/flys-artifacts/doc/conf/conf.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/conf.xml Thu Mar 07 12:02:02 2013 +0100 @@ -394,9 +394,14 @@ <zoom-scale river="Elbe" range="100" radius="5" /> <zoom-scale river="Elbe" range="500" radius="10" /> </zoom-scales> + <minfo-sq> <!-- valid names: grubbs or std-dev --> <outlier-method name="grubbs"/> </minfo-sq> + + <dgm-path> + /path/to/rivers/ + </dgm-path> </options> </artifact-database>
--- a/flys-artifacts/doc/conf/meta-data.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/meta-data.xml Thu Mar 07 12:02:02 2013 +0100 @@ -1,7 +1,10 @@ <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <dc:template xmlns:dc="http://www.intevation.org/2011/Datacage"> <datacage> -<dc:macro name="SQL-wst_columns_statement"> + <dc:comment> + Statement to load data from wsts. + </dc:comment> + <dc:macro name="SQL-wst_columns_statement"> <dc:statement> SELECT wst_columns.id AS prot_column_id, wst_columns.name AS prot_column_name, @@ -10,10 +13,49 @@ wst_ranges.a AS deffrom, wst_ranges.b AS defto FROM wst_columns, wst_ranges - WHERE wst_columns.wst_id = ${prot_id} AND wst_ranges.wst_column_id = wst_columns.id + WHERE wst_columns.wst_id = ${prot_id} + AND wst_ranges.wst_column_id = wst_columns.id + AND (${fromkm} BETWEEN wst_ranges.a AND wst_ranges.b + OR ${tokm} BETWEEN wst_ranges.a AND wst_ranges.b + OR wst_ranges.a BETWEEN ${fromkm} AND ${tokm} + OR wst_ranges.b BETWEEN ${fromkm} AND ${tokm}) ORDER by wst_columns.position </dc:statement> -</dc:macro> + </dc:macro> + + <dc:comment> + Load user specific distance information from artifact. + </dc:comment> + <dc:macro name="user-range"> + <dc:choose> + <dc:when test="dc:contains($parameters, 'user-id')"> + <dc:context connection="user"> + <dc:statement> + SELECT COALESCE(ld_mode, '') AS ldm, + COALESCE(ld_locations, '') AS ldl, + COALESCE(ld_from, '') AS ldf, + COALESCE(ld_to, '') AS ldt + FROM master_artifacts_range + WHERE gid = CAST(${artifact-id} as uuid) + </dc:statement> + <dc:elements> + <dc:variable name="fromkm" type="number" expr="dc:fromValue($ldm, $ldl, $ldf)"/> + <dc:variable name="tokm" type="number" expr="dc:toValue($ldm, $ldl, $ldt)"/> + <dc:macro-body/> + </dc:elements> + </dc:context> + </dc:when> + <dc:otherwise> + <dc:variable name="fromkm" type="number" expr="dc:fromValue('', '', '')"/> + <dc:variable name="tokm" type="number" expr="dc:toValue('', '', '')"/> + <dc:macro-body/> + </dc:otherwise> + </dc:choose> + </dc:macro> + + <dc:comment> + System part. Load data for the given river. + </dc:comment> <dc:macro name="load-system"> <dc:context connection="system"> <dc:statement> @@ -21,13 +63,15 @@ WHERE lower(name) LIKE lower(${river}) </dc:statement> <dc:elements> + <dc:comment> - Base-data macros (mostly data imported from wst-files) + Base-data macros (mostly data imported from wst-files). </dc:comment> <dc:macro name="basedata_0"> + <dc:call-macro name="user-range"> <dc:comment comment=" BASEDATA ---------------------------"/> <basedata> - <dc:context> + <dc:context connection="system"> <dc:statement> SELECT id AS prot_id, description AS prot_description @@ -51,11 +95,14 @@ </dc:elements> </dc:context> </basedata> + </dc:call-macro> </dc:macro> + <dc:macro name="basedata_0_wq"> + <dc:call-macro name="user-range"> <dc:comment comment=" BASEDATA ---------------------------"/> <basedata> - <dc:context> + <dc:context connection="system"> <dc:statement> SELECT id AS prot_id, description AS prot_description @@ -79,12 +126,14 @@ </dc:elements> </dc:context> </basedata> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_1_additionals_marks"> + <dc:call-macro name="user-range"> <dc:comment comment=".ZUS -------------------------------"/> <additionals> - <dc:context> + <dc:context connection="system"> <dc:statement> SELECT id AS prot_id, description AS prot_description @@ -108,12 +157,14 @@ </dc:elements> </dc:context> </additionals> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_1_additionals"> + <dc:call-macro name="user-range"> <dc:comment comment=".ZUS -------------------------------"/> <additionals> - <dc:context> + <dc:context connection="system"> <dc:statement> SELECT id AS prot_id, description AS prot_description @@ -137,12 +188,14 @@ </dc:elements> </dc:context> </additionals> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_1_additionals-relative_point"> + <dc:call-macro name="user-range"> <dc:comment comment=".ZUS -------------------------------"/> <additionals> - <dc:context> + <dc:context connection="system"> <dc:statement> SELECT id AS prot_id, description AS prot_description @@ -166,125 +219,135 @@ </dc:elements> </dc:context> </additionals> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_2_fixations_wst"> - <fixations> - <dc:context> - <dc:statement> - SELECT id AS prot_id, - description AS prot_description - FROM wsts WHERE kind = 2 AND river_id = ${river_id} - </dc:statement> - <dc:elements> - <fixation> - <dc:attribute name="name" value="${prot_description}"/> - <!--dc:attribute name="ids" value="fixations-wstv-A-${prot_id}"/--> - <dc:context> - <dc:call-macro name="SQL-wst_columns_statement"/> - <dc:elements> - <column> - <dc:attribute name="name" value="${prot_column_name}"/> - <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/> - <dc:attribute name="factory" value="wqinterpol"/> - <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> - </column> - </dc:elements> - </dc:context> - </fixation> - </dc:elements> - </dc:context> - </fixations> + <dc:call-macro name="user-range"> + <fixations> + <dc:context connection="system"> + <dc:statement> + SELECT id AS prot_id, + description AS prot_description + FROM wsts WHERE kind = 2 AND river_id = ${river_id} + </dc:statement> + <dc:elements> + <fixation> + <dc:attribute name="name" value="${prot_description}"/> + <!--dc:attribute name="ids" value="fixations-wstv-A-${prot_id}"/--> + <dc:context> + <dc:call-macro name="SQL-wst_columns_statement"/> + <dc:elements> + <column> + <dc:attribute name="name" value="${prot_column_name}"/> + <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/> + <dc:attribute name="factory" value="wqinterpol"/> + <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> + </column> + </dc:elements> + </dc:context> + </fixation> + </dc:elements> + </dc:context> + </fixations> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_2_fixations_wqkms"> - <fixations> - <dc:context> - <dc:statement> - SELECT id AS prot_id, - description AS prot_description - FROM wsts WHERE kind = 2 AND river_id = ${river_id} - </dc:statement> - <dc:elements> - <fixation> - <dc:attribute name="name" value="${prot_description}"/> - <dc:context> - <dc:call-macro name="SQL-wst_columns_statement"/> - <dc:elements> - <column> - <dc:attribute name="name" value="${prot_column_name}"/> - <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/> - <dc:attribute name="factory" value="wqinterpol"/> - <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> - </column> - </dc:elements> - </dc:context> - </fixation> - </dc:elements> - </dc:context> - </fixations> + <dc:call-macro name="user-range"> + <fixations> + <dc:context connection="system"> + <dc:statement> + SELECT id AS prot_id, + description AS prot_description + FROM wsts WHERE kind = 2 AND river_id = ${river_id} + </dc:statement> + <dc:elements> + <fixation> + <dc:attribute name="name" value="${prot_description}"/> + <dc:context> + <dc:call-macro name="SQL-wst_columns_statement"/> + <dc:elements> + <column> + <dc:attribute name="name" value="${prot_column_name}"/> + <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/> + <dc:attribute name="factory" value="wqinterpol"/> + <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> + </column> + </dc:elements> + </dc:context> + </fixation> + </dc:elements> + </dc:context> + </fixations> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_2_fixations"> - <fixations> - <dc:context> - <dc:statement> - SELECT id AS prot_id, - description AS prot_description - FROM wsts WHERE kind = 2 AND river_id = ${river_id} - </dc:statement> - <dc:elements> - <fixation> - <dc:attribute name="name" value="${prot_description}"/> - <dc:context> - <dc:call-macro name="SQL-wst_columns_statement"/> - <dc:elements> - <column> - <dc:attribute name="name" value="${prot_column_name}"/> - <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/> - <dc:attribute name="factory" value="staticwkms"/> - <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> - </column> - </dc:elements> - </dc:context> - </fixation> - </dc:elements> - </dc:context> - </fixations> + <dc:call-macro name="user-range"> + <fixations> + <dc:context connection="system"> + <dc:statement> + SELECT id AS prot_id, + description AS prot_description + FROM wsts WHERE kind = 2 AND river_id = ${river_id} + </dc:statement> + <dc:elements> + <fixation> + <dc:attribute name="name" value="${prot_description}"/> + <dc:context> + <dc:call-macro name="SQL-wst_columns_statement"/> + <dc:elements> + <column> + <dc:attribute name="name" value="${prot_column_name}"/> + <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/> + <dc:attribute name="factory" value="staticwkms"/> + <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> + </column> + </dc:elements> + </dc:context> + </fixation> + </dc:elements> + </dc:context> + </fixations> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_2_fixations_relative_point"> - <fixations> - <dc:context> - <dc:statement> - SELECT id AS prot_id, - description AS prot_description - FROM wsts WHERE kind = 2 AND river_id = ${river_id} - </dc:statement> - <dc:elements> - <relativepoint> - <dc:attribute name="name" value="${prot_description}"/> - <dc:context> - <dc:call-macro name="SQL-wst_columns_statement"/> - <dc:elements> - <column> - <dc:attribute name="name" value="${prot_column_name}"/> - <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/> - <dc:attribute name="factory" value="staticwkms"/> - <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> - </column> - </dc:elements> - </dc:context> - </relativepoint> - </dc:elements> - </dc:context> - </fixations> + <dc:call-macro name="user-range"> + <fixations> + <dc:context connection="system"> + <dc:statement> + SELECT id AS prot_id, + description AS prot_description + FROM wsts WHERE kind = 2 AND river_id = ${river_id} + </dc:statement> + <dc:elements> + <relativepoint> + <dc:attribute name="name" value="${prot_description}"/> + <dc:context> + <dc:call-macro name="SQL-wst_columns_statement"/> + <dc:elements> + <column> + <dc:attribute name="name" value="${prot_column_name}"/> + <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/> + <dc:attribute name="factory" value="staticwkms"/> + <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> + </column> + </dc:elements> + </dc:context> + </relativepoint> + </dc:elements> + </dc:context> + </fixations> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_3_officials"> + <dc:call-macro name="user-range"> <dc:comment comment=".wst -------------------------------"/> <officiallines> - <dc:context> + <dc:context connection="system"> <dc:statement> SELECT id AS prot_id, description AS prot_description @@ -308,107 +371,149 @@ </dc:elements> </dc:context> </officiallines> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_4_heightmarks-points-relative_points"> - <heightmarks> - <dc:context> - <dc:statement> - SELECT id AS prot_id, - description AS prot_description - FROM wsts WHERE kind = 4 AND river_id = ${river_id} - </dc:statement> - <dc:elements> - <relativepoint> - <dc:attribute name="name" value="${prot_description}"/> - <dc:context> - <dc:call-macro name="SQL-wst_columns_statement"/> - <dc:elements> - <column> - <dc:attribute name="name" value="${prot_column_name}"/> - <dc:attribute name="ids" value="heightmarks_points-wstv-${prot_rel_pos}-${prot_id}"/> - <dc:attribute name="factory" value="staticwkms"/> - <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> - </column> - </dc:elements> - </dc:context> - </relativepoint> - </dc:elements> - </dc:context> - </heightmarks> + <dc:call-macro name="user-range"> + <heightmarks> + <dc:context connection="system"> + <dc:statement> + SELECT id AS prot_id, + description AS prot_description + FROM wsts WHERE kind = 4 AND river_id = ${river_id} + </dc:statement> + <dc:elements> + <relativepoint> + <dc:attribute name="name" value="${prot_description}"/> + <dc:context> + <dc:call-macro name="SQL-wst_columns_statement"/> + <dc:elements> + <column> + <dc:attribute name="name" value="${prot_column_name}"/> + <dc:attribute name="ids" value="heightmarks_points-wstv-${prot_rel_pos}-${prot_id}"/> + <dc:attribute name="factory" value="staticwkms"/> + <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> + </column> + </dc:elements> + </dc:context> + </relativepoint> + </dc:elements> + </dc:context> + </heightmarks> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_4_heightmarks-points"> - <heightmarks> - <dc:context> - <dc:statement> - SELECT id AS prot_id, - description AS prot_description - FROM wsts WHERE kind = 4 AND river_id = ${river_id} - </dc:statement> - <dc:elements> - <heightmark> - <dc:attribute name="name" value="${prot_description}"/> - <dc:context> - <dc:call-macro name="SQL-wst_columns_statement"/> - <dc:elements> - <column> - <dc:attribute name="name" value="${prot_column_name}"/> - <dc:attribute name="ids" value="heightmarks_points-wstv-${prot_rel_pos}-${prot_id}"/> - <dc:attribute name="factory" value="staticwkms"/> - <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> - </column> - </dc:elements> - </dc:context> - </heightmark> - </dc:elements> - </dc:context> - </heightmarks> + <dc:call-macro name="user-range"> + <heightmarks> + <dc:context connection="system"> + <dc:statement> + SELECT id AS prot_id, + description AS prot_description + FROM wsts WHERE kind = 4 AND river_id = ${river_id} + </dc:statement> + <dc:elements> + <heightmark> + <dc:attribute name="name" value="${prot_description}"/> + <dc:context> + <dc:call-macro name="SQL-wst_columns_statement"/> + <dc:elements> + <column> + <dc:attribute name="name" value="${prot_column_name}"/> + <dc:attribute name="ids" value="heightmarks_points-wstv-${prot_rel_pos}-${prot_id}"/> + <dc:attribute name="factory" value="staticwkms"/> + <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> + </column> + </dc:elements> + </dc:context> + </heightmark> + </dc:elements> + </dc:context> + </heightmarks> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_4_heightmarks-wq"> - <heightmarks> - <dc:context> - <dc:statement> - SELECT id AS prot_id, - description AS prot_description - FROM wsts WHERE kind = 4 AND river_id = ${river_id} - </dc:statement> - <dc:elements> - <heightmark> - <dc:attribute name="name" value="${prot_description}"/> - <dc:context> - <dc:call-macro name="SQL-wst_columns_statement"/> - <dc:elements> - <column> - <dc:attribute name="name" value="${prot_column_name}"/> - <dc:attribute name="ids" value="heightmarks_annotations-wstv-${prot_rel_pos}-${prot_id}"/> - <dc:attribute name="factory" value="wqinterpol"/> - <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> - </column> - </dc:elements> - </dc:context> - </heightmark> - </dc:elements> - </dc:context> - </heightmarks> + <dc:call-macro name="user-range"> + <heightmarks> + <dc:context connection="system"> + <dc:statement> + SELECT id AS prot_id, + description AS prot_description + FROM wsts WHERE kind = 4 AND river_id = ${river_id} + </dc:statement> + <dc:elements> + <heightmark> + <dc:attribute name="name" value="${prot_description}"/> + <dc:context> + <dc:call-macro name="SQL-wst_columns_statement"/> + <dc:elements> + <column> + <dc:attribute name="name" value="${prot_column_name}"/> + <dc:attribute name="ids" value="heightmarks_annotations-wstv-${prot_rel_pos}-${prot_id}"/> + <dc:attribute name="factory" value="wqinterpol"/> + <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> + </column> + </dc:elements> + </dc:context> + </heightmark> + </dc:elements> + </dc:context> + </heightmarks> + </dc:call-macro> </dc:macro> <dc:macro name="basedata_5_flood-protections_relative_points"> - <flood_protections> - <dc:attribute name="id" value="flood-protections-${river_id}"/> - <dc:context> - <dc:statement> - SELECT id AS prot_id, - description AS prot_description - FROM wsts WHERE kind = 5 AND river_id = ${river_id} - </dc:statement> - <dc:elements> - <relativepoint> - <dc:attribute name="name" value="${prot_description}"/> - <dc:attribute name="db-id" value="${prot_id}"/> - <dc:attribute name="factory" value="staticwkms"/> - <columns> + <dc:call-macro name="user-range"> + <flood_protections> + <dc:attribute name="id" value="flood-protections-${river_id}"/> + <dc:context connection="system"> + <dc:statement> + SELECT id AS prot_id, + description AS prot_description + FROM wsts WHERE kind = 5 AND river_id = ${river_id} + </dc:statement> + <dc:elements> + <relativepoint> + <dc:attribute name="name" value="${prot_description}"/> + <dc:attribute name="db-id" value="${prot_id}"/> + <dc:attribute name="factory" value="staticwkms"/> + <columns> + <dc:context> + <dc:call-macro name="SQL-wst_columns_statement"/> + <dc:elements> + <column> + <dc:attribute name="name" value="${prot_column_name}"/> + <dc:attribute name="ids" value="flood_protection-wstv-${prot_rel_pos}-${prot_id}"/> + <dc:attribute name="factory" value="staticwkms"/> + <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> + </column> + </dc:elements> + </dc:context> + </columns> + </relativepoint> + </dc:elements> + </dc:context> + </flood_protections> + </dc:call-macro> + </dc:macro> + + <dc:macro name="basedata_5_flood-protections"> + <dc:call-macro name="user-range"> + <flood_protections> + <dc:attribute name="id" value="flood-protections-${river_id}"/> + <dc:context connection="system"> + <dc:statement> + SELECT id AS prot_id, + description AS prot_description + FROM wsts WHERE kind = 5 AND river_id = ${river_id} + </dc:statement> + <dc:elements> + <flood_protection> + <dc:attribute name="name" value="${prot_description}"/> + <dc:attribute name="db-id" value="${prot_id}"/> + <dc:attribute name="factory" value="staticwkms"/> <dc:context> <dc:call-macro name="SQL-wst_columns_statement"/> <dc:elements> @@ -420,42 +525,11 @@ </column> </dc:elements> </dc:context> - </columns> - </relativepoint> - </dc:elements> - </dc:context> - </flood_protections> - </dc:macro> - - <dc:macro name="basedata_5_flood-protections"> - <flood_protections> - <dc:attribute name="id" value="flood-protections-${river_id}"/> - <dc:context> - <dc:statement> - SELECT id AS prot_id, - description AS prot_description - FROM wsts WHERE kind = 5 AND river_id = ${river_id} - </dc:statement> - <dc:elements> - <flood_protection> - <dc:attribute name="name" value="${prot_description}"/> - <dc:attribute name="db-id" value="${prot_id}"/> - <dc:attribute name="factory" value="staticwkms"/> - <dc:context> - <dc:call-macro name="SQL-wst_columns_statement"/> - <dc:elements> - <column> - <dc:attribute name="name" value="${prot_column_name}"/> - <dc:attribute name="ids" value="flood_protection-wstv-${prot_rel_pos}-${prot_id}"/> - <dc:attribute name="factory" value="staticwkms"/> - <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/> - </column> - </dc:elements> - </dc:context> - </flood_protection> - </dc:elements> - </dc:context> - </flood_protections> + </flood_protection> + </dc:elements> + </dc:context> + </flood_protections> + </dc:call-macro> </dc:macro> <dc:macro name="mainvalues"> @@ -512,97 +586,129 @@ </dc:macro> <dc:macro name="cross_sections"> - <cross_sections> - <dc:attribute name="id" value="flood-protections-${river_id}"/> - <dc:context> - <dc:statement> - SELECT id AS prot_id, - description AS prot_description - FROM cross_sections WHERE river_id = ${river_id} - </dc:statement> - <dc:elements> - <cross_section> - <dc:attribute name="name" value="${prot_description}"/> - <dc:attribute name="ids" value="${prot_id}"/> - <dc:attribute name="factory" value="crosssections"/> - </cross_section> - </dc:elements> - </dc:context> - </cross_sections> + <dc:call-macro name="user-range"> + <cross_sections> + <dc:attribute name="id" value="flood-protections-${river_id}"/> + <dc:context connection="system"> + <dc:statement> + SELECT DISTINCT + cs.id AS prot_id, + cs.description AS prot_description + FROM cross_sections cs + JOIN cross_section_lines csl ON csl.cross_section_id = cs.id + WHERE cs.river_id = ${river_id} + AND csl.km BETWEEN ${fromkm} AND ${tokm} + </dc:statement> + <dc:elements> + <cross_section> + <dc:attribute name="name" value="${prot_description}"/> + <dc:attribute name="ids" value="${prot_id}"/> + <dc:attribute name="factory" value="crosssections"/> + </cross_section> + </dc:elements> + </dc:context> + </cross_sections> + </dc:call-macro> </dc:macro> <dc:macro name="hyks"> - <hyks> - <dc:attribute name="id" value="hyk-${river_id}"/> - <dc:context> - <dc:statement> - SELECT id AS hyk_id, - description AS hyk_description - FROM hyks WHERE river_id = ${river_id} - </dc:statement> - <dc:elements> - <hyk> - <dc:attribute name="name" value="${hyk_description}"/> - <dc:attribute name="ids" value="${hyk_id}"/> - <dc:attribute name="factory" value="hyk"/> - </hyk> - </dc:elements> - </dc:context> - </hyks> + <dc:call-macro name="user-range"> + <hyks> + <dc:attribute name="id" value="hyk-${river_id}"/> + <dc:context connection="system"> + <dc:statement> + SELECT DISTINCT + h.id AS hyk_id, + h.description AS hyk_description + FROM hyks h + JOIN hyk_entries he ON he.hyk_id = h.id + WHERE river_id = ${river_id} + AND he.km BETWEEN ${fromkm} AND ${tokm} + </dc:statement> + <dc:elements> + <hyk> + <dc:attribute name="name" value="${hyk_description}"/> + <dc:attribute name="ids" value="${hyk_id}"/> + <dc:attribute name="factory" value="hyk"/> + </hyk> + </dc:elements> + </dc:context> + </hyks> + </dc:call-macro> </dc:macro> <dc:macro name="flow_velocity_measurements"> - <flowvelocitymeasurement> - <dc:context> - <dc:statement> - SELECT id AS fvmid, - description AS fvmd - FROM flow_velocity_measurements WHERE river_id = ${river_id} - </dc:statement> - <dc:elements> - <flow_velocity_measurement> - <dc:attribute name="name" value="${fvmd}"/> - <dc:attribute name="ids" value="${fvmid}"/> - <dc:attribute name="factory" value="flowvelocity"/> - <dc:context> - <dc:statement> - SELECT id, description, station, datetime, v, w, q - FROM flow_velocity_measure_values - WHERE measurements_id = ${fvmid} - </dc:statement> - <dc:elements> - <measurement_value> - <dc:attribute name="name" value="${id}-${description}-${station}-${datetime}"/> - <dc:attribute name="ids" value="${id}"/> - <dc:attribute name="factory" value="flowvelocity"/> - </measurement_value> - </dc:elements> - </dc:context> - </flow_velocity_measurement> + <dc:call-macro name="user-range"> + <flowvelocitymeasurement> + <dc:context connection="system"> + <dc:statement> + SELECT id AS fvmid, + description AS fvmd + FROM flow_velocity_measurements WHERE river_id = ${river_id} + </dc:statement> + <dc:elements> + <flow_velocity_measurement> + <dc:attribute name="name" value="${fvmd}"/> + <dc:attribute name="ids" value="${fvmid}"/> + <dc:attribute name="factory" value="flowvelocity"/> + <dc:context> + <dc:statement> + SELECT id, description, station, datetime, v, w, q + FROM flow_velocity_measure_values + WHERE measurements_id = ${fvmid} + AND station BETWEEN ${fromkm} AND ${tokm} + </dc:statement> + <dc:elements> + <measurement_value> + <dc:attribute name="name" value="${id}-${description}-${station}-${datetime}"/> + <dc:attribute name="ids" value="${id}"/> + <dc:attribute name="factory" value="flowvelocity"/> + </measurement_value> + </dc:elements> + </dc:context> + </flow_velocity_measurement> </dc:elements> </dc:context> </flowvelocitymeasurement> + </dc:call-macro> + </dc:macro> + + <dc:macro name="sounding-width"> + <soundings_width> + <dc:context> + <dc:statement> + SELECT id AS bedh_id, + year AS bedh_year, + description AS bedh_descr + FROM bed_height_single WHERE river_id = ${river_id} + </dc:statement> + <dc:elements> + <height> + <dc:attribute name="factory" value="bedheight"/> + <dc:attribute name="ids" value="bedheight-singlevalues-${bedh_id}-${bedh_year}"/> + <dc:attribute name="description" value="${bedh_descr}"/> + </height> + </dc:elements> + </dc:context> + </soundings_width> </dc:macro> <dc:macro name="longitudinal-section-prototype"> - <dc:call-macro name="basedata_0"/> - <dc:call-macro name="basedata_1_additionals"/> - <dc:comment comment=" FIXATIONS ---------------------------"/> - <dc:call-macro name="basedata_2_fixations"/> - <dc:comment comment=" HOEHENMARKEN ---------------------------"/> - <dc:call-macro name="basedata_4_heightmarks-points"/> - <dc:comment comment=" AMTL LINIEN ---------------------------"/> - <dc:call-macro name="basedata_3_officials"/> - <dc:call-macro name="basedata_5_flood-protections"/> - <dc:call-macro name="annotations_per_type"/> + <dc:call-macro name="basedata_0"/> + <dc:call-macro name="basedata_1_additionals"/> + <dc:comment comment=" FIXATIONS ---------------------------"/> + <dc:call-macro name="basedata_2_fixations"/> + <dc:comment comment=" HOEHENMARKEN ---------------------------"/> + <dc:call-macro name="basedata_4_heightmarks-points"/> + <dc:comment comment=" AMTL LINIEN ---------------------------"/> + <dc:call-macro name="basedata_3_officials"/> + <dc:call-macro name="basedata_5_flood-protections"/> + <dc:call-macro name="annotations_per_type"/> </dc:macro> <dc:comment> - + River-Node - </dc:comment> - <river> <dc:attribute name="name" value="${river_name}"/> @@ -631,6 +737,9 @@ <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve')"> <dc:call-macro name="qsectors"/> </dc:if> + <dc:if test="dc:contains($artifact-outs, 'longitudinal_section')"> + <dc:call-macro name="annotations"/> + </dc:if> <dc:if test="dc:contains($artifact-outs, 'fix_longitudinal_section_curve')"> <dc:call-macro name="annotations"/> </dc:if> @@ -709,23 +818,7 @@ MINFO bedheight middle </dc:comment> <dc:if test="dc:contains($artifact-outs, 'bedheight_middle')"> - <soundings_width> - <dc:context> - <dc:statement> - SELECT id AS bedh_id, - year AS bedh_year, - description AS bedh_descr - FROM bed_height_single WHERE river_id = ${river_id} - </dc:statement> - <dc:elements> - <height> - <dc:attribute name="factory" value="bedheight"/> - <dc:attribute name="ids" value="bedheight-singlevalues-${bedh_id}-${bedh_year}"/> - <dc:attribute name="description" value="${bedh_descr}"/> - </height> - </dc:elements> - </dc:context> - </soundings_width> + <dc:call-macro name="sounding-width"/> </dc:if> <dc:comment comment="--- non-recommendations---"/> </dc:otherwise> @@ -1281,18 +1374,45 @@ <dc:when test="dc:contains($parameters, 'user-id')"> - <old_calculations> - <dc:context connection="user"> - <dc:comment> - Get the user and collection-id. - </dc:comment> - <dc:statement> + <old_calculations> + <!-- <dc:macro name="load-user">--> + <dc:call-macro name="user-range"> + <dc:context connection="user"> + <dc:comment> + Get the user and collection-id. + </dc:comment> + <dc:statement> SELECT u.id AS user_id, c.id AS collection_id, c.name as collection_name FROM collections c JOIN users u ON c.user_id = u.id WHERE u.gid = CAST(${user-id} AS uuid) ORDER BY c.creation DESC </dc:statement> + + <dc:macro name="range-filter"> + <dc:statement> + SELECT m.id AS a_id, + m.state AS a_state, + m.gid AS a_gid, + m.creation AS a_creation, + COALESCE(ld_mode, '') AS ld_m, + COALESCE(ld_locations, '') AS ld_l, + COALESCE(ld_from, '') AS ld_f, + COALESCE(ld_to, '') AS ld_t + FROM master_artifacts_range m + WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) + AND EXISTS ( + SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) + </dc:statement> + <dc:elements> + <dc:variable name="from" type="number" expr="dc:fromValue($ld_m, $ld_l, $ld_f)"/> + <dc:variable name="to" type="number" expr="dc:toValue($ld_m, $ld_l, $ld_t)"/> + <dc:if test="($from >= $fromkm and $from <= $tokm) or ($to <= $tokm and $to >= $fromkm) or ($from <= $fromkm and $to >= $tokm)"> + <dc:macro-body/> + </dc:if> + </dc:elements> + </dc:macro> + <!-- OFFICIAL LINES --> <dc:if test="dc:contains($artifact-outs, 'longitudinal_section')"> <dc:comment comment=".wst -------------------------------"/> @@ -1300,11 +1420,27 @@ <dc:elements> <dc:context> <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation, ardg.v as gaugy, arv.v as wqsingle - FROM master_artifacts m, artifact_data ardg, artifact_data arv - WHERE m.collection_id = ${collection_id} AND m.gid = CAST(${artifact-id} AS uuid) AND ardg.artifact_id = m.id AND ardg.k = 'ld_gaugename' AND arv.artifact_id = m.id AND arv.k = 'wq_single' - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) + SELECT m.id AS a_id, + m.state AS a_state, + m.gid AS a_gid, + m.creation AS a_creation, + ardg.v AS gaugy, + arv.v AS wqsingle + FROM master_artifacts m, + artifact_data ardg, + artifact_data arv + WHERE m.collection_id = ${collection_id} + AND m.gid = CAST(${artifact-id} AS uuid) + AND ardg.artifact_id = m.id + AND ardg.k = 'ld_gaugename' + AND arv.artifact_id = m.id + AND arv.k = 'wq_single' + AND EXISTS ( + SELECT id + FROM artifact_data ad + WHERE ad.artifact_id = m.id + AND k = 'river' + AND v = ${river}) </dc:statement> <dc:elements> <dc:context connection="system"> @@ -1332,18 +1468,11 @@ SHOW W-DIFFERENCES </dc:comment> - <dc:if test="dc:contains($artifact-outs, 'longitudinal_section') or (dc:contains($artifact-outs, 'w_differences') or (dc:contains($artifact-outs, 'discharge_longitudinal_section')))"> + <dc:macro name="differences"> <differences> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description @@ -1360,28 +1489,22 @@ </dc:element> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </differences> - </dc:if> + </dc:macro> <dc:comment> SHOW REFERENCE CURVE </dc:comment> - <dc:if test="dc:contains($artifact-outs, 'reference_curve')"> + + <dc:macro name="reference-curves"> <reference_curves> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> + <dc:call-macro name="user-range"> <dc:context> <dc:statement> SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description @@ -1398,28 +1521,21 @@ </dc:element> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </reference_curves> - </dc:if> + </dc:macro> <dc:comment> SHOW COMPUTED DISCHARGE CURVES </dc:comment> - <dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve')"> + <dc:macro name="computed-discharge-curve"> <computed_discharge_curves> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description @@ -1436,43 +1552,37 @@ </dc:element> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </computed_discharge_curves> - </dc:if> + </dc:macro> <dc:comment> CROSS SECTION </dc:comment> - <dc:if test="dc:contains($artifact-outs, 'cross_section')"> + + <dc:macro name="waterlevels"> <waterlevels> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> - <dc:context> - <dc:statement> - SELECT id AS out_id - FROM outs - WHERE artifact_id = ${a_id} AND name = 'cross_section' - </dc:statement> - <dc:elements> - <dc:context> - <dc:statement> - SELECT name AS facet_name, num as facet_num, description AS facet_description - FROM facets - WHERE out_id = ${out_id} - ORDER BY num ASC, name DESC - </dc:statement> - <longitudinal_section_columns> + <dc:call-macro name="range-filter"> + <dc:context> + <dc:statement> + SELECT id AS out_id + FROM outs + WHERE artifact_id = ${a_id} AND name = 'cross_section' + </dc:statement> + <dc:elements> + <dc:context> + <dc:statement> + SELECT name AS facet_name, num as facet_num, description AS facet_description + FROM facets + WHERE out_id = ${out_id} + ORDER BY num ASC, name DESC + </dc:statement> + <longitudinal_section_columns> <dc:attribute name="description" value="${river} ${a_creation}"/> <dc:elements> <dc:element name="${facet_name}"> @@ -1483,30 +1593,23 @@ <dc:attribute name="out" value="cross_section"/> </dc:element> </dc:elements> - </longitudinal_section_columns> - </dc:context> - </dc:elements> - </dc:context> - </dc:elements> + </longitudinal_section_columns> + </dc:context> + </dc:elements> + </dc:context> + </dc:call-macro> </dc:context> </dc:elements> </waterlevels> - </dc:if> + </dc:macro> - <dc:if test="dc:contains($artifact-outs, 'longitudinal_section') or (dc:contains($artifact-outs, 'discharge_longitudinal_section') or (dc:contains($artifact-outs, 'w_differences')))"> + <dc:macro name="longitudinal"> <waterlevels> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> - <dc:context> + <dc:call-macro name="range-filter"> + <dc:context> <dc:statement> SELECT id AS out_id FROM outs @@ -1535,25 +1638,18 @@ </dc:context> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </waterlevels> - </dc:if> + </dc:macro> - <dc:if test="dc:contains($artifact-outs, 'fix_longitudinal_section_curve')"> + + <dc:macro name="longitudinal-section"> <waterlevels> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> - + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT id AS out_id @@ -1584,25 +1680,17 @@ </dc:context> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </waterlevels> - </dc:if> + </dc:macro> - <dc:if test="dc:contains($artifact-outs, 'fix_deltawt_curve')"> + <dc:macro name="delta-wt"> <waterlevels> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> - + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT id AS out_id @@ -1632,25 +1720,18 @@ </dc:context> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </waterlevels> - </dc:if> + </dc:macro> - <dc:if test="dc:contains($artifact-outs, 'fix_derivate_curve')"> + + <dc:macro name="fix-derivate-curve"> <waterlevels> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> - + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT id AS out_id @@ -1680,25 +1761,18 @@ </dc:context> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </waterlevels> - </dc:if> + </dc:macro> - <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve')"> + + <dc:macro name="fix-wq-curve"> <waterlevels> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> - + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT id AS out_id @@ -1728,24 +1802,18 @@ </dc:context> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </waterlevels> - </dc:if> + </dc:macro> - <dc:if test="dc:contains($artifact-outs, 'duration_curve')"> + + <dc:macro name="duration-curve"> <computed_discharge_curves> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description @@ -1762,29 +1830,23 @@ </dc:element> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </computed_discharge_curves> - </dc:if> + </dc:macro> + <dc:comment> WATERLEVELS - ONLY SHOW Ws </dc:comment> <!-- TODO doesnt work nicely for fix/wq-diags. --> - <dc:if test="dc:contains($artifact-outs, 'waterlevels') or (dc:contains($artifact-outs, 'fix_wq_curve'))"> + + <dc:macro name="waterlevels-fix"> <waterlevels> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> - + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT id AS out_id @@ -1814,28 +1876,22 @@ </dc:context> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </waterlevels> - </dc:if> + </dc:macro> <dc:comment> SHOW FLOODMAPS </dc:comment> - <dc:if test="dc:contains($artifact-outs, 'floodmap') or dc:contains($artifact-outs, 'map')"> + + <dc:macro name="flood-map"> <floodmap> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description @@ -1852,28 +1908,21 @@ </dc:element> </dc:elements> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </floodmap> - </dc:if> + </dc:macro> <dc:comment> MINFO bedheight difference </dc:comment> - <dc:if test="dc:contains($artifact-outs, 'bed_difference_year') or dc:contains($artifact-outs, 'bed_difference_height_year')"> + <dc:macro name="bed-difference"> <fix_longitudinal_section_curve> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description @@ -1895,28 +1944,21 @@ </dc:elements> </fix_longitudinal_section_curve> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </fix_longitudinal_section_curve> - </dc:if> + </dc:macro> <dc:comment> MINFO bedheight middle </dc:comment> - <dc:if test="dc:contains($artifact-outs, 'bedheight_middle')"> + <dc:macro name="bed-height"> <fix_vollmer_wq_curve> <dc:elements> <dc:context> - <dc:statement> - SELECT m.id AS a_id, m.state AS a_state, m.gid AS a_gid, m.creation AS a_creation - FROM master_artifacts m - WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid) - AND EXISTS ( - SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river}) - </dc:statement> - <dc:elements> + <dc:call-macro name="range-filter"> <dc:context> <dc:statement> SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description @@ -1937,13 +1979,58 @@ </dc:elements> </fix_vollmer_wq_curve> </dc:context> - </dc:elements> + </dc:call-macro> </dc:context> </dc:elements> </fix_vollmer_wq_curve> + </dc:macro> + + <dc:if test="dc:contains($artifact-outs, 'longitudinal_section') or (dc:contains($artifact-outs, 'discharge_longitudinal_section') or (dc:contains($artifact-outs, 'w_differences')))"> + <dc:call-macro name="longitudinal"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'fix_deltawt_curve')"> + <dc:call-macro name="delta-wt"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'longitudinal_section') or (dc:contains($artifact-outs, 'w_differences') or (dc:contains($artifact-outs, 'discharge_longitudinal_section')))"> + <dc:call-macro name="differences"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'reference_curve')"> + <dc:call-macro name="reference-curves"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve')"> + <dc:call-macro name="computed-discharge-curve"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'cross_section')"> + <dc:call-macro name="waterlevels"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'fix_longitudinal_section_curve')"> + <dc:call-macro name="longitudinal-section"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'fix_derivate_curve')"> + <dc:call-macro name="fix-derivate-curve"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve')"> + <dc:call-macro name="fix-wq-curve"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'duration_curve')"> + <dc:call-macro name="duration-curve"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'waterlevels') or (dc:contains($artifact-outs, 'fix_wq_curve'))"> + <dc:call-macro name="waterlevels-fix"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'floodmap') or dc:contains($artifact-outs, 'map')"> + <dc:call-macro name="flood-map"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'bed_difference_year') or dc:contains($artifact-outs, 'bed_difference_height_year')"> + <dc:call-macro name="bed-difference"/> + </dc:if> + <dc:if test="dc:contains($artifact-outs, 'bedheight_middle')"> + <dc:call-macro name="bed-height"/> </dc:if> </dc:context> + </dc:call-macro> + </old_calculations>
--- a/flys-artifacts/doc/conf/themes.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/themes.xml Thu Mar 07 12:02:02 2013 +0100 @@ -173,6 +173,8 @@ <mapping from="longitudinal_section.q" pattern="(NQ)(\D.*)*" to="LongitudinalSectionQ_NQ" /> <mapping from="longitudinal_section.q" to="LongitudinalSection" /> + <mapping from="discharge_longitudinal_section.q.infolding" to="LongitudinalSectionQInfolding" /> + <mapping from="discharge_longitudinal_section.q.cutting" to="LongitudinalSectionQInfoldCut" /> <mapping from="discharge_curve.curve" to="DischargeCurve" /> <mapping from="historical_discharge.historicalq" to="HistoricalDischargeCurveQ" /> @@ -219,9 +221,9 @@ <mapping from="other.wqkms.q" to="WQKms" /> <mapping from="heightmarks_points" to="heightmarks_points" /> <mapping from="area" to="Area" /> - <mapping from="cross_section.area" to="Area" /> + <mapping from="cross_section.area" to="CrossSectionArea" /> <mapping from="hyk" to="Hyk" /> - <mapping from="longitudinal_section.area" to="Area" /> + <mapping from="longitudinal_section.area" to="LongitudinalSectionArea" /> <mapping from="longitudinal_section.manualpoints" to="ManualPoints" /> <mapping from="cross_section.manualpoints" to="ManualPoints" /> <mapping from="cross_section.manualline" to="CrossSectionWaterLine" />
--- a/flys-artifacts/doc/conf/themes/default/cross-section.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/themes/default/cross-section.xml Thu Mar 07 12:02:02 2013 +0100 @@ -48,4 +48,22 @@ default="true" /> </fields> </theme> + + <theme name="CrossSectionArea"> + <inherits> + <inherit from="Areas" /> + </inherits> + <fields> + <field name="areashowbg" type="boolean" display="Hintergrund anzeigen" + default="true" hints="hidden" /> + <field name="areashowborder" type="boolean" display="Begrenzung" + default="false" hints="hidden" /> + <field name="areabordercolor" type="Color" default="0, 0, 0" + display="Begrenzungslinienfarbe" hints="hidden" /> + <field name="showarea" type="boolean" display="Flaeche anzeigen" + default="true" hints="hidden" /> + <field name="showarealabel" type="boolean" + display="Flächenbeschriftung anzeigen" default="false" hints="hidden" /> + </fields> + </theme> <!--/themegroup-->
--- a/flys-artifacts/doc/conf/themes/default/general.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/themes/default/general.xml Thu Mar 07 12:02:02 2013 +0100 @@ -124,5 +124,22 @@ </fields> </theme> + <theme name="Area"> + <inherits> + <inherit from="Areas" /> + </inherits> + <fields> + <field name="areashowbg" type="boolean" display="Hintergrund anzeigen" + default="true" hints="hidden" /> + <field name="areashowborder" type="boolean" display="Begrenzung" + default="false" hints="hidden" /> + <field name="areabordercolor" type="Color" default="0, 0, 0" + display="Begrenzungslinienfarbe" hints="hidden" /> + <field name="showarea" type="boolean" display="Flaeche anzeigen" + default="true" hints="hidden" /> + <field name="showarealabel" type="boolean" + display="Flächenbeschriftung anzeigen" default="false" hints="hidden" /> + </fields> + </theme> <!--/themegroup-->
--- a/flys-artifacts/doc/conf/themes/default/longitudinal-section.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/themes/default/longitudinal-section.xml Thu Mar 07 12:02:02 2013 +0100 @@ -656,6 +656,26 @@ </fields> </theme> + <theme name="LongitudinalSectionQInfoldCut"> + <inherits> + <inherit from="LongitudinalSection" /> + </inherits> + <fields> + <field name="linecolor" type="Color" display="Linienfarbe" + default="102, 102, 102" /> + </fields> + </theme> + + <theme name="LongitudinalSectionQInfolding"> + <inherits> + <inherit from="LongitudinalSection" /> + </inherits> + <fields> + <field name="linecolor" type="Color" display="Linienfarbe" + default="51, 51, 51" /> + </fields> + </theme> + <!-- MIDDLE BED HEIGHT --> <theme name="MiddleBedHeightSingle"> <inherits> @@ -925,5 +945,21 @@ </fields> </theme> - + <theme name="LongitudinalSectionArea"> + <inherits> + <inherit from="Areas" /> + </inherits> + <fields> + <field name="areashowbg" type="boolean" display="Hintergrund anzeigen" + default="true" hints="hidden" /> + <field name="areashowborder" type="boolean" display="Begrenzung" + default="false" hints="hidden" /> + <field name="areabordercolor" type="Color" default="0, 0, 0" + display="Begrenzungslinienfarbe" hints="hidden" /> + <field name="showarea" type="boolean" display="Flaeche anzeigen" + default="true" hints="hidden" /> + <field name="showarealabel" type="boolean" + display="Flächenbeschriftung anzeigen" default="false" hints="hidden" /> + </fields> + </theme> <!--/themegroup>-->
--- a/flys-artifacts/doc/conf/themes/second/cross-section.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/themes/second/cross-section.xml Thu Mar 07 12:02:02 2013 +0100 @@ -48,4 +48,22 @@ default="true" /> </fields> </theme> + + <theme name="CrossSectionArea"> + <inherits> + <inherit from="Areas" /> + </inherits> + <fields> + <field name="areashowbg" type="boolean" display="Hintergrund anzeigen" + default="true" hints="hidden" /> + <field name="areashowborder" type="boolean" display="Begrenzung" + default="false" hints="hidden" /> + <field name="areabordercolor" type="Color" default="0, 0, 0" + display="Begrenzungslinienfarbe" hints="hidden" /> + <field name="showarea" type="boolean" display="Flaeche anzeigen" + default="true" hints="hidden" /> + <field name="showarealabel" type="boolean" + display="Flächenbeschriftung anzeigen" default="false" hints="hidden" /> + </fields> + </theme> <!--/themegroup-->
--- a/flys-artifacts/doc/conf/themes/second/general.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/themes/second/general.xml Thu Mar 07 12:02:02 2013 +0100 @@ -124,5 +124,22 @@ </fields> </theme> + <theme name="Area"> + <inherits> + <inherit from="Areas" /> + </inherits> + <fields> + <field name="areashowbg" type="boolean" display="Hintergrund anzeigen" + default="true" hints="hidden" /> + <field name="areashowborder" type="boolean" display="Begrenzung" + default="false" hints="hidden" /> + <field name="areabordercolor" type="Color" default="0, 0, 0" + display="Begrenzungslinienfarbe" hints="hidden" /> + <field name="showarea" type="boolean" display="Flaeche anzeigen" + default="true" hints="hidden" /> + <field name="showarealabel" type="boolean" + display="Flächenbeschriftung anzeigen" default="false" hints="hidden" /> + </fields> + </theme> <!--/themegroup-->
--- a/flys-artifacts/doc/conf/themes/second/longitudinal-section.xml Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/doc/conf/themes/second/longitudinal-section.xml Thu Mar 07 12:02:02 2013 +0100 @@ -925,5 +925,21 @@ </fields> </theme> - + <theme name="LongitudinalSectionArea"> + <inherits> + <inherit from="Areas" /> + </inherits> + <fields> + <field name="areashowbg" type="boolean" display="Hintergrund anzeigen" + default="true" hints="hidden" /> + <field name="areashowborder" type="boolean" display="Begrenzung" + default="false" hints="hidden" /> + <field name="areabordercolor" type="Color" default="0, 0, 0" + display="Begrenzungslinienfarbe" hints="hidden" /> + <field name="showarea" type="boolean" display="Flaeche anzeigen" + default="true" hints="hidden" /> + <field name="showarealabel" type="boolean" + display="Flächenbeschriftung anzeigen" default="false" hints="hidden" /> + </fields> + </theme> <!--/themegroup>-->
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/context/FLYSContextFactory.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/context/FLYSContextFactory.java Thu Mar 07 12:02:02 2013 +0100 @@ -83,6 +83,9 @@ private static final String XPATH_ZOOM_SCALES = "/artifact-database/options/zoom-scales/zoom-scale"; + private static final String XPATH_DGM_PATH = "/artifact-database/options/dgm-path/text()"; + + /** * Creates a new FLYSArtifactContext object and initialize all * components required by the application. @@ -102,11 +105,22 @@ configureFloodmapWMS(config, context); configureModules(config, context); configureZoomScales(config, context); + configureDGMPath(config, context); return context; } + private void configureDGMPath(Document config, FLYSContext context) { + String dgmPath = (String) XMLUtils.xpath( + config, + XPATH_DGM_PATH, + XPathConstants.STRING); + + context.put("dgm-path", dgmPath); + } + + protected void configureZoomScales(Document config, FLYSContext context) { NodeList list = (NodeList)XMLUtils.xpath( config,
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/templating/FunctionResolver.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/templating/FunctionResolver.java Thu Mar 07 12:02:02 2013 +0100 @@ -93,17 +93,17 @@ Object from = args.get(2); if (!(mode instanceof String)){ - return -Double.MAX_VALUE; + return -99999d; } if (mode.equals("locations")) { if (!(locations instanceof String)) { - return -Double.MAX_VALUE; + return -99999d; } String loc = ((String)locations).replace(" ", ""); String[] split = loc.split(","); if (split.length < 1) { - return -Double.MAX_VALUE; + return -99999d; } try { double min = Double.parseDouble(split[0]); @@ -116,23 +116,23 @@ return min; } catch (NumberFormatException nfe) { - return -Double.MAX_VALUE; + return -99999d; } } else if (mode.equals("distance")) { if (!(from instanceof String)) { - return -Double.MAX_VALUE; + return -99999d; } String f = (String)from; try { return Double.parseDouble(f); } catch(NumberFormatException nfe) { - return -Double.MAX_VALUE; + return -99999d; } } else { - return -Double.MAX_VALUE; + return -99999d; } } }); @@ -149,18 +149,18 @@ Object to = args.get(2); if (!(mode instanceof String)){ - return Double.MAX_VALUE; + return 99999d; } if (mode.equals("locations")) { if (!(locations instanceof String)) { - return Double.MAX_VALUE; + return 99999d; } try { String loc = ((String)locations).replace(" ", ""); String[] split = loc.split(","); if (split.length < 1) { - return Double.MAX_VALUE; + return 99999d; } double max = Double.parseDouble(split[0]); for (int i = 1; i < split.length; ++i) { @@ -172,12 +172,12 @@ return max; } catch (NumberFormatException nfe) { - return Double.MAX_VALUE; + return 99999d; } } else if (mode.equals("distance")) { if (!(to instanceof String)) { - return Double.MAX_VALUE; + return 99999d; } else { String t = (String)to; @@ -185,12 +185,12 @@ return Double.parseDouble(t); } catch(NumberFormatException nfe) { - return Double.MAX_VALUE; + return 99999d; } } } else { - return Double.MAX_VALUE; + return 99999d; } } });
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/Calculation4.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/Calculation4.java Thu Mar 07 12:02:02 2013 +0100 @@ -323,9 +323,7 @@ int numProblemsBefore = numProblems(); double [] qs = qf.findQs(kms, this); - // TODO: i18n - String name = "Umh\u00fcllende " + columns[i].getName(); - + String name = columns[i].getName(); ConstantWQKms infolding = new ConstantWQKms(kms, qs, ws, name); if (numProblems() > numProblemsBefore) { @@ -335,6 +333,19 @@ infoldings.add(infolding); } + for (int i = 0; i < infoldings.size(); i++) { + String name = infoldings.get(i).getName(); + // TODO: i18n + if (i == 0) { + infoldings.get(i).setName("untere Umh\u00fcllende " + name); + } + else if (i == infoldings.size() - 1) { + infoldings.get(i).setName("obere Umh\u00fcllende " + name); + } + else { + infoldings.get(i).setName("geschnitten " + name); + } + } return infoldings.toArray(new ConstantWQKms[infoldings.size()]); }
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/FacetTypes.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/FacetTypes.java Thu Mar 07 12:02:02 2013 +0100 @@ -175,6 +175,8 @@ String DISCHARGE_LONGITUDINAL_W = "discharge_longitudinal_section.w"; String DISCHARGE_LONGITUDINAL_Q = "discharge_longitudinal_section.q"; + String DISCHARGE_LONGITUDINAL_Q_INFOLD = "discharge_longitudinal_section.q.infolding"; + String DISCHARGE_LONGITUDINAL_Q_INFOLD_CUT = "discharge_longitudinal_section.q.cutting"; String DISCHARGE_LONGITUDINAL_C = "discharge_longitudinal_section.c"; String LONGITUDINAL_W = "longitudinal_section.w";
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/FixingsOverview.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/FixingsOverview.java Thu Mar 07 12:02:02 2013 +0100 @@ -644,11 +644,11 @@ public boolean accept(Fixing.Column column) { for (SectorRange s: column.getSectors()) { int v = s.getSector(); - if (v >= min && v <= max) { - return true; + if (v < min || v > max) { + return false; } } - return false; + return true; } } // class SectorRangeFilter
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/Segment.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/Segment.java Thu Mar 07 12:02:02 2013 +0100 @@ -15,6 +15,7 @@ import org.apache.log4j.Logger; +/** A Range with values and a reference point. */ public class Segment implements Serializable { @@ -178,18 +179,21 @@ DischargeTable dt = gauge.fetchMasterDischargeTable(); + //TODO: Change scale from 100 to 1 immediately after + // discharge table import changed to cm! double [][] table = - DischargeTables.loadDischargeTableValues(dt, 1); + DischargeTables.loadDischargeTableValues(dt, 100); // need the original values for naming segment.backup(); for (int i = 0; i < values.length; ++i) { - double w = values[i] / 100.0; + //TODO: s.o. + double w = values[i]; /* / 100.0; */ double [] qs = DischargeTables.getQsForW(table, w); if (qs.length == 0) { log.warn("No Qs found for W = " + values[i]); - report.addProblem("cannot.find.w.for.q", values[i]); + report.addProblem("cannot.find.q.for.w", values[i]); values[i] = Double.NaN; success = false; }
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/fixings/FixCalculation.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/fixings/FixCalculation.java Thu Mar 07 12:02:02 2013 +0100 @@ -32,6 +32,7 @@ import org.apache.log4j.Logger; +/** Calculation base class for fix. */ public abstract class FixCalculation extends Calculation {
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/fixings/FixRealizingCalculation.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/fixings/FixRealizingCalculation.java Thu Mar 07 12:02:02 2013 +0100 @@ -17,6 +17,7 @@ import org.apache.log4j.Logger; +/** Calculation for FixRealize (german: ausgel. WSPL). */ public class FixRealizingCalculation extends FixCalculation { @@ -128,7 +129,7 @@ } } - // name the curves + // Name the curves. for (int i = 0; i < results.length; ++i) { results[i].setName(createName(i)); }
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/DischargeLongitudinalSection.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/DischargeLongitudinalSection.java Thu Mar 07 12:02:02 2013 +0100 @@ -1,7 +1,9 @@ package de.intevation.flys.artifacts.states; import de.intevation.artifactdatabase.state.Facet; +import de.intevation.artifactdatabase.state.FacetActivity; +import de.intevation.artifacts.Artifact; import de.intevation.artifacts.CallContext; import de.intevation.flys.artifacts.ChartArtifact; @@ -32,6 +34,26 @@ private static Logger log = Logger.getLogger(DischargeLongitudinalSection.class); + static { + // Active/deactivate facets. + FacetActivity.Registry.getInstance().register( + "winfo", + new FacetActivity() { + @Override + public Boolean isInitialActive( + Artifact artifact, + Facet facet, + String output + ) { + String name = facet.getName(); + if (name.equals(DISCHARGE_LONGITUDINAL_Q_INFOLD_CUT)) { + return Boolean.FALSE; + } + return Boolean.TRUE; + } + }); + } + @Override public Object computeAdvance( FLYSArtifact artifact, @@ -80,14 +102,24 @@ Facet s = new CrossSectionWaterLineFacet(i, nameW); + Facet q = new WaterlevelFacet( + i, DISCHARGE_LONGITUDINAL_Q, nameQ); facets.add(s); facets.add(w); + facets.add(q); } - - Facet q = new WaterlevelFacet( - i, DISCHARGE_LONGITUDINAL_Q, nameQ); - - facets.add(q); + else { + Facet q; + if (nameQ.contains("geschnitten")) { + q = new WaterlevelFacet( + i, DISCHARGE_LONGITUDINAL_Q_INFOLD_CUT, nameQ); + } + else { + q = new WaterlevelFacet( + i, DISCHARGE_LONGITUDINAL_Q_INFOLD, nameQ); + } + facets.add(q); + } if (wqkms[i] instanceof WQCKms) { // TODO DO i18n
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/FloodMapState.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/FloodMapState.java Thu Mar 07 12:02:02 2013 +0100 @@ -10,6 +10,7 @@ import de.intevation.artifacts.CallContext; import de.intevation.artifacts.CallMeta; import de.intevation.artifacts.GlobalContext; +import de.intevation.artifacts.common.utils.Config; import de.intevation.artifacts.common.utils.FileTools; import de.intevation.flys.artifacts.access.RangeAccess; import de.intevation.flys.artifacts.FLYSArtifact; @@ -43,10 +44,13 @@ import java.util.ArrayList; import java.util.List; +import javax.naming.Context; + import org.apache.log4j.Logger; import org.geotools.feature.FeatureCollection; import org.geotools.feature.FeatureCollections; import org.geotools.feature.simple.SimpleFeatureBuilder; +import org.hibernate.HibernateException; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; @@ -307,7 +311,7 @@ setUserShape(artifact, facetCreator, artifactDir, job); setAxis(artifact, artifactDir, job); setPro(artifact, artifactDir, job); - setDgm(artifact, job); + setDgm(artifact, job, context); setArea(artifact, artifactDir, job); setOutFile(artifact, job); setWsp(artifact, context, artifactDir, job); // WSP @@ -613,8 +617,15 @@ String river = artifact.getDataAsString("river"); String srid = FLYSUtils.getRiverDGMSrid(river); String srs = "EPSG:" + srid; -logger.debug("srs: " + srs); - List<RiverAxis> axes = RiverAxis.getRiverAxis(river); + + List<RiverAxis> axes = null; + try { + axes = RiverAxis.getRiverAxis(river); + } + catch (HibernateException iae) { + logger.warn("No valid river axis found for " + river); + return; + } if (axes == null || axes.isEmpty()) { logger.warn("Could not find river axis for: '" + river + "'"); return; @@ -691,7 +702,11 @@ } - protected void setDgm(FLYSArtifact artifact, WSPLGENJob job) { + protected void setDgm( + FLYSArtifact artifact, + WSPLGENJob job, + CallContext context + ) { String dgm_id = artifact.getDataAsString("dgm"); int id = -1; @@ -708,7 +723,15 @@ return; } - job.setDgm(dgm.getPath()); + File dgmPath = new File (dgm.getPath()); + if (dgmPath.isAbsolute()) { + job.setDgm(dgm.getPath()); + } + else { + FLYSContext fc = (FLYSContext)context.globalContext(); + String prefix = (String) fc.get("dgm-path"); + job.setDgm(prefix.trim() + dgm.getPath().trim()); + } }
--- a/flys-artifacts/src/main/java/de/intevation/flys/collections/AttributeWriter.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/collections/AttributeWriter.java Thu Mar 07 12:02:02 2013 +0100 @@ -164,7 +164,7 @@ throws ArtifactDatabaseException { if (compatibleFacets == null) { - logger.warn("No compatible facets, not generating out."); + logger.warn("No compatible facets, not generating out " + outputName + "."); return false; }
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/AbstractExporter.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/AbstractExporter.java Thu Mar 07 12:02:02 2013 +0100 @@ -74,6 +74,12 @@ /** The master artifact. */ protected Artifact master; + private NumberFormat kmFormat; + + private NumberFormat wFormat; + + private NumberFormat qFormat; + /** * Concrete subclasses need to use this method to write their special data @@ -240,6 +246,10 @@ return Resources.getMsg(context.getMeta(), key, def); } + protected String msg(String key, String def, Object[] args) { + return Resources.getMsg(context.getMeta(), key, def, args); + } + /** * This method starts CSV creation. It makes use of writeCSVData() which has @@ -301,7 +311,10 @@ * @return the number formatter for kilometer values. */ protected NumberFormat getKmFormatter() { - return Formatter.getWaterlevelKM(context); + if (kmFormat == null) { + kmFormat = Formatter.getWaterlevelKM(context); + } + return kmFormat; } @@ -311,7 +324,10 @@ * @return the number formatter for W values. */ protected NumberFormat getWFormatter() { - return Formatter.getWaterlevelW(context); + if (wFormat == null) { + wFormat = Formatter.getWaterlevelW(context); + } + return wFormat; } @@ -321,7 +337,10 @@ * @return the number formatter for Q values. */ protected NumberFormat getQFormatter() { - return Formatter.getWaterlevelQ(context); + if (qFormat == null) { + qFormat = Formatter.getWaterlevelQ(context); + } + return qFormat; } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/CrossSectionGenerator.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/CrossSectionGenerator.java Thu Mar 07 12:02:02 2013 +0100 @@ -17,6 +17,7 @@ import de.intevation.artifactdatabase.state.ArtifactAndFacet; import de.intevation.artifacts.DataProvider; +import de.intevation.flys.artifacts.FLYSArtifact; import de.intevation.flys.artifacts.geom.Lines; import de.intevation.flys.artifacts.model.CrossSectionFacet; import de.intevation.flys.artifacts.model.FacetTypes; @@ -28,6 +29,7 @@ import de.intevation.flys.themes.LineStyle; import de.intevation.flys.themes.TextStyle; import de.intevation.flys.themes.ThemeAccess; +import de.intevation.flys.utils.FLYSUtils; import de.intevation.flys.utils.Formatter; import de.intevation.flys.utils.ThemeUtil; @@ -232,7 +234,13 @@ @Override protected String getDefaultYAxisLabel(int pos) { - return msg(I18N_YAXIS_LABEL, I18N_YAXIS_LABEL_DEFAULT); + FLYSArtifact flys = (FLYSArtifact) master; + + String unit = FLYSUtils.getRiver(flys).getWstUnit().getName(); + + return msg(I18N_YAXIS_LABEL, + I18N_YAXIS_LABEL_DEFAULT, + new Object[] { unit }); } @@ -341,7 +349,11 @@ if (ThemeUtil.parseShowLevel(theme) && lines.points.length > 1 && lines.points[1].length > 0) { NumberFormat nf = Formatter.getMeterFormat(this.context); - String labelAdd = "W=" + nf.format(lines.points[1][0]) + "NN+m"; + FLYSArtifact flys = (FLYSArtifact) master; + + String unit = FLYSUtils.getRiver(flys).getWstUnit().getName(); + + String labelAdd = "W=" + nf.format(lines.points[1][0]) + unit; if (series.getLabel().length() == 0) { series.setLabel(labelAdd); }
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/DischargeLongitudinalSectionExporter.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/DischargeLongitudinalSectionExporter.java Thu Mar 07 12:02:02 2013 +0100 @@ -61,7 +61,8 @@ msg(CSV_KM_HEADER, DEFAULT_CSV_KM_HEADER), msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER), msg(CSV_CW_HEADER, DEFAULT_CSV_CW_HEADER), - msg(CSV_Q_HEADER, DEFAULT_CSV_Q_HEADER) + msg(CSV_Q_HEADER, DEFAULT_CSV_Q_HEADER), + msg(CSV_Q_DESC_HEADER, DEFAULT_CSV_Q_DESC_HEADER) }); } @@ -70,8 +71,7 @@ CSVWriter writer, WQKms wqkms, boolean atGauge, - boolean isQ, - boolean isRange + boolean isQ ) { logger.debug("WaterlevelExporter.wQKms2CSV"); @@ -85,6 +85,7 @@ for (int i = 0; i < size; i ++) { result = wqkms.get(i, result); + String name = wqkms.getName(); String wc = ""; if (wqkms instanceof WQCKms) { wc = wf.format(result[3]); @@ -94,7 +95,8 @@ kmf.format(result[2]), wf.format(result[0]), wc, - qf.format(result[1]) + qf.format(result[1]), + name }); } }
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/DischargeLongitudinalSectionGenerator.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/DischargeLongitudinalSectionGenerator.java Thu Mar 07 12:02:02 2013 +0100 @@ -54,7 +54,7 @@ Facet facet = artifactFacet.getFacet(); - if (name.equals(DISCHARGE_LONGITUDINAL_Q)) { + if (name.contains(DISCHARGE_LONGITUDINAL_Q)) { doQOut( (WQKms) artifactFacet.getData(context), artifactFacet,
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/WaterlevelExporter.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/WaterlevelExporter.java Thu Mar 07 12:02:02 2013 +0100 @@ -185,7 +185,7 @@ * @param wqkms A WQKms object that should be prepared. */ protected String getColumnTitle(WINFOArtifact winfo, WQKms wqkms) { - logger.debug("WaterlevelExporter.prepareNamedValue"); + logger.debug("WaterlevelExporter.getColumnTitle"); String name = wqkms.getName(); @@ -426,10 +426,12 @@ ) { logger.info("WaterlevelExporter.writeCSVHeader"); + String unit = FLYSUtils.getRiver((FLYSArtifact) master).getWstUnit().getName(); + if (atGauge) { writer.writeNext(new String[] { msg(CSV_KM_HEADER, DEFAULT_CSV_KM_HEADER), - msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER), + msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER, new Object[] { unit }), msg(CSV_Q_HEADER, DEFAULT_CSV_Q_HEADER), (isQ ? msg(CSV_Q_DESC_HEADER, DEFAULT_CSV_Q_DESC_HEADER) @@ -441,7 +443,8 @@ else { writer.writeNext(new String[] { msg(CSV_KM_HEADER, DEFAULT_CSV_KM_HEADER), - msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER), + // TODO flys/issue1128 (unit per river) + msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER, new Object[] { unit }), msg(CSV_Q_HEADER, DEFAULT_CSV_Q_HEADER), msg(CSV_LOCATION_HEADER, DEFAULT_CSV_LOCATION_HEADER) }); @@ -449,6 +452,50 @@ } + /** Linearly search for gauge which is valid at km. */ + private Gauge findGauge(double km, List<Gauge> gauges) { + for (Gauge gauge: gauges) { + if (km >= gauge.getRange().getA().doubleValue() + && km <= gauge.getRange().getB().doubleValue()) { + return gauge; + } + } + return null; + } + + + private void writeRow4(CSVWriter writer, double wqkm[], FLYSArtifact flys) { + NumberFormat kmf = getKmFormatter(); + NumberFormat wf = getWFormatter(); + NumberFormat qf = getQFormatter(); + + writer.writeNext(new String[] { + kmf.format(wqkm[2]), + wf.format(wqkm[0]), + qf.format(wqkm[1]), + FLYSUtils.getLocationDescription(flys, wqkm[2]) + }); + } + + + /** Write an csv-row at gauge location. */ + private void writeRow6(CSVWriter writer, double wqkm[], String wOrQDesc, + FLYSArtifact flys, String gaugeName) { + NumberFormat kmf = getKmFormatter(); + NumberFormat wf = getWFormatter(); + NumberFormat qf = getQFormatter(); + + writer.writeNext(new String[] { + kmf.format(wqkm[2]), + wf.format(wqkm[0]), + qf.format(wqkm[1]), + wOrQDesc, + FLYSUtils.getLocationDescription(flys, wqkm[2]), + gaugeName + }); + } + + /** * Write "rows" of csv data from wqkms with writer. */ @@ -473,6 +520,7 @@ double[] result = new double[3]; FLYSArtifact flys = (FLYSArtifact) master; + List<Gauge> gauges = FLYSUtils.getGauges(flys); Gauge gauge = FLYSUtils.getGauge(flys); String gaugeName = gauge.getName(); String desc = ""; @@ -496,6 +544,7 @@ String colDesc = desc; List<Segment> segments = null; + boolean isFixRealize = false; if (flys instanceof WINFOArtifact) { if (wqkms != null && wqkms.getRawValue() != null) { WINFOArtifact winfo = (WINFOArtifact) flys; @@ -506,6 +555,9 @@ // Get W/Q input per gauge for this case. FixRealizingAccess fixAccess = new FixRealizingAccess(flys, getCallContext()); segments = fixAccess.getSegments(); + if (segments != null && segments.size() > 0) { + isFixRealize = true; + } } for (int i = 0; i < size; i ++) { @@ -521,25 +573,21 @@ } if (atGauge) { - writer.writeNext(new String[] { - kmf.format(result[2]), - wf.format(result[0]), - qf.format(result[1]), - colDesc, - FLYSUtils.getLocationDescription(flys, result[2]), + String gaugeN; + // TODO issue1131, name gauge + if (isFixRealize) { + gaugeN = findGauge(result[2], gauges).getName(); + } + else { // TODO issue1114: Take correct gauge - result[2] >= a && result[2] <= b + gaugeN = result[2] >= a && result[2] <= b ? gaugeName - : notinrange - }); + : notinrange; + } + writeRow6(writer, result, colDesc, flys, gaugeN); } else { - writer.writeNext(new String[] { - kmf.format(result[2]), - wf.format(result[0]), - qf.format(result[1]), - FLYSUtils.getLocationDescription(flys, result[2]) - }); + writeRow4(writer, result, flys); } }
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/fixings/FixATExport.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/fixings/FixATExport.java Thu Mar 07 12:02:02 2013 +0100 @@ -14,7 +14,6 @@ import de.intevation.flys.artifacts.model.CalculationResult; import de.intevation.flys.artifacts.model.Parameters; -import de.intevation.flys.artifacts.model.fixings.FixAnalysisResult; import de.intevation.flys.artifacts.model.fixings.FixResult; import de.intevation.flys.exports.AbstractExporter;
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/DoubleUtil.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/DoubleUtil.java Thu Mar 07 12:02:02 2013 +0100 @@ -16,6 +16,9 @@ public static final double DEFAULT_STEP_PRECISION = 1e6; + /** EPSILON for comparison of double precision values. */ + public static final double EPSILON = 1e-4; + private DoubleUtil() { } @@ -31,6 +34,10 @@ return Math.round(x * DEFAULT_STEP_PRECISION)/DEFAULT_STEP_PRECISION; } + /** + * Returns array with values from parameter from to to with given step width. + * from and to are included. + */ public static final double [] explode( double from, double to, @@ -56,7 +63,7 @@ double max = Math.max(from, to); for (int idx = 0; idx < num; idx++) { - if (lower > max) { + if (lower - max > EPSILON) { return Arrays.copyOfRange(values, 0, idx); }
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/GeometryUtils.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/GeometryUtils.java Thu Mar 07 12:02:02 2013 +0100 @@ -30,6 +30,7 @@ import org.geotools.geometry.jts.JTS; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.CRS; +import org.hibernate.HibernateException; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.referencing.FactoryException; @@ -50,24 +51,30 @@ } public static Envelope getRiverBoundary(String rivername) { - List<RiverAxis> axes = RiverAxis.getRiverAxis(rivername); - if (axes != null && axes.size() > 0) { - Envelope max = null; - - for (RiverAxis axis: axes) { - // TODO Take the correct EPSG into account. Maybe, we need to - // reproject the geometry. - Envelope env = axis.getGeom().getEnvelopeInternal(); + try { + List<RiverAxis> axes = RiverAxis.getRiverAxis(rivername); + if (axes != null && axes.size() > 0) { + Envelope max = null; - if (max == null) { - max = env; + for (RiverAxis axis: axes) { + // TODO Take the correct EPSG into account. Maybe, we need to + // reproject the geometry. + Envelope env = axis.getGeom().getEnvelopeInternal(); + + if (max == null) { + max = env; + } + else { + max.expandToInclude(env); + } } - else { - max.expandToInclude(env); - } + + return max; } - - return max; + } + catch(HibernateException iae) { + logger.warn("No vaild river axis forund for " + rivername); + return null; } return null;
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/MapUtils.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/MapUtils.java Thu Mar 07 12:02:02 2013 +0100 @@ -14,10 +14,10 @@ private static final Logger logger = Logger.getLogger(MapUtils.class); public static final Pattern DB_URL_PATTERN = - Pattern.compile("(.*)\\/\\/(.*):([0-9]+)\\/([a-zA-Z]+)"); + Pattern.compile("(.*)\\/\\/(.*):([0-9]+)\\/([a-zA-Z_-]+)"); public static final Pattern DB_PSQL_URL_PATTERN = - Pattern.compile("(.*)\\/\\/(.*):([0-9]+)\\/([a-zA-Z0-9]+)"); + Pattern.compile("(.*)\\/\\/(.*):([0-9]+)\\/([a-zA-Z0-9_-]+)"); /** * This method returns a connection string for databases used by @@ -46,8 +46,11 @@ logger.debug("Groups for connection string: " + m.groupCount()); int groups = m.groupCount(); - for (int i = 0; i <= groups; i++) { - logger.debug("Group " + i + ": " + m.group(i)); + + if (logger.isDebugEnabled()) { + for (int i = 0; i <= groups; i++) { + logger.debug("Group " + i + ": " + m.group(i)); + } } String connection = null; @@ -73,19 +76,30 @@ String port = m.group(3); String db = m.group(4); - StringBuilder sb = new StringBuilder(); - sb.append("dbname=" + db); - sb.append("host='" + host + "'"); - sb.append("port=" + port); - sb.append("password='" + pass + "'"); - sb.append("sslmode=disable"); - - connection = sb.toString(); + connection = createConnectionString(user, pass, host, db, port); } return connection; } + public static String createConnectionString( + String user, + String pass, + String host, + String db, + String port + ) { + StringBuilder sb = new StringBuilder(); + sb.append("dbname=").append(db); + sb.append(" host='").append(host).append("'"); + sb.append(" user=").append(user); + sb.append(" port=").append(port); + // XXX: We need to escape this somehow. + sb.append(" password='").append(pass).append("'"); + sb.append(" sslmode=disable"); + return sb.toString(); + } + protected static String getPostgreSQLConnection() { SessionFactoryImpl sf = (SessionFactoryImpl) SessionFactoryProvider.getSessionFactory(); @@ -120,15 +134,7 @@ String port = m.group(3); String db = m.group(4); - StringBuilder sb = new StringBuilder(); - sb.append("dbname=" + db); - sb.append(" host='" + host + "'"); - sb.append(" port=" + port); - sb.append(" user=" + user); - sb.append(" password='" + pass + "'"); - sb.append(" sslmode=disable"); - - connection = sb.toString(); + connection = createConnectionString(user, pass, host, db, port); logger.debug("Created connection: '" + connection + "'");
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/RiverMapfileGenerator.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/RiverMapfileGenerator.java Thu Mar 07 12:02:02 2013 +0100 @@ -1,7 +1,7 @@ package de.intevation.flys.utils; import com.vividsolutions.jts.geom.Envelope; -import com.vividsolutions.jts.geom.LineString; +import com.vividsolutions.jts.geom.MultiLineString; import de.intevation.flys.artifacts.model.LayerInfo; import de.intevation.flys.artifacts.model.RiverFactory; @@ -20,6 +20,7 @@ import org.apache.log4j.Logger; import org.apache.velocity.Template; +import org.hibernate.HibernateException; public class RiverMapfileGenerator extends MapfileGenerator { @@ -66,12 +67,25 @@ // We expect that every river has only one RiverAxis. // This is not correct but currently the case here, see // RiverAxis.java. - List<RiverAxis> riverAxis = RiverAxis.getRiverAxis(river.getName()); + List<RiverAxis> riverAxis = null; + try { + riverAxis = RiverAxis.getRiverAxis(river.getName()); + } + catch (HibernateException iae) { + logger.error("No valid riveraxis found for " + river.getName()); + continue; + } + if (riverAxis == null) { logger.warn("River " + river.getName() + " has no river axis!"); continue; } - LineString geom = riverAxis.get(0).getGeom(); + if (riverAxis.get(0).getGeom() == null) { + logger.warn("River " + river.getName() + + " has no riveraxis geometry!"); + continue; + } + MultiLineString geom = riverAxis.get(0).getGeom(); Envelope extent = geom.getEnvelopeInternal(); createRiverAxisLayer(
--- a/flys-artifacts/src/main/resources/messages.properties Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/resources/messages.properties Thu Mar 07 12:02:02 2013 +0100 @@ -151,7 +151,7 @@ chart.cross_section.title = Cross Section for river {0} chart.cross_section.subtitle = {0}-km: {1,number,#.###} chart.cross_section.xaxis.label = Distance [m] -chart.cross_section.yaxis.label = W [NN + m] +chart.cross_section.yaxis.label = W [{0}] chart.discharge.curve.title = Discharge Curve chart.discharge.curve.xaxis.label = Q [m\u00b3/s] @@ -301,7 +301,7 @@ export.waterlevel.csv.header.km = River-Km -export.waterlevel.csv.header.w = W [NN + m] +export.waterlevel.csv.header.w = W [{0}] export.waterlevel.csv.header.q = Q [m\u00b3/s] export.waterlevel.csv.header.q.desc = Description export.waterlevel.csv.header.location = Location
--- a/flys-artifacts/src/main/resources/messages_de.properties Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/resources/messages_de.properties Thu Mar 07 12:02:02 2013 +0100 @@ -144,7 +144,7 @@ chart.cross_section.title = Querprofildiagramm f\u00fcr Gew\u00e4sser {0} chart.cross_section.subtitle = {0}-km: {1,number,#.###} chart.cross_section.xaxis.label = Abstand [m] -chart.cross_section.yaxis.label = W [NN + m] +chart.cross_section.yaxis.label = W [{0}] chart.longitudinal.section.title = W-L\u00e4ngsschnitt chart.longitudinal.section.subtitle = Bereich: {0}-km {1,number,#.###} - {2,number,#.###} @@ -291,7 +291,7 @@ chart.beddifference.yaxis.label.heights = Absolute H\u00f6he [m] export.waterlevel.csv.header.km = Fluss-Km -export.waterlevel.csv.header.w = W [NN + m] +export.waterlevel.csv.header.w = W [{0}] export.waterlevel.csv.header.q = Q [m\u00b3/s] export.waterlevel.csv.header.q.desc = Bezeichnung export.waterlevel.csv.header.location = Lage
--- a/flys-artifacts/src/main/resources/messages_de_DE.properties Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/resources/messages_de_DE.properties Thu Mar 07 12:02:02 2013 +0100 @@ -145,7 +145,7 @@ chart.cross_section.title = Querprofildiagramm f\u00fcr Gew\u00e4sser {0} chart.cross_section.subtitle = {0}-km: {1,number,#.###} chart.cross_section.xaxis.label = Abstand [m] -chart.cross_section.yaxis.label = W [NN + m] +chart.cross_section.yaxis.label = W [{0}] chart.longitudinal.section.title = W-L\u00e4ngsschnitt chart.longitudinal.section.subtitle = Bereich: {0}-km {1,number,#.###} - {2,number,#.###} @@ -300,7 +300,7 @@ chart.beddifference.yaxis.label.heights = Absolute H\u00f6he [m] export.waterlevel.csv.header.km = Fluss-Km -export.waterlevel.csv.header.w = W [NN + m] +export.waterlevel.csv.header.w = W [{0}] export.waterlevel.csv.header.q = Q [m\u00b3/s] export.waterlevel.csv.header.q.desc = Bezeichnung export.waterlevel.csv.header.location = Lage
--- a/flys-artifacts/src/main/resources/messages_en.properties Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-artifacts/src/main/resources/messages_en.properties Thu Mar 07 12:02:02 2013 +0100 @@ -143,7 +143,7 @@ chart.cross_section.title = Cross Section for river {0} chart.cross_section.subtitle = {0}-km: {1,number,#.###} chart.cross_section.xaxis.label = Distance [m] -chart.cross_section.yaxis.label = W [NN + m] +chart.cross_section.yaxis.label = W [{0}] chart.longitudinal.section.title = W-Longitudinal Section chart.longitudinal.section.subtitle = Range: {0}-km {1,number,#.###} - {2,number,#.###} @@ -303,7 +303,7 @@ chart.beddifference.yaxis.label.heights = Absolute Height [m] export.waterlevel.csv.header.km = River-Km -export.waterlevel.csv.header.w = W [NN + m] +export.waterlevel.csv.header.w = W [{0}] export.waterlevel.csv.header.q = Q [m\u00b3/s] export.waterlevel.csv.header.q.desc = Description export.waterlevel.csv.header.location = Location
--- a/flys-backend/contrib/import-gew.py Thu Feb 28 11:49:48 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,223 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import sys -import os -import codecs -import re - -HAUPTWERT = re.compile(r"\s*([^\s]+)\s+([^\s+]+)\s+([QWDT-])") -WHITESPACE = re.compile(r"\s+") - -class KM(object): - - def __init__(self, filename): - self.filename = filename - self.load_values() - - def load_values(self): - with codecs.open(self.filename, "r", "latin-1") as f: - for line in f: - line = line.strip() - if not line or line.startswith("*"): - parts = [s.strip() for s in line.split(";")] - # TODO: Use code from import-kms.py - -class AbflussTafel(object): - - def __init__(self, filename): - self.filename = filename - self.name = "" - self.values = [] - self.load_values() - - def load_values(self): - with codecs.open(self.filename, "r", "latin-1") as f: - first = True - for line in f: - line = line.strip() - if not line: continue - if line.startswith("#! name="): - self.name = line[8:] - continue - if line.startswith("#") or line.startswith("*"): - continue - line = line.replace(",", ".") - splits = WHITESPACE.split(line) - - if len(splits) < 2 or len(splits) > 11: - continue - - w = float(splits[0]) - - shift = 0 - - if len(splits) != 11 and first: - shift = 11 - len(splits) - - for idx, q in enumerate(splits[1:]): - i_w = w + shift + idx - i_q = float(q) - w_q = (i_w/100.0, i_q/100.0) - self.values.append(w_q) - - first = False - - -class Hauptwert(object): - def __init__(self, name, value, kind): - self.name = name - self.extra = value - self.kind = kind - -class Pegel(object): - def __init__(self, name, start, stop, sta, at, html): - self.name = name - self.start = start - self.stop = stop - self.sta = sta - self.at = at - self.html = html - self.aeo = 0.0 - self.nullpunkt = 0.0 - self.km = 0.0 - self.hauptwerte = [] - self.load_hauptwerte() - self.at_data = AbflussTafel(self.at) - - def load_hauptwerte(self): - with codecs.open(self.sta, "r", "latin-1") as f: - for line_no, line in enumerate(f): - line = line.rstrip() - if line_no == 0: - first = False - name = line[16:37].strip() - line = [s.replace(",", ".") for s in line[37:].split()] - self.aeo = float(line[0]) - self.nullpunkt = float(line[1]) - print >> sys.stderr, "pegel name: '%s'" % name - print >> sys.stderr, "pegel aeo: '%f'" % self.aeo - print >> sys.stderr, "pegel nullpunkt: '%f'" % self.nullpunkt - elif line_no == 1: - self.km = float(line[29:36].strip().replace(",", ".")) - print >> sys.stderr, "km: '%f'" % self.km - else: - if not line: continue - line = line.replace(",", ".") - m = HAUPTWERT.match(line) - if not m: continue - self.hauptwerte.append(Hauptwert( - m.group(1), float(m.group(2)), m.group(3))) - -class Gewaesser(object): - - def __init__(self, name=None, b_b=None, wst=None): - self.name = name - self.b_b = b_b - self.wst = wst - self.pegel = [] - - def load_pegel(self): - dir_name = os.path.dirname(self.wst) - pegel_glt = find_file(dir_name, "PEGEL.GLT") - if not pegel_glt: - print >> sys.stderr, "Missing PEGEL.GLT for %r" % self.name - return - - print >> sys.stderr, "pegel_glt: %r" % pegel_glt - - with codecs.open(pegel_glt, "r", "latin-1") as f: - for line in f: - line = line.strip() - if not line or line.startswith("#"): - continue - # using re to cope with quoted columns, - # shlex has unicode problems. - parts = [p for p in re.split("( |\\\".*?\\\"|'.*?')", line) - if p.strip()] - if len(parts) < 7: - print >> sys.stderr, "too less colums (need 7): %r" % line - continue - - print >> sys.stderr, "%r" % parts - self.pegel.append(Pegel( - parts[0], - min(float(parts[2]), float(parts[3])), - max(float(parts[2]), float(parts[3])), - norm_path(parts[4], dir_name), - norm_path(parts[5], dir_name), - parts[6])) - - - def __repr__(self): - return u"Gewaesser(name=%r, b_b=%r, wst=%r)" % ( - self.name, self.b_b, self.wst) - -def norm_path(path, ref): - if not os.path.isabs(path): - path = os.path.normpath(os.path.join(ref, path)) - return path - -def find_file(path, what): - what = what.lower() - for filename in os.listdir(path): - p = os.path.join(path, filename) - if os.path.isfile(p) and filename.lower() == what: - return p - return None - - -def read_gew(filename): - - gewaesser = [] - - current = Gewaesser() - - filename = os.path.abspath(filename) - dirname = os.path.dirname(filename) - - with codecs.open(filename, "r", "latin-1") as f: - for line in f: - line = line.strip() - if not line or line.startswith("*"): - continue - - if line.startswith(u"Gewässer:"): - if current.name: - gewaesser.append(current) - current = Gewaesser() - current.name = line[len(u"Gewässer:"):].strip() - elif line.startswith(u"B+B-Info:"): - current.b_b = norm_path(line[len(u"B+B-Info:"):].strip(), - dirname) - elif line.startswith(u"WSTDatei:"): - current.wst = norm_path(line[len(u"WSTDatei:"):].strip(), - dirname) - - if current.name: - gewaesser.append(current) - - return gewaesser - -def main(): - - if len(sys.argv) < 2: - print >> sys.stderr, "missing gew file" - sys.exit(1) - - gew_filename = sys.argv[1] - - if not os.path.isfile(gew_filename): - print >> sys.stderr, "'%s' is not a file" % gew_filename - sys.exit(1) - - gewaesser = read_gew(gew_filename) - - for gew in gewaesser: - gew.load_pegel() - - - -if __name__ == '__main__': - main() -# vim: set fileencoding=utf-8 :
--- a/flys-backend/contrib/import-kms.py Thu Feb 28 11:49:48 2013 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,213 +0,0 @@ -#!/usr/bin/env python - -import sys -import logging -import re -import os - -import sqlite3 as db -import locale -import codecs - -from optparse import OptionParser - -log = logging.getLogger(__name__) -log.setLevel(logging.WARNING) -log.addHandler(logging.StreamHandler(sys.stderr)) - -RANGE = re.compile("([^#]*)#(.*)") -DEFAULT_DATABASE = "flys.db" - -SQL_NEXT_ID = "SELECT coalesce(max(id), -1) + 1 FROM %s" -SQL_SELECT_ID = "SELECT id FROM %s WHERE %s = ?" -SQL_INSERT_ID = "INSERT INTO %s (id, %s) VALUES (?, ?)" - -SQL_SELECT_RANGE_ID = """ -SELECT id FROM ranges WHERE river_id = ? AND a = ? AND b = ? -""" -SQL_INSERT_RANGE_ID = """ -INSERT INTO ranges (id, river_id, a, b) VALUES (?, ?, ?, ?) -""" -SQL_SELECT_ANNOTATION_ID = """ -SELECT id FROM annotations -WHERE range_id = ? AND attribute_id = ? AND position_id = ? -""" -SQL_INSERT_ANNOTATION_ID = """ -INSERT INTO annotations (id, range_id, attribute_id, position_id) -VALUES (?, ?, ?, ?) -""" - -def encode(s): - try: - return unicode(s, "latin-1") - except UnicodeDecodeError: - return unicode.encode(s, locale.getpreferredencoding()) - -class hashabledict(dict): - def __key(self): - return tuple((k, self[k]) for k in sorted(self)) - def __hash__(self): - return hash(self.__key()) - def __eq__(self, other): - return self.__key() == other.__key() - -def cache(f): - def func(*args, **kw): - key = (args, hashabledict(kw)) - try: - return f.__cache__[key] - except KeyError: - value = f(*args, **kw) - f.__cache__[key] = value - return value - f.__cache__ = {} - return func - -NEXT_IDS = {} -def next_id(cur, relation): - idx = NEXT_IDS.get(relation) - if idx is None: - cur.execute(SQL_NEXT_ID % relation) - idx = cur.fetchone()[0] - NEXT_IDS[relation] = idx + 1 - return idx - -def get_id(cur, relation, attribute, value): - select_stmt = SQL_SELECT_ID % (relation, attribute) - #log.debug(select_stmt) - cur.execute(select_stmt, (value,)) - row = cur.fetchone() - if row: return row[0] - idx = next_id(cur, relation) - insert_stmnt = SQL_INSERT_ID % (relation, attribute) - #log.debug(insert_stmnt) - cur.execute(insert_stmnt, (idx, value)) - cur.connection.commit() - log.debug("insert %s '%s' id: '%d'" % (relation, value, idx)) - return idx - -#@cache -def get_river_id(cur, name): - return get_id(cur, "rivers", "name", name) - -#@cache -def get_attribute_id(cur, value): - return get_id(cur, "attributes", "value", value) - -#@cache -def get_position_id(cur, value): - return get_id(cur, "positions", "value", value) - -#@cache -def get_range_id(cur, river_id, a, b): - cur.execute(SQL_SELECT_RANGE_ID, (river_id, a, b)) - row = cur.fetchone() - if row: return row[0] - idx = next_id(cur, "ranges") - cur.execute(SQL_INSERT_RANGE_ID, (idx, river_id, a, b)) - cur.connection.commit() - return idx - -#@cache -def get_annotation_id(cur, range_id, attribute_id, position_id): - cur.execute(SQL_SELECT_ANNOTATION_ID, ( - range_id, attribute_id, position_id)) - row = cur.fetchone() - if row: return row[0] - idx = next_id(cur, "annotations") - cur.execute(SQL_INSERT_ANNOTATION_ID, ( - idx, range_id, attribute_id, position_id)) - cur.connection.commit() - return idx - -def files(root, accept=lambda x: True): - if os.path.isfile(root): - if accept(root): yield root - elif os.path.isdir(root): - stack = [ root ] - while stack: - cur = stack.pop() - for f in os.listdir(cur): - p = os.path.join(cur, f) - if os.path.isdir(p): - stack.append(p) - elif os.path.isfile(p) and accept(p): - yield p - -def feed_km(cur, river_id, km_file): - - log.info("processing: %s" % km_file) - - for line in codecs.open(km_file, "r", "latin-1"): - line = line.strip() - if not line or line.startswith('*'): - continue - parts = [x.strip() for x in line.split(';')] - if len(parts) < 3: - log.error("cannot process: '%s'" % line) - continue - m = RANGE.match(parts[2]) - try: - if m: - x = [float(x.replace(",", ".")) for x in m.groups()] - a, b = min(x), max(x) - if a == b: b = None - else: - a, b = float(parts[2].replace(",", ".")), None - except ValueError: - log.error("cannot process: '%s'" % line) - continue - - attribute = parts[0] - position = parts[1] - attribute_id = get_attribute_id(cur, attribute) if attribute else None - position_id = get_position_id(cur, position) if position else None - - range_id = get_range_id(cur, river_id, a, b) - - get_annotation_id(cur, range_id, attribute_id, position_id) - -def main(): - - usage = "usage: %prog [options] river km-file ..." - parser = OptionParser(usage=usage) - parser.add_option( - "-v", "--verbose", action="store_true", - dest="verbose", - help="verbose output") - parser.add_option( - "-r", "--recursive", action="store_true", - dest="recursive", default=False, - help="recursive") - parser.add_option( - "-d", "--database", action="store", - dest="database", - help="database to connect with", - default=DEFAULT_DATABASE) - - options, args = parser.parse_args() - - if options.verbose: - log.setLevel(logging.INFO) - - if len(args) < 1: - log.error("missing river argument") - sys.exit(1) - - river = unicode(args[0], locale.getpreferredencoding()) - - with db.connect(options.database) as con: - cur = con.cursor() - river_id = get_river_id(cur, river) - - for arg in args[1:]: - if options.recursive: - for km_file in files( - arg, lambda x: x.lower().endswith(".km")): - feed_km(cur, river_id, km_file) - else: - feed_km(cur, river_id, arg) - - -if __name__ == '__main__': - main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/contrib/import_river.sh Thu Mar 07 12:02:02 2013 +0100 @@ -0,0 +1,352 @@ +#!/bin/bash +# Import script for rivers +# +# Authors: +# Andre Heinecke <aheinecke@intevation.de> +# +# Copyright: +# Copyright (C) 2013 Intevation GmbH +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +set -e + +# Default settings +DEFAULT_HOST=localhost +DEFAULT_PORT=1521 +DEFAULT_USER=flys_dami +DEFAULT_PASS=flys_dami +DEFAULT_LOG=$PWD/logs +DEFAULT_BACKEND_NAME="XE" +JAR="hydr_morph/importer.jar" +IMPORTER_DRY_RUN=false +IMPORTER_MAINVALUE_TYPES=QWTD +IMPORTER_ANNOTATION_TYPES="conf/annotation-types.xml" + + +MIN_MEMORY="8024m" + +if [ -z "$OPTIONAL_LIBS" ]; then + OPTIONAL_LIBS="${DIR}"/../opt +fi + +if [ -d "$OPTIONAL_LIBS" ]; then + export PATH="$OPTIONAL_LIBS/bin:$PATH" + export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib:$LD_LIBRARY_PATH" + export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib64:$LD_LIBRARY_PATH" + export PYTHONPATH="$OPTIONAL_LIBS/lib/python2.6/site-packages:$PYTHONPATH" + export PYTHONPATH="$OPTIONAL_LIBS/lib64/python2.6/site-packages:$PYTHONPATH" + export GDAL_DATA="$OPTIONAL_LIBS/share/gdal" +fi + +usage(){ + cat << EOF + +usage: $0 [options] gew_file + +Import a river described by the gew_file + +OPTIONS: + -?, --help Show this message + -u, --username=<username> Database username. Default: $DEFAULT_USER + -w, --password=<password> Database password. Default: $DEFAULT_PASS + -h, --host=<host> Connect to database on host <host>. + Default: $DEFAULT_HOST + -p, --port=<number> Use port number <number>. Default: $DEFAULT_PORT + -d, --db-name=<database_name> Name of the database / backend. Default: $DEFAULT_BACKEND_NAME + -l, --log-dir=<directory> Directory in which to create the log files. + Default: $LOG_DIR + --postgres Database is PostgreSQL + --skip-hydro Skip import of hydrological data + --skip-morpho Skip import of morphological data + --skip-geo Skip import of geographic data + --skip-wst Skip import of wst data +EOF +exit 0 +} + +OPTS=`getopt -o ?u:w:h:p:d: \ + -l help,username:,password:,host:,port:,db-name:,skip-hydro,skip-morpho,skip-geo,skip-wst \ + -n $0 -- "$@"` +if [ $? != 0 ] ; then usage; fi +eval set -- "$OPTS" +while true ; do + case "$1" in + "-?"|"--help") + usage;; + "--") + shift + break;; + "-u"|"--username") + USER=$2 + shift 2;; + "-w"|"--password") + PASS=$2 + shift 2;; + "-h"|"--host") + HOST=$2 + shift 2;; + "-p"|"--port") + PORT=$2 + shift 2;; + "-l"|"--log-dir") + LOG=$2 + shift 2;; + "-d"|"--db-name") + BACKEND_NAME=$2 + shift 2;; + "--skip-hydro") + SKIP_HYDRO="TRUE" + shift;; + "--skip-morpho") + SKIP_MORPHO="TRUE" + shift;; + "--skip-wst") + SKIP_WST="TRUE" + shift;; + "--skip-geo") + SKIP_GEO="TRUE" + shift;; + "--postgres") + POSTGRES="TRUE" + shift;; + *) + echo "Unknown Option $1" + usage;; + esac +done + +if [ -z $USER ]; then + USER=$DEFAULT_USER +fi +if [ -z $PASS ]; then + PASS=$DEFAULT_PASS +fi +if [ -z $PORT ]; then + PORT=$DEFAULT_PORT +fi +if [ -z $HOST ]; then + HOST=$DEFAULT_HOST +fi +if [ -z $BACKEND_NAME ]; then + BACKEND_NAME=$DEFAULT_BACKEND_NAME +fi +if [ -z $LOGDIR ]; then + LOG=$DEFAULT_LOG +fi + +if [ $# != 1 ]; then + usage +fi + +if [ ! -r $1 ]; then + echo "Could not open $1 please ensure it exists and is readable" +fi + +GEW_FILE="$1" +RIVER_NAME=$(grep "Gew.sser" "$1" | awk '{print $2}') +DATE=$(date +%Y.%m.%d_%H%M) +LOG_DIR=${LOG}/${RIVER_NAME}-$DATE +mkdir -p ${LOG_DIR} + +if [ "POSTGRES" = "TRUE" ]; then + JAR=$(echo "$JAR" | sed 's/importer/importer_psql/') + if [ ! -r "$JAR" ]; then + echo "Could not find Postgres importer $JAR" + exit 1 + fi + OGR_CONNECTION="PG:dbname=$BACKEND_NAME host=$HOST port=$PORT \ + user=$USER password=$PASS" + BACKEND_DB_PREFIX="jdbc:postgresql:" + BACKEND_DB_DRIVER="org.postgresql.Driver" + BACKEND_DB_DIALECT="org.hibernate.dialect.PostgreSQLDialect" +else + BACKEND_DB_PREFIX="jdbc:oracle:thin:@" + BACKEND_DB_DRIVER="oracle.jdbc.OracleDriver" + BACKEND_DB_DIALECT="org.hibernate.dialect.OracleDialect" +fi + +BACKEND_URL=$BACKEND_DB_PREFIX//$HOST:$PORT/$BACKEND_NAME + +echo "Importing $RIVER_NAME into $BACKEND_URL." + +import_hydro(){ + LOG_FILE=${LOG_DIR}/hydro.log + echo Importing Hydrological data. + echo Logging into: $LOG_FILE + sed 's!./import.log!'"$LOG_FILE"'!' conf/log4j.properties > $LOG_DIR/log4j.properties + java -jar \ + -Xmx$MIN_MEMORY \ + -server \ + -Dlog4j.configuration=file://$LOG_DIR/log4j.properties \ + -Dflys.backend.user=$USER \ + -Dflys.backend.password=$PASS \ + -Dflys.backend.url=$BACKEND_URL \ + -Dflys.backend.driver=$BACKEND_DB_DRIVER \ + -Dflys.backend.dialect=$BACKEND_DB_DIALECT \ + -Dflys.backend.importer.infogew.file="$GEW_FILE" \ + -Dflys.backend.main.value.types=$IMPORTER_MAINVALUE_TYPES \ + -Dflys.backend.importer.annotation.types=$IMPORTER_ANNOTATION_TYPES \ + -Dflys.backend.importer.dry.run=$IMPORTER_DRY_RUN \ + -Dflys.backend.importer.skip.annotations=false \ + -Dflys.backend.importer.skip.bwastr=false \ + -Dflys.backend.importer.skip.da50s=false \ + -Dflys.backend.importer.skip.da66s=false \ + -Dflys.backend.importer.skip.extra.wsts=false \ + -Dflys.backend.importer.skip.fixations=false \ + -Dflys.backend.importer.skip.flood.water=false \ + -Dflys.backend.importer.skip.flood.protection=false \ + -Dflys.backend.importer.skip.gauges=false \ + -Dflys.backend.importer.skip.historical.discharge.tables=false \ + -Dflys.backend.importer.skip.hyks=false \ + -Dflys.backend.importer.skip.official.lines=false \ + -Dflys.backend.importer.skip.prfs=false \ + -Dflys.backend.importer.skip.w80s=false \ + -Dflys.backend.importer.skip.wst=true \ + -Dflys.backend.importer.skip.waterlevel.differences=true \ + -Dflys.backend.importer.skip.waterlevels=true \ + -Dflys.backend.importer.skip.sq.relation=true \ + -Dflys.backend.importer.skip.sediment.density=true \ + -Dflys.backend.importer.skip.sediment.yield=true \ + -Dflys.backend.importer.skip.morphological.width=true \ + -Dflys.backend.importer.skip.flow.velocity=true \ + -Dflys.backend.importer.skip.bed.height.single=true \ + -Dflys.backend.importer.skip.bed.height.epoch=true \ + $JAR +} + +import_morpho(){ + LOG_FILE=${LOG_DIR}/morpho.log + echo Importing Morphological data. + echo Logging into: $LOG_FILE + sed 's!./import.log!'"$LOG_FILE"'!' conf/log4j.properties > $LOG_DIR/log4j.properties + java -jar \ + -Xmx$MIN_MEMORY \ + -server \ + -Dlog4j.configuration=file://$LOG_DIR/log4j.properties \ + -Dflys.backend.user=$USER \ + -Dflys.backend.password=$PASS \ + -Dflys.backend.url=$BACKEND_URL \ + -Dflys.backend.driver=$BACKEND_DB_DRIVER \ + -Dflys.backend.dialect=$BACKEND_DB_DIALECT \ + -Dflys.backend.importer.infogew.file="$GEW_FILE" \ + -Dflys.backend.main.value.types=$IMPORTER_MAINVALUE_TYPES \ + -Dflys.backend.importer.annotation.types=$IMPORTER_ANNOTATION_TYPES \ + -Dflys.backend.importer.dry.run=$IMPORTER_DRY_RUN \ + -Dflys.backend.importer.skip.annotations=true \ + -Dflys.backend.importer.skip.bwastr=true \ + -Dflys.backend.importer.skip.da50s=true \ + -Dflys.backend.importer.skip.da66s=true \ + -Dflys.backend.importer.skip.extra.wsts=true \ + -Dflys.backend.importer.skip.fixations=true \ + -Dflys.backend.importer.skip.flood.water=true \ + -Dflys.backend.importer.skip.flood.protection=true \ + -Dflys.backend.importer.skip.gauges=true \ + -Dflys.backend.importer.skip.historical.discharge.tables=true \ + -Dflys.backend.importer.skip.hyks=true \ + -Dflys.backend.importer.skip.official.lines=true \ + -Dflys.backend.importer.skip.prfs=true \ + -Dflys.backend.importer.skip.w80s=true \ + -Dflys.backend.importer.skip.wst=true \ + -Dflys.backend.importer.skip.waterlevel.differences=false \ + -Dflys.backend.importer.skip.waterlevels=false \ + -Dflys.backend.importer.skip.sq.relation=false \ + -Dflys.backend.importer.skip.sediment.density=false \ + -Dflys.backend.importer.skip.sediment.yield=false \ + -Dflys.backend.importer.skip.morphological.width=false \ + -Dflys.backend.importer.skip.flow.velocity=false \ + -Dflys.backend.importer.skip.bed.height.single=false \ + -Dflys.backend.importer.skip.bed.height.epoch=false \ + $JAR +} + +import_wst(){ + LOG_FILE=${LOG_DIR}/wst.log + echo Importing WST data. + echo Logging into: $LOG_FILE + sed 's!./import.log!'"$LOG_FILE"'!' conf/log4j.properties > $LOG_DIR/log4j.properties + java -jar \ + -Xmx$MIN_MEMORY \ + -server \ + -Dlog4j.configuration=file://$LOG_DIR/log4j.properties \ + -Dflys.backend.user=$USER \ + -Dflys.backend.password=$PASS \ + -Dflys.backend.url=$BACKEND_URL \ + -Dflys.backend.driver=$BACKEND_DB_DRIVER \ + -Dflys.backend.dialect=$BACKEND_DB_DIALECT \ + -Dflys.backend.importer.infogew.file="$GEW_FILE" \ + -Dflys.backend.main.value.types=$IMPORTER_MAINVALUE_TYPES \ + -Dflys.backend.importer.annotation.types=$IMPORTER_ANNOTATION_TYPES \ + -Dflys.backend.importer.dry.run=$IMPORTER_DRY_RUN \ + -Dflys.backend.importer.skip.annotations=true \ + -Dflys.backend.importer.skip.bwastr=true \ + -Dflys.backend.importer.skip.da50s=true \ + -Dflys.backend.importer.skip.da66s=true \ + -Dflys.backend.importer.skip.extra.wsts=true \ + -Dflys.backend.importer.skip.fixations=true \ + -Dflys.backend.importer.skip.flood.water=true \ + -Dflys.backend.importer.skip.flood.protection=true \ + -Dflys.backend.importer.skip.gauges=true \ + -Dflys.backend.importer.skip.historical.discharge.tables=true \ + -Dflys.backend.importer.skip.hyks=true \ + -Dflys.backend.importer.skip.official.lines=true \ + -Dflys.backend.importer.skip.prfs=true \ + -Dflys.backend.importer.skip.w80s=true \ + -Dflys.backend.importer.skip.wst=false \ + -Dflys.backend.importer.skip.waterlevel.differences=true \ + -Dflys.backend.importer.skip.waterlevels=true \ + -Dflys.backend.importer.skip.sq.relation=true \ + -Dflys.backend.importer.skip.sediment.density=true \ + -Dflys.backend.importer.skip.sediment.yield=true \ + -Dflys.backend.importer.skip.morphological.width=true \ + -Dflys.backend.importer.skip.flow.velocity=true \ + -Dflys.backend.importer.skip.bed.height.single=true \ + -Dflys.backend.importer.skip.bed.height.epoch=true \ + $JAR +} + +import_geo(){ + LOG_FILE=${LOG_DIR}/geo.log + echo Importing Geographic data. + echo Logging into: $LOG_FILE + + RIVER_PATH=$(grep "WSTDatei:" "$GEW_FILE" | awk '{print $2}') + RIVER_PATH=$(dirname "$RIVER_PATH")/../.. + RIVER_PATH=$(readlink -f "$RIVER_PATH") + + exec python $(dirname $0)/geodaesie/shpimporter.py \ + --directory $RIVER_PATH \ + --river_name $RIVER_NAME \ + --ogr_connection "$OGR_CONNECTION" \ + --host $HOST \ + --user $USER \ + --password $PASS \ + --verbose 1 \ + > "$LOG_FILE" 2>&1 +} + + +if [ "$SKIP_HYDRO" != "TRUE" ]; then +import_hydro +fi +if [ "$SKIP_WST" != "TRUE" ]; then +import_wst +fi +if [ "$SKIP_MORPHO" != "TRUE" ]; then +import_morpho +fi +if [ "$SKIP_GEO" != "TRUE" ]; then +import_geo +fi
--- a/flys-backend/contrib/shpimporter/axis.py Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/contrib/shpimporter/axis.py Thu Mar 07 12:02:02 2013 +0100 @@ -4,6 +4,7 @@ import ogr from importer import Importer +import utils NAME="Axis" TABLE_NAME="river_axes" @@ -25,16 +26,20 @@ def isGeometryValid(self, geomType): - return geomType == 2 - + return geomType in [ogr.wkbLineString, + ogr.wkbLineString25D, + ogr.wkbMultiLineString25D, + ogr.wkbMultiLineString] def isShapeRelevant(self, name, path): - return "achse" in name.lower() + return "km.shp" not in path.lower() def createNewFeature(self, featureDef, feat, **args): newFeat = ogr.Feature(featureDef) - newFeat.SetGeometry(feat.GetGeometryRef()) + geometry = feat.GetGeometryRef() + geometry.SetCoordinateDimension(3) + newFeat.SetGeometry(geometry) newFeat.SetField("name", args['name']) if self.IsFieldSet(feat, "river_id"): @@ -42,13 +47,10 @@ else: riverId = self.river_id - if self.IsFieldSet(feat, "kind"): - kind = feat.GetField("kind") + newFeat.SetField("river_id", riverId) + if args.get("name", "").lower() == "achse": + newFeat.SetField("kind_id", 1) # 1 is Current else: - kind = 0 + newFeat.SetField("kind_id", 2) # 2 Is Other - newFeat.SetField("river_id", riverId) - newFeat.SetField("kind", kind) - - return newFeat - + return utils.convertToMultiLine(newFeat)
--- a/flys-backend/contrib/shpimporter/boundaries.py Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/contrib/shpimporter/boundaries.py Thu Mar 07 12:02:02 2013 +0100 @@ -4,6 +4,7 @@ import ogr from importer import Importer +import utils TABLE_NAME="hydr_boundaries" TABLE_NAME_POLY="hydr_boundaries_poly" @@ -23,7 +24,10 @@ return NAME def isGeometryValid(self, geomType): - return geomType == ogr.wkbLineString + return geomType in [ogr.wkbLineString, + ogr.wkbLineString25D, + ogr.wkbMultiLineString25D, + ogr.wkbMultiLineString] def isShapeRelevant(self, name, path): shp = ogr.Open(path) @@ -48,7 +52,7 @@ newFeat = ogr.Feature(featureDef) geometry = feat.GetGeometryRef() - geometry.SetCoordinateDimension(2) + geometry.SetCoordinateDimension(3) newFeat.SetGeometry(geometry) newFeat.SetField("name", args['name']) @@ -64,7 +68,7 @@ else: newFeat.SetField("river_id", self.river_id) - return newFeat + return utils.convertToMultiLine(newFeat) class HydrBoundaryPoly(HydrBoundary):
--- a/flys-backend/contrib/shpimporter/dgm.py Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/contrib/shpimporter/dgm.py Thu Mar 07 12:02:02 2013 +0100 @@ -8,8 +8,8 @@ # <dbfield> : (<csvfield>, conversion function) DGM_MAP = { - "lower" : ("km_von", lambda x: int(x)), - "upper" : ("km_bis", lambda x: int(x)), + "lower" : ("km_von", lambda x: float(x)), + "upper" : ("km_bis", lambda x: float(x)), "year_from" : "Jahr_von", "year_to" : "Jahr_bis", "projection" : "Projektion",
--- a/flys-backend/contrib/shpimporter/hws.py Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/contrib/shpimporter/hws.py Thu Mar 07 12:02:02 2013 +0100 @@ -16,6 +16,7 @@ NAME="HWS" # Keep in sync with hws_kinds table: +# strings need to be lowercase HWS_KIND = { "durchlass" : 1, "damm" : 2, @@ -23,9 +24,12 @@ "hochufer" : 2, "graben" : 3, "rohr1" : 1, + "hauptdeich" : 2, + "sommerdeich" : 2 } # Keep in sync with fed_states table: +# strings need to be lowercase FED_STATES = { "bayern" : 1, "hessen" : 2, @@ -65,6 +69,8 @@ "H[oeö]{0,2}he_{0,1}IST$" : "z", } + printedforpath=[] + def getPath(self, base): return "%s/%s" % (base, PATH) @@ -81,6 +87,17 @@ shp = ogr.Open(path) return self.isGeometryValid(shp.GetLayerByName(name).GetGeomType()) + def getFedStateIDfromPath(self, path): + """ + Tries to get extract a bundesland from the path + """ + for state in sorted(FED_STATES.keys(), key = len, reverse = True): + if state in path.lower(): + if not path in self.printedforpath: + logger.info("Extracted federal state from path: %s" % state) + self.printedforpath.append(path) + return FED_STATES[state] + def createNewFeature(self, featureDef, feat, **args): newFeat = ogr.Feature(featureDef) geometry = feat.GetGeometryRef() @@ -107,9 +124,14 @@ if not fed_id: logger.warn("Unknown Bundesland: %s" % \ - feat.GetField("Bundesland")) + feat.GetField(fname)) else: newFeat.SetField("fed_state_id", fed_id) + else: + # Try to get the bundesland from path + fed_id = self.getFedStateIDfromPath(args['path']) + if fed_id: + newFeat.SetField("fed_state_id", fed_id) fname = self.searchField("(ufer$)|(flussseite$)") if self.IsFieldSet(feat, fname): @@ -180,7 +202,10 @@ return "HWS_LINES" def isGeometryValid(self, geomType): - return geomType == ogr.wkbLineString or geomType == ogr.wkbLineString25D + return geomType in [ogr.wkbLineString, + ogr.wkbLineString25D, + ogr.wkbMultiLineString25D, + ogr.wkbMultiLineString] def isShapeRelevant(self, name, path): shp = ogr.Open(path) @@ -189,9 +214,14 @@ def createNewFeature(self, featureDef, feat, **args): newFeat = HWSPoints.createNewFeature(self, featureDef, feat, **args) geometry = feat.GetGeometryRef() - geometry.SetCoordinateDimension(3) + if geometry.GetCoordinateDimension() == 2: + geometry.SetCoordinateDimension(3) + for i in range(0, geometry.GetPointCount()): + x,y,z = geometry.GetPoint(i) + z = 9999 + geometry.SetPoint(i, x, y, z) newFeat.SetGeometry(geometry) - return newFeat + return utils.convertToMultiLine(newFeat)
--- a/flys-backend/contrib/shpimporter/importer.py Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/contrib/shpimporter/importer.py Thu Mar 07 12:02:02 2013 +0100 @@ -30,7 +30,13 @@ raise NotImplementedError("Importer.getTablename is abstract!") def getName(self): - raise NotImplementedError("Importer.getTablename is abstract!") + raise NotImplementedError("Importer.getName is abstract!") + + def isGeometryValid(self, geomType): + raise NotImplementedError("Importer.isGeometryValid is abstract!") + + def createNewFeature(self, featureDef, feat, **args): + raise NotImplementedError("Importer.createNewFeature is abstract!") def IsFieldSet(self, feat, name): if not name: @@ -43,7 +49,7 @@ """ Searches for a field in the current src layer that matches the expression regex. - Throws an exception if more then one field matches + Throws an exception if more than one field matches @param feat: The feature to search for attributes @param regex: The regex to look for @@ -61,7 +67,7 @@ match = re.match(regex, name, re.IGNORECASE) if match: if result: - raise Exception("More then one field matches: %s" % regex) + raise Exception("More than one field matches: %s" % regex) else: result = match.group(0) return result @@ -233,7 +239,7 @@ if self.dry_run: return geomType destLayer.CommitTransaction() - except e: + except: logger.error("Exception while committing transaction.") return geomType
--- a/flys-backend/contrib/shpimporter/utils.py Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/contrib/shpimporter/utils.py Thu Mar 07 12:02:02 2013 +0100 @@ -97,3 +97,14 @@ def getWkbString(type): return WKB_MAP.get(type) or "Unknown" +def convertToMultiLine(feature): + """ + Converts a feature to a multiline feature. + """ + geometry = feature.GetGeometryRef() + # SRS information is lost while forcing to multiline + srs = geometry.GetSpatialReference() + geometry = ogr.ForceToMultiLineString(geometry) + geometry.AssignSpatialReference(srs) + feature.SetGeometry(geometry) + return feature
--- a/flys-backend/doc/schema/oracle-drop-spatial.sql Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/doc/schema/oracle-drop-spatial.sql Thu Mar 07 12:02:02 2013 +0100 @@ -71,4 +71,5 @@ DROP TABLE sectie_kinds; DROP TABLE sobek_kinds; DROP TABLE fed_states; +DROP TABLE axis_kinds; DROP TABLE boundary_kinds;
--- a/flys-backend/doc/schema/oracle-spatial.sql Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/doc/schema/oracle-spatial.sql Thu Mar 07 12:02:02 2013 +0100 @@ -1,11 +1,20 @@ WHENEVER SQLERROR EXIT; + +CREATE TABLE axis_kinds( + id NUMBER PRIMARY KEY NOT NULL, + name VARCHAR(64) +); +INSERT INTO axis_kinds(id, name) VALUES (0, 'Unbekannt'); +INSERT INTO axis_kinds(id, name) VALUES (1, 'Aktuell'); +INSERT INTO axis_kinds(id, name) VALUES (2, 'Sonstige'); + -- Geodaesie/Flussachse+km/achse CREATE SEQUENCE RIVER_AXES_ID_SEQ; CREATE TABLE river_axes( OGR_FID NUMBER(38), GEOM MDSYS.SDO_GEOMETRY, river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE, - kind NUMBER(38) DEFAULT 0 NOT NULL, + kind_id NUMBER(38) REFERENCES axis_kinds(id) NOT NULL, name VARCHAR(64), path VARCHAR(256), ID NUMBER PRIMARY KEY NOT NULL @@ -141,7 +150,7 @@ border_break NUMBER(1) DEFAULT 0 NOT NULL, resolution VARCHAR(16), description VARCHAR(256), - path VARCHAR(256) NOT NULL + path VARCHAR(256) NOT NULL UNIQUE ); CREATE OR REPLACE TRIGGER dem_trigger BEFORE INSERT ON dem FOR each ROW BEGIN
--- a/flys-backend/doc/schema/oracle-spatial_idx.sql Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/doc/schema/oracle-spatial_idx.sql Thu Mar 07 12:02:02 2013 +0100 @@ -22,10 +22,10 @@ -- TODO: index prevents importing on 11g. -- Error: "Ebenendimensionalitat stimmt nicht mit Geometrie-Dimensionen uberein" --- CREATE INDEX hydr_boundaries_idx ON hydr_boundaries(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); +-- CREATE INDEX hydr_boundaries_idx ON hydr_boundaries(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTILINE'); CREATE INDEX hws_points_spatial_idx ON hws_points(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT'); -CREATE INDEX hws_lines_spatial_idx ON hws_lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE'); +CREATE INDEX hws_lines_spatial_idx ON hws_lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTILINE'); CREATE INDEX floodmaps_spatial_idx ON floodmaps(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON'); CREATE INDEX gauge_location_idx ON gauge_location(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT'); CREATE INDEX hydr_boundaries_poly_idx ON hydr_boundaries_poly(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON');
--- a/flys-backend/doc/schema/oracle.sql Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/doc/schema/oracle.sql Thu Mar 07 12:02:02 2013 +0100 @@ -238,7 +238,7 @@ CREATE TABLE ranges ( id NUMBER(38,0) NOT NULL, - a NUMBER(38,10), + a NUMBER(38,10) NOT NULL, b NUMBER(38,10), river_id NUMBER(38,0), PRIMARY KEY (id) @@ -261,11 +261,12 @@ -- TIME_INTERVALS CREATE SEQUENCE TIME_INTERVALS_ID_SEQ; -CREATE TABLE time_intervals ( +CREATE TABLE time_intervals_ ( id NUMBER(38,0) NOT NULL, - start_time TIMESTAMP, + start_time TIMESTAMP NOT NULL, stop_time TIMESTAMP, - PRIMARY KEY (id) + PRIMARY KEY (id), + CHECK (start_time <= stop_time) );
--- a/flys-backend/doc/schema/postgresql-drop-spatial.sql Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/doc/schema/postgresql-drop-spatial.sql Thu Mar 07 12:02:02 2013 +0100 @@ -44,5 +44,6 @@ DROP TABLE sobek_kinds; DROP TABLE sectie_kinds; DROP TABLE boundary_kinds; +DROP TABLE axis_kinds; COMMIT;
--- a/flys-backend/doc/schema/postgresql-spatial.sql Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/doc/schema/postgresql-spatial.sql Thu Mar 07 12:02:02 2013 +0100 @@ -1,15 +1,23 @@ BEGIN; +CREATE TABLE axis_kinds( + id int PRIMARY KEY NOT NULL, + name VARCHAR(64) +); +INSERT INTO axis_kinds(id, name) VALUES (0, 'Unbekannt'); +INSERT INTO axis_kinds(id, name) VALUES (1, 'Aktuell'); +INSERT INTO axis_kinds(id, name) VALUES (2, 'Sonstige'); + -- Geodaesie/Flussachse+km/achse CREATE SEQUENCE RIVER_AXES_ID_SEQ; CREATE TABLE river_axes ( id int PRIMARY KEY NOT NULL, river_id int REFERENCES rivers(id) ON DELETE CASCADE, - kind int NOT NULL DEFAULT 0, + kind_id int REFERENCES axis_kinds(id) NOT NULL DEFAULT 0, name VARCHAR(64), path VARCHAR(256) ); -SELECT AddGeometryColumn('river_axes', 'geom', 31467, 'LINESTRING', 2); +SELECT AddGeometryColumn('river_axes', 'geom', 31467, 'MULTILINESTRING', 2); ALTER TABLE river_axes ALTER COLUMN id SET DEFAULT NEXTVAL('RIVER_AXES_ID_SEQ'); @@ -99,7 +107,7 @@ border_break BOOLEAN NOT NULL DEFAULT FALSE, resolution VARCHAR(16), description VARCHAR(256), - path VARCHAR(256) NOT NULL + path VARCHAR(256) NOT NULL UNIQUE ); ALTER TABLE dem ALTER COLUMN id SET DEFAULT NEXTVAL('DEM_ID_SEQ'); @@ -152,7 +160,7 @@ status_date TIMESTAMP, description VARCHAR(256) ); -SELECT AddGeometryColumn('hws_lines', 'geom', 31467, 'LINESTRING', 3); +SELECT AddGeometryColumn('hws_lines', 'geom', 31467, 'MULTILINESTRING', 3); -- TODO: dike_km_from dike_km_to, are they geometries? ALTER TABLE hws_lines ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_LINES_ID_SEQ'); @@ -247,7 +255,7 @@ sobek int REFERENCES sobek_kinds(id), path VARCHAR(256) ); -SELECT AddGeometryColumn('hydr_boundaries','geom',31467,'LINESTRING',3); +SELECT AddGeometryColumn('hydr_boundaries','geom',31467,'MULTILINESTRING',3); ALTER TABLE hydr_boundaries ALTER COLUMN id SET DEFAULT NEXTVAL('HYDR_BOUNDARIES_ID_SEQ');
--- a/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java Thu Mar 07 12:02:02 2013 +0100 @@ -5,6 +5,7 @@ import de.intevation.flys.model.Annotation; import de.intevation.flys.model.AnnotationType; import de.intevation.flys.model.Attribute; +import de.intevation.flys.model.AxisKind; import de.intevation.flys.model.BedHeightEpoch; import de.intevation.flys.model.BedHeightEpochValue; import de.intevation.flys.model.BedHeightSingle; @@ -120,6 +121,7 @@ Annotation.class, AnnotationType.class, Attribute.class, + AxisKind.class, BedHeightEpoch.class, BedHeightEpochValue.class, BedHeightSingle.class,
--- a/flys-backend/src/main/java/de/intevation/flys/backend/SpatialInfo.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/backend/SpatialInfo.java Thu Mar 07 12:02:02 2013 +0100 @@ -4,6 +4,7 @@ import org.apache.log4j.Logger; +import org.hibernate.HibernateException; import org.hibernate.Query; import org.hibernate.Session; @@ -84,13 +85,20 @@ protected void doRiverAxisInfo(River river) { - List<RiverAxis> axis = RiverAxis.getRiverAxis(river.getName()); - if (axis != null && axis.size() > 0) { - logger.debug("TODO: Compute length and boundary."); + try { + List<RiverAxis> axis = RiverAxis.getRiverAxis(river.getName()); + if (axis != null && axis.size() > 0) { + logger.debug("TODO: Compute length and boundary."); + } + else { + logger.warn("River has no RiverAxis."); + } } - else { - logger.warn("River has no RiverAxis."); + catch(HibernateException iae) { + logger.warn("No vaild river axis found for " + river.getName()); + return; } + }
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java Thu Mar 07 12:02:02 2013 +0100 @@ -194,7 +194,7 @@ addCrossSections(parser); } - } // ImportRiverCrossSectionParserCallback + } // ImportRiverCrossSectionParserCallback public ImportRiver() {
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BundesWasserStrassenParser.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BundesWasserStrassenParser.java Thu Mar 07 12:02:02 2013 +0100 @@ -1,22 +1,9 @@ package de.intevation.flys.importer.parsers; -import java.math.BigDecimal; - -import java.text.NumberFormat; -import java.text.ParseException; - -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import org.apache.log4j.Logger; -import de.intevation.flys.importer.ImportMorphWidth; -import de.intevation.flys.importer.ImportMorphWidthValue; -import de.intevation.flys.importer.ImportUnit; - /** Parse CSV file that contains official numbers for rivers. */ public class BundesWasserStrassenParser extends LineParser { @@ -57,7 +44,7 @@ return; } } - try{ + try { String name = unwrap(vals[0].toLowerCase()); String numberStr = unwrap(vals[1]); Long number = Long.valueOf(numberStr); @@ -69,7 +56,7 @@ } - /** Get river->official number mapping. */ + /** Get river -> official number mapping. */ public HashMap<String,Long> getMap() { return numberMap; }
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java Thu Mar 07 12:02:02 2013 +0100 @@ -33,28 +33,28 @@ public static final String FRAKTION_START = "Fraktion:"; public static final String FRACTION_COARSE_STR = - "_Grobkorn.csv"; + ".*Grobkorn.*"; public static final String FRACTION_FINE_MIDDLE_STR = - "_Fein-Mittel-Kies.csv"; + ".*Fein-Mittel-Kies.*"; public static final String FRACTION_SAND = - "_Sand.csv"; + ".*Sand.*"; public static final String FRACTION_SUSP_SAND = - "_susp_Sand.csv"; + ".*susp_Sand.*"; public static final String FRACTION_SUSP_SAND_BED = - "_bettbild_Anteil_susp_Sand.csv"; + ".*bettbild_Anteil_susp_Sand.*"; public static final String FRACTION_SUSP_SAND_BED_EPOCH = - "_susp_Sand_bettbildAnteil.csv"; + ".*susp_Sand_bettbildAnteil.*"; public static final String FRACTION_SUSPENDED_SEDIMENT = - "_Schwebstoff.csv"; + ".*Schwebstoff.*"; public static final String FRACTION_TOTAL = - "_gesamt.csv"; + ".*gesamt.*"; public static final Pattern TIMEINTERVAL_SINGLE = @@ -357,35 +357,33 @@ } } - log.warn("SYP: Unknow grain fraction: '" + gfStr + "'"); + log.warn("SYP: Unknown grain fraction: '" + gfStr + "'"); return null; } public static String getGrainFractionTypeName(String filename) { - if (filename.endsWith(FRACTION_COARSE_STR)) { - return GrainFraction.COARSE; + if (Pattern.matches(FRACTION_COARSE_STR, filename)) { + return GrainFraction.COARSE; } - else if (filename.endsWith(FRACTION_FINE_MIDDLE_STR)) { + else if (Pattern.matches(FRACTION_FINE_MIDDLE_STR, filename)) { return GrainFraction.FINE_MIDDLE; } - else if (filename.endsWith(FRACTION_SAND) && - !filename.endsWith(FRACTION_SUSP_SAND)) { + else if (Pattern.matches(FRACTION_SUSP_SAND_BED, filename) || + Pattern.matches(FRACTION_SUSP_SAND_BED_EPOCH, filename)) { + return GrainFraction.SUSP_SAND_BED; + } + else if (Pattern.matches(FRACTION_SUSP_SAND, filename)) { + return GrainFraction.SUSP_SAND; + } + else if (Pattern.matches(FRACTION_SAND, filename)) { return GrainFraction.SAND; } - else if (filename.endsWith(FRACTION_SUSP_SAND) && - !filename.endsWith(FRACTION_SUSP_SAND_BED)) { - return GrainFraction.SUSP_SAND; - } - else if (filename.endsWith(FRACTION_SUSP_SAND_BED) || - filename.endsWith(FRACTION_SUSP_SAND_BED_EPOCH)) { - return GrainFraction.SUSP_SAND_BED; - } - else if (filename.endsWith(FRACTION_SUSPENDED_SEDIMENT)) { + else if (Pattern.matches(FRACTION_SUSPENDED_SEDIMENT, filename)) { return GrainFraction.SUSPENDED_SEDIMENT; } - else if (filename.endsWith(FRACTION_TOTAL)) { + else if (Pattern.matches(FRACTION_TOTAL, filename)) { return GrainFraction.TOTAL; } else {
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/StaFileParser.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/StaFileParser.java Thu Mar 07 12:02:02 2013 +0100 @@ -30,8 +30,8 @@ public static final String TYPES = System.getProperty("flys.backend.main.value.types", "QWTD"); - public static final boolean PARSE_GAUGE_NUMBERS = - Boolean.getBoolean("flys.backend.sta.parse.gauge.numbers"); + public static final boolean NOT_PARSE_GAUGE_NUMBERS = + Boolean.getBoolean("flys.backend.sta.not.parse.gauge.numbers"); public static final Pattern QWTD_ = Pattern.compile("\\s*([^\\s]+)\\s+([^\\s]+)\\s+([" + @@ -68,7 +68,7 @@ Long gaugeNumber = null; - if (PARSE_GAUGE_NUMBERS) { + if (!NOT_PARSE_GAUGE_NUMBERS) { String gaugeNumberString = line.substring(0, 16).trim(); try {
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/src/main/java/de/intevation/flys/model/AxisKind.java Thu Mar 07 12:02:02 2013 +0100 @@ -0,0 +1,44 @@ +package de.intevation.flys.model; + +import java.io.Serializable; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name = "axis_kinds") +public class AxisKind implements Serializable { + private Integer id; + private String name; + + @Id + @Column(name = "id") + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + /** + * Get name. + * + * @return The display Name of the kind as String. + */ + @Column(name = "name") + public String getName() { + return name; + } + + /** + * Set name. + * + * @param name the value to set. + */ + public void setName(String name) { + this.name = name; + } +}
--- a/flys-backend/src/main/java/de/intevation/flys/model/HWSLine.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/model/HWSLine.java Thu Mar 07 12:02:02 2013 +0100 @@ -33,7 +33,7 @@ private HWSKind kind; private FedState fedState; private River river; - private Integer offical; + private Integer official; private Integer shoreSide; private String name; private String path; @@ -93,22 +93,22 @@ /** - * Get offical. + * Get official. * - * @return offical as Integer. + * @return official as Integer. */ - @Column(name = "offical") - public Integer getOffical() { - return offical; + @Column(name = "official") + public Integer getofficial() { + return official; } /** - * Set offical. + * Set official. * - * @param offical the value to set. + * @param official the value to set. */ - public void setOffical(Integer offical) { - this.offical = offical; + public void setofficial(Integer official) { + this.official = official; } /**
--- a/flys-backend/src/main/java/de/intevation/flys/model/HWSPoint.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/model/HWSPoint.java Thu Mar 07 12:02:02 2013 +0100 @@ -28,7 +28,7 @@ private HWSKind kind; private FedState fedState; private River river; - private Integer offical; + private Integer official; private Integer shoreSide; private String name; private String path; @@ -94,22 +94,22 @@ /** - * Get offical. + * Get official. * - * @return offical as Integer. + * @return official as Integer. */ - @Column(name = "offical") - public Integer getOffical() { - return offical; + @Column(name = "official") + public Integer getofficial() { + return official; } /** - * Set offical. + * Set official. * - * @param offical the value to set. + * @param official the value to set. */ - public void setOffical(Integer offical) { - this.offical = offical; + public void setofficial(Integer official) { + this.official = official; } /**
--- a/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java Thu Mar 07 12:02:02 2013 +0100 @@ -17,7 +17,7 @@ import org.hibernate.Query; import org.hibernate.annotations.Type; -import com.vividsolutions.jts.geom.LineString; +import com.vividsolutions.jts.geom.MultiLineString; import de.intevation.flys.backend.SessionHolder; @@ -32,7 +32,7 @@ private SobekKind sobek; private String name; private River river; - private LineString geom; + private MultiLineString geom; private BoundaryKind kind; public HydrBoundary() { @@ -82,12 +82,12 @@ @Column(name = "geom") @Type(type = "org.hibernatespatial.GeometryUserType") - public LineString getGeom() { + public MultiLineString getGeom() { return geom; } - public void setGeom(LineString geom) { + public void setGeom(MultiLineString geom) { this.geom = geom; }
--- a/flys-backend/src/main/java/de/intevation/flys/model/RiverAxis.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-backend/src/main/java/de/intevation/flys/model/RiverAxis.java Thu Mar 07 12:02:02 2013 +0100 @@ -10,21 +10,21 @@ import javax.persistence.OneToOne; import javax.persistence.Table; +import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.Query; import org.hibernate.annotations.Type; -import com.vividsolutions.jts.geom.LineString; +import com.vividsolutions.jts.geom.MultiLineString; import de.intevation.flys.backend.SessionHolder; +import de.intevation.flys.model.AxisKind; /** - * There is a modeling problem with the RiverAxis. The initial idea was, that a - * river can have a riveraxis that consist of exact one geometry. Now, it has - * turned out, that a single geometry is not enough for a riveraxis (arm of a - * river, inflows, ...). As workaround, we now expect, that a river can just - * have a single riveraxis. + * A river has one axis that is used for calculation. + * Additional axes of a river can be used to be painted int maps etc. + * which one is the main river axis can be determined over the axis kind. */ @Entity @Table(name = "river_axes") @@ -32,14 +32,13 @@ implements Serializable { private Integer id; - private Integer kind; + private AxisKind kind; private River river; - private LineString geom; + private MultiLineString geom; - public static final int DEFAULT_KIND = 0; - - public static final int KIND_OFFICIAL = 1; - public static final int KIND_OUTSOURCED = 2; + public static final int KIND_UNKOWN = 0; + public static final int KIND_CURRENT = 1; + public static final int KIND_OTHER = 2; public RiverAxis() { } @@ -69,43 +68,53 @@ } - @Column(name = "kind") - public Integer getKind() { + /** + * Get kind. + * + * @return kind as AxisKind. + */ + @OneToOne + @JoinColumn(name = "kind_id") + public AxisKind getKind() { return kind; } - - public void setKind(Integer kind) { + /** + * Set kind. + * + * @param kind the value to set. + */ + public void setKind(AxisKind kind) { this.kind = kind; } @Column(name = "geom") @Type(type = "org.hibernatespatial.GeometryUserType") - public LineString getGeom() { + public MultiLineString getGeom() { return geom; } - public void setGeom(LineString geom) { + public void setGeom(MultiLineString geom) { this.geom = geom; } - public static List<RiverAxis> getRiverAxis(String river) { - return getRiverAxis(river, DEFAULT_KIND); + public static List<RiverAxis> getRiverAxis(String river) + throws IllegalArgumentException { + return getRiverAxis(river, KIND_CURRENT); } - public static List<RiverAxis> getRiverAxis(String river, int kind) { + public static List<RiverAxis> getRiverAxis(String river, int kind) + throws HibernateException { Session session = SessionHolder.HOLDER.get(); - Query query = session.createQuery( - "from RiverAxis where river.name =:river AND kind =:kind"); + "from RiverAxis where river.name =:river AND kind.id =:kind"); query.setParameter("river", river); query.setParameter("kind", kind); List<RiverAxis> list = query.list(); - return list.isEmpty() ? null : list; } }
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.java Thu Mar 07 12:02:02 2013 +0100 @@ -286,6 +286,10 @@ String footerImpressum(); + String projectListMin(); + + String projectListAdd(); + String buttonNext(); String imageBack(); @@ -326,6 +330,10 @@ String downloadCSV(); + String downloadAT(); + + String downloadWST(); + String loadingImg(); String cancelCalculation();
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.properties Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.properties Thu Mar 07 12:02:02 2013 +0100 @@ -98,7 +98,10 @@ downloadPNG = images/png_export.png downloadPDF = images/pdf_export.png downloadSVG = images/svg_export.png -downloadCSV = images/save.png +downloadCSV = images/save_csv.png +downloadAT = images/save_at.png +downloadWST = images/save_wst.png +loadingImg = images/loading.gif loadingImg = images/loading.gif cancelCalculation = images/cancelCalculation.png markerRed = images/marker_red.png @@ -186,13 +189,15 @@ footerContact = Contact footerImpressum = Legal info +projectListMin = format-indent-less.png +projectListAdd = list-add.png buttonNext = Next imageBack = images/back_en.png imageSave = images/save.png -theme_top = images/arrow_first.png -theme_up = images/arrow_up.png -theme_down = images/arrow_down.png -theme_bottom = images/arrow_last.png +theme_top = images/go-first.png +theme_up = images/go-up.png +theme_down = images/go-down.png +theme_bottom = images/go-bottom.png zoom_all = images/mag_100.png zoom_in = images/mag_zoom_box.png zoom_out = images/mag_zoom_minus.png @@ -266,7 +271,7 @@ gauge_class = Gauge Class eventselect = Eventselection events = Events -kmchart = Chart +kmchart = W/Q Preview chart_themepanel_header_themes = Theme chart_themepanel_header_actions = Actions
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_de.properties Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_de.properties Thu Mar 07 12:02:02 2013 +0100 @@ -99,7 +99,9 @@ downloadPNG = images/png_export.png downloadPDF = images/pdf_export.png downloadSVG = images/svg_export.png -downloadCSV = images/save.png +downloadCSV = images/save_csv.png +downloadAT = images/save_at.png +downloadWST = images/save_wst.png loadingImg = images/loading.gif cancelCalculation = images/cancelCalculation.png markerRed = images/marker_red.png @@ -174,13 +176,15 @@ footerContact = Kontakt footerImpressum = Impressum +projectListMin = format-indent-less.png +projectListAdd = list-add.png buttonNext = \u00dcbernehmen imageBack = images/back_de.png imageSave = images/save.png -theme_top = images/arrow_first.png -theme_up = images/arrow_up.png -theme_down = images/arrow_down.png -theme_bottom = images/arrow_last.png +theme_top = images/go-first.png +theme_up = images/go-up.png +theme_down = images/go-down.png +theme_bottom = images/go-bottom.png zoom_all = images/mag_100.png zoom_in = images/mag_zoom_box.png zoom_out = images/mag_zoom_minus.png @@ -265,7 +269,7 @@ gauge_class = Abflussklasse eventselect = Ereignisauswahl events = Ereignisse -kmchart = Diagramm +kmchart = W/Q Vorschau exportATTooltip = Daten als AT Datei exportieren
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_en.properties Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_en.properties Thu Mar 07 12:02:02 2013 +0100 @@ -100,6 +100,9 @@ downloadPDF = images/pdf_export.png downloadSVG = images/svg_export.png downloadCSV = images/save.png +downloadAT = images/save_at.png +downloadWST = images/save_wst.png +loadingImg = images/loading.gif loadingImg = images/loading.gif cancelCalculation = images/cancelCalculation.png markerRed = images/marker_red.png @@ -187,13 +190,15 @@ footerContact = Contact footerImpressum = Legal info +projectListMin = format-indent-less.png +projectListAdd = list-add.png buttonNext = Next imageBack = images/back_en.png imageSave = images/save.png -theme_top = images/arrow_first.png -theme_up = images/arrow_up.png -theme_down = images/arrow_down.png -theme_bottom = images/arrow_last.png +theme_top = images/go-first.png +theme_up = images/go-up.png +theme_down = images/go-down.png +theme_bottom = images/go-bottom.png zoom_all = images/mag_100.png zoom_in = images/mag_zoom_box.png zoom_out = images/mag_zoom_minus.png @@ -267,7 +272,7 @@ gauge_class = Gauge Class eventselect = Eventselection events = Events -kmchart = Chart +kmchart = W/Q Preview chart_themepanel_header_themes = Theme chart_themepanel_header_actions = Actions
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/ExportPanel.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/ExportPanel.java Thu Mar 07 12:02:02 2013 +0100 @@ -101,9 +101,23 @@ String filename ) { String url = getExportUrl(name, facet, filename); - String iUrl = GWT.getHostPageBaseURL() + MSG.imageSave(); - - ImgLink link = new ImgLink(iUrl, url, 30, 30); + String imgUrl = GWT.getHostPageBaseURL(); + if (facet.equals("pdf")) { + imgUrl += MSG.downloadPDF(); + } + else if (facet.equals("at")) { + imgUrl += MSG.downloadAT(); + } + else if (facet.equals("wst")) { + imgUrl += MSG.downloadWST(); + } + else if (facet.equals("csv")) { + imgUrl += MSG.downloadCSV(); + } + else { + imgUrl += MSG.imageSave(); + } + ImgLink link = new ImgLink(imgUrl, url, 30, 30); link.setTooltip(getTooltipText(name, facet)); return link;
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/ProjectList.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/ProjectList.java Thu Mar 07 12:02:02 2013 +0100 @@ -1,11 +1,9 @@ package de.intevation.flys.client.client.ui; import com.google.gwt.core.client.GWT; -import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.i18n.client.DateTimeFormat; import com.google.gwt.user.client.Timer; import com.google.gwt.user.client.rpc.AsyncCallback; -import com.google.gwt.user.client.ui.Button; import com.smartgwt.client.types.Alignment; import com.smartgwt.client.types.Autofit; @@ -19,7 +17,9 @@ import com.smartgwt.client.util.BooleanCallback; import com.smartgwt.client.util.SC; import com.smartgwt.client.widgets.Canvas; +import com.smartgwt.client.widgets.IconButton; import com.smartgwt.client.widgets.Label; +import com.smartgwt.client.widgets.events.ClickEvent; import com.smartgwt.client.widgets.events.VisibilityChangedEvent; import com.smartgwt.client.widgets.events.VisibilityChangedHandler; import com.smartgwt.client.widgets.grid.CellFormatter; @@ -383,14 +383,14 @@ HLayout buttonWrapper = new HLayout(); - Button addButton = new Button("+"); - addButton.setStyleName("projectsAddButton"); - addButton.setTitle(messages.new_project()); + IconButton addButton = new IconButton(""); + addButton.setIcon(messages.projectListAdd()); + addButton.setTooltip(messages.new_project()); addButton.setWidth("30px"); - Button closeButton = new Button("X"); - closeButton.setStyleName("projectsCloseButton"); - closeButton.setTitle(messages.projectlist_close()); + IconButton closeButton = new IconButton(""); + closeButton.setIcon(messages.projectListMin()); + closeButton.setTooltip(messages.projectlist_close()); closeButton.setWidth("30px"); buttonWrapper.addMember(addButton); @@ -420,7 +420,7 @@ addMember(filterpanel); addButton.addClickHandler( - new com.google.gwt.event.dom.client.ClickHandler() { + new com.smartgwt.client.widgets.events.ClickHandler() { @Override public void onClick(ClickEvent ev) { @@ -429,7 +429,7 @@ }); closeButton.addClickHandler( - new com.google.gwt.event.dom.client.ClickHandler() { + new com.smartgwt.client.widgets.events.ClickHandler() { @Override public void onClick(ClickEvent ev) {
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/WQAdaptedInputPanel.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/WQAdaptedInputPanel.java Thu Mar 07 12:02:02 2013 +0100 @@ -129,7 +129,6 @@ layout.addMember(widget); layout.addMember(submit); - return layout; } @@ -161,7 +160,7 @@ List<Data> all = dataList.getAll(); Data wqData = getData(all, "wq_values"); Data wqMode = getData(all, "wq_isq"); - + boolean isQ = wqMode.getItems()[0].getStringValue().equals("true"); Canvas back = getBackButton(dataList.getState()); HLayout valLayout = new HLayout(); @@ -179,7 +178,8 @@ modeLabel.setWidth(200); valLayout.addMember(wqLabel); - valLayout.addMember(createOldWQValues(wqData)); + valLayout.addMember(createOldWQValues(wqData, isQ)); + valLayout.addMember(back); modeLayout.addMember(modeLabel); @@ -190,7 +190,8 @@ } - protected Canvas createOldWQValues(Data wqData) { + /** Create area showing previously entered w or q data. */ + protected Canvas createOldWQValues(Data wqData, boolean isQ) { VLayout layout = new VLayout(); DataItem item = wqData.getItems()[0]; @@ -198,6 +199,8 @@ String[] gauges = value.split(GAUGE_SEPARATOR); + String unit = isQ ? "m³/s" : "cm"; + for (String gauge: gauges) { HLayout h = new HLayout(); @@ -215,6 +218,8 @@ } sb.append(v); + sb.append(" "); + sb.append(unit); first = false; } @@ -268,8 +273,7 @@ } } - - protected List<String> validateW() { + protected List<String> validateRange(Map<String, double[]> ranges) { List<String> errors = new ArrayList<String>(); NumberFormat nf = NumberFormat.getDecimalFormat(); @@ -286,7 +290,7 @@ return errors; } - double[] mm = wranges.get(key); + double[] mm = ranges.get(key); if (mm == null) { SC.warn(MSG.error_read_minmax_values()); continue; @@ -326,65 +330,17 @@ } - protected List<String> validateQ() { - List<String> errors = new ArrayList<String>(); - NumberFormat nf = NumberFormat.getDecimalFormat(); - - Iterator<String> iter = wqranges.keySet().iterator(); - - while (iter.hasNext()) { - List<String> tmpErrors = new ArrayList<String>(); - - String key = iter.next(); - DoubleArrayPanel dap = wqranges.get(key); - - if (!dap.validateForm()) { - errors.add(MSG.error_invalid_double_value()); - return errors; - } - - double[] mm = qranges.get(key); - if (mm == null) { - SC.warn(MSG.error_read_minmax_values()); - continue; - } - - double[] values = dap.getInputValues(); - double[] good = new double[values.length]; + protected List<String> validateW() { + return validateRange(wranges); + } - int idx = 0; - for (double value: values) { - if (value < mm[0] || value > mm[1]) { - String tmp = MSG.error_validate_range(); - tmp = tmp.replace("$1", nf.format(value)); - tmp = tmp.replace("$2", nf.format(mm[0])); - tmp = tmp.replace("$3", nf.format(mm[1])); - tmpErrors.add(tmp); - } - else { - good[idx++] = value; - } - } - - double[] justGood = new double[idx]; - for (int i = 0; i < justGood.length; i++) { - justGood[i] = good[i]; - } - - if (!tmpErrors.isEmpty()) { - dap.setValues(justGood); - - errors.addAll(tmpErrors); - } - } - - return errors; + protected List<String> validateQ() { + return validateRange(qranges); } protected void initUserDefaults(DataList dataList) { - initUserWQValues(dataList); initUserWQMode(dataList); } @@ -488,6 +444,7 @@ } + /** Get items which are not WQ_MODE. */ protected DataItem[] getWQItems(DataList dataList) { List<Data> data = dataList.getAll(); @@ -505,6 +462,10 @@ } + /** + * Create radio button for switching w and q input. + * Radiobutton-change also triggers helper panel tab selection. + */ protected Canvas createMode(DataList dataList) { RadioGroupItem wq = new RadioGroupItem(FIELD_WQ_MODE); wq.setShowTitle(false); @@ -715,12 +676,5 @@ ArtifactDescription adesc = artifact.getArtifactDescription(); return adesc.getRiver(); } - - - protected void updatePanels(boolean isQ) { - - } - - } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/GaugePanel.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/GaugePanel.java Thu Mar 07 12:02:02 2013 +0100 @@ -1,8 +1,5 @@ package de.intevation.flys.client.client.ui.stationinfo; -import com.smartgwt.client.util.SC; -import com.smartgwt.client.widgets.Label; - import com.google.gwt.core.client.GWT; import com.google.gwt.user.client.rpc.AsyncCallback;
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/MeasurementStationRecord.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/MeasurementStationRecord.java Thu Mar 07 12:02:02 2013 +0100 @@ -137,7 +137,7 @@ private void setOperator(String value) { this.setAttribute("operator", value); } - + @Override public Date getStartTime() { return this.getAttributeAsDate("starttime"); @@ -168,7 +168,7 @@ public String getLink() { return this.getAttributeAsString("link"); } - + public void setLink(String link) { this.setAttribute("link", link); }
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/wq/WQAutoTabSet.java Thu Feb 28 11:49:48 2013 +0100 +++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/wq/WQAutoTabSet.java Thu Mar 07 12:02:02 2013 +0100 @@ -1,7 +1,6 @@ package de.intevation.flys.client.client.ui.wq; import com.google.gwt.core.client.GWT; -import com.google.gwt.i18n.client.NumberFormat; import com.smartgwt.client.util.SC; import com.smartgwt.client.widgets.tab.Tab;