changeset 3800:69d19995bc3c 2.9.1

merged flys-backend/2.9.1
author Thomas Arendsen Hein <thomas@intevation.de>
date Fri, 28 Sep 2012 12:14:48 +0200
parents 4adc35aa655c (current diff) b9a99fcc78c3 (diff)
children 22cd60315e08
files
diffstat 270 files changed, 47304 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/ChangeLog	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,3627 @@
+2012-09-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/BedHeightEpoch.java:
+	  Corrected broken loop to find epochs for river and km range.
+
+	* src/main/java/de/intevation/flys/model/BedHeightSingle.java:
+	  Corrected broken loop to find singles for river and km range.
+
+2012-09-12	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java:
+	  Made code more robust.
+
+2012-09-12	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java:
+	  Add system property 'flys.backend.importer.skip.default'
+	  which enables the switching of the skipping default.
+	  Very useful if you want only some sub systems by
+	  setting this to 'true' and the sub systems to 'false'.
+
+2012-09-12	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Indices on cross sections
+	  were created too early (before the referenced tables
+	  were created).
+
+2012-09-12	Björn Ricks <bjoern.ricks@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Add new method determineMinMaxQ to fetch the mimimum and maximum q values
+	  from the database.
+
+2012-09-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* postgresql-minfo.sql: Added missing tables.
+	* oracle-minfo.sql: Fixed small typos.
+
+2012-09-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml: Java 1.6 -> 1.6
+
+	* src/main/java/de/intevation/flys/utils/StringUtil.java:
+	  Replaced german comment (with an ill encoded Umlaut).
+
+2012-09-10	Christian Lins	<christian.lins@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  Remove obsolet method (#851).
+
+2012-09-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  We need min/max Q, too.
+
+2012-09-10	Björn Ricks <bjoern.ricks@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/MinMaxWQ.java,
+	  src/main/java/de/intevation/flys/model/Gauge.java:
+	  Add method fetchMinMaxWQ to Gauge. This mehtod returns a new MinMaxWQ
+	  instance that contains the fetched values for the gauge overview info.
+
+2012-09-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  Moved some code over from flys-artifacts.
+
+2012-09-07  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged module as '2.9'.
+
+2012-09-07	Björn Ricks <bjoern.ricks@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  Rename getDurationCurveData to fetchDurationCurveData to satisfy
+	  hibernate.
+
+2012-09-06	Björn Ricks <bjoern.ricks@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  Move static getDurationCurveData method from flys-artifacts
+	  MainValuesFactory class to a instance method in Gauge class.
+
+2012-08-30  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	Attempt fix for issue821 (cross sections just till +/-500m).
+
+	* src/main/java/de/intevation/flys/model/CrossSectionLine.java:
+	  Set max value for cross sections to 2500 instead of 500.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/run_geo.sh: New run script for the python based geo importer.
+
+	* contrib/run_hydr_morph.sh: New run script for the java based importer
+	  for hydrological and morphological data.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/overview.tex: Fixed typo.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/importer-geodaesie.tex,
+	  doc/documentation/de/importer-hydr-morph.tex: Corrected name of run
+	  scripts.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/importer-geodaesie.tex,
+	  doc/documentation/de/importer-hydr-morph.tex,
+	  doc/documentation/de/importer-manual.tex: Bugfixes and new geo error
+	  description.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/importer.py: Fixed broken method call.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/overview.tex: Hint to root permissions.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/importer.py: Set 'path' attribute for each feature.
+
+	* contrib/shpimporter/axis.py: Also accept shapes with 'achse' in its name.
+
+	* doc/schema/oracle-spatial.sql: Added 'path' attribute to each db
+	  relation.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/importer-geodaesie.tex: Bugfixes and added hint.
+
+	* doc/documentation/de/importer-manual.tex: Set document revision and
+	  date.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/importer-hydr-morph.tex: Added manual line breaks.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/importer-hydr-morph.tex: Fixed bug during PDF
+	  creation.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/importer-hydr-morph.tex: Bugfixes.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/overview.tex: Improved description of database
+	  schema creation. Describe command to unpack importer tarball.
+
+2012-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/importer-geodaesie.tex: Added further error message
+	  if no connection to Oracle database could be established.
+
+2012-08-29  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/importer-geodaesie.tex: Documented install steps
+	  for Oracle Instantclient and python and gdal.
+
+2012-08-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/importer-hydr-morph.tex: Describe Log4J
+	  configuration; some smaller bugfixes.
+
+2012-08-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/overview.tex: Added comment to directory structure
+	  of a river.
+
+	* doc/documentation/de/importer-hydr-morph.tex: Describe import of
+	  morphological files.
+
+2012-08-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/title.tex,
+	  doc/documentation/de/overview.tex,
+	  doc/documentation/de/importer-hydr-morph.tex,
+	  doc/documentation/de/importer-manual.tex: Smaller fixed and structural
+	  changes.
+
+2012-08-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/documentation/de/importer-hydr-morph.tex: Added documentation from
+	  README.
+
+2012-08-27	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/README.txt: Documented which hydrological files are taken into account.
+
+2012-08-26	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/README.txt: Documented the classification of the annotations.
+
+2012-08-26	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/README.txt: Documented the db credentials.
+
+2012-08-24	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/README.txt: Completed the error messages (Puh!)
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelDifference.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java,
+	  src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/AnnotationsParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/MorphologicalWidthParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/AnnotationClassifier.java,
+	  src/main/java/de/intevation/flys/importer/parsers/WstParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportSQRelation.java:
+	  Made error messages identifiable.
+
+2012-08-24  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/shapeimporter,
+	  doc/shapeimporter/documentation.txt: Removed. The documentation is now
+	  written in latex.
+
+	* doc/documentation,
+	  doc/documentation/de: New. The place where the german documentation is
+	  located.
+
+	* doc/documentation/de/title.tex,
+	  doc/documentation/de/importer-geodaesie.tex,
+	  doc/documentation/de/figures/bfg_logo.png,
+	  doc/documentation/de/figures/intevation-logo.pdf,
+	  doc/documentation/de/overview.tex,
+	  doc/documentation/de/importer-hydr-morph.tex,
+	  doc/documentation/de/importer-manual.tex: German documentation of the
+	  importer and shape importer.
+
+	* doc/documentation/de/Makefile: Makefile to generate the documentation.
+
+	* doc/documentation/de/README: Instructions to generate the documentation.
+
+2012-08-23	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/README.txt: Added warning messages. TODO: More warnings.
+
+	* src/main/java/de/intevation/flys/importer/parsers/PRFParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/StaFileParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/PegelGltParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/AtFileParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Made warnings identifiable.
+
+2012-08-23	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/README.txt: Added error messages.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java,
+	  src/main/java/de/intevation/flys/importer/parsers/PRFParser.java
+	  src/main/java/de/intevation/flys/importer/parsers/HYKParser.java
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Made errors identifiable.
+
+2012-08-23	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/README.txt: Documentation for the importer (german).
+	  TODO: db crendentials, warning & errors, annotation types.
+
+2012-08-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/run.sh: Moved 'SKIP_XXX' variables to the top of
+	  the script.
+
+2012-08-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/axis.py: Use log methods of shpimporter and
+	  removed print() calls.
+
+	* contrib/shpimporter/importer.py: Evaluate command line option 'dry_run'.
+	  Do not commit database transaction if it is activated.
+
+	* contrib/shpimporter/shpimporter.py: Added new command line option
+	  'dry_run' to supress database transactions.
+
+2012-08-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/shpimporter.py: Use OptionParse to read command line
+	  options. Now, it is no longer necessary to adjust user specific settings
+	  in python files. Users should adapt run.sh script for specific settings.
+
+	* contrib/shpimporter/catchments.py,
+	  contrib/shpimporter/boundaries.py,
+	  contrib/shpimporter/hws.py,
+	  contrib/shpimporter/importer.py,
+	  contrib/shpimporter/uesg.py,
+	  contrib/shpimporter/axis.py,
+	  contrib/shpimporter/km.py,
+	  contrib/shpimporter/floodplains.py,
+	  contrib/shpimporter/lines.py,
+	  contrib/shpimporter/gauges.py,
+	  contrib/shpimporter/buildings.py,
+	  contrib/shpimporter/fixpoints.py,
+	  contrib/shpimporter/crosssectiontracks.py: Added new method getName().
+
+	* contrib/shpimporter/utils.py: Use shpimporter functions to print debug
+	  messages.
+
+	* contrib/shpimporter/run.sh: New shell script to run the shape importer
+	  with a default configuration.
+
+2012-08-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FastAnnotations.java:
+	  Added toString() to Annotation.
+
+2012-07-27  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged module as '2.8.1'.
+
+2012-07-18	Felix Wolfsteller	<felix.wolfstellre@intevation.de>
+
+	* doc/schema/postgresql.sql, doc/schema/oracle.sql:
+	  Adjusted the official_lines views to include wst column pos.
+
+2012-07-16  Ingo Weinzierl <ingo@intevation.de>
+
+	* Tagged module as '2.8'.
+
+2012-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java,
+	  src/main/java/de/intevation/flys/backend/SpatialInfo.java:
+	  Removed same package imports.
+
+2012-07-12	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/hibernate/MapResultTransformer.java:
+	  Added static INSTANCE because its stateless.
+
+2012-07-12	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/hibernate/MapResultTransformer.java:
+	  Strategy to directly transform native SQL results into Maps.
+
+2012-07-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Fix for flys/issue359
+
+	* src/main/java/de/intevation/flys/importer/parsers/PRFParser.java:
+	  Removed extension from PRF descriptions.
+
+2012-07-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql, doc/schema/oracle.sql:
+	  Added indices for fasten access to cross section points.
+
+2012-07-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Partial fix for flys/issue697
+
+	* doc/schema/oracle.sql: Increased the decimal places of Ws, Qs and Kms to 5.
+
+2012-07-03  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Set correct connection provider class (its package has changed in the
+	  last commit).
+
+2012-07-03  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java:
+	  Fixed broken package declaration.
+
+2012-06-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql, doc/schema/oracle.sql:
+	  Added views to access the 'Amtlichen Linien'.
+
+2012-06-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/**/*.java: Removed trailing whitespace.
+
+2012-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	 Make access to SedDB configurable over conf.xml and provide
+	 access to sessions.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Moved FLYS specific stuff out into the concept of credential classes
+	  which provides the user, password, dialect, driver, url and the
+	  Hibernate classes to bind.
+
+	* src/main/java/de/intevation/flys/backend/Credentials.java: New.
+	  Abstact class to provide the data needed for the SessionFactoryProvider.
+
+	* src/main/java/de/intevation/flys/backend/FLYSCredentials.java: New.
+	  Extends Credentials to serve the FLYS specific stuff.
+
+	* src/main/java/de/intevation/flys/backend/SedDBCredentials.java: New.
+	  Extends Credentials to serve the SedDB specific stuff. The parameters
+	  are drawn from conf.xml in the form:
+
+	    <artifact-database>
+	      ...
+	      <seddb-database>
+	        <user>USER</user>
+	        <password>PASSWORD</password>
+	        <driver>DRIVER</driver>
+	        <url>URL</url>
+	        <dialect>DIALECT</dialect>
+	      </seddb-database>
+	      ...
+	    </artifact-database>
+
+	    If absent defaults are:
+	    - user:     seddb
+	    - password: seddb
+	    - driver:   org.postgresql.Driver
+	    - url:      jdbc:postgresql://localhost:5432/seddb
+	    - dialect:  org.hibernate.dialect.PostgreSQLDialect
+
+	   Defaults can be overwritten with system properties:
+	   - user:      flys.seddb.user
+	   - password:  flys.seddb.password
+	   - driver:    flys.seddb.driver
+	   - url:       flys.seddb.url
+	   - dialect:   flys.seddb.dialect
+
+	* src/main/java/de/intevation/flys/backend/SedDBSessionHolder.java: New.
+	  Use this if you want to establish a session to the SedDB!
+	  Works like the SessionHolder.
+
+	* src/main/java/de/intevation/flys/backend/SessionHolder.java:
+	  Add Override annotation. This holder gives you a session 
+	  to the FLYS database.
+
+	* src/main/java/de/intevation/flys/App.java: Adjusted.
+
+2012-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/seddb/model/*.java: Hibernate access to
+	  SedDB generated by Eclipse (ugly formatted, but compiles).
+
+2012-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/AnnotationsParser.java:
+	  Re-establish cross platform compatibilty again.
+
+2012-06-14  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSectionTrack.java: Added the
+	  missing 'name' property and added a function that returns all
+	  CrossSectionTracks of a specific river with a specific name.
+
+2012-06-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added missing river_id column to
+	  sq_relation table and reordered the drop statements.
+
+	* src/main/java/de/intevation/flys/model/SQRelationValue.java,
+	  src/main/java/de/intevation/flys/model/SQRelation.java: Added missing
+	  constructors and fixed some minor bugs that occured during import test.
+
+	* src/main/java/de/intevation/flys/importer/ImportSQRelationValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportSQRelation.java:
+	  Implemented code to store sq relations and values into db.
+
+	* src/main/java/de/intevation/flys/importer/parsers/SQRelationParser.java:
+	  Override parse() to retrieve the filename.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Implemented
+	  code to store sq relations into db.
+
+2012-06-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportSQRelationValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportSQRelation.java:
+	  New classes used during the import process of MINFO sq relations.
+
+	* src/main/java/de/intevation/flys/importer/parsers/SQRelationParser.java:
+	  New line parser that reads MINFO specifc sq relation files.
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added system
+	  property to skip parsing sq relations.
+
+	    "-Dflys.backend.importer.skip.sq.relation"
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Parse MINFO
+	  sq relations.
+
+2012-06-12  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Cleared some
+	  irritating debug statements.
+
+2012-06-12  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/annotation-types.xml: Added regex to match "Geschiebemesstellen" as
+	  "Messstelle" type.
+
+	* src/main/java/de/intevation/flys/importer/parsers/AnnotationsParser.java:
+	  Parse KM files in "../Morphologie/Streckendaten/". On Windows Systems,
+	  this won't work!
+
+2012-06-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FastAnnotations.java:
+	  Added Override annotation to new NameFilter.
+
+2012-06-11  Raimund Renkert  <raimund.renkert@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FastAnnotations.java:
+	  Added filter for annotation names.
+
+2012-06-08	Felix Wolfsteller	<felix.wolfstellre@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FastAnnotations.java:
+	  Fix comment.
+
+2012-06-08	Felix Wolfsteller	<felix.wolfstellre@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/SQRelationValue.java:
+	  Fix hibernaty annotation symptom (make it run again).
+
+2012-06-07  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Schema additions for MINFO s/q
+	  relation.
+
+	* src/main/java/de/intevation/flys/model/SQRelationValue.java,
+	  src/main/java/de/intevation/flys/model/SQRelation.java: New model
+	  classes for MINFO s/q relation.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes.
+
+2012-06-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/GaugeLocation.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Removed superfluous imports.
+
+2012-05-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* Tagged module as '2.7'.
+
+2012-05-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/BedHeightEpoch.java,
+	  src/main/java/de/intevation/flys/model/BedHeightSingle.java: Added lower
+	  and upper km to function that returns all singles and epochs for a given
+	  river.
+
+2012-05-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/BedHeightSingleValue.java: Fixed
+	  a typo.
+
+2012-05-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/BedHeightSingleValue.java,
+	   src/main/java/de/intevation/flys/model/BedHeightEpochValue.java: Added
+	   functions to retrieve single and epoch values based on its owner and km
+	   range.
+
+2012-05-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/BedHeightEpoch.java,
+	  src/main/java/de/intevation/flys/model/BedHeightSingle.java: Added
+	  functions to get singles and epochs by river and by id.
+
+2012-05-15  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FlowVelocityModel.java,
+	  src/main/java/de/intevation/flys/model/FlowVelocityModelValue.java,
+	  src/main/java/de/intevation/flys/model/DischargeZone.java: Added static
+	  functions to retrieve data from database.
+
+2012-05-15  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/DischargeZone.java: Added a
+	  function getDischargeZones() that returns all DischargeZones for a given
+	  river.
+
+2012-05-10  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/gauges.py: Search for field 'MPNAAM' and use its
+	  value as name for the geometry.
+
+	* src/main/java/de/intevation/flys/model/GaugeLocation.java: New model
+	  class for storing locations of gauges.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered GaugeLocation class.
+
+2012-05-10  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/HydrBoundary.java,
+	  src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java: New.
+	  Hydrological boundaries.
+
+	* src/main/java/de/intevation/flys/model/Line.java: Modified signature of
+	  Line.getLines(). It now also takes the name of a line to retrieve more
+	  specific lines.
+
+	* src/main/java/de/intevation/flys/model/Building.java: Modified signature
+	  of Building.getBuildings(). It now also takes the name of a building to
+	  retrieve more specific lines.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered HydrBoundary and HydrBoundaryPoly classes.
+
+2012-05-10  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/crosssectiontracks.py: Search for 'STATION' field in
+	  shapefile to extract the current km.
+
+2012-05-08  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Hws.java: The getHws() function
+	  got a further parameter 'name' to retrieve specific hws only.
+
+
+2012-05-08  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Catchment.java: The
+	  getCatchments() function got a further parameter 'name' to retrieve
+	  specific catchments only.
+
+2012-05-08  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-spatial_idx.sql: Set the geometry type of 'catchments'
+	  to 'multipolygon'.
+
+	* src/main/java/de/intevation/flys/model/Catchment.java: The geometry
+	  attribute in such instances is now from type 'Geometry'.
+
+2012-05-08  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/import-dems.sql: New. Insert statements to insert dems into
+	  database.
+
+2012-05-04  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-drop-spatial.sql,
+	  doc/schema/oracle-spatial.sql: New relation 'gauge_location' and some
+	  bugfixes in the drop schema.
+
+	* contrib/shpimporter/catchments.py,
+	  contrib/shpimporter/hws.py,
+	  contrib/shpimporter/gauges.py: New importers.
+
+	* contrib/shpimporter/importer.py: Added a debug statement to improve the
+	  visibility of the log output.
+
+	* contrib/shpimporter/shpimporter.py: Make use of the new importers.
+
+
+2012-05-03  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-drop-spatial.sql,
+	  doc/schema/oracle-spatial.sql: Added new relations for hydrological
+	  boundaries and appended a 'name' field to relations that had no such
+	  field yet.
+
+	* contrib/shpimporter/floodplains.py,
+	  contrib/shpimporter/boundaries.py: New importers for floodplain and
+	  hydrological boundaries.
+
+	* contrib/shpimporter/lines.py,
+	  contrib/shpimporter/buildings.py,
+	  contrib/shpimporter/uesg.py,
+	  contrib/shpimporter/fixpoints.py,
+	  contrib/shpimporter/axis.py,
+	  contrib/shpimporter/crosssectiontracks.py,
+	  contrib/shpimporter/km.py: Set the 'name' attribute for new features.
+
+	* contrib/shpimporter/importer.py: Some bugfixes and improvements:
+	  geometries are transformed into a destination coordinate system now.
+
+	* contrib/shpimporter/shpimporter.py: Use all importers and defined the
+	  destination srs.
+
+2012-05-02	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FastCrossSectionLine.java,
+	  src/main/java/de/intevation/flys/model/CrossSectionPoint.java,
+	  src/main/java/de/intevation/flys/model/CrossSectionLine.java,
+	  src/main/java/de/intevation/flys/importer/ImportCrossSectionLine.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Store meassure points of profiles as Doubles not as BigDecimal.
+	  This should save a lot of memory during the import.
+
+2012-04-27	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/FlowVelocityMeasurementParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceColumn.java:
+	  Removed superfluous imports.
+
+2012-04-27	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql-minfo.sql: Adjusted PostgreSQL port
+	  to match the Oracle schema.
+
+2012-04-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/annotation-types.xml: Adapted annotation typed based on BfG wishes.
+
+2012-04-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-drop.sql: New SQL file to drop WINFO specific db
+	  schema.
+
+2012-04-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle.sql: Made SQL instructions upper case.
+
+2012-04-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-drop-spatial.sql: New statements to drop the whole
+	  spatial schema.
+
+2012-04-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/lines.py,
+	  contrib/shpimporter/buildings.py,
+	  contrib/shpimporter/importer.py,
+	  contrib/shpimporter/fixpoints.py,
+	  contrib/shpimporter/axis.py,
+	  contrib/shpimporter/crosssectiontracks.py,
+	  contrib/shpimporter/km.py: New classes for importing specific
+	  shapefiles. Each of this imports defines its target db tablename and a
+	  directory path to the shapefiles it should import.
+
+	* contrib/shpimporter/uesg.py: Some modifications necessary to streamline
+	  the import process of shapefiles.
+
+	* contrib/shpimporter/shpimporter.py: Use all available imports for the
+	  import process.
+
+2012-04-26  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-spatial.sql: Repaired broken schema.
+
+2012-04-26  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a config
+	  option to skip parsing MINFO waterlevel differences:
+
+	    -Dflys.backend.importer.skip.waterlevel.differences=True
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Parse and
+	  store MINFO specific waterlevel differences.
+
+2012-04-26  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/WaterlevelDifferencesParser.java:
+	  New parser for MINFO specific waterlevel differences.
+
+	* src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelDifference.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceColumn.java:
+	  New importer classes used during the import process of MINFO specific
+	  waterlevel differences.
+
+2012-04-26  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added a missing constraint to
+	  'waterlevel_difference' relation. Remove that constraint in
+	  oracle-drop-minfo.sql.
+
+2012-04-25  Felix Wolfsteller <felix@intevation.de>
+
+	  * src/main/java/de/intevation/flys/model/WaterlevelDifferenceValue.java:
+	    (setValue, setValues): Change setter name to allow hibernate to recognize
+	    it.
+
+2012-04-24  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/WaterlevelDifferenceColumn.java,
+	  src/main/java/de/intevation/flys/model/WaterlevelDifference.java,
+	  src/main/java/de/intevation/flys/model/WaterlevelDifferenceValue.java:
+	  New model classes for MINFO specific waterlevel differences.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes.
+
+2012-04-24  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Improved the schema to store MINFO
+	  specific waterlevel differences.
+
+2012-04-24  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/WaterlevelValue.java: Renamed a
+	  property ('qRange' -> 'qrange') because there have been problem during the
+	  import.
+
+	* src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java:
+	  Filled the stub with code.
+
+	* src/main/java/de/intevation/flys/importer/ImportWaterlevelValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevel.java: Some
+	  adaptions and missing methods which are required during the import.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Use the
+	  correct directory to search for waterlevel files.
+
+2012-04-24  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql: Small type adaptions in the waterlevel_values
+	  relation.
+
+	* src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java:
+	  First stub of a parser for MINFO specific waterlevel values.
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a new config
+	  option to skip parsing MINFO specifc waterlevel_values:
+
+	    -Dflys.backend.importer.skip.waterlevels=True
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Added code
+	  path to start parsing and storing MINFO specific waterlevel values.
+
+2012-04-20  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportWaterlevelValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevel.java: New
+	  importer classes used for MINFO specific waterlevel import.
+
+2012-04-20  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/WaterlevelQRange.java,
+	  src/main/java/de/intevation/flys/model/WaterlevelValue.java,
+	  src/main/java/de/intevation/flys/model/Waterlevel.java: New model
+	  classes for MINFO specific waterlevel data.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes.
+
+2012-04-20  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added relations for waterlevels
+	  specific to MINFO. Note: those waterlevel values are not stored in the
+	  WINFO specific relations!
+
+2012-04-19  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentYield.java,
+	  src/main/java/de/intevation/flys/model/SedimentYield.java: Added a
+	  column 'description' to the sediment_yield relation.
+
+	* src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java:
+	  New parser for sediment yield data.
+
+	* src/main/java/de/intevation/flys/model/GrainFraction.java: Added constants
+	  that represent the names of the grain fraction types.
+
+	* src/main/java/de/intevation/flys/importer/ImportGrainFraction.java: New
+	  constructor that takes a name only.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Improved the
+	  process of parsing sediment yield files.
+
+2012-04-19  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added new config
+	  option to skip parsing sediment yield data:
+
+	     -Dflys.backend.importer.skip.sediment.yield=true
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Prepared the
+	  importer to parse sediment yield data.
+
+	* src/main/java/de/intevation/flys/importer/ImportSedimentYield.java:
+	  storeDependencies() now throws SQLExceptions and
+	  ConstraintViolationExceptions.
+
+2012-04-19  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportGrainFraction.java,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentYieldValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentYield.java: New
+	  importer classes for importing sediment yield data.
+
+2012-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/SedimentYield.java,
+	  src/main/java/de/intevation/flys/model/SedimentYieldValue.java,
+	  src/main/java/de/intevation/flys/model/GrainFraction.java: New model
+	  classes for sediment yield data.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes.
+
+2012-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added missing river_id column to
+	  sediment_yield relation.
+
+2012-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Floodmaps.java,
+	  src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeZone.java:
+	  Removed superflous imports.
+
+2012-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added relations for storing sediment
+	  yield values.
+
+2012-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: The discharge_zone relation has no
+	  longer a reference to a named main value but stores a lower and upper
+	  discharge as string.
+
+	* src/main/java/de/intevation/flys/model/DischargeZone.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeZone.java:
+	  Adapted the code to the changes in the db schema.
+
+	* src/main/java/de/intevation/flys/importer/parsers/FlowVelocityModelParser.java:
+	  This parser now reads the meta information properly.
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java:
+	  Repaired broken HQL statement.
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java:
+	  Log the number of flow velocity model values that have been written into
+	  database.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/FlowVelocityMeasurementParser.java:
+	  New parser for flow velocity measurements.
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurementValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurement.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java:
+	  Fixed broken HQL statements.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Store flow
+	  velocity measurements into database after parsing them.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes for flow velocity measurements.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurementValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurement.java:
+	  New temp classes used during the import process of flow velocity
+	  measurements.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FlowVelocityMeasurementValue.java,
+	  src/main/java/de/intevation/flys/model/FlowVelocityMeasurement.java: New
+	  model classes for storing flow velocity measurements.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added new relations for MINFO specific
+	  flow velocity measurements.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/FlowVelocityModelParser.java:
+	  New. Parser for model files of MINFO specific flow velocity data.
+
+	* doc/schema/oracle-minfo.sql: Added a missing q column to
+	  flow_velocity_model_values relation.
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java,
+	  src/main/java/de/intevation/flys/model/FlowVelocityModelValue.java:
+	  Added missing q column.
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java:
+	  Added setter methods for meta data and an addValue() for adding new
+	  ImportFlowVelocityModelValues.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Use
+	  FlowVelocityModelParser for parsing model data of flow velocity files.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeZone.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java:
+	  Temp classes used during the import process of flow velocity data.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Implemented
+	  the method that stores flow velocity model data.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Prepared for
+	  parsing flow  velocity files.
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a config
+	  option to skip parsing flow velocity files:
+
+	    -Dflys.backend.importer.skip.flow.velocity=true
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FlowVelocityModel.java,
+	  src/main/java/de/intevation/flys/model/FlowVelocityModelValue.java,
+	  src/main/java/de/intevation/flys/model/DischargeZone.java: New model
+	  classes for MINFO specific database relations.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the new model classes.
+
+2012-04-16  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added new relations for MINFO specific
+	  flow velocity values.
+
+2012-02-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/oracle-minfo.sql: Fixed column unit_id in table depths
+	  to match type of column id in table units.
+
+	* doc/schema/postgresql-minfo.sql: oracle-minfo.sql for a better DBMS.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/MorphologicalWidth.java: Added
+	  annotation for values.
+
+	* src/main/java/de/intevation/flys/importer/ImportMorphWidthValue.java:
+	  Removed debug output in getPeer() and storeDependencies().
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/MorphologicalWidthParser.java:
+	  New parser for morphological widths files.
+
+	* src/main/java/de/intevation/flys/importer/ImportMorphWidth.java: Throw
+	  constraint violation exceptions.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Parse and
+	  store morphological widths.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a config
+	  option to skip parsing morphological widths:
+
+	    -Dflys.backend.importer.skip.morphological.width=true
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql: Added a description field to morphological
+	  width values.
+
+	* src/main/java/de/intevation/flys/model/MorphologicalWidthValue.java:
+	  Added new instance variable for descriptions.
+
+	* src/main/java/de/intevation/flys/importer/ImportMorphWidthValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportMorphWidth.java: New
+	  temp classes used to store morphological width values during the import.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added new relations for MINFO specific
+	  morphological width.
+
+	* src/main/java/de/intevation/flys/model/MorphologicalWidth.java,
+	  src/main/java/de/intevation/flys/model/MorphologicalWidthValue.java: New
+	  model classes for morphological width.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql: Added a description field to table
+	  sediment_density.
+
+	* src/main/java/de/intevation/flys/model/SedimentDensityValue.java,
+	  src/main/java/de/intevation/flys/model/SedimentDensity.java: Some
+	  modifications based on the changes of the schema adaption in last commit.
+
+	* src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java:
+	  Override parse() of parent class to get the filename.
+
+	* src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java,
+	  src/main/java/de/intevation/flys/importer/ImportDepth.java,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java:
+	  Implemented the methods storeDependencies() and getPeer() to save new
+	  instances into database.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Save all
+	  ImportSedimentDensity objects to database.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the new model classes.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Modified the db schema specific to
+	  MINFO; replaced some columns.
+
+	* src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java:
+	  Implemented the method stubs: parse meta data and data values.
+
+	* src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java:
+	  Added and replaced some instance variables because the db schema has
+	  changed.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/LineParser.java: New.
+	  An abstract parser that might be used to read a file and handle each
+	  line contained in the file seperatly.
+
+	* src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java:
+	  New. Subclasses LineParser. It is able to parse MINFO specific sediment
+	  density files. NOTE: currently just a stub.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Prepared to
+	  read MINFO specific sediment density files.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a new
+	  config option to skip parsing MINFO sediment density values:
+
+	    -Dflys.backend.importer.skip.sediment.density=true
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportDepth.java: New importer
+	  classes used during MINFO sediment density import.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Improved the MINFO schema to store
+	  sediment density values specific to a river and depth.
+
+	* src/main/java/de/intevation/flys/model/SedimentDensityValue.java,
+	  src/main/java/de/intevation/flys/model/SedimentDensity.java,
+	  src/main/java/de/intevation/flys/model/Depth.java: New model classes
+	  used to store sediment density values specific to a river and depth.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java:
+	  New. An abstract super class for BedHeightSingleParser and
+	  BedHeightEpochParser. It implements methods for parsing meta data.
+	  Concrete subclasses need to implements the method for parsing data rows
+	  only.
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeight.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightValue.java: New.
+	  Interfaces that define some major methods which enables the BedHeightParser
+	  to parse both - single and epoch bed heights.
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java:
+	  Moved the code for parsing meta data to BedHeightParser which is now the
+	  parent class.
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightEpochParser.java:
+	  New. A further subclass of BedHeightParser for parsing MINFO bed heights
+	  for epochs.
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingleValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightEpochValue.java:
+	  Made them subclasses of ImportBedHeightValue.
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java: Made
+	  them subclasses of ImportBedHeight.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Some
+	  adjustments to store ImportBedHeight and ImportBedHeightValue instances
+	  instead of concrete subclasses.
+
+2012-04-12  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql: Added a link to the river to bed_height_epoch
+	  table.
+
+	* src/main/java/de/intevation/flys/model/BedHeightEpochValue.java,
+	  src/main/java/de/intevation/flys/model/BedHeightEpoch.java: New model
+	  classes for MINFO bed height epochs.
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Splitted up the
+	  config option to skip parsing bed heights. Now, we are able to skip single
+	  and epoch bed heights using the following options:
+
+	    -Dflys.backend.importer.skip.bed.height.single=true  (skip singles)
+	    -Dflys.backend.importer.skip.bed.height.epoch=true   (skip epochs)
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightEpochValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java: Implemented
+	  the whole stuff to parse those data.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the new model classes.
+
+2012-04-12  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Schema adaptions specific to MINFO bed
+	  heights.
+
+	* src/main/java/de/intevation/flys/model/BedHeightSingleValue.java,
+	  src/main/java/de/intevation/flys/model/BedHeightType.java,
+	  src/main/java/de/intevation/flys/model/ElevationModel.java,
+	  src/main/java/de/intevation/flys/model/LocationSystem.java,
+	  src/main/java/de/intevation/flys/model/BedHeightSingle.java: New model
+	  classes for MINFO bed heights.
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java:
+	  Some logging adjustments and a little bugfix: add BedHeightSingle objects
+	  after they were parsed; otherwise they are not saved to database.
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingleValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java,
+	  src/main/java/de/intevation/flys/importer/ImportElevationModel.java,
+	  src/main/java/de/intevation/flys/importer/ImportLocationSystem.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightType.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java: Implemented
+	  storeDependencies() and getPeer().
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered model classes.
+
+2012-04-11  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java:
+	  Finished work on parsing meta information and data specific to single bed
+	  heights.
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingleValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java,
+	  src/main/java/de/intevation/flys/importer/ImportElevationModel.java,
+	  src/main/java/de/intevation/flys/importer/ImportLocationSystem.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightType.java: Some
+	  new and modified temp storages used during MINFO import.
+
+2012-04-11  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Some schema adaptions specific to bed
+	  heights in MINFO.
+
+2012-04-11  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added new command
+	  line option to skip parsing bed heights (german "Sohlhoehen").
+	  Set "-Dflys.backend.importer.skip.bed.height=true" to skip parsing this
+	  file type.
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightEpochParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java:
+	  Initial checkin of parsers for bed heights (single and epoch).
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java: Temp
+	  storage for bed heights data used during the import.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java: Added an INFO
+	  statement that signals the start of parsing rivers.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Collect and
+	  trigger parsing of bed heights files (placed in 'Morphologie/Sohlhoehen').
+
+2012-04-11  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: MINFO specific DB schema and sql statements
+	  to drop MINFO specific stuff.
+
+2012-03-29  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a config
+	  option "flys.backend.importer.infogew.file" to set the path of an
+	  INFO.gew file.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java: Read the path
+	  to the INFO.gew from the new config option and try to parse it.
+
+2012-03-19  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-spatial.sql: Set the type of 'lower' and 'upper' column
+	  of relation 'dem' to NUMBER(19,5).
+
+2012-03-19  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Floodmaps.java: Set geometry type
+	  to 'Geometry', because Oracle can save POLYGONS and MULTIPOLYGONS in the
+	  same relation; Hibernate seems unable to load both types.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered missing Floodmaps.
+
+2012-03-16  Ingo Weinzierl <ingo@intevation.de>
+
+	* Tagged module as 'pre2.7-2012-03-16'.
+
+2012-03-16  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-spatial.sql: Repaired broken oracle db schema for
+	  relation 'floodmaps'.
+
+2012-03-08  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Floodmaps.java: New model class for
+	  'floodmaps'.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the Floodmaps model class.
+
+2012-03-07  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/shpimporter.py,
+	  contrib/shpimporter/utils.py,
+	  contrib/shpimporter/uesg.py: A python based tool for importing
+	  shapefiles into a database. This tool is based on python because it
+	  makes use of GDAL OGR to read shapefiles and write features into
+	  database.
+
+2012-03-07  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: Adapted the PostgreSQL schema for
+	  floodmaps.
+
+2012-03-06  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql,
+	  doc/schema/oracle-spatial.sql: Added new relations for existing
+	  floodmaps (currently tested for PostgreSQL only!).
+
+2012-03-06  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java: Adapted method call of
+	  Log4J logger 'warning()' -> 'warn()'.
+
+2012-03-02  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	Fix or workaround flys/issue632 .
+
+	* src/main/java/de/intevation/flys/model/Wst.java: Avoid NPE when
+	  trying to get min/max q values.
+
+2012-02-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/DischargeTable.java:
+	  The list of DischargeTableValue is now sorted by Q.
+
+2012-02-09  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java: Added a method
+	  getMasterDischargeTable() to retrieve the discharge table with kind 0.
+
+2012-02-03  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java
+	  (getGaugeDatumsKMs,queryGaugeDatumsKMs): renamed to avoid hibernate
+	  running into trouble finding db-mapping for type Map for
+	  what looks like a 'getter' of GaugeDatumsKMs.
+
+2012-02-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java(getGaugeDatumsKMs):
+	  New method to build a map of gauge stattion km to 
+	  the datums (PNP) of the gauge.  Useful look if a km 
+	  is a gauge station.
+
+2012-01-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FastAnnotations.java:
+	  New. Fetches all informations of annotations in one go for
+	  a river. Useful to speed up annotation handling.
+
+2012-01-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java: Added a function that
+	  returns a Gauge based on its official number.
+
+2012-01-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FastCrossSectionLine.java:
+	  New. Cacheable representation of the cross section line.
+
+	* src/main/java/de/intevation/flys/model/CrossSectionLine.java:
+	  Add a new isValid() method.
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java:
+	  Added method getFastLines() to fetch the lines (FastCrossSectionLines)
+	  directly with a single SQL statement and without expensive
+	  intermediate representations.
+
+2012-01-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java(getLines):
+	  Added method to fetch the lines of a cross section in a given interval.
+	  Useful to have chunked access to the lines.
+
+2012-01-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSectionLine.java
+	  (fetchCrossSectionLinesPoints): Simplified and prevent reallocations.
+
+2012-01-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/StaFileParser.java:
+	  Introduced boolean system property 'flys.backend.sta.parse.gauge.numbers'
+	  default: false. If set the official number is parsed out of
+	  the first line of the STA files. This leads to problems with the
+	  data of the Elbe river.
+
+	* src/main/java/de/intevation/flys/backend/SpatialInfo.java: Removed
+	  superfluous import.
+
+2012-01-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: Fixed table hws.
+
+2012-01-05  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/RiverAxis.java: The method
+	  getRiverAxis() now returns a list of RiverAxis objects. There is a
+	  modeling problem (see comment in the header of the class) which should
+	  be fixed!
+
+	* src/main/java/de/intevation/flys/backend/SpatialInfo.java: Adapted the
+	  code based on the modified signature in RiverAxis.
+
+2012-01-03  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java: Added a method
+	  determineMinMaxQFree() that determines the min/max Qs at a given
+	  kilometer.
+
+2012-01-02	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Remove 'NOT NULL' constraint from
+	  gauges.range_id because there are gauges which don't have
+	  a 'Gueltigkeitsbereich'
+	
+	  To adjust existing PostgreSQL databases use:
+	
+	      ALTER TABLE gauges ALTER COLUMN range_id DROP NOT NULL;
+
+	* src/main/java/de/intevation/flys/model/River.java: Handle
+	  null references to 'Gueltigkeitsbereiche'.
+
+2011-12-28  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java: Added a method
+	  determineMinMaxQ(double double) to be able to determine the Q range of a
+	  WST without having a Range object.
+
+2011-12-19	Sascha L. Teichmann	<sascha.teichmann@intevation.de>:
+
+	* src/main/java/de/intevation/flys/importer/parsers/StaFileParser.java:
+	  Parse the official 'Pegelnummer' out of the STA files, too.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Adjusted import model.
+
+	* src/main/java/de/intevation/flys/model/Gauge.java: Make the
+	  official gauge number accessible via Hibernate.
+
+	* doc/schema/postgresql.sql, doc/schema/oracle.sql: Added
+	  an official_number to the gauges table.
+
+2011-11-30	Bjoern Schilberg <bjoern.schilberg@intevation.de>:
+
+	* doc/schema/oracle-spatial.sql: Adjust extent of germany to EPSG:31467
+	  (GK3) coordinates.
+
+2011-11-30	Bjoern Schilberg <bjoern.schilberg@intevation.de>:
+
+	* doc/schema/oracle-spatial.sql: Adjust extent to the extent of germany
+	and srs to 31467 in USER_SDO_GEOM_METADATA.
+
+2011-11-29	Bjoern Schilberg <bjoern.schilberg@intevation.de>:
+
+	* doc/schema/oracle_create_user.sql: Fixed notation of the table in the
+	  alter statement.
+
+2011-11-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>:
+
+	Fixed flys/issue415
+
+	* doc/schema/oracle.sql: Increased precision of a and b in ranges.
+
+2011-11-10  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Building.java,
+	  src/main/java/de/intevation/flys/model/Fixpoint.java: Added functions
+	  that return a list of Buildings/Fixpoints for a given river.
+
+2011-11-10  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-spatial.sql,
+	  doc/schema/postgresql-spatial.sql: Adapted the "kind" field of "lines"
+	  relation (Int -> Varchar).
+
+	* doc/schema/oracle-spatial_idx.sql: Added missing spatial index for
+	  "lines" relation.
+
+	* src/main/java/de/intevation/flys/model/Line.java: Added a function that
+	  returns all lines of a given river.
+
+2011-11-09  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Catchment.java: New. A model for
+	  the 'catchment' relation.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the Catchment model.
+
+2011-11-09  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: Synced "hws" relation with oracle
+	  schema.
+
+	* src/main/java/de/intevation/flys/model/Hws.java: New. A model for the
+	  "hws" relation.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the Hws model.
+
+2011-11-09  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Added methods that return information about the database connection used
+	  by a concrete SessionFactoryImpl.
+
+2011-11-09  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Floodplain.java: Changed the
+	  geometry type from MultiPolygon to Polygon.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Added a function that returns the db driver of a given
+	  SessionFactoryImpl instance.
+
+2011-11-01  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/RiverAxisKm.java: New. Model class
+	  for 'river_axes_km' relation.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Added mapping for RiverAxisKm.
+
+2011-10-25  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle_create_user.sql:
+	  Added explizit TABLESPACE definition for the user.
+
+2011-10-25  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle_create_user.sql: 
+	  Adjusted oracle_create_user.sql script. Added USER SQL snippet to change
+	  tablespace to users.
+
+2011-10-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>:
+
+	* src/main/java/de/intevation/flys/utils/DateGuesser.java:
+	  New. Date guesser from desktop FLYS.
+
+	* src/main/java/de/intevation/flys/importer/parsers/WstParser.java:
+	  Try to parse the name of a WST column as a date and store
+	  the date into the database
+	  
+	* src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Added code to store the date of the column in the database.
+
+2011-10-24  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: 
+	  Adjusted geometry type for floodplain and hws in AddGeometryColumn.
+
+2011-10-19  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle.sql:
+	  Added missing views (wst_value_table,wst_w_values,wst_q_values).
+
+2011-10-18  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle.sql:
+	  Removed all CREATE OR REPLACE TRIGGER statements.
+
+2011-10-17  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle.sql:
+	  Adjusted NUMBER format for units, positions and cross_section_points tables.
+
+2011-10-10  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle.sql:
+	  Fix errors.
+
+2011-10-10  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle.sql:
+	  Adjust oracle schema [I382].
+
+2011-10-10  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportUnit.java:
+	  Removed obsolete imports.
+
+2011-10-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>:
+
+	* ChangeLog: Added database update statements.
+
+	To update existing databases:
+
+	    BEGIN;
+	      CREATE SEQUENCE UNITS_ID_SEQ;
+	      CREATE TABLE units (
+	        id   int PRIMARY KEY NOT NULL,
+	        name VARCHAR(32)     NOT NULL UNIQUE
+	      );
+	    ALTER TABLE rivers ADD COLUMN wst_unit_id int REFERENCES units(id);
+	    INSERT INTO units (id, name) VALUES (nextval('UNITS_ID_SEQ'), 'NN + m');
+	    INSERT INTO units (id, name) VALUES (nextval('UNITS_ID_SEQ'), 'NHN + m');
+	    UPDATE rivers SET wst_unit_id = (SELECT id FROM units WHERE name = 'NHN + m') WHERE name = 'Elbe';
+	    UPDATE rivers SET wst_unit_id = (SELECT id FROM units WHERE name = 'NN + m') WHERE name <> 'Elbe';
+	    ALTER TABLE rivers ALTER COLUMN wst_unit_id SET NOT NULL;
+	    COMMIT;
+
+2011-10-05  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/dump-schema.sh: New. A small shell script that dumps the
+	  database schema of a given database to SQL.
+
+	* pom.xml: Added dependency for hibernate-spatial-oracle stuff.
+
+2011-10-04  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql.sql: Removed unit column from wsts; added a
+	  wst_unit_id column to rivers. We decided to support a single elevation
+	  model for the whole river.
+
+	* src/main/java/de/intevation/flys/model/River.java: Added a WstUnit
+	  column.
+
+	* src/main/java/de/intevation/flys/model/Wst.java: Removed the Unit
+	  column.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java: Provide a
+	  getUnit() method that allows querying the elevation unit for this wst.
+
+	* src/main/java/de/intevation/flys/importer/ImportUnit.java: Removed
+	  storeDependencies().
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Save the
+	  Unit of the wst file or a default one into database before all other
+	  dependencies as well as the river itself is saved to database.
+
+2011-10-04  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql.sql: Added a "units" table.
+
+	* src/main/java/de/intevation/flys/model/Unit.java: New. Model class that
+	  represents a unit.
+
+	* src/main/java/de/intevation/flys/importer/ImportUnit.java: New. Model
+	  class that is used to import units.
+
+	* src/main/java/de/intevation/flys/model/Wst.java: A Wst stores a Unit
+	  which references "units" table now.
+
+	* src/main/java/de/intevation/flys/importer/parsers/WstParser.java: Set
+	  units which have already been parsed on Wsts.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java: Added setter
+	  method for Units and call storeDependencies() for ImportUnits.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model class Unit.
+
+2011-10-04  Ingo Weinzierl <ingo@intevation.de>
+
+	flys/issue333 (W-INFO / Berechnung Wasserspiegellage, Zuordnung Bezugspegel)
+
+	* src/main/java/de/intevation/flys/model/River.java: Modified
+	  determination of gauge based on a km range. River.determineGauge(double,
+	  double) will now return the gauge which matches the start km.
+
+2011-09-28  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/StringUtil.java:
+	  (wWrap): New method (extracted from WaterlevelSelectState).
+
+2011-09-28  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/StringUtil.java:
+	  (unbracket): New method (extracted from WaterlevelSelectState).
+
+2011-09-27	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSectionLine.java:
+	  Moved some logic from cross section demo app to this model.
+	
+2011-09-26  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSectionTrack.java:
+	  Added new function that the nearest CrossSectionTrack of a river to a
+	  given kilometer.
+
+2011-09-23  Bjoern Schilberg <bjoern@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: 
+	  Added missing river_axes_km schema.
+	* doc/schema/oracle-spatial.sql:
+	  Added missing dem and lines schema.
+
+2011-09-22  Bjoern Schilberg <bjoern@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql, doc/schema/oracle-spatial.sql:
+	  Harmonized oracle and postgresql spatial schema.
+
+2011-09-22  Bjoern Schilberg <bjoern@intevation.de>
+
+	* doc/schema/oracle-spatial.sql: Harmonized Extent for Saar and Mosel. Some cleanups.
+
+2011-09-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Added the possibility to enable JMX (/MBean) support for hibernate. By
+	  default, this support is NOT enabled. To enable JMX support for
+	  hibernate, set the system property "flys.backend.enablejmx=true".
+
+2011-09-20  Bjoern Schilberg <bjoern@intevation.de>
+
+	* doc/schema/oracle_create_user.sql: Added right to create views.
+
+2011-09-20  Bjoern Schilberg <bjoern@intevation.de>
+
+	* doc/schema/oracle-spatial.sql: Generalized oracle spatial schema.
+
+2011-09-19  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged RELEASE 2.5
+
+	* Changes: Prepared changes for release.
+
+2011-09-16  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/DGM.java: Added function to query
+	  a DGM by Id.
+
+2011-09-15  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  pom.xml: Removed oracle dependency.
+
+2011-09-15  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle.sql: Added inital oracle schema.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial_idx.sql: Added inital oracle-spatial_idx.sql
+	   script.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Deactivated spatial indexes.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle_create_user.sql: Added inital oracle_create_user.sql
+	   script.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema floodplain.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema hws.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema cross_section_tracks.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema river_axes.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema fixpoints.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema buildings.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Fixed table river_axes_km.
+
+2011-09-13  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added initial oracle schemas.
+
+2011-09-12  Bjoern Schilberg <bjoern@intevation.de>
+	
+	* doc/schema/postgresql-spatial.sql: Added initial schemas for
+	  Hydrologie/Einzugsgebiete, Hydrologie/HW-Schutzanlagen, Hydrologie/Hydr.
+	  Grenzen/Linien, BfG/hauptoeff_*.shp, BfG/MNQ-*.shp,
+	  BfG/modellgrenze*.shp,  BfG/uferlinie.shp, BfG/vorland_*.shp,
+	  Hydrologie/Streckendaten, Hydrologie/UeSG/Berechnung,
+	  Hydrologie/UeSG/Messung
+
+2011-09-02  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: Removed SERIALs from schema.
+	Auto-Increment is realized through SERIALs now.
+
+	* src/main/java/de/intevation/flys/model/DGM.java: The table for dem has
+	been renamed to 'dem' (before 'dgm').
+
+2011-09-01  Hans Plum <hans@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql:
+	Hint for unifying table names: dgm -> dem
+
+2011-09-01  Hans Plum <hans@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql:
+	Added mappings to existing data in file system (based on 
+	river Saar); added TODOs for missing tables/mappings
+
+2011-08-31  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: New relation for floodplains (german
+	  "Talaue").
+
+	* src/main/java/de/intevation/flys/model/Floodplain.java: New. Model class
+	  that represents a floodplain.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the Floodplain mapping.
+
+2011-08-31  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: Added a relation for DGMs. Note, that
+	  no data of the DGM is stored in this relation, but only the file path to
+	  the DGM file.
+
+	* src/main/java/de/intevation/flys/model/DGM.java: New. This class provides
+	  information for a DGM (km range and file path).
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the DGM.
+
+2011-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSectionTrack.java: New
+	  static function to retrieve all CrossSectionTracks of a specific river.
+
+2011-08-25  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/FileTools.java: Moved to
+	  artifacts-common module.
+
+	* src/main/java/de/intevation/flys/importer/parsers/PegelGltParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/PRFParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/HYKParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/AnnotationsParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/InfoGewParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportGauge.java: Adjusted
+	  imports of FileTools.
+
+2011-08-25  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java (getMainValues)
+	  (setMainValues):
+	  New method and mapping to get/set MainValues of a Gauge. Essentially
+	  developed by Sascha L. Teichmann.
+
+2011-08-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Reverted the default db dialect to PostgreSQL - this has been changed to
+	  PostGIS by accident.
+	  Configure the dialect "org.hibernatespatial.postgis.PostgisDialect" to
+	  use the PostGIS stuff.
+
+2011-08-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/RiverAxis.java: Added a function
+	  that returns the RiverAxis for a given River.
+
+	* src/main/java/de/intevation/flys/backend/SpatialInfo.java: Use the new
+	  function of RiverAxis to retrieve the RiverAxis.
+
+2011-08-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: The PostGIS database schema for
+	  FLYS spatial data.
+
+	* src/main/java/de/intevation/flys/model/CrossSectionTrack.java,
+	  src/main/java/de/intevation/flys/model/RiverAxis.java,
+	  src/main/java/de/intevation/flys/model/Line.java,
+	  src/main/java/de/intevation/flys/model/Building.java,
+	  src/main/java/de/intevation/flys/model/Fixpoint.java: Model classes that
+	  represent FLYS spatial data.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered model classes.
+
+	* pom.xml: Added dependencies to HibernateSpatial and PostGIS.
+
+	* src/main/java/de/intevation/flys/backend/SpatialInfo.java,
+	  contrib/spatial-info.sh: A small demo application that prints out some
+	  information about spatial data of specific rivers. Modify the system
+	  property -Dflys.backend.spatial.river to get information of a river of
+	  your choice.
+
+2011-07-31	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Re-enabled Hibernate schema dumps.
+
+	* src/main/java/de/intevation/flys/App.java: Removed old code
+	  and use new SessionFactoryProvider infrastructure.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Added methods to create db configurations without opening them.
+	  Useful for introspection only purposes.
+
+2011-07-31	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml: Bumped Apache DBCP up to 1.4 to use the same version
+	  as artifact database.
+
+2011-07-28	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Make use of the LRU cache from artifacts common.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added missing foreign key contraint on hyks table.
+	  To update existing databases:
+
+	  ALTER TABLE hyks ADD CONSTRAINT hyks_river_id_fkey FOREIGN KEY (river_id) REFERENCES rivers(id);
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/HYKParser.java:
+	  Check if zone coordinates in HYKs are swapped and warn the user.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/HYK.java
+	  src/main/java/de/intevation/flys/importer/parsers/HYKParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKFormation.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKEntry.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYK.java:
+	  Various small fixes and some extra logging.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: New.
+	  Central singleton to configure the Importer.
+	  Uses system properties by now:
+
+	  flys.backend.importer.dry.run: boolean
+	      default false. true: don't write to database.
+
+	  flys.backend.importer.annotation.types: String
+	      default unset. Filename of annotation type classifications.
+
+	  flys.backend.importer.skip.gauges: boolean
+	      default: false. true: don't parse/store *.glt, *.sta files
+
+	  flys.backend.importer.skip.annotations: boolean
+	      default: false. true: don't parse/store *.km files
+
+	  flys.backend.importer.skip.prfs: boolean
+	      default: false. true: don't parse/store *.prf files
+
+	  flys.backend.importer.skip.hyks: boolean
+	      default: false. true: don't parse/store *.hyk files
+
+	  flys.backend.importer.skip.wst: boolean
+	      default: false. true: don't parse/store river wst files
+
+	  flys.backend.importer.skip.extra.wsts: boolean
+	      default: false. true: don't parse/store extra *.zus, *.wst files
+
+	  flys.backend.importer.skip.fixations: boolean
+	      default: false. true: don't parse/store fixation *.wst files
+
+	  flys.backend.importer.skip.official.lines: boolean
+	      default: false. true: don't parse/store 'amtliche Linien' *.wst files
+
+	  flys.backend.importer.skip.flood.water: boolean
+	      default: false. true: don't parse/store 'HW-Marken' *.wst files
+
+	  flys.backend.importer.skip.flood.protection: boolean
+	      default: false. true: don't parse/store 'HW-Schutzanlagen' *.wst files
+
+2011-07-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/HYKEntry.java: Fixed OrderBy
+	  clause.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportHYKFormation.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKFlowZone.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKEntry.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYK.java:
+	  Store HYK data structures to database. Needs testing.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse the HYKs from the importer. TODO: Store them in database.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/HYKParser.java:
+	  Create data structures while parsing.
+
+	* src/main/java/de/intevation/flys/importer/ImportHYKFormation.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYK.java: Added
+	  getters/setters to ease model wiring.
+
+2011-07-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportHYKFormation.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKFlowZone.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKEntry.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYK.java: New.
+	  Importer models for HYKs.
+
+2011-07-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Each entry in a HYK can have
+	  an optional 'Peilungsjahr' (measure) not only the whole HYK.
+	  To update existing databases:
+	  BEGIN;
+	    ALTER TABLE hyks DROP COLUMN measure;
+	    ALTER TABLE hyk_entries ADD COLUMN measure TIMESTAMP;
+	  COMMIT;
+
+	  * src/main/java/de/intevation/flys/model/HYKEntry.java,
+	    src/main/java/de/intevation/flys/model/HYK.java:
+	    Adjusted Hibernate models.
+
+2011-07-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/HYKParser.java:
+	  Added callback mechanism and recursive file search like in the PRF parser.
+	  All BfG-HYK files seem to parse correctly now. TODO: Build the data structures.
+
+	* src/main/java/de/intevation/flys/importer/parsers/PRFParser.java:
+	  Added x.canRead() before accepting files for parsing.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/HYKParser.java:
+	  Initial version of the HYK parser. Not ready, yet.
+
+	* src/main/java/de/intevation/flys/importer/ImportHYKFlowZoneType.java:
+	  Importer model for HYK flow zone types.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Argh! Added distance_{vl|hf|vr} to
+	  wrong table.
+	  To update existing databases:
+	  BEGIN;
+	    ALTER TABLE hyk_entries DROP COLUMN distance_vl;
+	    ALTER TABLE hyk_entries DROP COLUMN distance_hf;
+	    ALTER TABLE hyk_entries DROP COLUMN distance_vr;
+	    ALTER TABLE hyk_formations ADD COLUMN distance_vl NUMERIC NOT NULL;
+	    ALTER TABLE hyk_formations ADD COLUMN distance_hf NUMERIC NOT NULL;
+	    ALTER TABLE hyk_formations ADD COLUMN distance_vr NUMERIC NOT NULL;
+	  COMMIT;
+
+	* src/main/java/de/intevation/flys/model/HYKFormation.java,
+	  src/main/java/de/intevation/flys/model/HYKEntry.java:
+	  Adjusted Hibernate models.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/FileTools.java: Added method
+	  walkTree() to traverse a directory tree. To be reused in HYK parser.
+
+	* src/main/java/de/intevation/flys/importer/parsers/PRFParser.java:
+	  Uses the FileTools.walkTree() method now to find all PRF file.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added missing columns.
+	  To update existing databases:
+	  BEGIN;
+	    ALTER TABLE hyks ADD COLUMN measure TIMESTAMP;
+	    ALTER TABLE hyk_entries ADD COLUMN distance_vl NUMERIC NOT NULL;
+	    ALTER TABLE hyk_entries ADD COLUMN distance_hf NUMERIC NOT NULL;
+	    ALTER TABLE hyk_entries ADD COLUMN distance_vr NUMERIC NOT NULL;
+	  COMMIT;
+
+	* src/main/java/de/intevation/flys/model/HYKEntry.java,
+	  src/main/java/de/intevation/flys/model/HYK.java:
+	  Adjusted Hibernate models.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/*.java:
+	  New package. Moved the file parsers (*.gew, *.sta, *.at, *.glt, *.prf, *.km, *.wst)
+	  into this package.
+
+	* src/main/java/de/intevation/flys/importer/*.java: Adjusted the imports.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/HYKFormation.java,
+	  src/main/java/de/intevation/flys/model/HYKEntry.java,
+	  src/main/java/de/intevation/flys/model/HYKFlowZone.java,
+	  src/main/java/de/intevation/flys/model/HYKFlowZoneType.java,
+	  src/main/java/de/intevation/flys/model/HYK.java: New. The hibernate models
+	  for the HYK structures.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new models.
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java: Added
+	  'order by' annotation for fetching the cross section lines.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added structures for HYKs "Hydraulische Kenngroessen"
+	  To update existing databases:
+
+	  BEGIN;
+	    CREATE SEQUENCE HYKS_ID_SEQ;
+	    CREATE TABLE hyks (
+	        id          int PRIMARY KEY NOT NULL,
+	        river_id    int             NOT NULL,
+	        description VARCHAR(256)    NOT NULL
+	    );
+	    
+	    CREATE SEQUENCE HYK_ENTRIES_ID_SEQ;
+	    CREATE TABLE hyk_entries (
+	        id     int PRIMARY KEY NOT NULL,
+	        hyk_id int             NOT NULL REFERENCES hyks(id),
+	        km     NUMERIC         NOT NULL,
+	        UNIQUE (hyk_id, km)
+	    );
+	    
+	    CREATE SEQUENCE HYK_FORMATIONS_ID_SEQ;
+	    CREATE TABLE hyk_formations (
+	        id            int PRIMARY KEY NOT NULL,
+	        formation_num int             NOT NULL DEFAULT 0,
+	        hyk_entry_id  int             NOT NULL REFERENCES hyk_entries(id),
+	        top           NUMERIC         NOT NULL,
+	        bottom        NUMERIC         NOT NULL,
+	        UNIQUE (hyk_entry_id, formation_num)
+	    );
+	    
+	    CREATE SEQUENCE HYK_FLOW_ZONE_TYPES_ID_SEQ;
+	    CREATE TABLE hyk_flow_zone_types (
+	        id          int PRIMARY KEY NOT NULL,
+	        name        VARCHAR(50)     NOT NULL UNIQUE,
+	        description VARCHAR(256)
+	    );
+	    
+	    CREATE SEQUENCE HYK_FLOW_ZONES_ID_SEQ;
+	    CREATE TABLE hyk_flow_zones (
+	        id           int PRIMARY KEY NOT NULL,
+	        formation_id int             NOT NULL REFERENCES hyk_formations(id),
+	        type_id      int             NOT NULL REFERENCES hyk_flow_zone_types(id),
+	        a            NUMERIC         NOT NULL,
+	        b            NUMERIC         NOT NULL,
+	        CHECK (a <= b)
+	    );
+	  COMMIT;
+
+2011-07-13	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml: Bumped Hibernate up to 3.6.5.
+
+2011-07-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/FileTools.java: Argh!
+	  Forget to call the file hashing so only the file lengths were
+	  compared.
+
+2011-07-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/FileTools.java:
+	  Added a class HashedFile to compare files by there length
+	  and a message digest. Digest can be set with system property
+	  "flys.backend.file.cmp.digest" and defaults to MD5. Useful to
+	  detect file duplicates.
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java: Added
+	  method prfAccept(File) to callback to check if a found PRF file
+	  should be parsed. Useful to prevent parsing file duplicates.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Use the HashedFile and the PRFParser.Callback to prevent
+	  parsing of PRF duplicates.
+
+2011-07-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Misspelled sequence.
+	  To update existing databases:
+
+	    DROP SEQUENCE CROSS_SECTION_LINES_SEQ;
+		CREATE SEQUENCE CROSS_SECTION_LINES_ID_SEQ;
+
+	* src/main/java/de/intevation/flys/importer/ImportCrossSection.java:
+	  Added some logging because importing is slow.
+
+	* src/main/java/de/intevation/flys/importer/ImportCrossSectionLine.java:
+	  Prevent NPE if a cross section line from db has no points.
+
+2011-07-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Parse all PRFs in all subfolders of a river and store them
+	as cross sections into the database. Needs testing!
+
+	* src/main/java/de/intevation/flys/importer/ImportCrossSection.java,
+	  src/main/java/de/intevation/flys/importer/ImportCrossSectionLine.java:
+	  New. Importer models for cross sections.
+
+	* src/main/java/de/intevation/flys/importer/XY.java:
+	  New. Made top level class from inner PRFParser.XY.
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java:
+	  Moved out XY class. Renamed callback.
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java,
+	  src/main/java/de/intevation/flys/model/CrossSectionLine.java,
+	  src/main/java/de/intevation/flys/model/CrossSectionPoint.java,
+	  src/main/java/de/intevation/flys/importer/ImportTimeInterval.java:
+	  Added convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse and store cross sections into database.
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Introduced a new table cross_section_line
+	  holding the km of a set of points.
+
+	* src/main/java/de/intevation/flys/model/CrossSectionLine.java:
+	  New. Model for a single line of a "Querprofil".
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java: Removed
+	  'km' and 'points' they are part of the line now.
+
+	* src/main/java/de/intevation/flys/model/CrossSectionPoint.java:
+	  They reference to the containing line now.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model.
+
+	  To update existing databases:
+	  BEGIN;
+	      DROP SEQUENCE CROSS_SECTIONS_ID_SEQ;
+	      DROP SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+	      DROP TABLE cross_section_points;
+	      DROP TABLE cross_sections;
+	      CREATE SEQUENCE CROSS_SECTIONS_ID_SEQ;
+	      CREATE TABLE cross_sections (
+	          id               int PRIMARY KEY NOT NULL,
+	          river_id         int             NOT NULL REFERENCES rivers(id),
+	          time_interval_id int                      REFERENCES time_intervals(id),
+	          description      VARCHAR(256)
+	      );
+	      CREATE SEQUENCE CROSS_SECTION_LINES_SEQ;
+	      CREATE TABLE cross_section_lines (
+	          id               int PRIMARY KEY NOT NULL,
+	          km               NUMERIC         NOT NULL,
+	          cross_section_id int             NOT NULL REFERENCES cross_sections(id),
+	          UNIQUE (km, cross_section_id)
+	      );
+	      CREATE SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+	      CREATE TABLE cross_section_points (
+	          id                    int PRIMARY KEY NOT NULL,
+	          cross_section_line_id int             NOT NULL REFERENCES cross_section_lines(id),
+	          col_pos               int             NOT NULL,
+	          x                     NUMERIC         NOT NULL,
+	          y                     NUMERIC         NOT NULL,
+	          UNIQUE (cross_section_line_id, col_pos)
+	      );
+	  COMMIT;
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Dropped constraint that enforces the
+	  uniqueness of km and river. This is violated because there are
+	  more than one sounding in different year at the same km of a river.
+	  Added column 'description' to the cross section table to make it
+	  human readable.
+
+	  To update existing databases:
+
+		ALTER TABLE cross_sections DROP CONSTRAINT cross_sections_km_key;
+		ALTER TABLE cross_sections ADD COLUMN description VARCHAR(256);
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java:
+	  Added the description column to the Hibernate model.
+	
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Dropped constraint that enforces the
+	  uniqueness of x in a "Querprofil-Spur". There are vertical lines
+	  in the soundings so this constraint is violated.
+
+	  To update existing databases:
+	
+		ALTER TABLE cross_section_points DROP CONSTRAINT cross_section_points_cross_section_id_key2;
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java:
+	  Added a callback to be called from parsePRFs() if
+	  a PRF was parsed successfully. Useful to scan whole
+	  sub directories for PRF files.
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java:
+	  Extract the year of sounding from file names. If not found
+	  from the name of th containing directory. Description is made
+	  of file name and parent directory file name.
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java:
+	  Extracted the data. All BfG PRFs are parsed correctly, now.
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java:
+	  Extract km from lines. TODO: extract data.
+
+2011-07-06	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java: New.
+	  Parser for PRF files. TODO extra data and station from data lines.
+
+2011-07-06	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Point3d.java: Deleted.
+	  Not needed (braindead).
+
+	* src/main/java/de/intevation/flys/model/CrossSectionPoint.java:
+	  Directly store the x/y values now.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Removed registration of Point3d.
+
+	To update existing databases:
+
+	BEGIN;
+	  ALTER TABLE cross_section_points DROP COLUMN point3d_id;
+	  DROP SEQUENCE POINTS3D_ID_SEQ;
+	  DROP TABLE points3d;
+	  ALTER TABLE cross_section_points ADD COLUMN x NUMERIC NOT NULL;
+	  ALTER TABLE cross_section_points ADD COLUMN y NUMERIC NOT NULL;
+	  ALTER TABLE cross_section_points ADD CONSTRAINT
+		cross_section_points_cross_section_id_key2 UNIQUE (cross_section_id, x);
+	COMMIT;
+
+2011-07-06	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java,
+	  src/main/java/de/intevation/flys/model/CrossSectionPoint.java:
+	  New. Hibernate models for cross-sections and their forming points.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the new models.
+
+2011-07-06	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Point3d.java: New. point3d model
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered point3d model.
+
+2011-07-06	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Add relations for cross sections.
+	  To update existing databases:
+	     BEGIN;
+	     	
+	     CREATE SEQUENCE CROSS_SECTIONS_ID_SEQ;
+	     CREATE TABLE cross_sections (
+	         id               int PRIMARY KEY NOT NULL,
+	         km               NUMERIC         NOT NULL,
+	         river_id         int             NOT NULL REFERENCES rivers(id),
+	         time_interval_id int             REFERENCES time_intervals(id),
+	         UNIQUE (km, river_id)
+	     );
+	     
+	     CREATE SEQUENCE POINTS3D_ID_SEQ;
+	     CREATE TABLE points3d (
+	         id int     PRIMARY KEY NOT NULL,
+	         x  NUMERIC NOT NULL,
+	         y  NUMERIC NOT NULL,
+	         z  NUMERIC NOT NULL
+	     );
+	     
+	     CREATE SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+	     CREATE TABLE cross_section_points (
+	         id               int PRIMARY KEY NOT NULL,
+	         cross_section_id int NOT NULL REFERENCES cross_sections(id),
+	         point3d_id       int NOT NULL REFERENCES points3d(id),
+	         col_pos          int NOT NULL,
+	         UNIQUE (cross_section_id, point3d_id, col_pos),
+	         UNIQUE (cross_section_id, col_pos)
+	     );
+	     
+	     COMMIT;
+
+2011-06-28  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged RELEASE 2.4
+
+2011-06-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* Changes: Prepared changes for the upcoming release.
+
+2011-06-26	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java,
+	  src/main/java/de/intevation/flys/importer/AnnotationClassifier.java,
+	  src/main/java/de/intevation/flys/importer/AtFileParser.java,
+	  src/main/java/de/intevation/flys/importer/ValueKey.java,
+	  src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Removed trailing whitespace.
+
+2011-06-26	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AnnotationClassifier.java:
+	  Removed superfluous imports.
+
+2011-06-26	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added a method maxOverlap to determine the gauge which has
+	  the max common length to a given interval. This is for
+	  numerical stability in slightly overlapping gauge ranges.
+
+2011-06-26	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java: When
+	looking up a gauge by km containment use precision of 1e-6.
+
+2011-06-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added method to find gauge by its name.
+
+2011-06-19	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/annotation-types.xml: Fixed some rules.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java,
+	  src/main/java/de/intevation/flys/importer/InfoGewParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Call the right constructors now.
+
+	* src/main/java/de/intevation/flys/importer/ImportAnnotationType.java:
+	  Added getter/setter for name property.
+
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java:
+	  Print duplicated at WARN level not as DEBUG.
+
+	* src/main/java/de/intevation/flys/importer/AnnotationClassifier.java:
+	  Fixed XPath expression to build the internal lookup structures
+	  correctly.
+
+2011-06-19	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Added classification of annotation types. Needs testing!
+
+	* doc/annotation-types.xml: New. Rules to classify the different
+	  types of annotations. The classification works like this:
+
+	  There are unique types like 'Bruecke', 'Pegel' and so on.
+	  They are defined in the /annotation/type section and
+	  identified by their name. One of the types can be set 
+	  as the default type if no rule applies.
+
+	  In the /annotation/pattern section are two types of pattern.
+
+	  1 - file pattern: If a KM file is opened its filename is
+	      matched against the regular expressions of these
+		  patterns. If a match is found the corresponding type
+		  is used as the default type in the open file.
+		  If no match is found the global default type is used
+		  as the default type.
+
+	  2 - line patterns: For each line of an open KM file these
+	      patterns are applied to find a match. If a match is
+		  found the corresponding type is used as the type of
+		  the annotation. If no match is found the default
+		  file default is assumed to be the right type. For
+		  the file default see 1.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  To activate the annotation type classification set
+	  the system property
+
+	      'flys.backend.importer.annotation.types'
+
+	  to the path of a XML looking like the annotation-types.xml
+	  file. If the system property is not set no classification
+	  is done.
+
+	* src/main/java/de/intevation/flys/importer/AnnotationClassifier.java:
+	  New. Implements the classification.
+	  
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java,
+	  src/main/java/de/intevation/flys/importer/InfoGewParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Looped through the annotation type classification.
+
+2011-06-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added method to find gauge by a position lying in its range.
+
+2011-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	First part of flys/issue18
+
+	* doc/schema/postgresql.sql: Add new table 'annotation_types'.
+	  To update existing databases:
+	    BEGIN;
+	      CREATE SEQUENCE ANNOTATION_TYPES_ID_SEQ;
+	      CREATE TABLE annotation_types (
+	          id    int PRIMARY KEY NOT NULL,
+	          name  VARCHAR(256)    NOT NULL UNIQUE
+	      );
+		  ALTER TABLE annotations ADD COLUMN type_id int REFERENCES annotation_types(id);
+	    COMMIT;
+
+	* doc/schema/postgresql-cleanup.sql: Removed. Hopeless out-dated.
+
+	* src/main/java/de/intevation/flys/model/AnnotationType.java:
+	  New. Hibernate model to access the type.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Register the new backend type.
+
+	* src/main/java/de/intevation/flys/model/Annotation.java:
+	  References the annotation type.
+
+	* src/main/java/de/intevation/flys/importer/ImportAnnotationType.java:
+	  New. Model to help import the annotation type.
+
+	* src/main/java/de/intevation/flys/importer/ImportAnnotation.java:
+	  Uses the import type.
+
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java:
+	  Set the annotation type to 'null'. TODO: Do the classification!
+
+2011-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Fix for flys/issue110. start index was shifted by one.
+
+2011-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportAnnotation.java:
+	  Forgot to store reference to edge.
+
+2011-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	 * src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	   Register backend model.
+
+	 * src/main/java/de/intevation/flys/importer/ImportEdge.java: New. Model
+	   for importing the edges.
+
+	 * src/main/java/de/intevation/flys/importer/AnnotationsParser.java:
+	   Parses the edges of an annotation, too.
+
+2011-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added table edges to model 'Kanten' of an annotation.
+	  To update existing databases:
+	    BEGIN;
+	      CREATE SEQUENCE EDGES_ID_SEQ;
+	      CREATE TABLE edges (
+	        id     int PRIMARY KEY NOT NULL,
+	        top    NUMERIC,
+	        bottom NUMERIC);
+	      ALTER TABLE annotations ADD COLUMN edge_id int REFERENCES edges(id);
+	    COMMIT;
+
+	* src/main/java/de/intevation/flys/model/Edge.java: New. A 'Kanten'-Model.
+	* src/main/java/de/intevation/flys/model/Annotation.java: References the
+	  edges.
+	
+2011-06-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added method to find gauge only by station position.
+
+2011-05-24	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Make search for gauges independent of from/to order.
+
+2011-05-24	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added a new view to select qs of a WST.
+	  To update existing databases:
+
+	    CREATE VIEW wst_q_values AS
+	        SELECT wc.position AS column_pos,
+	               wqr.q       AS q, 
+	               r.a         AS a, 
+	               r.b         AS b,
+	               wc.wst_id   AS wst_id
+	        FROM wst_column_q_ranges wcqr
+	        JOIN wst_q_ranges wqr ON wcqr.wst_q_range_id = wqr.id
+	        JOIN ranges r         ON wqr.range_id        = r.id
+	        JOIN wst_columns wc   ON wcqr.wst_column_id  = wc.id
+	        ORDER BY wc.position, wcqr.wst_column_id, r.a;	  
+
+2011-05-24	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added a new view to select ws of a WST.
+	  To update existing databases:
+
+	    CREATE VIEW wst_w_values AS
+	        SELECT wcv."position" AS km, 
+	               wcv.w          AS w,  
+	               wc."position"  AS column_pos, 
+	               w.id           AS wst_id
+	            FROM wst_column_values wcv
+	            JOIN wst_columns wc ON wcv.wst_column_id = wc.id
+	            JOIN wsts w         ON wc.wst_id = w.id
+	        ORDER BY wcv."position", wc."position";
+
+2011-05-23	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	 flys/issue76
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Close gaps between q ranges.
+
+2011-05-20  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged RELEASE 2.3.1
+
+2011-05-13  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged RELEASE 2.3
+
+2011-05-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* Changes: Changes for release 2.3 - see Changes file to get to know about
+	  the version numbers of this software.
+
+2011-05-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added a column which tells
+	  if a river counts its km up or downwards.
+	  To update existing databases:
+
+	    ALTER TABLE rivers ADD COLUMN km_up BOOLEAN NOT NULL DEFAULT true;
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Adjust Hibernate mapping of new column.
+
+2011-05-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Fixed flys/issue11 and flys51.
+
+2011-05-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ValueKey.java:
+	  Add some unsharp comparison (eps = 1e-6) to avoid 
+	  numerical problems.
+
+	* src/main/java/de/intevation/flys/importer/IdValueKey.java:
+	  Removed. Not need any longer.
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Use unsharp lookup.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Flush more often. Hopefully this reduces hibernate sync
+	  problems?!
+
+2011-05-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Fixed silly bug. Set flush mode back to auto because
+	  manual flushing causes some undetermined problems.
+
+2011-05-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Do not load _all_ values from discharge tables and
+	  wst columns. This is extremly slow and will lead
+	  to OOM if more rivers are imported. Now only the
+	  last 20 columns und discharge tables are cached.
+
+	* src/main/java/de/intevation/flys/importer/ValueKey.java:
+	  New. Key for caching discharge table values and wst
+	  column values.
+
+	* src/main/java/de/intevation/flys/importer/IdValueKey.java:
+	  Fixed bug in equals().
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Removed too eloquent debug output.
+
+2011-05-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/IdValueKey.java:
+	  Use BigDecimals as representation for the numeric components
+	  which prevents running into unique constraint problems
+	  caused by imprecision.
+
+	* src/main/java/de/intevation/flys/importer/ImportRange.java,
+	  src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Ranges are now cached globally, too.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Improved logging.
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTableValue.java:
+	  Removed superfluous imports.
+
+2011-05-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/StaFileParser.java:
+	  Only accept main value types 'Q', 'W', 'D' and 'T' by default.
+	  '-' is not handled any more because it causes too much constraint
+	  problems. You can overwrite the imported type with the
+	  system property flys.backend.main.value.types" (default "QWTD")
+	  
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Set session flush mode to manual. Hopefully this improves the
+	  performance a bit.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportGauge.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumn.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  Improved logging.
+
+2011-05-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstColumnValueKey.java: Deleted
+	* src/main/java/de/intevation/flys/importer/IdValueKey.java: Reinserted
+	  here in a more generalized form.
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Cache the discharge table value, too.
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTableValue.java:
+	  Use the global cache.
+
+2011-05-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  New. Centralized caching in the thread local context. Importing
+	  the Elbe leads to OOM because the column values of the
+	  WST files where loaded separately for every file.
+
+	* src/main/java/de/intevation/flys/importer/ImportPosition.java,
+	  src/main/java/de/intevation/flys/importer/Importer.java,
+	  src/main/java/de/intevation/flys/importer/ImportAnnotation.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportMainValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportMainValueType.java,
+	  src/main/java/de/intevation/flys/importer/ImportNamedMainValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportGauge.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumnValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumnQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumn.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeTableValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeTable.java,
+	  src/main/java/de/intevation/flys/importer/ImportAttribute.java,
+	  src/main/java/de/intevation/flys/importer/ImportTimeInterval.java:
+	  Adjusted to use the new global context.
+
+2011-05-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Dropped constraint which
+	  forces discharge tables to have a unique time interval
+	  for a given gauge and kind.
+
+	  There are AT files (historical Mosel/Perl/Perl/1967-1981.at
+	  and Mosel/Perl/1967-1981-1.at) which violate this
+	  constraint. Its a technical question to the customer
+	  how to handle these cases.
+
+	  To adjust existing databases:
+
+	    ALTER TABLE discharge_tables DROP CONSTRAINT discharge_tables_gauge_id_key;
+
+2011-05-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  There are wst files where column names are not unique.
+	  Make them unique by appending (1), (2) and so on.
+
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Import the "HW-Schutzanlagen", too.
+
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Make import of historical discharge tables work.
+
+	* doc/schema/postgresql.sql: Added forgotten column 'description'.
+
+	  !!! You have to drop your database !!!
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  src/main/java/de/intevation/flys/model/DischargeTable.java:
+	  Add the forgotten description property.
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Fixed problems with date recognition.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Prefix the description of the historical discharge tables 
+	  with "Histor.Abflusstafeln".
+
+	* src/main/java/de/intevation/flys/importer/ImportTimeInterval.java:
+	  Fixed silly programming error.
+
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Inject 'kind' attribute.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Traverse the "Histor.Abflusstafeln" for the historical
+	  discharge tables. too. TODO: Store them in the database.
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  Added convinience constructor to set the kind of the
+	  discharge table.
+	  
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Prefix "Zusätzliche Längsschnitte" with "Zus.Längsschnitte"
+	  in description.
+
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Import 'Hochwasser-Marken', too.
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Removed superfluous import.
+
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Ignore lines that contain km positions which were found
+	  before in the same file.
+
+2011-05-05  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java: Added a method that
+	  returns all gauges of the river intersected by a given start and end
+	  point.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportTimeInterval.java:
+	  Forgot to fetch peer from result set.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportTimeInterval.java:
+	  New. Importer model help fetching the database peer.
+
+	* src/main/java/de/intevation/flys/model/TimeInterval.java:
+	  Add convinience constructor with start and stop time.
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Attach a time interval to a discharge table if we find one.
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  Store the reference to the importer model of the 
+	  time interval of the discharge table.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Try to extract time ranges from at files.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse and store "amtliche Linien" wst files.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse and store fixation wst files as well.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse the "zusaetzliche Laengsschnitte", too.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java:
+	  Add getter/setter for column 'kind'.
+
+2011-05-02	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Fixed flys/issue19: Do not take km column in wst file as a water level.
+
+2011-05-02  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java: Introduced a 'scale'
+	  that is used to adjust the range of min/max W values.
+
+2011-05-01	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Removed superfluous import.
+
+2011-04-20  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstColumnValueKey.java:
+	  New. This class is used as distinct key of a WstColumnValue - e.g. as
+	  key in a map.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java: A
+	  WstColumnValue cache is build up while initialization. This cache
+	  contains all WstColumnValues that exist in the database.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumn.java: New
+	  constructor that takes the WstColumnValues cache. It is used to
+	  initialize new ImportWstColumnValue objects.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumnValue.java:
+	  Speedup: A ImportWstColumnValue has a WstColumnValues cache that
+	  contains all WstColumnValues existing in the database. This makes it
+	  unnecessary to call an sql statement for each WstColumnValue to
+	  determine its existence in the database.
+
+2011-04-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql, doc/schema/postgresql-cleanup.sql:
+	  Added a view 'wst_value_table' which aggregates the data
+	  to build w/q value tables.
+
+	  To update existing databases:
+
+	  BEGIN;
+	    CREATE VIEW wst_value_table AS
+	        SELECT wcv.position AS position,
+	               w,
+	               (SELECT q
+	                FROM   wst_column_q_ranges wcqr
+	                       JOIN wst_q_ranges wqr
+	                         ON wcqr.wst_q_range_id = wqr.id
+	                       JOIN ranges r
+	                         ON r.id = wqr.range_id
+	                WHERE  wcqr.wst_column_id = wc.id
+	                       AND wcv.position BETWEEN r.a AND r.b) AS q,
+	               wc.position                                   AS column_pos,
+	               w.id                                          AS wst_id
+	        FROM   wst_column_values wcv
+	               JOIN wst_columns wc
+	                 ON wcv.wst_column_id = wc.id
+	               JOIN wsts w
+	                 ON wc.wst_id = w.id
+	        ORDER  BY wcv.position ASC,
+	              wc.position DESC;
+	  COMMIT;
+
+2011-04-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java:
+	  Add forgotten one to many relation Wst -> WstColumn.
+
+2011-04-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Import of q ranges of wst files was totally broken. :-/
+	  You have to reimport all your data.
+
+2011-04-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  Forgot kind parameter in peer fetching query.
+	  
+2011-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java::
+	  Added a new method to determine the gauge based on a given start and end
+	  point of the river.
+
+2011-04-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added 'position' column to wst_columns
+	  to allow order them by there column position in the original
+	  wst file.
+	
+	  Update existing database with:
+
+	    BEGIN;
+	      ALTER TABLE wst_columns ADD COLUMN position int;
+	      UPDATE wst_columns w SET 
+	        position = id - (SELECT min(id) FROM wst_columns WHERE wst_id = w.wst_id);
+	      ALTER TABLE wst_columns ADD CONSTRAINT wst_columns_wst_id_position_key
+	        UNIQUE (wst_id, position);
+	    COMMIT;
+
+	* src/main/java/de/intevation/flys/model/Wst.java,
+	  src/main/java/de/intevation/flys/model/WstColumn.java:
+	  Adjusted models.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Adjusted importer.
+
+2011-04-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Forget ',' in schema.
+
+2011-04-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java (determineMinMaxQ): 
+	  Fixed index problem when an empty list is returned.
+
+2011-04-15  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java: A Wst is now able to
+	  return its min and max Q values.
+
+	* src/main/java/de/intevation/flys/model/Gauge.java: A Gauge is now able
+	  to return its min and max W values.
+
+2011-04-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added new column 'kind' in
+	  discharge tables and wst to distinguish between different
+	  kinds like 'Haupt-WST', 'zusaetzliche Laengsschnitte',
+	  'amtliche Daten' and so on.
+
+	  Update existing databases with:
+	    BEGIN;
+	      ALTER TABLE discharge_tables ADD COLUMN kind int NOT NULL DEFAULT 0;
+	      ALTER TABLE wsts             ADD COLUMN kind int NOT NULL DEFAULT 0;
+	    COMMIT;
+
+	* src/main/java/de/intevation/flys/model/DischargeTable.java
+	  src/main/java/de/intevation/flys/model/Wst.java,
+	  src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  Adjusted the models.
+
+2011-04-15  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionHolder.java: Moved to
+	  this module from flys-artifacts. This is necessary to get access to the
+	  current session in this module as well.
+
+2011-04-14  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java: Added a method that
+	  returns the min and max distance of a river.
+
+2011-04-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/**/*.java: Removed trailing whitespace.
+
+2011-03-30  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged RELEASE 0.1
+
+2011-03-28	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Range.java:
+	  Forgot to save the last change before commit.
+
+2011-03-28	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Range.java:
+	  Added methods to find out if two ranges intersects.
+
+2011-03-24	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  Added an one to many relation to the discharge tables of a gauge.
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Finished import of WSTs.
+
+	TODO 1: Speed it up! It takes on a high end machine over 7(!)
+	        minutes only for the data of the Saar.
+	TODO 2: Double precision floating point representations produced
+	        by the the parsers leed to unique constraint violations
+			in the backend on a second run. So the import is currently
+			only working on freshly initialized data bases.
+			More consequent working with BigDecimal and some
+			rounding may be of help here.
+
+	* src/main/java/de/intevation/flys/model/WstColumnValue.java:
+	  Added convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumnValue.java:
+	  Added getPeer() method.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Add a list of the ImportWstColumnValues produced by the WST parser.
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java: Add
+	  the (km, w) values to the ImportWstColumns.
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Build models for wsts, wst columns and q ranges and
+	  store them in the backend. TODO: store the w values.
+
+	* src/main/java/de/intevation/flys/model/WstQRange.java
+	  src/main/java/de/intevation/flys/model/Wst.java,
+	  src/main/java/de/intevation/flys/model/Range.java,
+	  src/main/java/de/intevation/flys/model/WstColumnQRange.java,
+	  src/main/java/de/intevation/flys/model/WstColumn.java:
+	  Added convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumnQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumn.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java:
+	  Added getPeer() methods.
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  The unit extraction in the WST parser of desktop FLYS
+	  is broken! Add a hack here to repair this for our
+	  importer. Desktop FLYS needs a fix, too!
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Ported some stuff ver from WSTSource.java of desktop flys to
+	  parse WST files. TODO: create instances of the import models.
+
+	* src/main/java/de/intevation/flys/utils/StringUtil.java:
+	  Copied from desktop flys. Used for some string operations
+	  in WST parser.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Added system property 'flys.backend.importer.dry.run'.
+	  Set to true only the parsing is done and no writing
+	  to the backend. Default: false.
+
+	*  src/main/java/de/intevation/flys/App.java,
+	   src/main/java/de/intevation/flys/model/MainValueType.java:
+	   Removed needless imports.
+
+2011-03-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/DischargeTableValue.java,
+	  src/main/java/de/intevation/flys/model/DischargeTable.java: Added new
+	  constructors.
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java: New. This
+	  parser is used to '*.at' files.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java: Added code to
+	  import discharge tables.
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTableValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeTable.java: New.
+	  Helper models for import discharge tables.
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Added stub for WST parser.
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportWstQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumnValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumnQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Added importer helper model stubs for WST imports.
+	
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Second part of parsing/storing main values. Should be finished now.
+
+	* src/main/java/de/intevation/flys/importer/ImportNamedMainValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportMainValue.java:
+	  New. Helper models for import main values,
+
+	* src/main/java/de/intevation/flys/model/MainValue.java,
+	  src/main/java/de/intevation/flys/model/NamedMainValue.java:
+	  Added convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Write main values to backend, too.
+
+	* src/main/java/de/intevation/flys/importer/StaFileParser.java:
+	  Build importer models for main values.
+
+	* src/main/java/de/intevation/flys/importer/ImportMainValueType.java:
+	  Data was called 'value'. Now it is 'name' to fit the
+	  schema.
+
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Fixed wrong unique constraint.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Added some logging when storing gauges.
+
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  Add forgotten column river_id.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Small HQL fix.
+
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  Added convinience contructor.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Fixed getPeer() method.
+
+	* src/main/java/de/intevation/flys/importer/StaFileParser.java:
+	  Fixed parsing of STA files.
+
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Propagate river into storing of gauges.
+
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Added code to store rivers not only annotations.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Added stub code to write gauges.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	First part of parsing main values.
+
+	* src/main/java/de/intevation/flys/App.java: Commented out
+	  creation of dummy rivers.
+
+	* src/main/java/de/intevation/flys/model/NamedMainValues.java:
+	  Moved to NamedMainValue.
+
+	* src/main/java/de/intevation/flys/model/NamedMainValue.java:
+	  New. Formerly NamedMainValues.
+
+	* src/main/java/de/intevation/flys/model/MainValue.java:
+	  New. Forgotten part of the model.
+
+	* src/main/java/de/intevation/flys/model/MainValueType.java:
+	  Data is String not BigDecimal
+
+	* src/main/java/de/intevation/flys/model/Range.java: Removed
+	  contructor with double arguments. Using BigDecimal now.
+
+	* src/main/java/de/intevation/flys/importer/PegelGltParser.java:
+	  Propagate BigDecimal usage.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Removed needless import. Added TODO
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse the dependencies of the gauges, too.
+
+	* src/main/java/de/intevation/flys/importer/StaFileParser.java:
+	  New. Parser for STA files.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Call STA file parser.
+
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java:
+	  Uses BigDecimal now.
+
+	* src/main/java/de/intevation/flys/importer/ImportAttribute.java:
+	  Fixed wrong type cast in equals.
+
+	* src/main/java/de/intevation/flys/importer/ImportMainValueType.java:
+	  New. Helper model for importing main value types.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Register forgotten MainValue model.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Store annotations in backend.
+
+	* src/main/java/de/intevation/flys/model/Annotation.java:
+	  New convinience constructor.
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added toString() method.
+
+	* src/main/java/de/intevation/flys/model/Range.java:
+	  Fixed nasty mistake in @OneToOne annotatation.
+	  New convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportPosition.java
+	  src/main/java/de/intevation/flys/importer/ImportAnnotation.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java
+	  src/main/java/de/intevation/flys/importer/ImportAttribute.java:
+	  Make storing to backend work. It's a bit too slow. :-/
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Fetch peer from backend. Added method to store annotations.
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Stored annotations into backend. More eloquent SQL exception
+	  handling.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Attribute.java,
+	  src/main/java/de/intevation/flys/model/Position.java:
+	  Added convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportPosition.java,
+	  src/main/java/de/intevation/flys/importer/ImportAttribute.java:
+	  Bound them to there backend peers.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java:
+	  New. Added parser to read *.KM files.
+
+	* src/main/java/de/intevation/flys/importer/ImportPosition.java,
+	  src/main/java/de/intevation/flys/importer/PegelGltParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportAnnotation.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java,
+	  src/main/java/de/intevation/flys/importer/InfoGewParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportAttribute.java:
+	  Adjusted to load the annotations from *.KM files.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportPosition.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportAttribute.java,
+	  src/main/java/de/intevation/flys/importer/ImportAnnotation.java:
+	  New helper models for import.
+
+	* src/main/java/de/intevation/flys/importer/PegelGltParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Use new models.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PegelGltParser.java:
+	  New. Parser for PEGEL.GLT files.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  New. Import model for gauges.
+
+	* src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java:
+	  Removed needless imports.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Added method to parse the gauges.
+
+	* src/main/java/de/intevation/flys/importer/InfoGewParser.java:
+	  Trigger pegel glt file parsing.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Used thread local pattern to make sharing of session easier.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Fixed error in HQL statement.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java:
+	  Commented out a debug block because it leaks the db password.
+
+2011-03-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/InfoGewParser.java:
+	  Expose imported rivers.
+
+	* src/main/java/de/intevation/flys/importer/InfoGewParser.java:
+	  Store imported rivers into database. Needs testing!
+
+2011-03-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Default connection parameters are now overwritable with
+	  system properties (defaults in brackets):
+	  - user name:     flys.backend.user     (flys)
+	  - user password: flys.backend.password (flys)
+	  - db dialect:    flys.backend.dialect  (org.hibernate.dialect.PostgreSQLDialect)
+	  - db driver:     flys.backend.driver   (org.postgresql.Driver)
+	  - db url:        flys.backend.url      (jdbc:postgresql://localhost:5432/flys)
+
+2011-03-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Expose createSessionFactory() as public to be usable without
+	  a artifact database running.
+
+2011-03-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/FileTools.java: 
+	  Tools for handling with filenames. Currently there is
+	  a repair(File) method with repairs letter case errors
+	  which is useful when reading windows filenames on a
+	  un*x platform.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Standalone app to read data from the file system and
+	  store it in a database. Currently it does not store 
+	  anything. It only loads info gew files.
+
+	* src/main/java/de/intevation/flys/importer/InfoGewParser.java:
+	  Info gew parser.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Helper model of a river used produced by parsing the
+	  info gew files.
+
+2011-03-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  New. SessionFactoryProvider.getSessionFactory() provides a
+	  SessionFactory to use the Hibernate O/R mapper for the FLYS backend.
+
+2011-03-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml: Added dependency to artifacts-commons to
+	  be able to use the global configuration of the artifact database.
+
+2011-03-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/App.java: Wire all POJOs
+	  to corresponding factory.
+
+	* src/main/java/de/intevation/flys/model/*.java: Generate
+	  all foreign key constraints. TODO: name them correctly
+	  because the machine generated names are ugly and do
+	  not fit the PostgreSQL names.
+
+	* doc/schema/postgresql.sql: Small quantifier fix in descriptions
+	  of wst columns.
+
+2011-03-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Fixed wrong spelled 
+	  column references in foreign keys introduces with
+	  last change.
+
+2011-03-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/*.java: Added
+	  column annotations for simple fields. TODO: foreign keys.
+
+2011-03-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql, doc/schema/postgresql-cleanup.sql:
+	  Fixed inconsistent table names.
+
+	* src/main/java/de/intevation/flys/model/*.java: Added
+	  entity and id annotations.
+
+2011-03-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql, doc/schema/postgresql-cleanup.sql:
+	  Added missing sequences.
+
+	* doc/schema/sqlite.sql: Deleted. Not longer supported.
+
+2011-03-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql-cleanup.sql: Forgot to add.
+
+	* src/main/java/de/intevation/flys/App.java: Use
+	  Apache Commons DBCP as Hibernate connection provide.
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added a constructor with string argument. Set the
+	  sequence increment to 1 (eat up 100 at a time before).
+
+	* pom.xml: Added PostgreSQL 8.4 driver as runtime dependency.
+
+2011-03-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql-cleanup.sql: New. Tear down schema
+	  for a postgres database.
+
+	* doc/schema/postgresql.sql: Added squence for 
+	  auto generating ids in river table. Cleaned up schema.
+
+	* src/main/java/de/intevation/flys/App.java: Simple
+	  test app to interact with hibernate. Needs to be removed
+	  because its only a toy.
+
+	* src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java:
+	  New. Binds Apache Commons to Hibernate.
+
+	* pom.xml: Added dependencies to log4j, commons dbcp,
+	  JPA of hibernate.
+
+	* src/main/java/de/intevation/flys/model/River.java: Added
+	  JPA annotations.
+
+	* src/main/java/de/intevation/flys/model/*.java: Replaced
+	  Long with Integer because column ids are only four bytes wide.
+
+2011-03-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/sqlite.sql, doc/schema/postgresql.sql: Fixed
+	  smaller issues in ddl.
+
+	* src/main/java/de/intevation/flys/model/*.java: Added POJOs
+	  of to be mapped. TODO: Map them!
+
+	* pom.xml: Added plugin config for hibernate.
+
+2011-03-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml: Added dependency (and corresponding repository) to
+	  Hibernate Core 3.6.1 Final
+
+2011-03-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml, src/**: Created a new empty maven project:
+	  $ mvn archetype:create         \
+	    -DgroupId=de.intevation.flys \
+		-DartifactId=flys-backend
+
+2011-03-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* README: New. Some setup instructions.
+
+	* doc/schema/postgresql.sql: New. Schema converted to PostgreSQL
+
+	* doc/schema/sqlite.sql: Fixed defect foreign key constraints.
+
+2011-03-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/sqlite.sql: Factorized time intervals out into
+	  a separated table.
+
+2011-01-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* contrib/import-kms.py, contrib/import-gew.py: Initial scripts
+	to import data into SQLite database. They still need some work.
+
+2011-02-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>:
+
+	* doc/schema/sqlite.sql: Added initial schema for
+	FLYS database.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/Changes	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,109 @@
+2011-09-19      RELEASE 2.5
+
+    NEW:
+
+        * Improved the data model to support:
+          - cross sections
+          - hyks ("Hydraulische Kenngroessen")
+
+        * Improved the importer to read:
+          - PRF files
+          - HYK files
+
+        * Added a central Config to configure the Importer. There are several
+          system properties to adjust what to import (see ChangeLog for
+          details).
+
+        * Added a Postgis and Oracle models for spatial flys data:
+          - catchments
+          - river axes
+          - buildings
+          - fixpoints
+          - cross section tracks
+          - hws ("HW-Schutzanlagen")
+          - floodplains
+
+        * Bumped Hibernate up to 3.6.5.
+
+        * Bumped Apache DBCP up to 1.4.
+
+
+
+2011-06-27      RELEASE 2.4
+
+    New:
+
+        * Improved the data model to support:
+          - edges ("Oberkante", "Unterkante")
+          - annotations
+
+        * Improved the importer to read:
+          - edges
+          - annotations
+
+        * Made search for gauges in River independent of from/to
+          kilometer order.
+
+
+    Fixes:
+
+        * flys/issue76 Close gaps between q ranges
+
+        * flys/issue110 Fixed import of ATs that skipped the first value.
+
+
+
+2011-05-13      RELEASE 2.3
+
+    New:
+
+        * Import of:
+          - "zusätzliche Längsschnitte"
+          - "Fixierungen"
+          - "amtliche Linien"
+          - "Hochwassermarken"
+          - "Historische Abflusskurven"
+          - "HW-Schutzanlagen"
+
+        * Improvements in the SQL schema:
+          - Added a 'kind' column to WSTs
+          - Added a 'position' column to WST columns to enable sorting WST columns.
+          - Added a 'km_up' column to rivers to determine the flow direction of rivers.
+
+        * Rivers are now able to:
+          - determine their min/max kilometer range
+          - determine the selected gauge(s) based on a kilometer range
+
+        * WSTs are able to determine their min/max Q range.
+
+        * Gauges are able to determine their min/max W range.
+
+        * Added a view 'wst_value_table' that aggregates the data to build w/q
+          value tables.
+
+        * Added one-to-many relation Wst -> WstColumn
+
+        * Speedup of the importer by using an internal caching mechanism.
+
+        * Sta-Parse just accepts the following main value types: 'Q', 'W', 'D' and 'T'
+
+
+    Fixes:
+
+        * Fixed import of Q ranges in wst files.
+
+        * Fixed flys/issue19: Do not take km column in wst file as a water level.
+
+        * Fixed flys/issue11 (Diagramm: Fehlerhafte Werte in Abflusskurven)
+
+        * Fixed flys/issue51 (WINFO: Fachdaten am Pegel Perl enthält Sprünge)
+
+
+    !!!
+
+    The version number of this release depends on an existing desktop variant of
+    this software that is in version 2.1.
+
+    !!!
+
+2011-03-30      RELEASE 0.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/README	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,10 @@
+For the artifact database
+# su - postgres
+$ createuser --no-createrole --no-superuser --pwprompt --no-createdb artifacts
+$ createdb --encoding=UTF-8 --owner artifacts artifactsdb
+
+For the flys database
+
+# su - postgres
+$ createuser --no-createrole --no-superuser --pwprompt --no-createdb flys
+$ createdb --encoding=UTF-8 --owner flys flystest1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/dump-schema.sh	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+DB_URL=jdbc:postgresql://czech-republic.atlas.intevation.de:5432/flys3
+DB_USER=flys
+DB_PASS=flys
+DB_DIALECT=org.hibernatespatial.oracle.OracleSpatial10gDialect
+
+mvn compile package -e \
+    -Dlog4j.configuration=file://`readlink -f contrib/log4j.properties` \
+    -Dflys.backend.user=$DB_USER \
+    -Dflys.backend.password=$DB_PASS \
+    -Dflys.backend.dialect=$DB_DIALECT \
+    -Dflys.backend.url=$DB_URL \
+    -Dexec.mainClass=de.intevation.flys.App \
+    exec:java
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/import-gew.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,223 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import sys
+import os
+import codecs
+import re
+
+HAUPTWERT  = re.compile(r"\s*([^\s]+)\s+([^\s+]+)\s+([QWDT-])")
+WHITESPACE = re.compile(r"\s+")
+
+class KM(object):
+
+    def __init__(self, filename):
+        self.filename = filename
+        self.load_values()
+
+    def load_values(self):
+        with codecs.open(self.filename, "r", "latin-1") as f:
+            for line in f:
+                line = line.strip()
+                if not line or line.startswith("*"):
+                    parts = [s.strip() for s in line.split(";")]
+                    # TODO: Use code from import-kms.py
+
+class AbflussTafel(object):
+
+    def __init__(self, filename):
+        self.filename = filename
+        self.name     = ""
+        self.values = []
+        self.load_values()
+
+    def load_values(self):
+        with codecs.open(self.filename, "r", "latin-1") as f:
+            first = True
+            for line in f:
+                line = line.strip()
+                if not line: continue
+                if line.startswith("#! name="):
+                    self.name = line[8:]
+                    continue
+                if line.startswith("#") or line.startswith("*"):
+                    continue
+                line = line.replace(",", ".")
+                splits = WHITESPACE.split(line)
+
+                if len(splits) < 2 or len(splits) > 11:
+                    continue
+
+                w = float(splits[0])
+
+                shift = 0
+
+                if len(splits) != 11 and first:
+                    shift = 11 - len(splits)
+
+                for idx, q in enumerate(splits[1:]):
+                    i_w = w + shift + idx
+                    i_q = float(q)
+                    w_q = (i_w/100.0, i_q/100.0)
+                    self.values.append(w_q)
+
+                first = False
+
+
+class Hauptwert(object):
+    def __init__(self, name, value, kind):
+        self.name  = name
+        self.extra = value
+        self.kind  = kind
+
+class Pegel(object):
+    def __init__(self, name, start, stop, sta, at, html):
+        self.name       = name
+        self.start      = start
+        self.stop       = stop
+        self.sta        = sta
+        self.at         = at
+        self.html       = html
+        self.aeo        = 0.0
+        self.nullpunkt  = 0.0
+        self.km         = 0.0
+        self.hauptwerte = []
+        self.load_hauptwerte()
+        self.at_data = AbflussTafel(self.at)
+
+    def load_hauptwerte(self):
+        with codecs.open(self.sta, "r", "latin-1") as f:
+            for line_no, line in enumerate(f):
+                line = line.rstrip()
+                if line_no == 0:
+                    first = False
+                    name = line[16:37].strip()
+                    line = [s.replace(",", ".") for s in line[37:].split()]
+                    self.aeo = float(line[0])
+                    self.nullpunkt = float(line[1])
+                    print >> sys.stderr, "pegel name: '%s'" % name
+                    print >> sys.stderr, "pegel aeo: '%f'" % self.aeo
+                    print >> sys.stderr, "pegel nullpunkt: '%f'" % self.nullpunkt
+                elif line_no == 1:
+                    self.km = float(line[29:36].strip().replace(",", "."))
+                    print >> sys.stderr, "km: '%f'" % self.km
+                else:
+                    if not line: continue
+                    line = line.replace(",", ".")
+                    m = HAUPTWERT.match(line)
+                    if not m: continue
+                    self.hauptwerte.append(Hauptwert(
+                        m.group(1), float(m.group(2)), m.group(3)))
+
+class Gewaesser(object):
+
+    def __init__(self, name=None, b_b=None, wst=None):
+        self.name = name
+        self.b_b = b_b
+        self.wst = wst
+        self.pegel = []
+
+    def load_pegel(self):
+        dir_name = os.path.dirname(self.wst)
+        pegel_glt = find_file(dir_name, "PEGEL.GLT")
+        if not pegel_glt:
+            print >> sys.stderr, "Missing PEGEL.GLT for %r" % self.name
+            return
+
+        print >> sys.stderr, "pegel_glt: %r" % pegel_glt
+
+        with codecs.open(pegel_glt, "r", "latin-1") as f:
+            for line in f:
+                line = line.strip()
+                if not line or line.startswith("#"):
+                    continue
+                # using re to cope with quoted columns,
+                # shlex has unicode problems.
+                parts = [p for p in re.split("( |\\\".*?\\\"|'.*?')", line) 
+                         if p.strip()]
+                if len(parts) < 7:
+                    print >> sys.stderr, "too less colums (need 7): %r" % line
+                    continue
+
+                print >> sys.stderr, "%r" % parts
+                self.pegel.append(Pegel(
+                    parts[0],
+                    min(float(parts[2]), float(parts[3])),
+                    max(float(parts[2]), float(parts[3])),
+                    norm_path(parts[4], dir_name),
+                    norm_path(parts[5], dir_name),
+                    parts[6]))
+
+
+    def __repr__(self):
+        return u"Gewaesser(name=%r, b_b=%r, wst=%r)" % (
+            self.name, self.b_b, self.wst)
+
+def norm_path(path, ref):
+    if not os.path.isabs(path):
+        path = os.path.normpath(os.path.join(ref, path))
+    return path
+
+def find_file(path, what):
+    what = what.lower()
+    for filename in os.listdir(path):
+        p = os.path.join(path, filename)
+        if os.path.isfile(p) and filename.lower() == what:
+            return p
+    return None
+    
+
+def read_gew(filename):
+
+    gewaesser = []
+
+    current = Gewaesser()
+
+    filename = os.path.abspath(filename)
+    dirname = os.path.dirname(filename)
+
+    with codecs.open(filename, "r", "latin-1") as f:
+        for line in f:
+            line = line.strip()
+            if not line or line.startswith("*"):
+                continue
+
+            if line.startswith(u"Gewässer:"):
+                if current.name:
+                    gewaesser.append(current)
+                    current = Gewaesser()
+                current.name = line[len(u"Gewässer:"):].strip()
+            elif line.startswith(u"B+B-Info:"):
+                current.b_b = norm_path(line[len(u"B+B-Info:"):].strip(),
+                                        dirname)
+            elif line.startswith(u"WSTDatei:"):
+                current.wst = norm_path(line[len(u"WSTDatei:"):].strip(),
+                                        dirname)
+
+        if current.name:
+            gewaesser.append(current)
+
+    return gewaesser
+
+def main():
+
+    if len(sys.argv) < 2:
+        print >> sys.stderr, "missing gew file"
+        sys.exit(1)
+
+    gew_filename = sys.argv[1]
+
+    if not os.path.isfile(gew_filename):
+        print >> sys.stderr, "'%s' is not a file" % gew_filename
+        sys.exit(1)
+
+    gewaesser = read_gew(gew_filename)
+
+    for gew in gewaesser:
+        gew.load_pegel()
+
+    
+
+if __name__ == '__main__':
+    main()
+# vim: set fileencoding=utf-8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/import-kms.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+
+import sys
+import logging
+import re
+import os
+
+import sqlite3 as db
+import locale
+import codecs
+
+from optparse import OptionParser
+
+log = logging.getLogger(__name__) 
+log.setLevel(logging.WARNING)
+log.addHandler(logging.StreamHandler(sys.stderr))
+
+RANGE = re.compile("([^#]*)#(.*)")
+DEFAULT_DATABASE = "flys.db"
+
+SQL_NEXT_ID   = "SELECT coalesce(max(id), -1) + 1 FROM %s"
+SQL_SELECT_ID = "SELECT id FROM %s WHERE %s = ?"
+SQL_INSERT_ID = "INSERT INTO %s (id, %s) VALUES (?, ?)"
+
+SQL_SELECT_RANGE_ID = """
+SELECT id FROM ranges WHERE river_id = ? AND a = ? AND b = ?
+"""
+SQL_INSERT_RANGE_ID = """
+INSERT INTO ranges (id, river_id, a, b) VALUES (?, ?, ?, ?)
+"""
+SQL_SELECT_ANNOTATION_ID = """
+SELECT id FROM annotations
+WHERE range_id = ? AND attribute_id = ? AND position_id = ?
+"""
+SQL_INSERT_ANNOTATION_ID = """
+INSERT INTO annotations (id, range_id, attribute_id, position_id) 
+VALUES (?, ?, ?, ?)
+"""
+
+def encode(s):
+    try:
+        return unicode(s, "latin-1")
+    except UnicodeDecodeError:
+        return unicode.encode(s, locale.getpreferredencoding())
+
+class hashabledict(dict):
+    def __key(self):
+        return tuple((k, self[k]) for k in sorted(self))
+    def __hash__(self):
+        return hash(self.__key())
+    def __eq__(self, other):
+        return self.__key() == other.__key()
+
+def cache(f):
+    def func(*args, **kw):
+        key = (args, hashabledict(kw))
+        try:
+            return f.__cache__[key]
+        except KeyError:
+            value = f(*args, **kw)
+            f.__cache__[key] = value
+            return value
+    f.__cache__ = {}
+    return func
+
+NEXT_IDS = {}
+def next_id(cur, relation):
+    idx = NEXT_IDS.get(relation)
+    if idx is None:
+        cur.execute(SQL_NEXT_ID % relation)
+        idx = cur.fetchone()[0]
+    NEXT_IDS[relation] = idx + 1
+    return idx
+
+def get_id(cur, relation, attribute, value):
+    select_stmt = SQL_SELECT_ID % (relation, attribute)
+    #log.debug(select_stmt)
+    cur.execute(select_stmt, (value,))
+    row = cur.fetchone()
+    if row: return row[0]
+    idx = next_id(cur, relation)
+    insert_stmnt = SQL_INSERT_ID % (relation, attribute)
+    #log.debug(insert_stmnt)
+    cur.execute(insert_stmnt, (idx, value))
+    cur.connection.commit()
+    log.debug("insert %s '%s' id: '%d'" % (relation, value, idx))
+    return idx
+
+#@cache
+def get_river_id(cur, name):
+    return get_id(cur, "rivers", "name", name)
+
+#@cache
+def get_attribute_id(cur, value):
+    return get_id(cur, "attributes", "value", value)
+
+#@cache
+def get_position_id(cur, value):
+    return get_id(cur, "positions", "value", value)
+
+#@cache
+def get_range_id(cur, river_id, a, b):
+    cur.execute(SQL_SELECT_RANGE_ID, (river_id, a, b))
+    row = cur.fetchone()
+    if row: return row[0]
+    idx = next_id(cur, "ranges")
+    cur.execute(SQL_INSERT_RANGE_ID, (idx, river_id, a, b))
+    cur.connection.commit()
+    return idx
+
+#@cache
+def get_annotation_id(cur, range_id, attribute_id, position_id):
+    cur.execute(SQL_SELECT_ANNOTATION_ID, (
+        range_id, attribute_id, position_id))
+    row = cur.fetchone()
+    if row: return row[0]
+    idx = next_id(cur, "annotations")
+    cur.execute(SQL_INSERT_ANNOTATION_ID, (
+        idx, range_id, attribute_id, position_id))
+    cur.connection.commit()
+    return idx
+
+def files(root, accept=lambda x: True):
+    if os.path.isfile(root):
+        if accept(root): yield root
+    elif os.path.isdir(root):
+        stack = [ root ]
+        while stack:
+            cur = stack.pop()
+            for f in os.listdir(cur):
+                p = os.path.join(cur, f)
+                if os.path.isdir(p):
+                    stack.append(p)
+                elif os.path.isfile(p) and accept(p):
+                    yield p
+
+def feed_km(cur, river_id, km_file):
+
+    log.info("processing: %s" % km_file)
+
+    for line in codecs.open(km_file, "r", "latin-1"):
+        line = line.strip()
+        if not line or line.startswith('*'):
+            continue
+        parts = [x.strip() for x in line.split(';')]
+        if len(parts) < 3:
+            log.error("cannot process: '%s'" % line)
+            continue
+        m = RANGE.match(parts[2])
+        try:
+            if m:
+                x = [float(x.replace(",", ".")) for x in m.groups()]
+                a, b = min(x), max(x)
+                if a == b: b = None
+            else:
+                a, b = float(parts[2].replace(",", ".")), None
+        except ValueError:
+            log.error("cannot process: '%s'" % line)
+            continue
+
+        attribute = parts[0]
+        position  = parts[1]
+        attribute_id = get_attribute_id(cur, attribute) if attribute else None
+        position_id  = get_position_id(cur, position) if position else None
+
+        range_id = get_range_id(cur, river_id, a, b)
+
+        get_annotation_id(cur, range_id, attribute_id, position_id)
+
+def main():
+
+    usage = "usage: %prog [options] river km-file ..."
+    parser = OptionParser(usage=usage)
+    parser.add_option(
+        "-v", "--verbose", action="store_true",
+        dest="verbose",
+        help="verbose output")
+    parser.add_option(
+        "-r", "--recursive", action="store_true",
+        dest="recursive", default=False,
+        help="recursive")
+    parser.add_option(
+        "-d", "--database", action="store",
+        dest="database",
+        help="database to connect with",
+        default=DEFAULT_DATABASE)
+
+    options, args = parser.parse_args()
+
+    if options.verbose:
+        log.setLevel(logging.INFO)
+    
+    if len(args) < 1:
+        log.error("missing river argument")
+        sys.exit(1)
+
+    river = unicode(args[0], locale.getpreferredencoding())
+
+    with db.connect(options.database) as con:
+        cur = con.cursor()
+        river_id = get_river_id(cur, river)
+
+        for arg in args[1:]:
+            if options.recursive:
+                for km_file in files(
+                    arg, lambda x: x.lower().endswith(".km")):
+                    feed_km(cur, river_id, km_file)
+            else:
+                feed_km(cur, river_id, arg)
+        
+
+if __name__ == '__main__':
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/run_geo.sh	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,50 @@
+#!/bin/sh
+
+# Required
+RIVER_PATH="/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar"
+RIVER_ID=1
+TARGET_SRS=31467
+HOST=localhost
+USER=flys28
+PASS=flys28
+
+# Optional
+VERBOSE=1
+SKIP_AXIS=0
+SKIP_KMS=0
+SKIP_CROSSSECTIONS=0
+SKIP_LINES=0
+SKIP_FIXPOINTS=0
+SKIP_BUILDINGS=0
+SKIP_FLOODPLAINS=0
+SKIP_HYDR_BOUNDARIES=0
+SKIP_HWS=0
+SKIP_GAUGE_LOCATION=0
+SKIP_CATCHMENTS=0
+SKIP_UESG=0
+
+
+DIR=`dirname $0`
+DIR=`readlink -f "$DIR"`
+
+exec python $DIR/shpimporter/shpimporter.py \
+    --directory $RIVER_PATH \
+    --river_id $RIVER_ID \
+    --target_srs $TARGET_SRS \
+    --host $HOST \
+    --user $USER \
+    --password $PASS \
+    --verbose $VERBOSE \
+    --skip_axis $SKIP_AXIS \
+    --skip_kms $SKIP_KMS \
+    --skip_crosssections $SKIP_CROSSSECTIONS \
+    --skip_lines $SKIP_LINES \
+    --skip_fixpoints $SKIP_FIXPOINTS \
+    --skip_buildings $SKIP_BUILDINGS \
+    --skip_floodplains $SKIP_FLOODPLAINS \
+    --skip_hydr_boundaries $SKIP_HYDR_BOUNDARIES \
+    --skip_hws $SKIP_HWS \
+    --skip_gauge_locations $SKIP_GAUGE_LOCATION \
+    --skip_catchments $SKIP_CATCHMENTS \
+    --skip_uesgs $SKIP_UESG
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/run_hydr_morph.sh	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,103 @@
+#!/bin/bash
+
+######################### CONFIG OPTIONS ############################
+INFO_GEW="/vol1/projects/Geospatial/flys-3.0/testdaten/saar.gew"
+BACKEND_USER="flys28"
+BACKEND_PASS="flys28"
+BACKEND_HOST="czech-republic.atlas.intevation.de"
+BACKEND_PORT="1521"
+BACKEND_NAME="XE"
+LOG4J_CONFIG="conf/log4j.properties"
+#####################################################################
+
+
+########################## Oracle Settings ##########################
+BACKEND_DB_PREFIX="jdbc:oracle:thin:@"
+BACKEND_DB_DRIVER="oracle.jdbc.OracleDriver"
+BACKEND_DB_DIALECT="org.hibernate.dialect.OracleDialect"
+BACKEND_URL=$BACKEND_DB_PREFIX//$BACKEND_HOST:$BACKEND_PORT/$BACKEND_NAME
+#####################################################################
+
+
+######################## Custom Importer Settings ###################
+IMPORTER_DRY_RUN=false
+IMPORTER_MAINVALUE_TYPES=QWTD
+IMPORTER_ANNOTATION_TYPES="conf/annotation-types.xml"
+
+IMPORTER_SKIP_GAUGES=false
+IMPORTER_SKIP_ANNOTATIONS=false
+IMPORTER_SKIP_WST=false
+IMPORTER_SKIP_PRFS=false
+IMPORTER_SKIP_HYKS=false
+IMPORTER_SKIP_EXTRA_WST=false
+IMPORTER_SKIP_FIXATIONS=false
+IMPORTER_SKIP_OFFICIAL_LINES=false
+IMPORTER_SKIP_FLOOD_WATER=false
+IMPORTER_SKIP_FLOOD_PROTECTION=false
+
+IMPORTER_SKIP_BED_HEIGHT_SINGLE=false
+IMPORTER_SKIP_BED_HEIGHT_EPOCH=false
+IMPORTER_SKIP_SEDIMENT_DENSITY=false
+IMPORTER_SKIP_MORPHOLOGICAL_WIDTH=false
+IMPORTER_SKIP_FLOW_VELOCITY=false
+IMPORTER_SKIP_SEDIMENT_YIELD=false
+IMPORTER_SKIP_WATERLEVELS=false
+IMPORTER_SKIP_WATERLEVEL_DIFFERENCES=false
+IMPORTER_SKIP_SQ_RELATION=false
+#####################################################################
+
+#MIN_MEMORY="8192m"
+MIN_MEMORY="1024m"
+
+
+########################## Importer Settings ########################
+APP="de.intevation.flys.importer.Importer"
+DIR=`dirname $0`
+DIR=`readlink -f "$DIR/.."`
+#####################################################################
+
+
+########################## Collect required libraries ###############
+CLASSPATH=
+for l in `find "$DIR/lib" -name \*.jar -print`; do
+   CLASSPATH=$CLASSPATH:$l
+done
+
+export CLASSPATH
+#####################################################################
+
+
+######################### Run Importer ##############################
+exec java \
+    -Xmx$MIN_MEMORY \
+    -server \
+    -Dlog4j.configuration=file://`readlink -f $LOG4J_CONFIG` \
+    -Dflys.backend.importer.infogew.file=$INFO_GEW \
+    -Dflys.backend.main.value.types=$IMPORTER_MAINVALUE_TYPES \
+    -Dflys.backend.importer.annotation.types=$IMPORTER_ANNOTATION_TYPES \
+    -Dflys.backend.importer.dry.run=$IMPORTER_DRY_RUN \
+    -Dflys.backend.importer.skip.gauges=$IMPORTER_SKIP_GAUGES \
+    -Dflys.backend.importer.skip.annotations=$IMPORTER_SKIP_ANNOTATIONS \
+    -Dflys.backend.importer.skip.prfs=$IMPORTER_SKIP_PRFS \
+    -Dflys.backend.importer.skip.hyks=$IMPORTER_SKIP_HYKS \
+    -Dflys.backend.importer.skip.wst=$IMPORTER_SKIP_WST \
+    -Dflys.backend.importer.skip.extra.wsts=$IMPORTER_SKIP_EXTRA_WST \
+    -Dflys.backend.importer.skip.fixations=$IMPORTER_SKIP_FIXATIONS \
+    -Dflys.backend.importer.skip.official.lines=$IMPORTER_SKIP_OFFICIAL_LINES \
+    -Dflys.backend.importer.skip.flood.water=$IMPORTER_SKIP_FLOOD_WATER \
+    -Dflys.backend.importer.skip.flood.protection=$IMPORTER_SKIP_FLOOD_PROTECTION \
+    -Dflys.backend.importer.skip.bed.height.single=$IMPORTER_SKIP_BED_HEIGHT_SINGLE \
+    -Dflys.backend.importer.skip.bed.height.epoch=$IMPORTER_SKIP_BED_HEIGHT_EPOCH \
+    -Dflys.backend.importer.skip.sediment.density=$IMPORTER_SKIP_SEDIMENT_DENSITY \
+    -Dflys.backend.importer.skip.morphological.width=$IMPORTER_SKIP_MORPHOLOGICAL_WIDTH \
+    -Dflys.backend.importer.skip.flow.velocity=$IMPORTER_SKIP_FLOW_VELOCITY \
+    -Dflys.backend.importer.skip.sediment.yield=$IMPORTER_SKIP_SEDIMENT_YIELD \
+    -Dflys.backend.importer.skip.waterlevels=$IMPORTER_SKIP_WATERLEVELS \
+    -Dflys.backend.importer.skip.waterlevel.differences=$IMPORTER_SKIP_WATERLEVEL_DIFFERENCES \
+    -Dflys.backend.importer.skip.sq.relation=$IMPORTER_SKIP_SQ_RELATION \
+    -Dflys.backend.user=$BACKEND_USER \
+    -Dflys.backend.password=$BACKEND_PASS \
+    -Dflys.backend.url=$BACKEND_URL \
+    -Dflys.backend.driver=$BACKEND_DB_DRIVER \
+    -Dflys.backend.dialect=$BACKEND_DB_DIALECT \
+     $APP
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/axis.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,52 @@
+import ogr
+
+from importer import Importer
+import shpimporter
+
+NAME="Axis"
+TABLE_NAME="river_axes"
+PATH="Geodaesie/Flussachse+km"
+
+
+class Axis(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2
+
+
+    def isShapeRelevant(self, name, path):
+        return name == "achse" or name.find("achse") >= 0
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat = ogr.Feature(featureDef)
+        newFeat.SetGeometry(feat.GetGeometryRef())
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            riverId = feat.GetField("river_id")
+        else:
+            riverId = self.river_id
+
+        if self.IsFieldSet(feat, "kind"):
+            kind = feat.GetField("kind")
+        else:
+            kind = 0
+
+        newFeat.SetField("river_id", riverId)
+        newFeat.SetField("kind", kind)
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/boundaries.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,91 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="hydr_boundaries"
+TABLE_NAME_POLY="hydr_boundaries_poly"
+PATH="Hydrologie/Hydr.Grenzen/Linien"
+NAME="Hydr. Boundaries"
+
+
+class HydrBoundary(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def getKind(self, path):
+        if path.find("BfG") > 0:
+            return 1
+        else:
+            return 2
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        kind  = self.getKind(args['path'])
+
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+        newFeat.SetField("kind", kind)
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        return newFeat
+
+
+
+class HydrBoundaryPoly(HydrBoundary):
+
+    def getTablename(self):
+        return TABLE_NAME_POLY
+
+
+    def getName(self):
+        return "%s (Polygons)" % NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 3 or geomType == 6
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        kind  = self.getKind(args['path'])
+
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+        newFeat.SetField("kind", kind)
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/buildings.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,47 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="buildings"
+PATH="Geodaesie/Bauwerke"
+NAME="Buildings"
+
+
+class Building(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat = ogr.Feature(featureDef)
+        newFeat.SetGeometry(feat.GetGeometryRef())
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "Name"):
+            newFeat.SetField("name", feat.GetField("Name"))
+        elif self.IsFieldSet(feat, "KWNAAM"):
+            newFeat.SetField("name", feat.GetField("KWNAAM"))
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/catchments.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,53 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="catchment"
+PATH="Hydrologie/Einzugsgebiet"
+NAME="Catchments"
+
+
+class Catchment(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 3 or geomType == 6
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "Name"):
+            newFeat.SetField("name", feat.GetField("name"))
+        else:
+            newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "AREA"):
+            newFeat.SetField("area", feat.GetField("area"))
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/crosssectiontracks.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,57 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="cross_section_tracks"
+PATH="Geodaesie/Querprofile"
+NAME="Crosssections"
+
+
+class CrosssectionTrack(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat = ogr.Feature(featureDef)
+        newFeat.SetGeometry(feat.GetGeometryRef())
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "KILOMETER"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("KILOMETER"))
+        elif self.IsFieldSet(feat, "KM"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("KM"))
+        elif self.IsFieldSet(feat, "STATION"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("STATION"))
+        else:
+            return None
+
+        if self.IsFieldSet(feat, "ELEVATION"):
+            newFeat.SetField("z", feat.GetFieldAsDouble("ELEVATION"))
+        else:
+            newFeat.SetField("z", 0)
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/fixpoints.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,61 @@
+import ogr, osr
+
+from importer import Importer
+
+TABLE_NAME="fixpoints"
+PATH="Geodaesie/Festpunkte"
+NAME="Fixpoints"
+
+
+class Fixpoint(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 1
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "KM"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("KM"))
+        elif self.IsFieldSet(feat, "ELBE_KM"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("ELBE_KM"))
+        else:
+            return None
+
+        if self.IsFieldSet(feat, "X"):
+            newFeat.SetField("x", feat.GetFieldAsDouble("X"))
+
+        if self.IsFieldSet(feat, "Y"):
+            newFeat.SetField("y", feat.GetFieldAsDouble("Y"))
+
+        if self.IsFieldSet(feat, "HPGP"):
+            newFeat.SetField("HPGP", feat.GetField("HPGP"))
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/floodplains.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,45 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="floodplain"
+PATH="Hydrologie/Hydr.Grenzen"
+NAME="Floodplains"
+
+
+class Floodplain(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 3 or geomType == 6
+
+
+    def isShapeRelevant(self, name, path):
+        return name.find("talaue") >= 0
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/gauges.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,52 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="gauge_location"
+PATH="Hydrologie/Streckendaten"
+NAME="Gauge locations"
+
+
+class GaugeLocation(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 1
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "Name"):
+            newFeat.SetField("name", feat.GetField("name"))
+        elif self.IsFieldSet(feat, "MPNAAM"):
+            newFeat.SetField("name", feat.GetField("MPNAAM"))
+        else:
+            newFeat.SetField("name", args['name'])
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/hws.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,56 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="hws"
+PATH="Hydrologie/HW-Schutzanlagen"
+NAME="HWS"
+
+
+class HWS(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "TYP"):
+            newFeat.SetField("type", feat.GetField("TYP"))
+
+        if self.IsFieldSet(feat, "Bauart"):
+            newFeat.SetField("hws_facility", feat.GetField("Bauart"))
+
+        if self.IsFieldSet(feat, "Name"):
+            newFeat.SetField("name", feat.GetField("name"))
+        else:
+            newFeat.SetField("name", args['name'])
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/importer.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,147 @@
+import ogr, osr
+import shpimporter
+
+class Importer:
+
+    def __init__(self, config):
+        self.config = config
+        self.dbconn   = 'OCI:%s/%s@%s' % (config.user, config.password, config.host)
+        self.river_id = config.river_id
+        self.dest_srs = osr.SpatialReference()
+        self.dest_srs.ImportFromEPSG(config.target_srs)
+
+
+    def getKind(self, path):
+        raise NotImplementedError("Importer.getKind is abstract!")
+
+
+    def getPath(self, base):
+        raise NotImplementedError("Importer.getPath is abstract!")
+
+
+    def getTablename(self):
+        raise NotImplementedError("Importer.getTablename is abstract!")
+
+
+    def getName(self):
+        raise NotImplementedError("Importer.getTablename is abstract!")
+
+
+    def IsFieldSet(self, feat, name):
+        try:
+            isset = feat.GetField(name)
+            return isset is not None
+        except:
+            return False
+
+
+    def IsDoubleFieldSet(self, feat, name):
+        try:
+            isset = feat.GetFieldAsDouble(name)
+            return isset is not None
+        except:
+            return False
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def walkOverShapes(self, shape):
+        (name, path) = shape
+        if not self.isShapeRelevant(name, path):
+            shpimporter.INFO("Skip shapefile '%s'" % path)
+            return
+
+        shp = ogr.Open(shape[1])
+        if shp is None:
+            shpimporter.ERROR("Shapefile '%s' could not be opened!" % path)
+            return
+
+        shpimporter.INFO("Processing shapefile '%s'" % path)
+        srcLayer = shp.GetLayerByName(name)
+
+        if srcLayer is None:
+            shpimporter.ERROR("Layer '%s' was not found!" % name)
+            return
+
+        return self.shape2Database(srcLayer, name, path)
+
+
+    def transform(self, feat):
+        geometry = feat.GetGeometryRef()
+        src_srs  = geometry.GetSpatialReference()
+
+        if src_srs is None:
+            shpimporter.ERROR("No source SRS given! No transformation possible!")
+            return feat
+
+        transformer = osr.CoordinateTransformation(src_srs, self.dest_srs)
+        geometry.Transform(transformer)
+
+        return feat
+
+
+    def shape2Database(self, srcLayer, name, path):
+        table     = ogr.Open(self.dbconn)
+        destLayer = table.GetLayerByName(self.getTablename())
+
+        if srcLayer is None:
+            shpimporter.ERROR("Shapefile is None!")
+            return -1
+
+        if destLayer is None:
+            shpimporter.ERROR("No destination layer given!")
+            return -1
+
+        count = srcLayer.GetFeatureCount()
+        shpimporter.DEBUG("Try to add %i features to database." % count)
+
+        srcLayer.ResetReading()
+
+        geomType    = -1
+        success     = 0
+        unsupported = 0
+        creationFailed = 0
+        featureDef  = destLayer.GetLayerDefn()
+
+        for feat in srcLayer:
+            geom     = feat.GetGeometryRef()
+
+            if geom is None:
+                continue
+
+            geomType = geom.GetGeometryType()
+
+            if self.isGeometryValid(geomType):
+                newFeat = self.createNewFeature(featureDef,
+                                                feat,
+                                                name=name,
+                                                path=path)
+
+                if newFeat is not None:
+		    newFeat.SetField("path", path)
+                    newFeat = self.transform(newFeat)
+                    res = destLayer.CreateFeature(newFeat)
+                    if res is None or res > 0:
+                        shpimporter.Error("Unable to insert feature: %r" % res)
+                    else:
+                        success = success + 1
+                else:
+                    creationFailed = creationFailed + 1
+            else:
+                unsupported = unsupported + 1
+
+        shpimporter.INFO("Inserted %i features" % success)
+        shpimporter.INFO("Failed to create %i features" % creationFailed)
+        shpimporter.INFO("Found %i unsupported features" % unsupported)
+
+        try:
+            if self.config.dry_run > 0:
+                return geomType
+            destLayer.CommitTransaction()
+        except e:
+            shpimporter.ERROR("Exception while committing transaction.")
+
+        return geomType
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/km.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,50 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="river_axes_km"
+PATH="Geodaesie/Flussachse+km"
+NAME="KMS"
+
+
+class KM(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 1
+
+
+    def isShapeRelevant(self, name, path):
+        return name == "km"
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat = ogr.Feature(featureDef)
+        newFeat.SetGeometry(feat.GetGeometryRef())
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsDoubleFieldSet(feat, "km"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("km"))
+        elif self.IsDoubleFieldSet(feat, "KM"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("KM"))
+        else:
+            return None
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/lines.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,56 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="lines"
+PATH="Geodaesie/Linien"
+NAME="Lines"
+
+
+class Line(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2 or geomType == -2147483646
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "TYP"):
+            newFeat.SetField("kind", feat.GetFieldAsDouble("TYP"))
+        else:
+            newFeat.SetField("kind", "DAMM")
+
+        if self.IsFieldSet(feat, "Z"):
+            newFeat.SetField("z", feat.GetFieldAsDouble("Z"))
+        else:
+            newFeat.SetField("z", 9999)
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/run.sh	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,46 @@
+#!/bin/sh
+
+# Required
+RIVER_PATH="/path/to/rivers/river"
+RIVER_ID=1
+TARGET_SRS=31466
+HOST=localhost
+USER=the_user
+PASS=the_pass
+
+# Optional
+VERBOSE=1
+SKIP_AXIS=0
+SKIP_KMS=0
+SKIP_CROSSSECTIONS=0
+SKIP_LINES=0
+SKIP_FIXPOINTS=0
+SKIP_BUILDINGS=0
+SKIP_FLOODPLAINS=0
+SKIP_HYDR_BOUNDARIES=0
+SKIP_HWS=0
+SKIP_GAUGE_LOCATION=0
+SKIP_CATCHMENTS=0
+SKIP_UESG=0
+
+exec python shpimporter.py \
+    --directory $RIVER_PATH \
+    --river_id $RIVER_ID \
+    --target_srs $TARGET_SRS \
+    --host $HOST \
+    --user $USER \
+    --password $PASS \
+    --verbose $VERBOSE \
+    --skip_axis $SKIP_AXIS \
+    --skip_kms $SKIP_KMS \
+    --skip_crosssections $SKIP_CROSSSECTIONS \
+    --skip_lines $SKIP_LINES \
+    --skip_fixpoints $SKIP_FIXPOINTS \
+    --skip_buildings $SKIP_BUILDINGS \
+    --skip_floodplains $SKIP_FLOODPLAINS \
+    --skip_hydr_boundaries $SKIP_HYDR_BOUNDARIES \
+    --skip_hws $SKIP_HWS \
+    --skip_gauge_locations $SKIP_GAUGE_LOCATION \
+    --skip_catchments $SKIP_CATCHMENTS \
+    --skip_uesgs $SKIP_UESG
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/shpimporter.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,171 @@
+import ogr
+
+import utils, optparse
+
+from uesg  import UESG
+from axis  import Axis
+from km    import KM
+from lines import Line
+from fixpoints import Fixpoint
+from buildings import Building
+from crosssectiontracks import CrosssectionTrack
+from floodplains import Floodplain
+from boundaries import HydrBoundary, HydrBoundaryPoly
+from hws import HWS
+from gauges import GaugeLocation
+from catchments import Catchment
+
+
+VERBOSE_DEBUG=2
+VERBOSE_INFO=1
+
+
+def DEBUG(msg):
+    config = getConfig()
+    if config.verbose >= VERBOSE_DEBUG:
+        print "DEBUG: %s" % msg
+
+def INFO(msg):
+    config = getConfig()
+    if config.verbose >= VERBOSE_INFO:
+        print "INFO: %s" % msg
+
+def ERROR(msg):
+    config = getConfig()
+    print "ERROR: %s" % msg
+
+
+def getImporters(config):
+    return [
+        Axis(config),
+        KM(config),
+        CrosssectionTrack(config),
+        Line(config),
+        Fixpoint(config),
+        Building(config),
+        Floodplain(config),
+        HydrBoundary(config),
+        HydrBoundaryPoly(config),
+        HWS(config),
+        GaugeLocation(config),
+        Catchment(config),
+        UESG(config)
+        ]
+
+
+def getConfig():
+    parser = optparse.OptionParser()
+    parser.add_option("--directory", type="string")
+    parser.add_option("--target_srs", type="int")
+    parser.add_option("--host", type="string")
+    parser.add_option("--user", type="string")
+    parser.add_option("--password", type="string")
+    parser.add_option("--river_id", type="int")
+    parser.add_option("--verbose", type="int", default=1)
+    parser.add_option("--dry_run", type="int", default=0)
+    parser.add_option("--skip_axis", type="int")
+    parser.add_option("--skip_hydr_boundaries", type="int")
+    parser.add_option("--skip_buildings", type="int")
+    parser.add_option("--skip_crosssections", type="int")
+    parser.add_option("--skip_lines", type="int")
+    parser.add_option("--skip_fixpoints", type="int")
+    parser.add_option("--skip_floodplains", type="int")
+    parser.add_option("--skip_hws", type="int")
+    parser.add_option("--skip_gauge_locations", type="int")
+    parser.add_option("--skip_catchments", type="int")
+    parser.add_option("--skip_kms", type="int")
+    parser.add_option("--skip_uesgs", type="int")
+    (config, args) = parser.parse_args()
+
+    if config.directory == None:
+        ERROR("No river directory specified!")
+        raise Exception("Invalid config")
+    elif config.host == None:
+        ERROR("No database host specified!")
+        raise Exception("Invalid config")
+    elif config.user == None:
+        ERROR("No databaser user specified!")
+        raise Exception("Invalid config")
+    elif config.password == None:
+        ERROR("No password specified!")
+        raise Exception("Invalid config")
+    elif config.river_id == None:
+        ERROR("No river id specified!")
+        raise Exception("Invalid config")
+
+    return config
+
+
+def skip_importer(config, importer):
+    if config.skip_axis == 1 and isinstance(importer, Axis):
+        return True
+    elif config.skip_hydr_boundaries == 1 and isinstance(importer, HydrBoundary):
+        return True
+    elif config.skip_hydr_boundaries == 1 and isinstance(importer, HydrBoundaryPoly):
+        return True
+    elif config.skip_buildings == 1 and isinstance(importer, Building):
+        return True
+    elif config.skip_crosssections == 1 and isinstance(importer, CrosssectionTrack):
+        return True
+    elif config.skip_lines == 1 and isinstance(importer, Line):
+        return True
+    elif config.skip_fixpoints == 1 and isinstance(importer, Fixpoint):
+        return True
+    elif config.skip_floodplains == 1 and isinstance(importer, Floodplain):
+        return True
+    elif config.skip_hws == 1 and isinstance(importer, HWS):
+        return True
+    elif config.skip_gauge_locations == 1 and isinstance(importer, GaugeLocation):
+        return True
+    elif config.skip_catchments == 1 and isinstance(importer, Catchment):
+        return True
+    elif config.skip_kms == 1 and isinstance(importer, KM):
+        return True
+    elif config.skip_uesgs == 1 and isinstance(importer, UESG):
+        return True
+
+    return False
+
+
+def parse():
+    config=None
+    try:
+        config = getConfig()
+    except:
+        return
+
+    if config == None:
+        ERROR("Unable to read config from command line!")
+        return
+
+    if config.dry_run > 0:
+        INFO("You enable 'dry_run'. No database transaction will take place!")
+
+    importers = getImporters(config)
+    types = {}
+
+    for importer in importers:
+        if skip_importer(config, importer):
+            INFO("Skip import of '%s'" % importer.getName())
+            continue
+
+        INFO("Start import of '%s'" % importer.getName())
+
+        shapes = utils.findShapefiles(importer.getPath(config.directory))
+        DEBUG("Found %i Shapefiles" % len(shapes))
+
+        for shpTuple in shapes:
+            geomType = importer.walkOverShapes(shpTuple)
+            try:
+                if geomType is not None:
+                    num = types[geomType]
+                    types[geomType] = num+1
+            except:
+                types[geomType] = 1
+
+    for key in types:
+        DEBUG("%i x geometry type %s" % (types[key], key))
+
+
+if __name__ == '__main__':
+    parse()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/uesg.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,94 @@
+import ogr
+
+from importer import Importer
+
+
+TABLE_NAME="floodmaps"
+PATH="Hydrologie/UeSG/Berechnung"
+NAME="UESG"
+
+
+class UESG(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def getName(self):
+        return NAME
+
+
+    def isGeometryValid(self, geomType):
+        if geomType == 3 or geomType == 6:
+            return True
+        else:
+            return False
+
+
+    def getKind(self, path):
+        kind = 0
+        if path.find("Berechnung") > 0:
+            kind = kind + 100
+
+            if path.find("Aktuell") > 0:
+                kind = kind + 10
+            else:
+                kind = kind + 20
+
+            if path.find("Land") > 0:
+                kind = kind + 2
+            else:
+                kind = kind + 1
+        else:
+            kind = kind + 200
+
+        return kind
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        kind  = self.getKind(args['path'])
+
+        newFeat = ogr.Feature(featureDef)
+        newFeat.SetGeometry(feat.GetGeometryRef())
+
+        if self.IsFieldSet(feat, "river_id"):
+            riverId = feat.GetField(feat)
+        else:
+            riverId = self.river_id
+
+        if self.IsFieldSet(feat, "diff"):
+            diff = feat.GetFieldAsDouble("diff")
+        else:
+            diff = 0
+
+        if self.IsFieldSet(feat, "count"):
+            count = feat.GetFieldAsInteger("count")
+        else:
+            count = 0
+
+        if self.IsFieldSet(feat, "area"):
+            area = feat.GetFieldAsDouble("area")
+        else:
+            area = 0
+
+        if self.IsFieldSet(feat, "perimeter"):
+            perimeter = feat.GetFieldAsDouble("perimeter")
+        else:
+            perimeter = 0
+
+        groupId = 2
+
+        newFeat.SetField("river_id", riverId)
+        newFeat.SetField("diff", diff)
+        newFeat.SetField("count", count)
+        newFeat.SetField("area", area)
+        newFeat.SetField("perimeter", perimeter)
+        newFeat.SetField("kind", kind)
+        newFeat.SetField("name", args['name'])
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/utils.py	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,21 @@
+import os
+from shpimporter import DEBUG, INFO, ERROR
+
+SHP='.shp'
+
+def findShapefiles(path):
+    shapes = []
+
+    for root, dirs, files in os.walk(path):
+        if len(files) == 0:
+            continue
+
+        DEBUG("Processing directory '%s' with %i files " % (root, len(files)))
+
+        for f in files:
+            idx = f.find(SHP)
+            if (idx+len(SHP)) == len(f):
+                shapes.append((f.replace(SHP, ''), root + "/" + f))
+
+    return shapes
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/spatial-info.sh	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+mvn -e \
+    -Dlog4j.configuration=file://`readlink -f contrib/log4j.properties` \
+    -Dflys.backend.user=USER \
+    -Dflys.backend.password=PASSWORD \
+    -Dflys.backend.url=jdbc:postgresql://THE.DATABASE.HOST:PORT/DBNAME \
+    -Dflys.backend.spatial.river=Saar \
+    -Dexec.mainClass=de.intevation.flys.backend.SpatialInfo \
+    exec:java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/README.txt	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,527 @@
+FLYS3-Importer
+
+Der FLYS3-Importer dient dazu, hydrologische und morphologische
+Gewässerdaten aus dem Dateisystem in die FLYS3-Datenbank zu importieren.
+Das Werkzeug orientiert sich hierbei an der Dateihierachie,
+so wie sie auch von Desktop-FLYS ausgelesen wird.
+
+Als Startargument bekommt der Importer den Pfad zu einer
+GEW-Datei übergeben. 
+
+Wichtig für den Importer sind in dieser Datei
+die Zeilen, die mit "WSTDatei:" beginnen. In ihnen wird der 
+Pfad zu der zentralen WST-Datei des jeweiligen Gewässers angegeben.
+Alle anderen importierten Dateien werden in ihrer Lage relativ zur 
+Lage dieser Datei betrachtet.
+
+Das Verhalten des Importes kann mit sogenannten
+System-Properties gesteuert werden. Diese werden im
+Allgemeinen in der Form -Dkey=value angegeben.
+
+Will man z.B. mit dem Importer nur in simulierierten Import
+durchführen, dann kann dies über die Angabe von
+'-Dflys.backend.importer.dry.run=true' erfolgen.
+
+!!! Der Import geht wie Desktop-FLYS davon aus, dass die Dateien
+!!! Latin-1 encodiert vorliegen.
+
+Für den Importer stellt jeweils der Import eines Gewässers eine
+transaktionale Einheit dar. Wird der Import während eines Gewässers
+abgebrochen, werden alle Änderungen bzgl. dieses Gewässers zurück gerollt.
+
+Importierte Daten:
+
+Der Importer importiert folgende Datentypen:
+
+- Streckenfavoriten (*.km-Dateien)
+  Der Import kann mit '-Dflys.backend.importer.skip.annotations=true'
+  unterdrückt werden.
+
+  Zur Klassifikation von Streckenfavoriten kann mit
+  -Dflys.backend.importer.annotation.types=DATEI
+  der Pfad zu einer XML-Datei angegeben werden, in der über
+  Regeln festgelegt wird, wie diese geschehen soll.
+  Details hierzu im Anhang 'Klassifikation von Streckenfavoriten'.
+
+- Pegel, Stammdaten (*.glt, *.sta-Dateien):
+  Der Import kann mit '-Dflys.backend.importer.skip.gauges=true'
+  unterdrückt werden.
+  Die .glt-Datei, die neben der .wst-Datei liegt, wird zuerst
+  ausgelesen. Es werden nur *.sta-Datei von Pegeln geladen, die
+  in der .glt-Datei vermerkt sind.
+
+  Wenn "-Dflys.backend.sta.parse.gauge.numbers=true' wird versucht,
+  die offiziellen Pegelnummern aus den Stammdaten zu extrahieren.
+  !!! Dies ist mit Vorsicht zu behandeln, denn die meisten STA-Dateien
+  !!! Enthalten invalide Pegelnummern.
+
+  Die System-Property "flys.backend.main.value.types" kann einen
+  String mit gültigen Typen von Stammdaten enthalten. Vorbelegt
+  ist "QWTD". In der Praxis ist "QWD" eine sinnvolle Belegung.
+
+- Basis-Wasserstände (gewaesser.wst-Dateien):
+  Der Import kann mit '-Dflys.backend.importer.skip.wst=true'
+  unterdrückt werden.
+
+- Zusätzliche Längsschnitte (*.zus, *.wst-Dateien)
+  Der Import kann mit '-Dflys.backend.importer.skip.extra.wsts=true'
+  unterdrückt werden.
+  Es werden die *.zus- und *.wst-Dateien aus dem Verzeichnis
+  "../Zus.Längsschnitte" relativ zur gewaesser.wst-Datei betrachtet.
+
+- Fixierungen (*.wst-Dateien)
+  Der Import kann mit '-Dflys.backend.importer.skip.fixations=true'
+  unterdrückt werden.
+  Es werden die *.wst-Dateien aus dem Verzeichnis
+  "../Fixierungen" relativ zur gewaesser.wst-Datei betrachtet.
+
+- Amtliche Linien (*.wst-Dateien)
+  Der Import kann mit '-Dflys.backend.importer.skip.official.lines=true'
+  unterdrückt werden.
+  Es werden die "Amtl_Linien.wst"-Dateien aus dem Verzeichnis
+  "../Basisdaten" und "../Fixierungen" relativ zur gewaesser.wst-Datei betrachtet.
+
+- Profilspuren (*.prf-Dateien)
+  Der Import kann mit '-Dflys.backend.importer.skip.prfs=true'
+  unterdrückt werden.
+  Es werden rekursiv alle *.prf-Dateien aus "../../.." relativ
+  zur gewaesser.wst-Datei betrachtet. Vor dem Import werden
+  mithilfe eines Längen- und eines MD5-Summen-Vergleichs
+  inhaltliche Duplikate ausgeschlossen.
+
+- Hydraulische Kennzahlen (*.hyk)
+  Der Import kann mit '-Dflys.backend.importer.skip.hyks=true'
+  unterdrückt werden.
+  Es werden rekursiv alle *.hyk-Dateien aus "../../.." relativ
+  zur gewaesser.wst-Datei betrachtet. Vor dem Import werden
+  mithilfe eines Längen- und eines MD5-Summen-Vergleichs
+  inhaltliche Duplikate ausgeschlossen.
+
+- Hochwassermarken (*.zus, *.wst)
+  Der Import kann mit '-Dflys.backend.importer.skip.flood.water=true'
+  unterdrückt werden.
+  Es werden die *.zus- und *.wst-Dateien aus dem Verzeichnis
+  "../HW-Marken" relativ zur gewaesser.wst-Datei betrachtet.
+
+- Hochwasserschutzanlagen (*.zus)
+  Der Import kann mit '-Dflys.backend.importer.skip.flood.protection=true'
+  unterdrückt werden.
+  Es werden die *.zus- und *.wst-Dateien aus dem Verzeichnis
+  "../HW-Schutzanlagen" relativ zur gewaesser.wst-Datei betrachtet.
+
+  TODO INGO:
+
+  flys.backend.importer.skip.bed.height.single
+  flys.backend.importer.skip.bed.height.epoch
+  flys.backend.importer.skip.sediment.density
+  flys.backend.importer.skip.morphological.width
+  flys.backend.importer.skip.flow.velocity
+  flys.backend.importer.skip.sediment.yield
+  flys.backend.importer.skip.waterlevels
+  flys.backend.importer.skip.waterlevel.differences
+  flys.backend.importer.skip.sq.relation
+
+Für die Verbindung zur Datenbank ist es nötig, dem Import
+die Verbindungsdaten zu übergeben. Dies geschieht ebenfalls
+über System-Properties:
+
+  -Dflys.backend.user=NUTZER
+   Datenbanknutzer
+
+  -Dflys.backend.password=PASSWORT
+   Datenbankpasswort
+
+  -Dflys.backend.url=URL
+   URL zur Datenbank. Typische wäre im Fall einer Oracle-XE-Edition z.B.:
+   jdbc:oracle:thin:@//RECHNER:PORT/XE
+   mit RECHNER Name des Servers, auf dem die Datenbank läuft
+   und PORT der Port auf dem die Datenbank konkret zu erreichen ist.
+   Weitere Details unter http://www.orafaq.com/wiki/JDBC
+
+ -Dflys.backend.driver=TREIBER
+  mit TREIBER dem Namen des JDBC-Treibers, der es erlaubt
+  das Protokoll der Datenbank zu sprechen. Im Falle
+  einer Oracle XE wäre dies z.B.:
+  oracle.jdbc.OracleDriver
+
+ -Dflys.backend.dialect=DIALECT
+  mit DIALECT dem Hibernate-Dialekt, den die Datenbank versteht.
+  Im Falle eine Oracle-XE wäre dies z.B.:
+  org.hibernate.dialect.OracleDialect
+
+
+Hinweise zum Betrieb:
+---------------------
+
+    Der Speicherverbrauch des Importers ist sehr hoch. Es ist empfehlenswert,
+    der JVM mindestens 8GiB Hauptspeicher zuzuordnen: '-Xmx8192m'
+    Besonders speicherintensiv ist der Import der HYKs und der PRFs.
+    Hier ist es unter Umständen empfehlenswert, diese in zwei oder drei
+    Schritten zu importieren. Zuerst die sonstigen hydrologischen Daten
+    (hierbei mit flys.backend.importer.skip.hyks=true und flys.backend.importer.skip.prfs
+    den Import der HYKs und PRFs verhindern). Dann die HYKs (mit flys.backend.importer.skip.*
+    der anderen Daten) und im finalen Schritt dann die PRFs.
+
+Anhang 'Klassifikation von Streckenfavoriten'
+---------------------------------------------
+Streckenfavoriten werden aus KM-Dateien importiert. Um die einzelnen Einträge
+eine Kategorie (Brücken, Pegel, etc.) zuzuordnen, kann eine XML angegeben werden,
+in der Regeln für diese Klassifikation definiert werden. Schematisch gliedert
+sich diese Datei in die zwei Bereiche 'types' und 'patterns':
+
+<annotation>
+    <types> ...  </types>
+    <patterns> ...  </patterns>
+</annotation>
+
+In der Sektion 'types' werden die Kategorien vereinbart, in die
+klassifiziert werden sollen. Die geschieht mit einzelnen
+
+  <type name="Pegel"/>
+  <type name="Brücke"/>
+  ...
+  <type name="Sonstige" default="true"/>
+
+Das Attribut 'default' kann einmal vergeben werden und
+besagt, dass diese Kategorie gewählt werden soll, wenn
+keine andere Kategorie zugeordnet werden kann.
+
+In der Sektion 'patterns' werden dann die Regel
+definiert, die einzelne Einträge den definierten Kategorien
+zuordnet. Hierfür können zwei Arten von Definitionen
+angegeben werden:
+
+  <file pattern="^Brücken$" type="Brücke"/>
+
+oder
+
+  <line pattern="^Brücke[:\s].*$" type="Brücke"/>
+
+Die erste Variante bestimmt die Kategorie, die pro KM-Datei
+gelten soll. 'pattern' ist hierbei ein regulärer Ausdruck,
+der auf den Dateinamen angewandt wird. Passt der Name
+der Datei auf den regulären Ausdruck, wird 'type' als
+Vorgabe angenommen. Treffen mehrere <file>-Regeln zu,
+wird der erste Treffer angewandt. Findet keine der <file>-Regeln
+Anwendung, wird die Kategorie ausgewählt, die in der <types>-Section
+das Attribut 'default' gesetzt hat.
+
+Die zweite Regel-Variante <line> wird auf jeden Eintrag
+innerhalb einer KM-Datei auf den Bezeichner der Streckenfavoriten
+angewandt. Als Muster dient auch hier ein regulärer Ausdruck,
+der über das Attribut 'pattern' definiert wird. Die Kategorie
+wird in Trefferfall über das Attribut 'type' bestimmt. Treffen
+mehrere Regeln zu, wird die Kategorie gewählt, die zum ersten
+Treffer gehört. Trifft keine Regel zu wird der Eintrag der
+Kategorie zugeteilt, die für die beinhaltende Datei als Vorgabe
+gilt.
+
+Anhang 'Fehler und Warnungen':
+=============================
+
+Fehler:
+-------
+
+- 'error while parsing gew'
+  Die GEW-Datei ist fehlerhaft oder konnte nicht geöffnet werden.
+
+- 'File 'XYZ' is broken!'
+  Die Datei XYZ ist inkonsistent und führt zu Fehlern.
+
+- 'Error while parsing file for morph. width.'
+  Beim Lesen der morphologischen Breite trat ein Fehler auf.
+
+- 'Error while storing flow velocity model.'
+  Beim Schreiben eines Fliessgeschwindigkeitsmodells trat ein Fehler auf.
+
+- 'Error while storing flow velocity measurement.'
+  Beim Schreiben einer Fliessgeschwindigkeitsmessung trat ein Fehler auf. 
+
+- 'Error while storing sediment yield.'
+  Beim Schreiben einer Sedimentablagerung trat ein Fehler auf.
+
+- 'Error while storing waterlevel diff.'
+  Beim Schreiben einer Wassspiegeldifferenz trat ein Fehler auf.
+
+- 'Error while storing sq relation.'
+  Beim Schreiben einer S(Q) Beziehung trat ein Fehler auf.
+
+- 'Error reading PRF file.'
+  Beim Lesen einer PRF-Datei trat ein Fehler auf.
+
+- 'Error closing PRF file.'
+  Beim Schliessen einer PRF-Datei trat ein Fehler auf.
+
+- 'HYK 1: not enough elements in line #'
+- 'HYK 2: not enough elements in line #'
+- 'HYK 5: not enough elements in line #'
+- 'HYK 6: not enough elements in line #'
+  Eine Zeile in einer HYK-Datei hat nicht genügend Elemente.
+
+- 'HYK: parsing num zones, bottom or top height failed in line #'
+- 'HYK: HYK: number of flow zones mismatches in line #'
+  Die Anzahl der Zonen oder Daten über die Zonen sind nicht korrekt.
+
+- 'HYK: cannot parse number in line #'
+  Eine Zahl wurde erwartet.
+
+- 'HYK: Error reading file.'
+  Beim Lesen einer HYK-Datei trat ein Fehler auf.
+
+- 'HYK: Error closing file.'
+  Beim Schliessen einer HYK-Datei trat ein Fehler auf.
+
+Warnungen:
+----------
+
+- 'annotation type file 'XYZ' is not readable.'
+  Die Datein XYZ kann nicht gelesen werden.
+
+- 'cannot parse annotation types file.'
+  Während der Verarbeitung der Annotationsdatei ist Fehler aufgetreten.
+
+- 'Cannot read directory.'
+  verzeichnis konnte nicht gelesen werden.
+
+- 'no official lines wst file found'
+  Keine Datei mit amtlichen Linien gefunden.
+
+- 'cannot read fixations wst file directory'
+  Das Verzeichnis mit den Fixierungen kann nicht gelesen werden.
+
+- 'cannot read extra longitudinal wst file directory'
+  Das Verzeichnis mit den zusätzlichen Längsschnitten kann nicht gelesen werden.
+
+- 'cannot read gauges from 'XYZ''
+  Die Pegelgültigkeiten können nicht gelesen werden.
+
+- 'HYK file 'XYZ' seems to be a duplicate.'
+  Die HYK-Datei wurde unter anderem Namen aber gleichen Inhalts
+  bereits gefunden.
+
+- 'PRF file 'XYZ' seems to be a duplicate.'
+  Die PRF-Datei wurde unter anderem Namen aber gleichen Inhalts
+  bereits gefunden.
+
+- 'Skip invalid SedimentYield: time interval or unit null!'
+  Eine Sedimentablagerung ist ungültig und wurde ausgelassen.
+
+- 'skip flow velocity model: No discharge zone specified.'
+  Da kein Abflussbereich angegeben wurde, wurde das Fliessgeschwindigkeitsmodell ausgelassen.
+
+- 'skip invalid waterlevel - no unit set!'
+  Ein einheitenloser Wasserstand wurde ausgelassen.
+
+- 'Cannot parse time range.'
+  Das Zeitformat wurde nicht erkannt.
+
+- 'skip invalid data line #'
+  Ungültige Datenzeile wurde ausgelassen.
+
+- 'Error while parsing sq relation row #'
+  Eine Zeile in der S(Q)-Beziehung ist ungültig.
+
+- 'GLT: no gauge found in line #'
+  In der GLT-Datei wurde ein Pegel erwartet, aber nicht gefunden.
+
+- 'GLT: line # has not enough columns.'
+  Eine Zeile in der Pegelgültigkeitsdatei hat nicht genug spalten.
+
+- 'Error while parsing flow velocity values.'
+- 'skip invalid data line: #'
+  Invalide Datenzeile in einer Datei mit einer Fliessgeschwindigkeitsmessung.
+
+- 'skip invalid waterlevel line: #'
+- 'Error while parsing value: #'
+- 'Error while parsing station: #'
+  Invalide Datenzeile in einer Datei mit Wasserstandsdifferenzen.
+
+- 'skip invalid MainValue part: #'
+- 'skip invalid gauge part: #'
+- 'Error while parsing Q value: <Q>'
+- 'skip invalid data line: #'
+- 'Error while parsing flow velocity values.'
+  Invalide Datenzeile in einer Datei Fliessgeschwindigkeitsmodellen.
+
+- 'Error while parsing number from data row: #'
+  TODO INGO
+
+- 'Unknown meta line: #'
+- 'Error while parsing numbers in: #'
+- 'skip invalid data line: #'
+- 'Error while parsing numbers in #'
+  Invalide Datenzeile in einer Datei mit Sedimentdichten.
+
+- 'STA file is empty'
+- 'STA file has not enough lines'
+- 'STA file is too short'
+  Stammdatendatei ist leer oder hat zu wenige Zeilen.
+
+- 'First line in STA file is too short.'
+  Die erste Zeile der Stammdaten ist zu kurz.
+
+- 'STA: second line is too short'
+  Die zweite Zeile ist zu kurz.
+
+- 'STA: parsing of the datum of the gauge failed'
+
+- 'STA: 'XYZ' is not a valid long number.'
+  Die Pegelnummer ist invalide.
+
+- 'STA: Not enough columns for aeo and datum.
+  AEO und Pegelnullpunkt können nicht ermittelt werden.
+
+- 'STA: cannot parse aeo or datum.'
+  AEO oder Pegelnullpunkt sind invalide.
+
+- 'STA: value not parseable in line #'
+  Wert ist nicht als Zahl zu interpretieren.
+  
+- 'PRF: cannot open file <FILE>'
+  Die PRF kann nicht geöffnet werden.
+
+- PRF: file is empty
+- PRF: First line does not look like a PRF data pattern.
+- PRF: premature EOF. Expected integer in line 2
+- PRF: Expected <num> in line 2
+- PRF: invalid integer in line 2
+- PRF: premature EOF. Expected pattern for km extraction
+- PRF: line 4 does not look like a PRF km extraction pattern.
+- PRF: premature EOF. Expected skip row count.
+- PRF: line 5 is not an positive integer.
+- PRF: cannot extract km in line #
+  Das PRF-Format ist komplex. Hier sollten weitere Information
+  zur genaueren Analyse herangezogen werden.
+
+- 'cannot access WST file <FILE>'
+  Die WST-Datei konnte nicht gefunden werden.
+
+- 'Found an invalid row in the AT file.'
+  Eine Zeile in einer AT-Datei ist nicht korrekt.
+
+- 'AT: invalid number <XYZ>'
+  Eine Zahl wurde erwartet aber nicht gefunden.
+
+- 'Try to add Q range without waterlevel!'
+  Q-Bereich ohne Wasserstand gefunden.
+
+- 'Error while parsing Q range: #'
+  Invalider Q-Bereich
+
+- 'skip invalid waterlevel line: #'
+  Ungültige Wasserstandslinie
+
+- 'Error while parsing number values: #'
+  Ungültige Zahlenwerte.
+
+- 'ANN: not enough columns in line #'
+  Nicht genug Zeichenspalten in KM-Datei
+
+- 'ANN: invalid number in line #'
+  Ungültige Zahl.
+
+- 'ANN: cannot parse 'Unterkante' in line #'
+  Die Unterkante in einer KM-Datei konnte nicht gelesen werden.
+
+- 'ANN: cannot parse 'Unterkante' or 'Oberkante' in line #'
+  Unter- oder Oberkannte liegen in einem falschen Format vor.
+
+- 'ANN: duplicated annotation 'XYZ' in line #'
+  Ein Duplikat eines Streckenfavoriten wurde gefunden.
+
+- 'ANN: 'XYZ' is not a directory.'
+  Unterverzeichnis konnte nicht geöffnet werden.
+
+- 'ANN: cannot list directory 'XYZ''
+  Unterverzeichnis konnte nicht durchsucht werden.
+
+- 'BHP: Meta line did not match any known type: #'
+  Unbekannter Typ.
+
+- 'BHP: Error while parsing timeinterval!'
+  Ungültiges Zeitinterval.
+
+- 'BHP: Error while parsing year!'
+  Ungültige Jahresangabe.
+
+- 'BHP: Error while parsing sounding width!'
+  Unbekannte Peilungsbreite.
+
+- 'BHP: Error while parsing range!'
+  Bereichsangabe fehlerhaft.
+
+- 'MWP: Unknown meta line: #'
+  Meta-Informationen ungültig.
+
+- 'MWP: skip invalid data line: #'
+  Ungültige Datenzeile wurde übersprungen.
+
+- 'MWP: Error while parsing numbers in #'
+  Falsche Zahlenformat.
+
+- 'ANNCLASS: rule has no name'
+  Klassifizierungsregel für Streckenfavoriten hat keinen Namen.
+
+- 'ANNCLASS: pattern has no 'pattern' attribute.'
+  Klassifizierungsmuster für Streckenfavoriten hat kein Muster.
+
+- 'ANNCLASS: pattern has unknown type 'XYZ''
+  Klassifizierungsmuster für Streckenfavoriten konnte keinem Typ zugeordnet werden.
+
+- 'ANNCLASS: pattern 'XYZ' is invalid.'
+  Klassifizierungsmuster für Streckenfavoriten ist ungültig.
+
+- 'BSP: Error while parsing data row.'
+  Ungültige Datenzeile.
+
+- 'SYP: Unknown meta line: #'
+  Ungültige Metadatenzeile.
+
+- 'SYP: skip invalid data line #'
+  Ungültige Datenzeile wurde übersprungen.
+
+- 'SYP: Error while parsing numbers in #'
+  Ungültige Zahlenformatierung.
+
+- 'SYP: Unknown time interval string <XYZ>'
+  Falsches Datumformat.
+
+- 'SYP: Error while parsing years <XYZ>'
+  Falsches Jahreszahlformat.
+ 
+- 'SYP: Error while parsing ranges of <XYZ>'
+  Bereichsangaben fehlerhaft.
+
+- 'SYP: Unknown grain fraction <XYZ>'
+  Unbekannte Kornfraktion.
+
+- 'WST: invalid number.'
+  Ungültige Zahl.
+
+- 'WST: km <km> (<Zeile>) found more than once. -> ignored.'
+  Ein Kilometer ist doppelt in einer WST-Datei enthalten.
+
+- 'HYK: zone coordinates swapped in line #'
+  Fliesszonenkordinaten wurden in umgekehrter Reihenfolge angeben.
+
+- 'BHS: Skip invalid file 'XYZ''
+  Die Inhalte der Datei sind ungültig.
+
+- 'ISQ: Unable to store sq relation value.'
+  S(Q) Beziehung konnte nicht gespeichert werden.
+
+- 'ISQ: Cannot determine sq relation without time interval.'
+  Einer S(Q)-Beziehung ist keine zeitliche Gültigkeit zugeordnet.
+
+- 'IWD: skip invalid waterlevel difference - no unit set!'
+  Wasserstandsdifferenz hat keine Einheit.
+
+- 'BHE: Skip file - invalid current elevation model.'
+  Höhenmodell ungültig.
+
+- 'BHE: Skip file - invalid time range.'
+  Zeitbereich ungültig.
+
+- 'BHE: Skip file - invalid km range.'
+  Kilometerbereich ungültig.
+  
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/annotation-types.xml	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,58 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<annotation>
+    <types>
+        <type name="Abzweigung"/>
+        <type name="Berechnungsstrecke"/>
+        <type name="Brücke"/>
+        <type name="Bundesland"/>
+        <type name="Deich"/>
+        <type name="Einmündung"/>
+        <type name="Fähre"/>
+        <type name="Gemeinde"/>
+        <type name="Grenze"/>
+        <type name="Hafen"/>
+        <type name="HW-Schutz"/>
+        <type name="Landkreis"/>
+        <type name="Meldestelle"/>
+        <type name="Messstelle"/>
+        <type name="Pegel"/>
+        <type name="Stauwehr"/>
+        <type name="Staatsgrenze"/>
+        <type name="Staat"/>
+        <type name="WSA"/>
+        <type name="Zufluß"/>
+        <type name="Sonstige" default="true"/>
+    </types>
+
+    <patterns>
+        <file pattern="^Brücken$" type="Brücke"/>
+        <file pattern="^Deich.*$" type="Deich"/>
+        <file pattern="^Hafen$" type="Hafen"/>
+        <file pattern="^Pegel-alle$" type="Pegel"/>
+        <file pattern="^Pegel$" type="Pegel"/>
+        <file pattern="^Wehr$" type="Stauwehr"/>
+        <file pattern="^Stauwehr$" type="Stauwehr"/>
+        <file pattern="^Zufluß$" type="Zufluß"/>
+
+        <line pattern="^Abz\.?[:\s].*$" type="Abzweigung"/>
+        <line pattern="^Berechnungsstrecke.*$" type="Berechnungsstrecke"/>
+        <line pattern="^Brücke[:\s].*$" type="Brücke"/>
+        <line pattern="^Bundesland[:\s].*$" type="Bundesland"/>
+        <line pattern="^Einmündung[:\s].*$" type="Einmündung"/>
+        <line pattern="^Fähre[:\s].*$" type="Abzweig"/>
+        <line pattern="^Gemeinde[:\s].*$" type="Gemeinde"/>
+        <line pattern="^Grenze[:\s].*$" type="Grenze"/>
+        <line pattern="^Hafen[:\s].*$" type="Hafen"/>
+        <line pattern="^HW-Schutz[:\s].*$" type="HW-Schutz"/>
+        <line pattern="^Landkreis[:\s].*$" type="Landkreis"/>
+        <line pattern="^Meldestelle[:\s].*$" type="Meldestelle"/>
+        <line pattern="^Messstelle[:\s].*$" type="Messstelle"/>
+        <line pattern="^Geschiebemessstelle[:\s].*$" type="Messstelle"/>
+        <line pattern="^Pegel[:\s].*$" type="Pegel"/>
+        <line pattern="^Staatsgrenze[:\s].*$" type="Staatsgrenze"/>
+        <line pattern="^Staat[:\s].*$" type="Staat"/>
+        <line pattern="^Wehr[:\s].*$" type="Stauwehr"/>
+        <line pattern="^WSA[:\s].*$" type="WSA"/>
+        <line pattern="^Zufluß[:\s].*$" type="Zufluß"/>
+    </patterns>
+</annotation>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/documentation/de/Makefile	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,3492 @@
+# Copyright 2004 Chris Monson (shiblon@gmail.com)
+# Latest version available at http://www.bouncingchairs.net/oss
+#
+#    This file is part of ``Chris Monson's Free Software''.
+#
+#    ``Chris Monson's Free Software'' is free software; you can redistribute it
+#    and/or modify it under the terms of the GNU General Public License as
+#    published by the Free Software Foundation, Version 2.
+#
+#    ``Chris Monson's Free Software'' is distributed in the hope that it will
+#    be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
+#    Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License along
+#    with ``Chris Monson's Free Software''; if not, write to the Free Software
+#    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+#
+#    It is also available on the web at http://www.gnu.org/copyleft/gpl.html
+#
+#    Note that using this makefile to build your documents does NOT place them
+#    under the GPL unless you, the author, specifically do so.  In other words,
+#    I, Chris Monson, the copyright holder and author of this makefile,
+#    consider it impossible to ``link'' to this makefile in any way covered by
+#    the GPL.
+#
+# TO OBTAIN INSTRUCTIONS FOR USING THIS FILE, RUN:
+#    make help
+#
+fileinfo	:= LaTeX Makefile
+author		:= Chris Monson
+version		:= 2.2.0-rc1
+#
+# Note that the user-global version is imported *after* the source directory,
+# so that you can use stuff like ?= to get proper override behavior.
+.PHONY: Makefile GNUmakefile Makefile.ini $(HOME)/.latex-makefile/Makefile.ini
+-include Makefile.ini
+-include $(HOME)/.latex-makefile/Makefile.ini
+#
+# This can be pdflatex or latex - you can change this by adding the following line to your Makefile.ini:
+# BUILD_STRATEGY := latex
+BUILD_STRATEGY		?= pdflatex
+#
+# Sets LC_ALL=C, by default, so that the locale-aware tools, like sort, be
+# # immune to changes to the locale in the user environment.
+export LC_ALL		?= C
+#
+#
+# If you specify sources here, all other files with the same suffix
+# will be treated as if they were _include_ files.
+#onlysources.tex	?= main.tex
+#onlysources.tex.sh	?=
+#onlysources.tex.pl	?=
+#onlysources.tex.py	?=
+#onlysources.rst	?=
+#onlysources.fig	?=
+#onlysources.gpi	?=
+#onlysources.dot	?=
+#onlysources.xvg	?=
+#onlysources.svg	?=
+#onlysources.eps.gz	?=
+#onlysources.eps	?=
+#
+# If you list files here, they will be treated as _include_ files
+#includes.tex		?= file1.tex file2.tex
+#includes.tex.sh	?=
+#includes.tex.pl	?=
+#includes.tex.py	?=
+#includes.rst		?=
+#includes.fig		?=
+#includes.gpi		?=
+#includes.dot		?=
+#includes.xvg		?=
+#includes.svg		?=
+#includes.eps.gz	?=
+#includes.eps		?=
+#
+# If you list files or wildcards here, they will *not* be cleaned - default is
+# to allow everything to be cleaned.
+#neverclean		?= *.pdf
+#
+# Alternatively (recommended), you can add those lines to a Makefile.ini file
+# and it will get picked up automatically without your having to edit this
+# Makefile.
+#
+# KNOWN ISSUES:
+#	* The following occurs:
+#		file with: \usepackage{named}\bibliographystyle{named}
+#		Compile
+#		change to: \usepackage{apalike}\bibliographystyle{apalike}
+#		Compile again -- BARF!
+#
+#		The workaround: make clean-nographics; make
+#
+#		Note that we may not be able to fix this.  LaTeX itself barfs
+#		on this, not the makefile.  The very first invocation of LaTeX
+#		(when something like this has happened) reads the existing .aux
+#		file and discovers invalid commands like \citeauthoryear that
+#		are only valid in the package that was just removed.  It then
+#		tries to parse them and explodes.  It's not at all clear to me
+#		how to fix this.  I tried removing the .aux files on the first
+#		run of LaTeX, but that necessarily requires more subsequent
+#		rebuilds on common edits.  There does not appear to be a
+#		graceful solution to this issue.
+#
+# CHANGES:
+# Chris Monson (2010-04-08):
+# 	* Bumped version to 2.2.0-rc1
+# 	* Added back in the rst_style_file stuff that got broken when switching
+# 		rst -> tex to use the script mechanism
+# Chris Monson (2010-03-23):
+#	* Bumped version to 2.2.0-beta8
+#	* Work on issue 76: bad backtick escape for some sed versions, failure
+#		to clear out the hold buffer when outputting MISSING comment.
+#		- Backed out 2>&1 to &> (doesn't work in sh)
+#		- Backed out using . to source variables
+# Chris Monson (2010-03-22):
+# 	* Bumped version to 2.2.0-beta7
+# 	* Issue 72: Fix latex/bibtex invocation order for annotated bib styles
+# 	* Fixed informational output to reflect which LaTeX run we're on
+# 	* Fixed graphic detection to include graphics that are already there in
+# 		.d files
+# 	* Tightened up the .d file output to only make .d depend on graphic
+# 		*source* files.  This means that building foo.d no longer
+# 		builds all of the graphics files on which foo.tex depends.
+# 		Had to use .SECONDEXPANSION trickery to make it work.
+# 	* Changed get-graphics to only accept a stem.
+# 	* Fixed build-once logic for scripted .tex to work better
+# 	* Made get-inputs sed script more maintainable.
+# 	* Moved Makefile.ini import up higher.
+# 	* Changed bare stems to not recursively invoke make
+# 	* Updated diff output to be more silent everywhere
+# 	* Added a MISSING comment to the .d file if stuff isn't found - forces
+# 		removal of .1st.make file, which often forces it to try again.
+# 	* Fixed broken graphics-target function
+# 	* Added sleep to .d file generation when stuff is missing - if it
+# 		builds too fast, make doesn't realize it needs to be reloaded,
+# 		and thus never discovers some deeper dependencies (especially
+# 		evident when graphics are included from scripted include
+# 		files).
+# Chris Monson (2010-03-17):
+# 	* Bumped version to 2.2.0-beta6
+# 	* Fixed bareword builds to actually work (requires static patterns)
+# 	* Fixed colorization to work with new paragraph stuff
+# Chris Monson (2010-03-17):
+# 	* Bumped version to 2.2.0-beta5
+# 	* Fixed graphic detection to be much more focused - splits log file
+# 		into paragraphs before doing pattern matching.
+# 	* Fixed make foo to work properly (recursively calls make foo.pdf)
+# 	* Fixed gpi -> pdf generation to not waste time building .eps *after*
+# 		the pdf already exists.
+# 	* Changed log copies to include MAKE_RESTARTS as part of the name.
+# 	* Fixed missing include file detection (also makes use of the paragraph
+# 		stuff) to detect missing scripted include files.
+# Chris Monson (2010-03-16):
+# 	* Bumped version to 2.2.0-beta4
+# 	* issue 70: .pdf not moved out of the way properly on first
+# 		compilation, resulting in early error detection failure.
+# 	* issue 74: fixed broken error on missing .aux files: the
+# 		implementation was masking real errors.
+# Chris Monson (2010-03-15):
+# 	* Bumped version to 2.2.0-beta3
+# 	* issue 71: Made the tput dependency optional
+# 	* issue 73: Made .tex targets not pull in .d files (building them from
+# 		scripts should not require a .d)
+# 	* issue 74: Output a much saner error when a .aux file is not produced
+# 		(e.g., when you are typing "make" without arguments in a
+# 		directory with included .tex files that are not named with
+# 		._include_.)
+# Chris Monson (2010-03-11):
+# 	* Bumped version to 2.2.0-beta2
+# 	* Fixed clean-graphics to get rid of intermediate .eps files that may
+# 		be hanging around
+# 	* Added an automatic setting to use eps terminals in pdflatex mode for
+# 		gnuplot if it doesn't understand pdf.
+# 	* issue 66: Removed grayscale generation via magic suffix.  Grayscale
+# 		generation is now only available via GRAY=1
+# 	* issue 68: Added explicit handling of LC_ALL for locale-aware tools
+# 		like "sort"
+# Chris Monson (2010-03-10):
+# 	* Bumped version to 2.2.0-beta1
+# 	* Fixed success message to handle output message in different places
+# 	* Added name of produced file to success message
+# Chris Monson (2010-03-10):
+# 	* Bumped version to 2.2.0-alpha3
+# 	* Added meaningful error message for wrong hyperref options
+# 	* Added meaningful error message for incorrect graphics extensions
+# Chris Monson (2010-03-09):
+# 	* Bumped version to 2.2.0-alpha2
+# 	* Updated graphics handling (gnuplot and fig generate pdf natively)
+# 	* Changed xmgrace to output monochrome natively
+# Chris Monson (2010-03-09):
+# 	* Bumped version to 2.2.0-alpha1 - major change!
+# 	* Support pdflatex natively and by default (issue 6 - a long time coming)
+# 	* Add ability to have a single $HOME/.latex-makefile/Makefile.ini for
+# 		all invocations
+# 	* Reworked graphic inclusion detection so that extensions need not be
+# 		specified for either build strategy (e.g.,
+# 		\includegraphics{test1.eps} -> \includegrahpics{test1})
+# 	* Changed log format to include filenames and line numbers
+# Chris Monson (2010-02-04):
+# 	* Bumped version to 2.1.43
+# 	* All of the following are for issue 63 (thanks to mojoh81):
+# 	* Added documentation about fixing Makefile.ini default target
+# 	* Added perl and python script targets
+# 	* Fixed run logic to allow included .tex files to be scripted (the
+# 		run-again logic now detects missing .tex files, and the MV
+# 		command has been switched out for a command that only invokes
+# 		MV if the files exist)
+# 	* Changed scripted generation to only run once per make invocation
+# 	* Added dependency on expr
+# Chris Monson (2010-01-19):
+# 	* Bumped version to 2.1.42
+# 	* issue 62: Added .brf extension to cleanable files (backrefs)
+# Chris Monson (2010-01-07):
+# 	* Bumped version to 2.1.41
+# 	* issue 60: bad makeindex runs now error out on subsequent tries
+# Chris Monson (2009-12-01):
+# 	* Bumped version to 2.1.40
+# 	* issue 36: build all indices (for e.g., splitidx usage)
+# 	* issue 59: clean up all generated files (including indices)
+# Chris Monson (2009-11-23):
+# 	* Bumped version to 2.1.39
+# 	* issue 57: change ps2pdf invocations to just use gs directly
+# Chris Monson (2009-11-19):
+# 	* Bumped version to 2.1.38
+# 	* issue 57: Added some limited support for Cygwin (spaces in filenames)
+# Chris Monson (2009-11-15):
+# 	* Bumped version to 2.1.37
+# 	* Removed svninfo, since this is now managed by mercurial
+# 	* Fixed typo in changelist
+# 	* Issue 52: added jpg->eps conversion (thanks to brubakee)
+# 	* Issue 54: fix missing Overfull colorization due to lack of a blank
+# 		line preceding the first error.
+#	* Issue 51: remove head.tmp and body.tmp in make clean invocation
+#	* Issue 56: maintain multiple versions of log files (for debugging)
+# Chris Monson (2009-11-14):
+# 	* Bumped version to 2.1.36
+# 	* Issues 53 and 49: added .brf, .mtc, and .maf to the cleanables
+# Chris Monson (2009-11-05):
+# 	* Bumped version to 2.1.35
+# 	* Added nomenclature support (see issue 48)
+# Chris Monson (2009-10-29):
+# 	* Bumped version to 2.1.34
+# 	* Fixed _out_ creation bug introduced in 2.1.33 (it was always created)
+# 	* Fixed erroneous help output for $HOME in BINARY_TARGET_DIR
+# 	* Changed contact email address - bring on the spam!
+# Chris Monson (2009-10-21):
+# 	* Bumped version to 2.1.33
+# 	* Fixed issue 46, adding support for dot2tex (thanks to fdemesmay)
+# 	* Made all_files.* settable in Makefile.ini (using ?= instead of :=)
+# 	* Fixed issue 47, thanks to fdemesmay: add binary copy directory, copy
+# 		dvi, pdf, and ps if it exists
+# Chris Monson (2009-09-25):
+# 	* Bumped version to 2.1.32
+# 	* Fixed so that a changed lol file will cause a rebuild
+# 	* Added .lol files to the cleanable list
+# Chris Monson (2009-09-08):
+# 	* Bumped version to 2.1.31
+# 	* Closed issue 43: evince doesn't notice pdf change w/out touch
+# Chris Monson (2009-08-28):
+# 	* Bumped version to 2.1.30
+# 	* Closed issue 39: Capture multi-line log warnings/errors to output
+# Chris Monson (2009-08-26):
+# 	* Bumped version to 2.1.29
+# 	* Closed issue 42: add svg support using inkscape
+# Chris Monson (2009-08-17):
+# 	* Bumped version to 2.1.28
+# 	* Patch from paul.biggar for issue 38: package warnings are overlooked
+# Chris Monson (2009-08-07):
+# 	* Bumped version to 2.1.27
+# 	* Included patch for issue 37 - removes pdf/ps files before copying,
+# 		allowing some broken viewers to see changes properly.
+# Chris Monson (2009-05-15):
+# 	* Bumped version to 2.1.26
+# 	* Included patch for issue 9 from favonia - detects .fig changes for
+# 		pstex files during regular compilation, so long as the pstex
+# 		has been built at least once with make all-pstex.
+# Chris Monson (2009-03-27):
+# 	* Bumped version to 2.1.25
+# 	* Cleaned up a bunch of variable setting stuff - more stuff is now
+# 		settable from Makefile.ini
+# 	* Cleaned up documentation for various features, especially settable
+# 		variables.
+# 	* issue 28: support for png -> eps conversion (it even looks good!)
+# 	* issue 29: support for "neverclean" files in Makefile.ini
+# 	* issue 30: make ps2pdf14 the default - fall back when not there
+# Chris Monson (2009-03-09):
+# 	* Bumped version to 2.1.24
+# 	* issue 27: xmgrace support (thanks to rolandschulzhd)
+# Chris Monson (2008-10-23):
+# 	* Bumped version to 2.1.23
+# 	* issue 23: fixed _check_programs to not use bash string subs
+# Chris Monson (2008-09-02):
+# 	* Bumped version to 2.1.22
+# 	* Appled patch from Holger <yllohy@googlemail.com> to add include
+# 		sources and some documentation updates.
+# 	* Updated backup_patterns to be a bit more aggressive (also thanks to
+# 		Holger)
+# Chris Monson (2008-08-30):
+# 	* Bumped version to 2.1.21
+# 	* Added ability to specify onlysources.* variables to indicate the only
+# 		files that should *not* be considered includes.  Thanks to Holger
+# 		<yllohy@googlemail.com> for this patch.
+# 	* Added an automatic include of Makefile.ini if it exists.  Allows
+# 		settings to be made outside of this makefile.
+# Chris Monson (2008-05-21):
+# 	* Bumped version to 2.1.20
+# 	* Added manual pstex compilation support (run make all-pstex first)
+# 	* Removed all automatic pstex support.  It was totally breaking
+# 		everything and is very hard to incorporate into the makefile
+# 		concept because it requires LaTeX to *fail* before it can
+# 		determine that it needs the files.
+# Chris Monson (2008-04-17):
+# 	* Bumped version to 2.1.19
+# 	* Changed the pstex build hack to be on by default
+# Chris Monson (2008-04-09):
+# 	* Bumped version to 2.1.18
+# 	* issue 16: fixed pstex build problems, seems nondeterministic.  Added
+# 		gratuitious hack for testing: set PSTEX_BUILD_ALL_HACK=1.
+# Chris Monson (2008-04-09):
+# 	* Bumped version to 2.1.17
+# 	* issue 20: fixed accumulation of <pid>*.make files - wildcard was
+#		refusing to work on files that are very recently created.
+# Chris Monson (2008-04-02):
+# 	* Bumped version to 2.1.16
+# 	* issue 19: Removed the use of "type" to fix broken "echo" settings
+# Chris Monson (2008-03-27):
+# 	* Bumped version to 2.1.15
+# 	* issue 18: Favors binary echo over builtin, as binary understands -n
+# 	* issue 16: Fixed handling of missing pstex_t files in the log
+# 	* issue 9: Added .SECONDARY target for .pstex files
+# Chris Monson (2008-03-21):
+# 	* Bumped version to 2.1.14
+# 	* Fixed broken aux file flattening, which caused included bibs to be
+# 		missed.
+# Chris Monson (2008-03-20):
+# 	* Bumped version to 2.1.13
+# 	* Changed error output colorization to show errors for missing files
+# 		that are not graphics files.
+# Chris Monson (2008-03-20):
+# 	* Bumped version to 2.1.12
+# 	* Fixed a regression introduced in r28 that makes bibtex fail when
+# 		there is no index file present
+# Chris Monson (2008-03-03):
+# 	* Bumped version to 2.1.11
+# 	* Fixed issue 11 (handle index files, reported by abachn)
+# 	* Cleaned up some comments and help text
+# Chris Monson (2008-01-24):
+# 	* Bumped version to 2.1.10
+#	* Fixed to work when 'sh' is a POSIX shell like 'dash'
+# Chris Monson (2007-12-12):
+# 	* Bumped version to 2.1.9
+# 	* Fixed documentation and dependency graph for pstex files
+# Chris Monson (2007-12-12):
+# 	* Bumped version to 2.1.8
+# 	* Added basic pstex_t support for fig files (Issue 9 by favonia)
+# 		I still suggest that psfrag be used instead.
+# Chris Monson (2007-10-16):
+# 	* Bumped version to 2.1.7
+# 	* Removed todo item: allow other comment directives for rst conversion
+# 	* Added ability to use global rst style file _rststyle_._include_.tex
+# 	* Added help text to that effect
+# Chris Monson (2007-05-20):
+# 	* Bumped version to 2.1.6
+# 	* Changed default paper size for rst files
+# 	* Added todo item: fix paper size for rst files
+# 	* Added todo item: allow other comment directives for rst conversion
+# Chris Monson (2007-04-02):
+# 	* Bumped version to 2.1.5
+# 	* Addressed Issue 7, incorrect .gpi.d generation in subdirectories
+# Chris Monson (2007-03-28):
+# 	* Bumped version to 2.1.4
+# 	* Fixed syntax error in dot output
+# Chris Monson (2007-03-01):
+# 	* Bumped version to 2.1.3
+# 	* Added reST to the included documentation
+# 	* Fixed graphics and script generation to be settable in the
+# 		environment.
+# Chris Monson (2007-02-23):
+# 	* Bumped version to 2.1.2
+# 	* Added the ability to generate .tex files from .rst files
+# Chris Monson (2006-10-17):
+# 	* Bumped version to 2.1.1
+# 	* Fixed includes from subdirectories (sed-to-sed slash escape problem)
+# Chris Monson (2006-10-05):
+# 	* Bumped version to 2.1.0 (pretty serious new feature added)
+# 	* New feature: bib files can now be anywhere on the BIBINPUTS path
+# 	* New programs: kpsewhich (with tetex) and xargs (BSD)
+# Chris Monson (2006-09-28):
+# 	* Bumped version to 2.0.9
+# 	* Added ability to parse more than one bibliography
+# Chris Monson (2006-06-01):
+# 	* Bumped version to 2.0.8
+# 	* Added .vrb to the list of cleaned files
+# Chris Monson (2006-04-26):
+# 	* Bumped version to 2.0.7
+# 	* Fixed so that clean-nographics does not remove .gpi.d files
+# 	* Removed jpg -> eps hack (not working properly -- just pre-convert)
+# 	* Fixed so that postscript grayscale can be done with BSD sed
+# Chris Monson (2006-04-25):
+# 	* Bumped version to 2.0.6
+# 	* Fixed so that changed toc, lot, lof, or out causes a rebuild
+# Chris Monson (2006-04-17):
+# 	* Bumped version to 2.0.5
+# 	* Added jpg -> eps conversion target
+# Chris Monson (2006-04-12):
+# 	* Bumped version to 2.0.4
+# 	* Fixed BSD sed invocation to not use \| as a branch delimiter
+# 	* Added a comment section on what is and is not allowed in BSD sed
+# 	* Made paper size handling more robust while I was at it
+# 	* Fixed postscript RGB grayscale to use a weighted average
+# 	* Fixed postscript HSB grayscale to convert to RGB first
+# 	* Fixed a problem with rebuilding .bbl files
+# Chris Monson (2006-04-11):
+# 	* Bumped version to 2.0.3
+# 	* Fixed some BSD sed problems: can't use \n in substitutions
+# Chris Monson (2006-04-10):
+# 	* Bumped version to 2.0.2
+# 	* Once again removed ability to create .tex files from scripts
+# 	* \includeonly works again
+# Chris Monson (2006-04-09):
+# 	* Bumped version to 2.0.1
+# 	* Fixed grayscale postscript handling to be more robust
+# 	* Added ability to generate ._gray_. files from eps and eps.gz
+# 	* Added ability to clean ._gray_.eps files created from .eps files
+# Chris Monson (2006-04-07):
+# 	* Bumped version to 2.0.0
+# 	* Removed clunky ability to create included .tex files from scripts
+# 	* Added note in the help about included tex scripting not working
+# 	* Fixed the .eps generation to delete %.gpihead.make when finished
+# 	* Abandoned designs to use shell variables to create sed scripts
+# 	* Abandoned __default__.tex.sh idea: it causes recursion with %: .
+# 	* Removed web page to-do.  All items are now complete.
+# 	* Added better grayscale conversion for dot figures (direct ps fixup).
+# 	* Include files can now be scripted (at the expense of \includeonly).
+# 	* Updated dependency graph to contain better node names.
+# Chris Monson (2006-04-06):
+# 	* Bumped version to 2.0b3
+# 	* Top level includes now fail if there is no rule to build them
+# 	* A helpful message is printed when they do fail
+# 	* Grayscale has been changed to be ._gray_, other phonies use _ now, too
+# 	* Grayscale handling has been completed
+# 	* Changed _include_stems target to _includes target.
+# 	* Fixed _includes target to be useful by itself.
+# 	* Removed the ability to specify clean and build targets at once
+# 	* Verified that epsfig works fine with current code
+# 	* Fixed included scripts so that they are added to the dep files
+# 	* Fixed so that graphics includes don't happen if they aren't for gpi
+# 	* Fixed dot output to allow grayscale.
+# Chris Monson (2006-04-05):
+#	* Bumped version to 2.0b2
+#	* Removed automatic -gray output.  It needs fixing in a bad way.
+#	* Revamped dependency creation completely.
+#	* Fixed conditional inclusion to actually work (test.nobuild.d, test.d).
+#	* Fixed clean target to remove log targets
+#	* Added the 'monochrome' word for gray gpi output
+#	* Added a _check_gpi_files target that checks for common problems
+#	* Changed the _version target into the version target (no _)
+#	* Added better handling of grayscale files.  Use the .gray.pdf target.
+#	* Fixed testing for rebuilds
+# Chris Monson (2006-04-04):
+#	* Bumped version to 2.0b1
+#	* Changed colorization of output
+#	* Made .auxbbl and .auxtex .make files secondary targets
+#	* Shortened and simplified the final latex invocation loop
+#	* Added version-specific output ($$i vs. $$$$i) in latex loop
+#	* Added a build message for the first .dvi run (Building .dvi (0))
+#	* Removed some build messages that most people don't care about.
+#	* Simplified procedure for user-set colors -- simple text specification
+#	* Fixed diff output to...not output.
+#	* Fixed rerun bug -- detect not only when preceded with LaTeX Warning
+#	* Sped up gpi plotting
+#	* Added error handling and colorized output for gpi failure
+#	* Documented color changing stuff.
+#	* Now sort the flattened aux file to avoid false recompilation needs
+#	* Added clean-nographics target
+#	* Don't remove self.dvi file if self.aux is missing in the log
+#	* Clarified some code.  Did some very minor adjusting.
+# Chris Monson (2006-04-03):
+#	* Bumped version to 2.0a7
+#	* Added .dvi and .ps files as secondary files.
+#	* Fixed handling of multiple run detection when includeonly is in use.
+#	* Added code to flatten .aux files.
+#	* Added more files as .SECONDARY prerequisites to avoid recompilation.
+#	* Fixed the inputs generation to be much simpler and to use pipes.
+#	* Added the dependency graph directly into the makefile.
+#	* Changed flatten-aux to remove \@writefile \relax \newlabel, etc.
+#	* Undid pipe changes with sed usage (BSD sed doesn't know -f-).
+#	* Added a _check_programs target that tells you what your system has.
+#	* Fixed an error in colorization that made unnecessary errors appear
+#	* Added view targets.
+#	* Updated help text.
+#	* Augmented cookies so that .aux can trigger .bbl and .dvi rebuilds
+#	* Added more informative error handling for dvips and ps2pdf
+# Chris Monson (2006-04-02):
+#	* Bumped version to 2.0a6
+#	* Added indirection to .bbl dependencies to avoid rebuilding .bbl files
+#	* Streamlined the diff invocation to eliminate an existence test
+#	* Removed special shell quote escape variables
+#	* Moved includes to a more prominent location
+#	* Fixed .inputs.make to not contain .aux files
+#	* Fixed embedding to use a file instead of always grepping.
+#	* Added *.make.temp to the list of cleanable files
+#	* Fixed Ruby.  It should now be supported properly.
+#	* Now differentiate between all, default, and buildable files.
+#	* Fixed to bail out on serious errors.
+#	* Revised the handling of includable files.  Still working on it.
+# Chris Monson (2006-03-31):
+#	* Bumped version to 2.0a5
+#	* Fixed a bug with LaTeX error detection (there can be spaces)
+#	* Added .bbl support, simplifying everything and making it more correct
+#	* Refactored some tests that muddy the code
+#	* Did a little cleanup of some shell loops that can safely be make loops
+#	* Added support for graphviz .dot files
+#	* Made _all_programs output easier to read
+#	* Added the ruby support that has long been advertised
+#	* Font embedding was screwed up for PostScript -- now implicit
+#	* Changed the generation of -gray.gpi files to a single command
+#	* Changed any make-generated file that is not included from .d to .make
+# Chris Monson (2006-03-30):
+#	* Bumped version to 2.0a4
+#	* Fixed a bug with very long graphics file names
+#	* Added a todo entry for epsfig support
+#	* Fixed a bug paper size bug: sometimes more than one entry appears
+#	* Fixed DVI build echoing to display the number instead of process ID
+#	* DVI files are now removed on first invocation if ANY file is missing
+#	* Added a simple grayscale approach: if a file ends with -gray.gpi, it
+#		is created from the corresponding .gpi file with a special
+#		comment ##GRAY in its header, which causes coloring to be
+#		turned off.
+#	* Fixed a bug in the handling of .tex.sh files.  For some reason I had
+#		neglected to define file stems for scripted output.
+#	* Removed a trailing ; from the %.graphics dependencies
+#	* Added dvips embedding (I think it works, anyway)
+# Chris Monson (2006-03-29):
+#	* Bumped version to 2.0a3
+#	* Fixed error in make 3.79 with MAKEFILE_LIST usage
+#	* Added the presumed filename to the _version output
+#	* Added a vim macro for converting sed scripts to make commands
+#	* Added gpi dependency support (plotting external files and loading gpi)
+#	* Allow .gpi files to be ignored if called .include.gpi or .nobuild.gpi
+#	* Fixed sed invocations where \+ was used.  BSD sed uses \{1,\}.
+# Chris Monson (2006-03-28):
+#	* Bumped version to 2.0a2
+#	* Added SHELL_DEBUG and VERBOSE options
+#	* Changed the default shell back to /bin/sh (unset, in other words)
+#	* Moved .PHONY declarations closer to their targets
+#	* Moved help text into its own define block to obtain better formatting
+#	* Removed need for double-entry when adding a new program invocation
+#	* Moved .SECONDARY declaration closer to its relevant occurrence
+#	* Commented things more heavily
+#	* Added help text about setting terminal and output in gnuplot
+#	* Created more fine-grained clean targets
+#	* Added a %.graphics target that generates all of %'s graphics
+#	* Killed backward-compatible graphics generation (e.g., eps.gpi=gpi.eps)
+#	* For now, we're just GPL 2, not 3.  Maybe it will change later
+#	* Made the version and svninfo into variables
+# Chris Monson (2006-03-27):
+#	* Bumped version to 2.0a1
+#	* Huge, sweeping changes -- automatic dependencies
+
+# IMPORTANT!
+#
+# When adding to the following list, do not introduce any blank lines.  The
+# list is extracted for documentation using sed and is terminated by a blank
+# line.
+#
+# EXTERNAL PROGRAMS:
+# = ESSENTIAL PROGRAMS =
+# == Basic Shell Utilities ==
+CAT		?= cat
+CP		?= cp -f
+DIFF		?= diff
+ECHO		?= echo
+EGREP		?= egrep
+ENV		?= env
+EXPR		?= expr
+MV		?= mv -f
+SED		?= sed
+SORT		?= sort
+TOUCH		?= touch
+UNIQ		?= uniq
+WHICH		?= which
+XARGS		?= xargs
+SLEEP		?= sleep
+# == LaTeX (tetex-provided) ==
+BIBTEX		?= bibtex
+DVIPS		?= dvips
+LATEX		?= latex
+PDFLATEX	?= pdflatex
+EPSTOPDF	?= epstopdf
+MAKEINDEX	?= makeindex
+KPSEWHICH	?= kpsewhich
+GS		?= gs
+# = OPTIONAL PROGRAMS =
+# == Makefile Color Output ==
+TPUT		?= tput
+# == TeX Generation ==
+PERL		?= perl
+PYTHON		?= python
+RST2LATEX	?= rst2latex.py
+# == EPS Generation ==
+CONVERT		?= convert	# ImageMagick
+DOT		?= dot		# GraphViz
+DOT2TEX		?= dot2tex	# dot2tex - add options (not -o) as needed
+FIG2DEV		?= fig2dev	# XFig
+GNUPLOT		?= gnuplot	# GNUplot
+INKSCAPE	?= inkscape	# Inkscape (svg support)
+XMGRACE		?= xmgrace	# XMgrace
+PNGTOPNM	?= pngtopnm	# From NetPBM - step 1 for png -> eps
+PPMTOPGM	?= ppmtopgm	# From NetPBM - (gray) step 2 for png -> eps
+PNMTOPS		?= pnmtops	# From NetPBM - step 3 for png -> eps
+GUNZIP		?= gunzip	# GZipped EPS
+# == Beamer Enlarged Output ==
+PSNUP		?= psnup
+# == Viewing Stuff ==
+VIEW_POSTSCRIPT	?= gv
+VIEW_PDF	?= xpdf
+VIEW_GRAPHICS	?= display
+
+# Command options for embedding fonts and postscript->pdf conversion
+PS_EMBED_OPTIONS	?= -dPDFSETTINGS=/printer -dEmbedAllFonts=true -dSubsetFonts=true -dMaxSubsetPct=100
+PS_COMPATIBILITY	?= 1.4
+
+# Defaults for GPI
+DEFAULT_GPI_EPS_FONTSIZE	?= 22
+DEFAULT_GPI_PDF_FONTSIZE	?= 12
+
+# Style file for ReST
+RST_STYLE_FILE			?= $(wildcard _rststyle_._include_.tex)
+
+# This ensures that even when echo is a shell builtin, we still use the binary
+# (the builtin doesn't always understand -n)
+FIXED_ECHO	:= $(if $(findstring -n,$(shell $(ECHO) -n)),$(shell which echo),$(ECHO))
+ECHO		:= $(if $(FIXED_ECHO),$(FIXED_ECHO),$(ECHO))
+
+define determine-gnuplot-output-extension
+$(if $(shell $(WHICH) $(GNUPLOT)),
+     $(if $(findstring unknown or ambiguous, $(shell $(GNUPLOT) -e "set terminal pdf" 2>&1)),
+	  eps, pdf),
+     none)
+endef
+
+GNUPLOT_OUTPUT_EXTENSION	?= $(strip $(call determine-gnuplot-output-extension))
+
+# Directory into which we place "binaries" if it exists.
+# Note that this can be changed on the commandline or in Makefile.ini:
+#
+# Command line:
+#   make BINARY_TARGET_DIR=$HOME/pdfs myfile.pdf
+#
+# Also, you can specify a relative directory (relative to the Makefile):
+#   make BINARY_TARGET_DIR=pdfs myfile.pdf
+#
+# Or, you can use Makefile.ini:
+#
+#   BINARY_TARGET_DIR := $(HOME)/bin_out
+#
+BINARY_TARGET_DIR	?= _out_
+
+RESTARTS		:= $(if $(MAKE_RESTARTS),$(MAKE_RESTARTS),0)
+# SH NOTES
+#
+# On some systems, /bin/sh, which is the default shell, is not linked to
+# /bin/bash.  While bash is supposed to be sh-compatible when invoked as sh, it
+# just isn't.  This section details some of the things you have to stay away
+# from to remain sh-compatible.
+#
+#	* File pattern expansion does not work for {}
+#	* [ "$x" = "$y" ] has to be [ x"$x" x"$y" ]
+#	* &> for stderr redirection doesn't work, use 2>&1 instead
+#
+# BSD SED NOTES
+#
+# BSD SED is not very nice compared to GNU sed, but it is the most
+# commonly-invoked sed on Macs (being based on BSD), so we have to cater to
+# it or require people to install GNU sed.  It seems like the GNU
+# requirement isn't too bad since this makefile is really a GNU makefile,
+# but apparently GNU sed is much less common than GNU make in general, so
+# I'm supporting it here.
+#
+# Sad experience has taught me the following about BSD sed:
+#
+# 	* \+ is not understood to mean \{1,\}
+# 	* \| is meaningless (does not branch)
+# 	* \n cannot be used as a substitution character
+# 	* ? does not mean \{0,1\}, but is literal
+# 	* a\ works, but only reliably for a single line if subsequent lines
+# 		have forward slashes in them (as is the case in postscript)
+#
+# For more info (on the Mac) you can consult
+#
+# man -M /usr/share/man re_format
+#
+# And look for the word "Obsolete" near the bottom.
+
+#
+# EXTERNAL PROGRAM DOCUMENTATION SCRIPT
+#
+
+# $(call output-all-programs,[<output file>])
+define output-all-programs
+	[ -f '$(this_file)' ] && \
+	$(SED) \
+		-e '/^[[:space:]]*#[[:space:]]*EXTERNAL PROGRAMS:/,/^$$/!d' \
+		-e '/EXTERNAL PROGRAMS/d' \
+		-e '/^$$/d' \
+		-e '/^[[:space:]]*#/i\ '\
+		-e 's/^[[:space:]]*#[[:space:]][^=]*//' \
+		$(this_file) $(if $1,> '$1',) || \
+	$(ECHO) "Cannot determine the name of this makefile."
+endef
+
+# If they misspell gray, it should still work.
+GRAY	?= $(call get-default,$(GREY),)
+
+#
+# Utility Functions and Definitions
+#
+
+# While not exactly a make function, this vim macro is useful.  It takes a
+# verbatim sed script and converts each line to something suitable in a command
+# context.  Just paste the script's contents into the editor, yank this into a
+# register (starting at '0') and run the macro once for each line of the
+# original script:
+#
+# 0i	-e :s/\$/$$/eg
:s/'/'"'"'/eg
^Ela'A' \:noh
j
+
+# don't call this directly - it is here to avoid calling wildcard more than
+# once in remove-files.
+remove-files-helper	= $(if $1,$(RM) $1,$(sh_true))
+
+# $(call remove-files,file1 file2)
+remove-files		= $(call remove-files-helper,$(wildcard $1))
+
+# Removes all cleanable files in the given list
+# $(call clean-files,file1 file2 file3 ...)
+# Works exactly like remove-files, but filters out files in $(neverclean)
+clean-files		= \
+	$(call remove-files-helper,$(call cleanable-files,$(wildcard $1)))
+
+# Outputs all generated files to STDOUT, along with some others that are
+# created by these (e.g., .idx files end up producing .ilg and .ind files).
+# Discovered by reading *.fls OUTPUT lines and producing corresponding .ind
+# filenames as needed.
+#
+# $(call get-generated-names,<source recorder file (*.fls)>)
+define get-generated-names
+[ -f '$1' ] && \
+$(SED) \
+	-e '/^OUTPUT /{' \
+	-e '  s///' \
+	-e '  p' \
+	-e '  s/\.idx/\.ind/p' \
+	-e '  s/\.ind/\.ilg/p' \
+	-e '}' \
+	-e 'd' \
+	'$1' \
+| $(SORT) | $(UNIQ)
+endef
+
+# This removes files without checking whether they are there or not.  This
+# sometimes has to be used when the file is created by a series of shell
+# commands, but there ends up being a race condition: make doesn't know about
+# the file generation as quickly as the system does, so $(wildcard ...) doesn't
+# work right.  Blech.
+# $(call remove-temporary-files,filenames)
+remove-temporary-files	= $(if $1,$(RM) $1,:)
+
+# Create an identifier from a file name
+# $(call cleanse-filename,filename)
+cleanse-filename	= $(subst .,_,$(subst /,__,$1))
+
+# Escape dots
+# $(call escape-dots,str)
+escape-dots		= $(subst .,\\.,$1)
+
+# Test that a file exists
+# $(call test-exists,file)
+test-exists		= [ -e '$1' ]
+
+# $(call move-files,source,destination)
+move-if-exists		= $(call test-exists,$1) && $(MV) '$1' '$2'
+
+# Copy file1 to file2 only if file2 doesn't exist or they are different
+# $(call copy-if-different,sfile,dfile)
+copy-if-different	= $(call test-different,$1,$2) && $(CP) '$1' '$2'
+copy-if-exists		= $(call test-exists,$1) && $(CP) '$1' '$2'
+move-if-different	= $(call test-different,$1,$2) && $(MV) '$1' '$2'
+replace-if-different-and-remove	= \
+	$(call test-different,$1,$2) \
+	&& $(MV) '$1' '$2' \
+	|| $(call remove-files,'$1')
+
+# Note that $(DIFF) returns success when the files are the SAME....
+# $(call test-different,sfile,dfile)
+test-different		= ! $(DIFF) -q '$1' '$2' >/dev/null 2>&1
+test-exists-and-different	= \
+	$(call test-exists,$2) && $(call test-different,$1,$2)
+
+# Return value 1, or value 2 if value 1 is empty
+# $(call get-default,<possibly empty arg>,<default value if empty>)
+get-default	= $(if $1,$1,$2)
+
+# Copy a file and log what's going on
+# $(call copy-with-logging,<source>,<target>)
+define copy-with-logging
+if [ -d '$2/' ]; then \
+	if $(CP) '$1' '$2/'; then \
+		$(ECHO) "$(C_INFO)Copied '$1' to '$2/'$(C_RESET)"; \
+	else \
+		$(ECHO) "$(C_ERROR)Failed to copy '$1' to '$2/'$(C_RESET)"; \
+	fi; \
+fi
+endef
+
+# Gives a reassuring message about the failure to find include files
+# $(call include-message,<list of include files>)
+define include-message
+$(strip \
+$(if $(filter-out $(wildcard $1),$1),\
+	$(shell $(ECHO) \
+	"$(C_INFO)NOTE: You may ignore warnings about the"\
+	"following files:" >&2;\
+	$(ECHO) >&2; \
+	$(foreach s,$(filter-out $(wildcard $1),$1),$(ECHO) '     $s' >&2;)\
+	$(ECHO) "$(C_RESET)" >&2)
+))
+endef
+# Characters that are hard to specify in certain places
+space		:= $(empty) $(empty)
+colon		:= \:
+comma		:= ,
+
+# Useful shell definitions
+sh_true		:= :
+sh_false	:= ! :
+
+# Clear out the standard interfering make suffixes
+.SUFFIXES:
+
+# Turn off forceful rm (RM is usually mapped to rm -f)
+ifdef SAFE_RM
+RM	:= rm
+endif
+
+# Turn command echoing back on with VERBOSE=1
+ifndef VERBOSE
+QUIET	:= @
+endif
+
+# Turn on shell debugging with SHELL_DEBUG=1
+# (EVERYTHING is echoed, even $(shell ...) invocations)
+ifdef SHELL_DEBUG
+SHELL	+= -x
+endif
+
+# Get the name of this makefile (always right in 3.80, often right in 3.79)
+# This is only really used for documentation, so it isn't too serious.
+ifdef MAKEFILE_LIST
+this_file	:= $(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST))
+else
+this_file	:= $(wildcard GNUmakefile makefile Makefile)
+endif
+
+# Terminal color definitions
+
+REAL_TPUT 	:= $(if $(NO_COLOR),,$(shell $(WHICH) $(TPUT)))
+
+# $(call get-term-code,codeinfo)
+# e.g.,
+# $(call get-term-code,setaf 0)
+get-term-code = $(if $(REAL_TPUT),$(shell $(REAL_TPUT) $1),)
+
+black	:= $(call get-term-code,setaf 0)
+red	:= $(call get-term-code,setaf 1)
+green	:= $(call get-term-code,setaf 2)
+yellow	:= $(call get-term-code,setaf 3)
+blue	:= $(call get-term-code,setaf 4)
+magenta	:= $(call get-term-code,setaf 5)
+cyan	:= $(call get-term-code,setaf 6)
+white	:= $(call get-term-code,setaf 7)
+bold	:= $(call get-term-code,bold)
+uline	:= $(call get-term-code,smul)
+reset	:= $(call get-term-code,sgr0)
+
+#
+# User-settable definitions
+#
+LATEX_COLOR_WARNING	?= magenta
+LATEX_COLOR_ERROR	?= red
+LATEX_COLOR_INFO	?= green
+LATEX_COLOR_UNDERFULL	?= magenta
+LATEX_COLOR_OVERFULL	?= red bold
+LATEX_COLOR_PAGES	?= bold
+LATEX_COLOR_BUILD	?= cyan
+LATEX_COLOR_GRAPHIC	?= yellow
+LATEX_COLOR_DEP		?= green
+LATEX_COLOR_SUCCESS	?= green bold
+LATEX_COLOR_FAILURE	?= red bold
+
+# Gets the real color from a simple textual definition like those above
+# $(call get-color,ALL_CAPS_COLOR_NAME)
+# e.g., $(call get-color,WARNING)
+get-color	= $(subst $(space),,$(foreach c,$(LATEX_COLOR_$1),$($c)))
+
+#
+# STANDARD COLORS
+#
+C_WARNING	:= $(call get-color,WARNING)
+C_ERROR		:= $(call get-color,ERROR)
+C_INFO		:= $(call get-color,INFO)
+C_UNDERFULL	:= $(call get-color,UNDERFULL)
+C_OVERFULL	:= $(call get-color,OVERFULL)
+C_PAGES		:= $(call get-color,PAGES)
+C_BUILD		:= $(call get-color,BUILD)
+C_GRAPHIC	:= $(call get-color,GRAPHIC)
+C_DEP		:= $(call get-color,DEP)
+C_SUCCESS	:= $(call get-color,SUCCESS)
+C_FAILURE	:= $(call get-color,FAILURE)
+C_RESET		:= $(reset)
+
+#
+# PRE-BUILD TESTS
+#
+
+# Check that clean targets are not combined with other targets (weird things
+# happen, and it's not easy to fix them)
+hascleangoals	:= $(if $(sort $(filter clean clean-%,$(MAKECMDGOALS))),1)
+hasbuildgoals	:= $(if $(sort $(filter-out clean clean-%,$(MAKECMDGOALS))),1)
+ifneq "$(hasbuildgoals)" ""
+ifneq "$(hascleangoals)" ""
+$(error $(C_ERROR)Clean and build targets specified together$(C_RESET)))
+endif
+endif
+
+#
+# VARIABLE DECLARATIONS
+#
+
+# Names of sed scripts that morph gnuplot files -- only the first found is used
+GNUPLOT_SED	:= global-gpi.sed gnuplot.sed
+GNUPLOT_GLOBAL	:= global._include_.gpi gnuplot.global
+
+ifneq "$(strip $(BUILD_STRATEGY))" "pdflatex"
+default_graphic_extension	?= eps
+latex_build_program		?= $(LATEX)
+build_target_extension		?= dvi
+hyperref_driver_pattern		?= hdvips
+hyperref_driver_error		?= Using dvips: specify ps2pdf in the hyperref options.
+else
+default_graphic_extension	?= pdf
+latex_build_program		?= $(PDFLATEX)
+build_target_extension		?= pdf
+hyperref_driver_pattern		?= hpdf.*
+hyperref_driver_error		?= Using pdflatex: specify pdftex in the hyperref options (or leave it blank).
+endif
+
+# Files of interest
+all_files.tex		?= $(wildcard *.tex)
+all_files.tex.sh	?= $(wildcard *.tex.sh)
+all_files.tex.pl	?= $(wildcard *.tex.pl)
+all_files.tex.py	?= $(wildcard *.tex.py)
+all_files.rst		?= $(wildcard *.rst)
+all_files.fig		?= $(wildcard *.fig)
+all_files.gpi		?= $(wildcard *.gpi)
+all_files.dot		?= $(wildcard *.dot)
+all_files.xvg		?= $(wildcard *.xvg)
+all_files.svg		?= $(wildcard *.svg)
+all_files.png		?= $(wildcard *.png)
+all_files.jpg		?= $(wildcard *.jpg)
+all_files.eps.gz	?= $(wildcard *.eps.gz)
+all_files.eps		?= $(wildcard *.eps)
+
+# Utility function for obtaining all files not specified in $(neverclean)
+# $(call cleanable-files,file1 file2 file3 ...)
+# Returns the list of files that is not in $(wildcard $(neverclean))
+cleanable-files = $(filter-out $(wildcard $(neverclean)), $1)
+
+# Utility function for getting all .$1 files that are to be ignored
+#  * files listed in $(includes.$1)
+#  * files not listed in $(onlysources.$1) if it is defined
+ignore_files = \
+  $(includes.$1) \
+  $(if $(onlysources.$1),$(filter-out $(onlysources.$1), $(all_files.$1)))
+
+# Patterns to never be allowed as source targets
+ignore_patterns	:= %._include_
+
+# Patterns allowed as source targets but not included in 'all' builds
+nodefault_patterns := %._nobuild_ $(ignore_patterns)
+
+# Utility function for getting targets suitable building
+# $(call filter-buildable,suffix)
+filter-buildable	= \
+	$(filter-out $(call ignore_files,$1) \
+		$(addsuffix .$1,$(ignore_patterns)),$(all_files.$1))
+
+# Utility function for getting targets suitable for 'all' builds
+# $(call filter-default,suffix)
+filter-default		= \
+	$(filter-out $(call ignore_files,$1) \
+		$(addsuffix .$1,$(nodefault_patterns)),$(all_files.$1))
+
+# Top level sources that can be built even when they are not by default
+files.tex	:= $(call filter-buildable,tex)
+files.tex.sh	:= $(call filter-buildable,tex.sh)
+files.tex.pl	:= $(call filter-buildable,tex.pl)
+files.tex.py	:= $(call filter-buildable,tex.py)
+files.rst	:= $(call filter-buildable,rst)
+files.gpi	:= $(call filter-buildable,gpi)
+files.dot	:= $(call filter-buildable,dot)
+files.fig	:= $(call filter-buildable,fig)
+files.xvg	:= $(call filter-buildable,xvg)
+files.svg	:= $(call filter-buildable,svg)
+files.png	:= $(call filter-buildable,png)
+files.jpg	:= $(call filter-buildable,jpg)
+files.eps.gz	:= $(call filter-buildable,eps.gz)
+
+# Make all pstex targets secondary.  The pstex_t target requires the pstex
+# target, and nothing else really depends on it, so it often gets deleted.
+# This avoids that by allowing *all* fig files to be pstex targets, which is
+# perfectly valid and causes no problems even if they're going to become eps
+# files in the end.
+.SECONDARY:	$(patsubst %.fig,%.pstex,$(files.fig))
+
+# Top level sources that are built by default targets
+default_files.tex	:= $(call filter-default,tex)
+default_files.tex.sh	:= $(call filter-default,tex.sh)
+default_files.tex.pl	:= $(call filter-default,tex.pl)
+default_files.tex.py	:= $(call filter-default,tex.py)
+default_files.rst	:= $(call filter-default,rst)
+default_files.gpi	:= $(call filter-default,gpi)
+default_files.dot	:= $(call filter-default,dot)
+default_files.fig	:= $(call filter-default,fig)
+default_files.xvg	:= $(call filter-default,xvg)
+default_files.svg	:= $(call filter-default,svg)
+default_files.png	:= $(call filter-default,png)
+default_files.jpg	:= $(call filter-default,jpg)
+default_files.eps.gz	:= $(call filter-default,eps.gz)
+
+# Utility function for creating larger lists of files
+# $(call concat-files,suffixes,[prefix])
+concat-files	= $(foreach s,$1,$($(if $2,$2_,)files.$s))
+
+# Useful file groupings
+all_files_source	:= $(call concat-files,tex,all)
+all_files_scripts	:= $(call concat-files,tex.sh tex.pl tex.py rst,all)
+
+.PHONY: $(all_files_scripts)
+
+default_files_source	:= $(call concat-files,tex,default)
+default_files_scripts	:= $(call concat-files,tex.sh tex.pl tex.py rst,default)
+
+files_source	:= $(call concat-files,tex)
+files_scripts	:= $(call concat-files,tex.sh tex.pl tex.py rst)
+
+# Utility function for obtaining stems
+# $(call get-stems,suffix,[prefix])
+get-stems	= $(sort $($(if $2,$2_,)files.$1:%.$1=%))
+
+# List of all stems (including ._include_ and ._nobuild_ file stems)
+all_stems.tex		:= $(call get-stems,tex,all)
+all_stems.tex.sh	:= $(call get-stems,tex.sh,all)
+all_stems.tex.pl	:= $(call get-stems,tex.pl,all)
+all_stems.tex.py	:= $(call get-stems,tex.py,all)
+all_stems.rst		:= $(call get-stems,rst,all)
+all_stems.fig		:= $(call get-stems,fig,all)
+all_stems.gpi		:= $(call get-stems,gpi,all)
+all_stems.dot		:= $(call get-stems,dot,all)
+all_stems.xvg		:= $(call get-stems,xvg,all)
+all_stems.svg		:= $(call get-stems,svg,all)
+all_stems.png		:= $(call get-stems,png,all)
+all_stems.jpg		:= $(call get-stems,jpg,all)
+all_stems.eps.gz	:= $(call get-stems,eps.gz,all)
+all_stems.eps		:= $(call get-stems,eps,all)
+
+# List of all default stems (all default PDF targets):
+default_stems.tex		:= $(call get-stems,tex,default)
+default_stems.tex.sh		:= $(call get-stems,tex.sh,default)
+default_stems.tex.pl		:= $(call get-stems,tex.pl,default)
+default_stems.tex.py		:= $(call get-stems,tex.py,default)
+default_stems.rst		:= $(call get-stems,rst,default)
+default_stems.fig		:= $(call get-stems,fig,default)
+default_stems.gpi		:= $(call get-stems,gpi,default)
+default_stems.dot		:= $(call get-stems,dot,default)
+default_stems.xvg		:= $(call get-stems,xvg,default)
+default_stems.svg		:= $(call get-stems,svg,default)
+default_stems.png		:= $(call get-stems,png,default)
+default_stems.jpg		:= $(call get-stems,jpg,default)
+default_stems.eps.gz		:= $(call get-stems,eps.gz,default)
+
+# List of all stems (all possible bare PDF targets created here):
+stems.tex		:= $(call get-stems,tex)
+stems.tex.sh		:= $(call get-stems,tex.sh)
+stems.tex.pl		:= $(call get-stems,tex.pl)
+stems.tex.py		:= $(call get-stems,tex.py)
+stems.rst		:= $(call get-stems,rst)
+stems.fig		:= $(call get-stems,fig)
+stems.gpi		:= $(call get-stems,gpi)
+stems.dot		:= $(call get-stems,dot)
+stems.xvg		:= $(call get-stems,xvg)
+stems.svg		:= $(call get-stems,svg)
+stems.png		:= $(call get-stems,png)
+stems.jpg		:= $(call get-stems,jpg)
+stems.eps.gz		:= $(call get-stems,eps.gz)
+
+# Utility function for creating larger lists of stems
+# $(call concat-stems,suffixes,[prefix])
+concat-stems	= $(sort $(foreach s,$1,$($(if $2,$2_,)stems.$s)))
+
+# The most likely to be source but not finished product go first
+graphic_source_extensions	:= fig \
+				   gpi \
+				   xvg \
+				   svg \
+				   dot \
+				   eps.gz
+
+ifneq "$(strip $(BUILD_STRATEGY))" "pdflatex"
+graphic_source_extensions	+= png jpg
+graphic_target_extensions	:= eps ps
+else
+graphic_source_extensions	+= eps
+graphic_target_extensions	:= pdf png jpg mps tif
+endif
+
+all_stems_source	:= $(call concat-stems,tex,all)
+all_stems_script	:= $(call concat-stems,tex.sh tex.pl tex.py rst,all)
+all_stems_graphic	:= $(call concat-stems,$(graphic_source_extensions),all)
+all_stems_ss		:= $(sort $(all_stems_source) $(all_stems_script))
+all_stems_sg		:= $(sort $(all_stems_script))
+all_stems_ssg		:= $(sort $(all_stems_ss))
+
+default_stems_source	:= $(call concat-stems,tex,default)
+default_stems_script	:= $(call concat-stems,tex.sh tex.pl tex.py rst,default)
+default_stems_ss	:= $(sort $(default_stems_source) $(default_stems_script))
+default_stems_sg	:= $(sort $(default_stems_script))
+default_stems_ssg	:= $(sort $(default_stems_ss))
+
+stems_source		:= $(call concat-stems,tex)
+stems_script		:= $(call concat-stems,tex.sh tex.pl tex.py rst)
+stems_graphic		:= $(call concat-stems,$(graphic_source_extensions))
+stems_gg		:= $(sort $(stems_graphic))
+stems_ss		:= $(sort $(stems_source) $(stems_script))
+stems_sg		:= $(sort $(stems_script))
+stems_ssg		:= $(sort $(stems_ss))
+
+# Calculate names that can generate the need for an include file.  We can't
+# really do this with patterns because it's too easy to screw up, so we create
+# an exhaustive list.
+allowed_source_suffixes	:= \
+	pdf \
+	ps \
+	dvi \
+	ind \
+	nls \
+	bbl \
+	aux \
+	aux.make \
+	d \
+	auxbbl.make \
+	_graphics \
+	_show
+allowed_source_patterns		:= $(addprefix %.,$(allowed_source_suffixes))
+
+allowed_graphic_suffixes	:= \
+	pdf \
+	eps \
+	gpihead.make \
+	gpi.d
+allowed_graphic_patterns	:= $(addprefix %.,$(allowed_graphic_suffixes))
+
+# All targets allowed to build documents
+allowed_source_targets	:= \
+	$(foreach suff,$(allowed_source_suffixes),\
+	$(addsuffix .$(suff),$(stems_ssg)))
+
+# All targets allowed to build graphics
+allowed_graphic_targets	:= \
+	$(foreach suff,$(allowed_graphic_suffixes),\
+	$(addsuffix .$(suff),$(stems_gg)))
+
+# All targets that build multiple documents (like 'all')
+allowed_batch_source_targets	:= \
+	all \
+	all-pdf \
+	all-ps \
+	all-dvi \
+	all-bbl \
+	all-ind \
+	all-gls \
+	all-nls \
+	show
+
+# All targets that build multiple graphics (independent of document)
+allowed_batch_graphic_targets	:= \
+	all-graphics \
+	all-pstex \
+	all-dot2tex \
+	show-graphics
+
+# Now we figure out which stuff is available as a make target for THIS RUN.
+real_goals	:= $(call get-default,$(filter-out _includes,$(MAKECMDGOALS)),\
+			all)
+
+specified_source_targets	:= $(strip \
+	$(filter $(allowed_source_targets) $(stems_ssg),$(real_goals)) \
+	)
+
+specified_batch_source_targets	:= $(strip \
+	$(filter $(allowed_batch_source_targets),$(real_goals)) \
+	)
+
+specified_graphic_targets	:= $(strip \
+	$(filter $(allowed_graphic_targets),$(real_goals)) \
+	)
+
+specified_batch_graphic_targets	:= $(strip \
+	$(filter $(allowed_batch_graphic_targets),$(real_goals)) \
+	)
+
+specified_gpi_targets	:= $(patsubst %.gpi,%.$(default_graphic_extension),\
+	$(filter $(patsubst %.$(default_graphic_extension),%.gpi,$(specified_graphic_targets)),\
+		$(all_files.gpi)) \
+	)
+
+# Determine which .d files need including from the information gained above.
+# This is done by first checking whether a batch target exists.  If it does,
+# then all *default* stems are used to create possible includes (nobuild need
+# not apply for batch status).  If no batch targets exist, then the individual
+# targets are considered and appropriate includes are taken from them.
+source_stems_to_include	:= \
+	$(sort\
+	$(if $(specified_batch_source_targets),\
+		$(default_stems_ss),\
+		$(foreach t,$(specified_source_targets),\
+		$(foreach p,$(allowed_source_patterns),\
+			$(patsubst $p,%,$(filter $p $(stems_ssg),$t)) \
+		)) \
+	))
+
+# Determine which .gpi.d files are needed using the above information.  We
+# first check whether a batch target is specified, then check individual
+# graphics that may have been specified.
+graphic_stems_to_include	:= \
+	$(sort\
+	$(if $(specified_batch_graphic_targets),\
+		$(default_stems.gpi),\
+		$(foreach t,$(specified_gpi_targets),\
+		$(foreach p,$(allowed_graphic_patterns),\
+			$(patsubst $p,%,$(filter $p,$t)) \
+		)) \
+	))
+
+# All dependencies for the 'all' targets
+all_pdf_targets		:= $(addsuffix .pdf,$(stems_ssg))
+all_ps_targets		:= $(addsuffix .ps,$(stems_ssg))
+all_dvi_targets		:= $(addsuffix .dvi,$(stems_ssg))
+all_tex_targets		:= $(addsuffix .tex,$(stems_sg))
+all_d_targets		:= $(addsuffix .d,$(stems_ssg))
+all_graphics_targets	:= $(addsuffix .$(default_graphic_extension),$(stems_gg))
+intermediate_graphics_targets	:= $(if $(filter pdf,$(default_graphic_extension)),$(addsuffix .eps,$(stems_gg)),)
+all_pstex_targets	:= $(addsuffix .pstex_t,$(stems.fig))
+all_dot2tex_targets	:= $(addsuffix .dot_t,$(stems.dot))
+
+all_known_graphics	:= $(sort $(all_graphics_targets) $(wildcard *.$(default_graphic_extension)))
+
+default_pdf_targets	:= $(addsuffix .pdf,$(default_stems_ss))
+ifneq "$(strip $(BUILD_STRATEGY))" "pdflatex"
+default_ps_targets	:= $(addsuffix .ps,$(default_stems_ss))
+default_dvi_targets	:= $(addsuffix .dvi,$(default_stems_ss))
+pre_pdf_extensions	:= dvi ps
+endif
+
+# Extensions generated by LaTeX invocation that can be removed when complete
+rm_ext		:= \
+	log *.log aux $(pre_pdf_extensions) pdf blg bbl out nav snm toc lof lot lol pfg \
+	fls vrb idx ind ilg glg glo gls lox nls nlo nlg brf mtc maf brf
+backup_patterns	:= *~ *.bak *.backup body.tmp head.tmp
+
+graph_stem	:= _graph
+
+# All LaTeX-generated files that can be safely removed
+
+rm_tex := \
+	$(foreach e,$(rm_ext),$(addsuffix .$e,$(all_stems_source))) \
+	$(foreach e,$(rm_ext) tex,$(addsuffix .$e,$(all_stems_sg))) \
+	$(addsuffix .log,$(all_ps_targets) $(all_pdf_targets)) \
+	$(addsuffix .*.log,$(stems_graphic))
+
+# These are the files that will affect .gpi transformation for all .gpi files.
+#
+# Use only the first one found.  Backward compatible values are at the end.
+# Note that we use foreach, even though wildcard also returns a list, to ensure
+# that the order in the uppercase variables is preserved.  Directory listings
+# provide no such guarantee, so we avoid relying on them.
+gpi_sed		:= $(strip \
+	$(firstword $(foreach f,$(GNUPLOT_SED),$(wildcard $f))))
+gpi_global	:= $(strip \
+	$(firstword $(foreach f,$(GNUPLOT_GLOBAL),$(wildcard $f))))
+
+#
+# Functions used in generating output
+#
+
+# Outputs all source dependencies to stdout.  The first argument is the file to
+# be parsed, the second is a list of files that will show up as dependencies in
+# the new .d file created here.
+#
+# NOTE: BSD sed does not understand \|, so we have to do something more
+# clunky to extract suitable extensions.
+#
+# Also, we do a little bit of funny rewriting up front (TARGETS=) to make sure
+# that we can properly backslash-escape spaces in file names (e.g, on Cygwin
+# for tex distributions that have "Program Files" in their name).
+#
+# $(call get-inputs,<parsed file>,<target files>)
+define get-inputs
+$(SED) \
+-e '/^INPUT/!d' \
+-e 's!^INPUT \(\./\)\{0,1\}!!' \
+-e 's/[[:space:]]/\\ /g' \
+-e 's/\(.*\)\.aux$$/\1.tex/' \
+-e '/\.tex$$/b addtargets' \
+-e '/\.cls$$/b addtargets' \
+-e '/\.sty$$/b addtargets' \
+-e '/\.pstex_t$$/b addtargets' \
+-e '/\.dot_t$$/b addtargets' \
+-e 'd' \
+-e ':addtargets' \
+-e 's/^/$2: /' \
+$1 | $(SORT) | $(UNIQ)
+endef
+
+# $(call get-missing-inputs,<log file>,<target files>)
+define get-missing-inputs
+$(SED) \
+-e '$$ b para' \
+-e '/^$$/b para' \
+-e 'H' \
+-e 'd' \
+-e ':para' \
+-e 'x' \
+-e '/^$$/d' \
+-e 's/^\n*//' \
+-e '/^! LaTeX Error: File /{' \
+-e '  s/^/::DOUBLE_PARAGRAPH::/' \
+-e '  h' \
+-e '  d' \
+-e '}' \
+-e 's/^::DOUBLE_PARAGRAPH:://' \
+-e '/Default extension: /!d' \
+-e 's/[[:space:]]\{1,\}/ /g' \
+-e 's/\n\{1,\}/ /g' \
+-e 's/^.*File `//' \
+-e 's/'"'"' not found\..*//' \
+-e '/\.tex/!s/$$/.tex/' \
+-e 's/[[:space:]]/\\ /g' \
+-e 'h' \
+-e 's/.*/# MISSING input "&" - (presence of comment affects build)/' \
+-e 'p' \
+-e 's/.*//' \
+-e 'x' \
+-e 's/^/$2: /' \
+$1 | $(SORT) | $(UNIQ)
+endef
+
+# Get source file for specified graphics stem.
+#
+# $(call graphics-source,<stem>)
+define graphics-source
+$(strip $(firstword \
+	$(wildcard \
+		$(addprefix $1.,\
+			$(graphic_source_extensions))) \
+	$1 \
+))
+endef
+
+# Get the target file for the specified graphics file/stem
+#
+# $(call graphics-target,<stem>)
+define graphics-target
+$(strip $(if 	$(filter $(addprefix %.,$(graphic_target_extensions)),$1), $1,
+	$(firstword $(patsubst $(addprefix %.,$(graphic_source_extensions) $(graphic_target_extensions)), %, $1).$(default_graphic_extension) $1.$(default_graphic_extension))))
+endef
+
+# Outputs all of the graphical dependencies to stdout.  The first argument is
+# the stem of the source file being built, the second is a list of suffixes
+# that will show up as dependencies in the generated .d file.
+#
+# Note that we try to escape spaces in filenames where possible.  We have to do
+# it with three backslashes so that as the name percolates through the makefile
+# it eventually ends up with the proper escaping when the build rule is found.
+# Ugly, but it appears to work.  Note that graphicx doesn't allow filenames
+# with spaces, so this could in many ways be moot unless you're using something
+# like grffile.
+#
+# For pdflatex, we really need the missing file to be specified without an
+# extension, otherwise compilation barfs on the first missing file.  Truly
+# annoying, but there you have it.
+#
+# It turns out that the graphics errors, although they have lines with empty
+# space, are only made of two paragraphs.  So, we just use some sed magic to
+# get everything into paragraphs, detect when it's a paragraph that interests
+# us, and double it up.  Then we get the filename only if we're missing
+# extensions (a sign that it's graphicx complaining).
+#
+# $(call get-graphics,<target file stem>)
+#.log,$(addprefix $*.,d $(build_target_extension) _graphics)
+define get-graphics
+$(SED) \
+-e '$$ b para' \
+-e '/^$$/b para' \
+-e 'H' \
+-e 'd' \
+-e ':para' \
+-e 'x' \
+-e '/^$$/d' \
+-e 's/^\n*//' \
+-e '/^! LaTeX Error: File `/{' \
+-e '  s/^/::DOUBLE_PARAGRAPH::/' \
+-e '  h' \
+-e '  d' \
+-e '}' \
+-e 's/^::DOUBLE_PARAGRAPH:://' \
+-e '/could not locate the file with any of these extensions:/{' \
+-e '  s/\n\{1,\}/ /g' \
+-e '  s/[[:space:]]\{1,\}/ /g' \
+-e '  s/^.*File `//' \
+-e '  s/'"'"' not found\..*//' \
+-e '  h' \
+-e '  s/.*/# MISSING stem "&" - (presence of comment affects build)/' \
+-e '  p' \
+-e '  g' \
+-e '  b addtargets' \
+-e '}' \
+-e '/.*File: \(.*\) Graphic file (type [^)]*).*/{' \
+-e '  s//\1/' \
+-e '  b addtargets' \
+-e '}' \
+-e 'd' \
+-e ':addtargets' \
+-e 's/[[:space:]]/\\\\\\&/g' \
+-e 'h' \
+-e 's/.*/-include &.gpi.d/' \
+-e 'p' \
+-e 'g' \
+-e 's/.*/$(addprefix $1,.d): $$$$(call graphics-source,&)/' \
+-e 'p' \
+-e 's/.*//' \
+-e 'x' \
+-e 's/.*/$(addprefix $1.,$(build_target_extension) _graphics): $$$$(call graphics-target,&)/' \
+-e 'p' \
+-e 'd' \
+$*.log
+endef
+
+# Checks for build failure due to pstex inclusion, and gives instructions.
+#
+# $(call die-on-pstexs,<parsed file>)
+define die-on-pstexs
+if $(EGREP) -q '^! LaTeX Error: File .*\.pstex.* not found' $1; then \
+	$(ECHO) "$(C_ERROR)Missing pstex_t file(s)$(C_RESET)"; \
+	$(ECHO) "$(C_ERROR)Please run$(C_RESET)"; \
+	$(ECHO) "$(C_ERROR)  make all-pstex$(C_RESET)"; \
+	$(ECHO) "$(C_ERROR)before proceeding.$(C_RESET)"; \
+	exit 1; \
+fi
+endef
+
+# Checks for build failure due to dot2tex, and gives instructions.
+#
+# $(call die-on-dot2tex,<parsed file>)
+define die-on-dot2tex
+if $(EGREP) -q ' LaTeX Error: File .*\.dot_t.* not found' $1; then \
+	$(ECHO) "$(C_ERROR)Missing dot_t file(s)$(C_RESET)"; \
+	$(ECHO) "$(C_ERROR)Please run$(C_RESET)"; \
+	$(ECHO) "$(C_ERROR)  make all-dot2tex$(C_RESET)"; \
+	$(ECHO) "$(C_ERROR)before proceeding.$(C_RESET)"; \
+	exit 1; \
+fi
+endef
+
+# Checks for the existence of a .aux file, and dies with an error message if it
+# isn't there.  Note that we pass the file stem in, not the full filename,
+# e.g., to check for foo.aux, we call it thus: $(call die-on-no-aux,foo)
+#
+# $(call die-on-no-aux,<aux stem>)
+define die-on-no-aux
+if [ ! -e '$1.aux' ]; then \
+	$(call colorize-latex-errors,$1.log); \
+	exit 1; \
+fi
+endef
+
+# Outputs all index files to stdout.  Arg 1 is the source file stem, arg 2 is
+# the list of targets for the discovered dependency.
+#
+# $(call get-log-index,<log file stem>,<target files>)
+define get-log-index
+$(SED) \
+-e 's/^No file \(.*\.ind\)\.$$/TARGETS=\1/' \
+-e 's/^No file \(.*\.[gn]ls\)\.$$/TARGETS=\1/' \
+-e 's/[[:space:]]/\\&/g' \
+-e '/^TARGETS=/{' \
+-e '  h' \
+-e '  s/^TARGETS=/$2: /p' \
+-e '  g' \
+-e '  s/^TARGETS=\(.*\)/\1: $1.tex/p' \
+-e '}' \
+-e 'd' \
+'$1.log' | $(SORT) | $(UNIQ)
+endef
+
+
+# Outputs all bibliography files to stdout.  Arg 1 is the source stem, arg 2 is
+# a list of targets for each dependency found.
+#
+# The script kills all lines that do not contain bibdata.  Remaining lines have
+# the \bibdata macro and delimiters removed to create a dependency list.  A
+# trailing comma is added, then all adjacent commas are collapsed into a single
+# comma.  Then commas are replaced with the string .bib[space], and the
+# trailing space is killed off.  Finally, all filename spaces are escaped.
+# This produces a list of space-delimited .bib filenames, which is what the
+# make dep file expects to see.
+#
+# Note that we give kpsewhich a bogus argument so that a failure of sed to
+# produce output will not cause an error.
+#
+# $(call get-bibs,<aux file>,<targets>)
+define get-bibs
+$(SED) \
+-e '/^\\bibdata/!d' \
+-e 's/\\bibdata{\([^}]*\)}/\1,/' \
+-e 's/,\{2,\}/,/g' \
+-e 's/[[:space:]]/\\&/g' \
+-e 's/,/.bib /g' \
+-e 's/ \{1,\}$$//' \
+$1 | $(XARGS) $(KPSEWHICH) '#######' | \
+$(SED) \
+-e 's/^/$2: /' | \
+\$(SORT) | $(UNIQ)
+endef
+
+# Makes a an aux file that only has stuff relevant to the target in it
+# $(call make-auxtarget-file,<flattened-aux>,<new-aux>)
+define make-auxtarget-file
+$(SED) \
+-e '/^\\newlabel/!d' \
+$1 > $2
+endef
+
+# Makes an aux file that only has stuff relevant to the bbl in it
+# $(call make-auxbbl-file,<flattened-aux>,<new-aux>)
+define make-auxbbl-file
+$(SED) \
+-e '/^\\newlabel/d' \
+$1 > $2
+endef
+
+# Makes a .gpi.d file from a .gpi file
+# $(call make-gpi-d,<.gpi>,<.gpi.d>)
+define make-gpi-d
+$(ECHO) '# vim: ft=make' > $2; \
+$(ECHO) 'ifndef INCLUDED_$(call cleanse-filename,$2)' >> $2; \
+$(ECHO) 'INCLUDED_$(call cleanse-filename,$2) := 1' >> $2; \
+$(call get-gpi-deps,$1,$(addprefix $(2:%.gpi.d=%).,$(GNUPLOT_OUTPUT_EXTENSION) gpi.d)) >> $2; \
+$(ECHO) 'endif' >> $2;
+endef
+
+# Parse .gpi files for data and loaded dependencies, output to stdout
+#
+# The sed script here tries to be clever about obtaining valid
+# filenames from the gpi file.  It assumes that the plot command starts its own
+# line, which is not too difficult a constraint to satisfy.
+#
+# This command script also generates 'include' directives for every 'load'
+# command in the .gpi file.  The load command must appear on a line by itself
+# and the file it loads must have the suffix .gpi.  If you don't want it to be
+# compiled when running make graphics, then give it a suffix of ._include_.gpi.
+#
+# $(call get-gpi-deps,<gpi file>,<targets>)
+define get-gpi-deps
+$(SED) \
+-e '/^[[:space:]]*s\{0,1\}plot/,/[^\\]$$/{' \
+-e ' H' \
+-e ' /[^\\]$$/{' \
+-e '  s/.*//' \
+-e '  x' \
+-e '  s/\\\{0,1\}\n//g' \
+-e '  s/^[[:space:]]*s\{0,1\}plot[[:space:]]*\(\[[^]]*\][[:space:]]*\)*/,/' \
+-e '  s/[[:space:]]*\(['\''"][^'\''"]*['\''"]\)\{0,1\}[^,]*/\1/g' \
+-e '  s/,['\''"]-\{0,1\}['\''"]//g' \
+-e '  s/[,'\''"]\{1,\}/ /g' \
+-e '  s!.*!$2: &!' \
+-e '  p' \
+-e ' }' \
+-e ' d' \
+-e '}' \
+-e 's/^[[:space:]]*load[[:space:]]*['\''"]\([^'\''"]*\.gpi\)['\''"].*$$/-include \1.d/p' \
+-e 'd' \
+$1
+endef
+
+# Colorizes real, honest-to-goodness LaTeX errors that can't be overcome with
+# recompilation.
+#
+# Note that we only ignore file not found errors for things that we know how to
+# build, like graphics files.
+#
+# $(call colorize-latex-errors,<log file>)
+define colorize-latex-errors
+$(SED) \
+-e '$$ b para' \
+-e '/^$$/b para' \
+-e 'H' \
+-e 'd' \
+-e ':para' \
+-e 'x' \
+-e '/^$$/d' \
+-e 's/^\n*//' \
+-e '/^! LaTeX Error: File /{' \
+-e '  s/^/::DOUBLE_PARAGRAPH::/' \
+-e '  h' \
+-e '  d' \
+-e '}' \
+-e 's/^::DOUBLE_PARAGRAPH:://' \
+-e '/could not locate the file with any of these extensions:/d' \
+-e '/Missing .begin.document/{' \
+-e '  h' \
+-e '  s/.*/Are you trying to build an include file?/' \
+-e '  x' \
+-e '  G' \
+-e '}' \
+-e '/ LaTeX Error: Cannot determine size/d' \
+-e 's/.* LaTeX Error .*/$(C_ERROR)&$(C_RESET)/p' \
+-e 's/Error: pdflatex (file .*/$(C_ERROR)& - try specifying it without an extension$(C_RESET)/p' \
+-e '/.*\*hyperref using.*driver \(.*\)\*.*/{' \
+-e '  s//\1/' \
+-e '  /^$(hyperref_driver_pattern)$$/!{' \
+-e '    s/.*//' \
+-e '    p' \
+-e '    s/.*/$(C_ERROR)--- Using incorrect driver for hyperref! ---$(C_RESET)/' \
+-e '    p' \
+-e '    s/.*/$(C_ERROR)$(hyperref_driver_error)$(C_RESET)/' \
+-e '    p' \
+-e '  }' \
+-e '  d' \
+-e '}' \
+-e '/ LaTeX Error: Unknown graphics extension/{' \
+-e '  s/^/     /' \
+-e '  h' \
+-e '  s/.*/--- Graphics extension error:/' \
+-e '  G' \
+-e '  h' \
+-e '  s/.*/--- If you specified the extension explicitly in your .tex file, try removing it./' \
+-e '  H' \
+-e '  g' \
+-e '  s/.*/$(C_ERROR)&$(C_RESET)/' \
+-e '  p' \
+-e '  s/.*//' \
+-e '  h' \
+-e '  b' \
+-e '}' \
+-e 's/.*\(\n\{0,\}! .*\)/$(C_ERROR)\1$(C_RESET)/p' \
+-e 'd' \
+$1
+endef
+
+# Colorize Makeindex errors
+define colorize-makeindex-errors
+$(SED) \
+-e '/^!! /{' \
+-e '  N' \
+-e '  s/^.*$$/$(C_ERROR)&$(C_RESET)/' \
+-e '  p' \
+-e '}' \
+-e 'd' \
+$1
+endef
+
+# Colorize epstopdf errors
+#
+# $(call colorize-epstopdf-errors,<log file>)
+define colorize-epstopdf-errors
+$(SED) \
+-e '/^Error:/,/^Execution stack:/{' \
+-e '  /^Execution stack:/d' \
+-e '  s/.*/$(C_ERROR)&$(C_RESET)/' \
+-e '  p' \
+-e '}' \
+-e 'd' \
+$1
+endef
+
+# Colorize GNUplot errors
+#
+# $(call colorize-gnuplot-errors,<log file>)
+define colorize-gnuplot-errors
+$(SED) \
+-e '/, line [0-9]*:/!{' \
+-e '  H' \
+-e '  x' \
+-e '  s/.*\n\(.*\n.*\)$$/\1/' \
+-e '  x' \
+-e '}' \
+-e '/, line [0-9]*:/{' \
+-e '  H' \
+-e '  /unknown.*terminal type/{' \
+-e '    s/.*/--- Try changing the GNUPLOT_OUTPUT_EXTENSION variable to 'eps'./' \
+-e '	H' \
+-e '  }' \
+-e '  /gpihead/{' \
+-e '    s/.*/--- This could be a Makefile bug - contact the maintainer./' \
+-e '    H' \
+-e '  }' \
+-e '  g' \
+-e '  s/.*/$(C_ERROR)&$(C_RESET)/' \
+-e '  p' \
+-e '}' \
+-e '/^gnuplot>/,/^$$/{' \
+-e '  s/^gnuplot.*/$(C_ERROR)&/' \
+-e '  s/^$$/$(C_RESET)/' \
+-e '  p' \
+-e '}' \
+-e 'd' \
+$1
+endef
+
+# Colorize GraphViz errors
+#
+# $(call colorize-dot-errors,<log file>)
+define colorize-dot-errors
+$(SED) \
+-e '/^Error:/,/context:/s/.*/$(C_ERROR)&$(C_RESET)/p' \
+-e 's/^Warning:.*/$(C_WARNING)&$(C_RESET)/p' \
+-e 'd' \
+'$1'
+endef
+
+# Get all important .aux files from the top-level .aux file and merges them all
+# into a single file, which it outputs to stdout.
+#
+# $(call flatten-aux,<toplevel aux>,<output file>)
+define flatten-aux
+$(SED) \
+-e '/\\@input{\(.*\)}/{' \
+-e     's//\1/' \
+-e     's![.:]!\\&!g' \
+-e     'h' \
+-e     's!.*!\\:\\\\@input{&}:{!' \
+-e     'p' \
+-e     'x' \
+-e     's/\\././g' \
+-e     's/.*/r &/p' \
+-e     's/.*/d/p' \
+-e     's/.*/}/p' \
+-e     'd' \
+-e '}' \
+-e 'd' \
+'$1' > "$1.$$$$.sed.make"; \
+$(SED) -f "$1.$$$$.sed.make" '$1' > "$1.$$$$.make"; \
+$(SED) \
+-e '/^\\relax/d' \
+-e '/^\\bibcite/d' \
+-e 's/^\(\\newlabel{[^}]\{1,\}}\).*/\1/' \
+"$1.$$$$.make" | $(SORT) > '$2'; \
+$(call remove-temporary-files,$1.$$$$.make $1.$$$$.sed.make)
+endef
+
+# Generate pdf from postscript
+#
+# Note that we don't just call ps2pdf, since there are so many versions of that
+# script on various systems.  Instead, we call the postscript interpreter
+# directly.
+#
+# $(call ps2pdf,infile,outfile,[embed fonts])
+define ps2pdf
+	$(GS) \
+		-dSAFER -dCompatibilityLevel=$(PS_COMPATIBILITY) \
+		$(if $3,$(PS_EMBED_OPTIONS)) \
+		-q -dNOPAUSE -dBATCH \
+		-sDEVICE=pdfwrite -sstdout=%stderr \
+		'-sOutputFile=$2' \
+		-dSAFER -dCompatibilityLevel=$(PS_COMPATIBILITY) \
+		$(if $3,$(PS_EMBED_OPTIONS)) \
+		-c .setpdfwrite \
+		-f '$1'
+endef
+
+# Colorize LaTeX output.
+# This uses a neat trick from the Sed & Awk Book from O'Reilly:
+# 1) If a line has a single ending paren, delete it to make a blank line (so
+#	that we catch the first error, which is not always preceded by a blank
+#	line).
+# 2) Ensure that the last line of the file gets appended to the hold buffer,
+# 	and blank it out to trigger end-of-paragraph logic below.
+# 3) When encountering a blank line (LaTeX output helpfully breaks output on
+# 	newlines)
+# 	a) swap the hold buffer (containing the paragraph) into the pattern buffer (putting a blank line into the hold buffer),
+# 	b) remove the newline at the beginning (don't ask),
+# 	c) apply any colorizing substitutions necessary to ensure happiness.
+# 	d) get the newline out of the hold buffer and append it
+# 	e) profit! (print)
+# 4) Anything not colorized is deleted, unless in verbose mode.
+color_tex	:= \
+	$(SED) \
+	-e '$${' \
+	-e '  /^$$/!{' \
+	-e '    H' \
+	-e '    s/.*//' \
+	-e '  }' \
+	-e '}' \
+	-e '/^$$/!{' \
+	-e '  H' \
+	-e '  d' \
+	-e '}' \
+	-e '/^$$/{' \
+	-e '  x' \
+	-e '  s/^\n//' \
+	-e '  /Output written on /{' \
+	-e '    s/.*Output written on \([^(]*\) (\([^)]\{1,\}\)).*/Success!  Wrote \2 to \1/' \
+	-e '    s/[[:digit:]]\{1,\}/$(C_PAGES)&$(C_RESET)/g' \
+	-e '    s/Success!/$(C_SUCCESS)&$(C_RESET)/g' \
+	-e '    s/to \(.*\)$$/to $(C_SUCCESS)\1$(C_RESET)/' \
+	-e '    b end' \
+	-e '  }' \
+	-e '  / *LaTeX Error:.*/{' \
+	-e '    s/.*\( *LaTeX Error:.*\)/$(C_ERROR)\1$(C_RESET)/' \
+	-e '    b end' \
+	-e '  }' \
+	-e '  /.*Warning:.*/{' \
+	-e '    s//$(C_WARNING)&$(C_RESET)/' \
+	-e '    b end' \
+	-e '  }' \
+	-e '  /Underfull.*/{' \
+	-e '    s/.*\(Underfull.*\)/$(C_UNDERFULL)\1$(C_RESET)/' \
+	-e '    b end' \
+	-e '  }' \
+	-e '  /Overfull.*/{' \
+	-e '    s/.*\(Overfull.*\)/$(C_OVERFULL)\1$(C_RESET)/' \
+	-e '    b end' \
+	-e '  }' \
+	$(if $(VERBOSE),,-e '  d') \
+	-e '  :end' \
+	-e '  G' \
+	-e '}' \
+
+# Colorize BibTeX output.
+color_bib	:= \
+	$(SED) \
+	-e 's/^Warning--.*/$(C_WARNING)&$(C_RESET)/' -e 't' \
+	-e '/---/,/^.[^:]/{' \
+	-e '  H' \
+	-e '  /^.[^:]/{' \
+	-e '    x' \
+	-e '    s/\n\(.*\)/$(C_ERROR)\1$(C_RESET)/' \
+	-e '	p' \
+	-e '    s/.*//' \
+	-e '    h' \
+	-e '    d' \
+	-e '  }' \
+	-e '  d' \
+	-e '}' \
+	-e '/(.*error.*)/s//$(C_ERROR)&$(C_RESET)/' \
+	$(if $(VERBOSE),,-e 'd')
+
+
+# Make beamer output big enough to print on a full page.  Landscape doesn't
+# seem to work correctly.
+enlarge_beamer	= $(PSNUP) -l -1 -W128mm -H96mm -pletter
+
+# $(call test-run-again,<source stem>)
+test-run-again	= $(EGREP) -q '^(.*Rerun .*|No file $1\.[^.]+\.)$$' $1.log
+
+# This tests whether the build target commands should be run at all, from
+# viewing the log file.
+# $(call test-log-for-need-to-run,<source stem>)
+define test-log-for-need-to-run
+$(SED) \
+-e '/^No file $(call escape-dots,$1)\.aux\./d' \
+$1.log \
+| $(EGREP) -q '^(.*Rerun .*|No file $1\.[^.]+\.|No file .+\.tex\.|LaTeX Warning: File.*)$$'
+endef
+
+# LaTeX invocations
+#
+# $(call latex,<tex file>,[<extra LaTeX args>])
+run-latex	= $(latex_build_program) --interaction=batchmode $(if $2,$2,) $1 > /dev/null
+
+# $(call latex-color-log,<LaTeX stem>)
+latex-color-log	= $(color_tex) $1.log
+
+# $(call run-makeindex,<input>,<output>,<log>,<extra flags>)
+define run-makeindex
+success=1; \
+if ! $(MAKEINDEX) -q $1 -t $3 -o $2 $4 > /dev/null || $(EGREP) -q '^!!' $3; then \
+	$(call colorize-makeindex-errors,$3); \
+	$(RM) -f '$2'; \
+	success=0; \
+fi; \
+[ "$$success" = "1" ] && $(sh_true) || $(sh_false);
+endef
+
+# This runs the given script to generate output, and it uses MAKE_RESTARTS to
+# ensure that it never runs it more than once for a particular root make
+# invocation.
+#
+# $(call run-script,<interpreter>,<input>,<output>)
+define run-script
+[ ! -e '$2.cookie' ] && $(ECHO) "restarts=$(RESTARTS)" > $2.cookie && $(ECHO) "level=$(MAKELEVEL)" >> $2.cookie; \
+restarts=`$(SED) -n -e 's/^restarts=//p' $2.cookie`; \
+level=`$(SED) -n -e 's/^level=//p' $2.cookie`; \
+if $(EXPR) $(MAKELEVEL) '<=' $$level '&' $(RESTARTS) '<=' $$restarts >/dev/null; then \
+	$(call echo-build,$2,$3,$(RESTARTS)-$(MAKELEVEL)); \
+	$1 '$2' '$3'; \
+	$(ECHO) "restarts=$(RESTARTS)" > '$2.cookie'; \
+	$(ECHO) "level=$(MAKELEVEL)" >> '$2.cookie'; \
+fi
+endef
+
+# BibTeX invocations
+#
+# $(call run-bibtex,<tex stem>)
+run-bibtex	= $(BIBTEX) $1 | $(color_bib)
+
+
+# $(call convert-eps-to-pdf,<eps file>,<pdf file>,[gray])
+# Note that we don't use the --filter flag because it has trouble with bounding boxes that way.
+define convert-eps-to-pdf
+$(if $3,$(CAT) '$1' | $(call kill-ps-color) > '$1.cookie',$(CP) '$1' '$1.cookie'); \
+$(EPSTOPDF) '$1.cookie' --outfile='$2' > $1.log; \
+$(call colorize-epstopdf-errors,$1.log);
+endef
+
+# $(call convert-gpi,<gpi file>,<output file>,[gray])
+#
+define convert-gpi
+$(ECHO) 'set terminal $(if $(filter %.pdf,$2),pdf enhanced,postscript enhanced eps)' \
+$(if $(filter %.pdf,$2),fsize ,)$(call get-default,$(strip \
+$(firstword \
+	$(shell \
+		$(SED) \
+			-e 's/^\#\#FONTSIZE=\([[:digit:]]\{1,\}\)/\1/p' \
+			-e 'd' \
+			$1 $(strip $(gpi_global)) \
+	) \
+) \
+),$(if $(filter %.pdf,$2),$(DEFAULT_GPI_PDF_FONTSIZE),$(DEFAULT_GPI_EPS_FONTSIZE))) \
+$(strip $(if $3,monochrome,$(if \
+$(shell $(EGREP) '^\#\#[[:space:]]*GRAY[[:space:]]*$$' $< $(gpi_global)),\
+,color))) > $1head.make; \
+$(ECHO) 'set output "$2"' >> $1head.make; \
+$(if $(gpi_global),$(CAT) $(gpi_global) >> $1head.make;,) \
+fnames='$1head.make $1';\
+$(if $(gpi_sed),\
+	$(SED) -f '$(gpi_sed)' $$fnames > $1.temp.make; \
+	fnames=$1.temp.make;,\
+) \
+success=1; \
+if ! $(GNUPLOT) $$fnames 2>$1.log; then \
+	$(call colorize-gnuplot-errors,$1.log); \
+	success=0; \
+fi; \
+$(if $(gpi_sed),$(call remove-temporary-files,$1.temp.make);,) \
+$(call remove-temporary-files,$1head.make); \
+[ "$$success" = "1" ] && $(sh_true) || $(sh_false);
+endef
+
+# Creation of .eps files from .png files
+#
+# The intermediate step of PNM (using NetPBM) produces much nicer output than
+# ImageMagick's "convert" binary.  I couldn't get the right combination of
+# flags to make it look nice, anyway.
+#
+# To handle gray scale conversion, we pipe things through ppmtopgm in the
+# middle.
+#
+# $(call convert-png,<png file>,<eps file>)
+define convert-png
+$(PNGTOPNM) "$1" \
+	$(if $3,| $(PPMTOPGM),) \
+	| $(PNMTOPS) -noturn \
+	> "$2"
+endef
+
+# Creation of .eps files from .jpg files
+#
+# Thanks to brubakee for this solution.
+#
+# Uses Postscript level 2 to avoid file size bloat
+# $(call convert-jpg,<jpg file>,<eps file>)
+define convert-jpg
+$(CONVERT) $(if $3,-type Grayscale,) '$1' eps2:'$2'
+endef
+
+# Creation of .eps files from .fig files
+# $(call convert-fig,<fig file>,<output file>,[gray])
+convert-fig	= $(FIG2DEV) -L $(if $(filter %.pdf,$2),pdf,eps) $(if $3,-N,) $1 $2
+
+# Creation of .pstex files from .fig files
+# $(call convert-fig-pstex,<fig file>,<pstex file>)
+convert-fig-pstex	= $(FIG2DEV) -L pstex $1 $2 > /dev/null 2>&1
+
+# Creation of .pstex_t files from .fig files
+# $(call convert-fig-pstex-t,<fig file>,<pstex file>,<pstex_t file>)
+convert-fig-pstex-t	= $(FIG2DEV) -L pstex_t -p $3 $1 $2 > /dev/null 2>&1
+
+# Creation of .dot_t files from .dot files
+# #(call convert-dot-tex,<dot file>,<dot_t file>)
+convert-dot-tex		= $(DOT2TEX) '$1' > '$2'
+
+# Converts svg files into .eps files
+#
+# $(call convert-svg,<svg file>,<eps file>,[gray])
+convert-svg	= $(INKSCAPE) --export-eps='$2' '$1'
+
+# Converts xvg files into .eps files
+#
+# $(call convert-xvg,<xvg file>,<eps file>,[gray])
+convert-xvg	= $(XMGRACE) '$1' -printfile - -hardcopy -hdevice $(if $3,-mono,) EPS > '$2'
+
+# Converts .eps.gz files into .eps files
+#
+# $(call convert-epsgz,<eps.gz file>,<eps file>,[gray])
+convert-epsgz	= $(GUNZIP) -c '$1' $(if $3,| $(call kill-ps-color)) > '$2'
+
+# Converts .eps files into .eps files (usually a no-op, but can make grayscale)
+#
+# $(call convert-eps,<in file>,<out file>,[gray])
+convert-eps	= $(if $3,$(call kill-ps-color) $1 > $2)
+
+# The name of the file containing special postscript commands for grayscale
+gray_eps_file	:= gray.eps.make
+
+# Changes sethsbcolor and setrgbcolor calls in postscript to always produce
+# grayscale.  In general, this is accomplished by writing new versions of those
+# functions into the user dictionary space, which is looked up before the
+# global or system dictionaries (userdict is one of the permanent dictionaries
+# in postscript and is not read-only like systemdict).
+#
+# For setrgbcolor, the weighted average of the triple is computed and the
+# triple is replaced with three copies of that average before the original
+# procedure is called: .299R + .587G + .114B
+#
+# For sethsbcolor, the color is first converted to RGB, then to grayscale by
+# the new setrgbcolor operator as described above.  Why is this done?
+# Because simply using the value component will tend to make pure colors
+# white, a very undesirable thing.  Pure blue should not translate to white,
+# but to some level of gray.  Conversion to RGB does the right thing.  It's
+# messy, but it works.
+#
+# From
+# http://en.wikipedia.org/wiki/HSV_color_space#Transformation_from_HSV_to_RGB,
+# HSB = HSV (Value = Brightness), and the formula used to convert to RGB is
+# as follows:
+#
+# Hi = int(floor(6 * H)) mod 6
+# f = 6 * H - Hi
+# p = V(1-S)
+# q = V(1-fS)
+# t = V(1-(1-f)S)
+# if Hi = 0: R G B <-- V t p
+# if Hi = 1: R G B <-- q V p
+# if Hi = 2: R G B <-- p V t
+# if Hi = 3: R G B <-- p q V
+# if Hi = 4: R G B <-- t p V
+# if Hi = 5: R G B <-- V p q
+#
+# The messy stack-based implementation is below
+# $(call create-gray-eps-file,filename)
+define create-gray-eps-file
+$(ECHO) -n -e '\
+/OLDRGB /setrgbcolor load def\n\
+/setrgbcolor {\n\
+    .114 mul exch\n\
+    .587 mul add exch\n\
+    .299 mul add\n\
+    dup dup\n\
+    OLDRGB\n\
+} bind def\n\
+/OLDHSB /sethsbcolor load def\n\
+/sethsbcolor {\n\
+    2 index                     % H V S H\n\
+    6 mul floor cvi 6 mod       % Hi V S H\n\
+    3 index                     % H Hi V S H\n\
+    6 mul                       % 6H Hi V S H\n\
+    1 index                     % Hi 6H Hi V S H\n\
+    sub                         % f Hi V S H\n\
+    2 index 1                   % 1 V f Hi V S H\n\
+    4 index                     % S 1 V f Hi V S H\n\
+    sub mul                     % p f Hi V S H\n\
+    3 index 1                   % 1 V p f Hi V S H\n\
+    6 index                     % S 1 V p f Hi V S H\n\
+    4 index                     % f S 1 V p f Hi V S H\n\
+    mul sub mul                 % q p f Hi V S H\n\
+    4 index 1 1                 % 1 1 V q p f Hi V S H\n\
+    5 index                     % f 1 1 V q p f Hi V S H\n\
+    sub                         % (1-f) 1 V q p f Hi V S H\n\
+    8 index                     % S (1-f) 1 V q p f Hi V S H\n\
+    mul sub mul                 % t q p f Hi V S H\n\
+    4 -1 roll pop               % t q p Hi V S H\n\
+    7 -2 roll pop pop           % t q p Hi V\n\
+    5 -2 roll                   % Hi V t q p\n\
+    dup 0 eq\n\
+    {1 index 3 index 6 index}\n\
+    {\n\
+        dup 1 eq\n\
+        {3 index 2 index 6 index}\n\
+        {\n\
+            dup 2 eq\n\
+            {4 index 2 index 4 index}\n\
+            {\n\
+                dup 3 eq\n\
+                {4 index 4 index 3 index}\n\
+                {\n\
+                    dup 4 eq\n\
+                    {2 index 5 index 3 index}\n\
+                    {\n\
+                        dup 5 eq\n\
+                        {1 index 5 index 5 index}\n\
+                        {0 0 0}\n\
+                        ifelse\n\
+                    }\n\
+                    ifelse\n\
+                }\n\
+                ifelse\n\
+            }\n\
+            ifelse\n\
+        }\n\
+        ifelse\n\
+    }\n\
+    ifelse                      % B G R Hi V t q p\n\
+    setrgbcolor\n\
+    5 {pop} repeat\n\
+} bind def\n'\
+> $1
+endef
+
+# This actually inserts the color-killing code into a postscript file
+# $(call kill-ps-color)
+define kill-ps-color
+$(SED) -e '/%%EndComments/r $(gray_eps_file)'
+endef
+
+# Converts graphviz .dot files into .eps files
+# Grayscale is not directly supported by dot, so we pipe it through fig2dev in
+# that case.
+# $(call convert-dot,<dot file>,<eps file>,<log file>,[gray])
+define convert-dot
+$(DOT) -Tps '$1' 2>'$3' $(if $4,| $(call kill-ps-color)) > $2; \
+$(call colorize-dot-errors,$3)
+endef
+
+# Convert DVI to Postscript
+# $(call make-ps,<dvi file>,<ps file>,<log file>,[<paper size>])
+make-ps		= \
+	$(DVIPS) -o '$2' $(if $(filter-out BEAMER,$4),-t$(firstword $4),) '$1' \
+		$(if $(filter BEAMER,$4),| $(enlarge_beamer)) > $3 2>&1
+
+# Convert Postscript to PDF
+# $(call make-pdf,<ps file>,<pdf file>,<log file>,<embed file>)
+make-pdf	= \
+	$(call ps2pdf,$1,$2,$(filter 1,$(shell $(CAT) '$4'))) > '$3' 2>&1
+
+# Display information about what is being done
+# $(call echo-build,<input file>,<output file>,[<run number>])
+echo-build	= $(ECHO) "$(C_BUILD)= $1 --> $2$(if $3, ($3),) =$(C_RESET)"
+echo-graphic	= $(ECHO) "$(C_GRAPHIC)= $1 --> $2 =$(C_RESET)"
+echo-dep	= $(ECHO) "$(C_DEP)= $1 --> $2 =$(C_RESET)"
+
+# Display a list of something
+# $(call echo-list,<values>)
+echo-list	= for x in $1; do $(ECHO) "$$x"; done
+
+#
+# DEFAULT TARGET
+#
+
+.PHONY: all
+all: $(default_pdf_targets) ;
+
+.PHONY: all-pdf
+all-pdf: $(default_pdf_targets) ;
+
+ifneq "$(strip $(BUILD_STRATEGY))" "pdflatex"
+.PHONY: all-ps
+all-ps: $(default_ps_targets) ;
+
+.PHONY: all-dvi
+all-dvi: $(default_dvi_targets) ;
+endif
+
+#
+# VIEWING TARGET
+#
+.PHONY: show
+show: all
+	$(QUIET)for x in $(default_pdf_targets); do \
+		[ -e "$$x" ] && $(VIEW_PDF) $$x & \
+	done
+
+#
+# INCLUDES
+#
+source_includes	:= $(addsuffix .d,$(source_stems_to_include))
+graphic_includes := $(addsuffix .gpi.d,$(graphic_stems_to_include))
+
+# Include only the dependencies used
+ifneq "" "$(source_includes)"
+include $(source_includes)$(call include-message,$(source_includes))
+endif
+ifneq "" "$(graphic_includes)"
+include $(graphic_includes)$(call include-message,$(graphic_includes))
+endif
+
+#
+# MAIN TARGETS
+#
+
+# Note that we don't just say %: %.pdf here - this can tend to mess up our
+# includes, which detect what kind of file we are asking for.  For example,
+# asking to build foo.pdf is much different than asking to build foo when
+# foo.gpi exists, because we look through all of the goals for *.pdf that
+# matches *.gpi, then use that to determine which include files we need to
+# build.
+#
+# Thus, we invoke make recursively with better arugments instead, restarting
+# all of the appropriate machinery.
+.PHONY: $(default_stems_ss)
+$(default_stems_ss): %: %.pdf ;
+
+# This builds and displays the wanted file.
+.PHONY: $(addsuffix ._show,$(stems_ssg))
+%._show: %.pdf
+	$(QUIET)$(VIEW_PDF) $< &
+
+ifneq "$(strip $(BUILD_STRATEGY))" "pdflatex"
+.SECONDARY: $(all_pdf_targets)
+%.pdf: %.ps %.embed.make
+	$(QUIET)$(call echo-build,$<,$@)
+	$(QUIET)$(call make-pdf,$<,$@.temp,$@.log,$*.embed.make); \
+	if [ x"$$?" = x"0" ]; then \
+	    $(if $(VERBOSE),$(CAT) $@.log,:); \
+	    $(RM) -f '$@'; \
+	    $(MV) '$@.temp' '$@'; \
+	    $(TOUCH) '$@'; \
+	    $(call copy-with-logging,$@,$(BINARY_TARGET_DIR)); \
+	else \
+	    $(CAT) $@.log; \
+	    $(call remove-temporary-files,'$@.temp'); \
+	    $(sh_false); \
+	fi
+
+.SECONDARY: $(all_ps_targets)
+%.ps: %.dvi %.paper.make
+	$(QUIET)$(call echo-build,$<,$@)
+	$(QUIET)$(call make-ps,$<,$@.temp,$@.log,\
+			$(firstword $(shell $(CAT) $*.paper.make))); \
+	if [ x"$$?" = x"0" ]; then \
+	    $(if $(VERBOSE),$(CAT) $@.log,:); \
+	    $(RM) -f '$@'; \
+	    $(MV) '$@.temp' '$@'; \
+	    $(TOUCH) '$@'; \
+	    $(call copy-with-logging,$@,$(BINARY_TARGET_DIR)); \
+	else \
+	    $(CAT) $@.log; \
+	    $(call remove-temporary-files,'$@.temp'); \
+	    $(sh_false); \
+	fi
+endif
+
+# Build the final target (dvi or pdf) file.  This is a very tricky rule because
+# of the way that latex runs multiple times, needs graphics after the first run
+# (or maybe already has them), and relies on bibliographies or indices that may
+# not exist.
+#
+#	Check the log for fatal errors.  If they exist, colorize and bail.
+#
+#	Create the .auxtarget.cookie file.  (Needed for next time if not present)
+#
+#	If any of the following are true, we must rebuild at least one time:
+#
+#	* the .bbl was recently rebuilt
+#
+#		check a cookie, then delete it
+#
+#	* any of several output files was created or changed:
+#
+#		check $*.run.cookie, then delete it
+#
+#	* the .aux file changed in a way that necessitates attention
+#
+#		Note that if the .auxtarget.make file doesn't exist, this means
+#		that we are doing a clean build, so it doesn't figure into the
+#		test for running again.
+#
+#		compare against .auxtarget.make
+#
+#		move if different, remove if not
+#
+#	* the .log file has errors or warnings requiring at least one more run
+#
+#	We use a loop over a single item to simplify the process of breaking
+#	out when we find one of the conditions to be true.
+#
+#	If we do NOT need to run latex here, then we move the $@.1st.make file
+#	over to $@ because the target file has already been built by the first
+#	dependency run and is valid.
+#
+#	If we do, we delete that cookie file and do the normal multiple-runs
+#	routine.
+#
+ifneq "$(strip $(BUILD_STRATEGY))" "pdflatex"
+.SECONDARY: $(all_dvi_targets)
+endif
+%.$(build_target_extension): %.bbl %.aux %.$(build_target_extension).1st.make
+	$(QUIET)\
+	fatal=`$(call colorize-latex-errors,$*.log)`; \
+	if [ x"$$fatal" != x"" ]; then \
+		$(ECHO) "$$fatal"; \
+		exit 1; \
+	fi; \
+	$(call make-auxtarget-file,$*.aux.make,$*.auxtarget.cookie); \
+	run=0; \
+	for i in 1; do \
+		if $(call test-exists,$*.bbl.cookie); then \
+			run=1; \
+			break; \
+		fi; \
+		if $(call test-exists,$*.run.cookie); then \
+			run=1; \
+		    	break; \
+		fi; \
+		if $(call \
+		test-exists-and-different,$*.auxtarget.cookie,$*.auxtarget.make);\
+		then \
+			run=1; \
+			break; \
+		fi; \
+		if $(call test-log-for-need-to-run,$*); then \
+			run=1; \
+			break; \
+		fi; \
+	done; \
+	$(call remove-temporary-files,$*.bbl.cookie $*.run.cookie); \
+	$(MV) $*.auxtarget.cookie $*.auxtarget.make; \
+	if [ x"$$run" = x"1" ]; then \
+		$(call remove-files,$@.1st.make); \
+		for i in 2 3 4 5; do \
+			$(if $(findstring 3.79,$(MAKE_VERSION)),\
+				$(call echo-build,$*.tex,$@,$(RESTARTS)-$$$$i),\
+				$(call echo-build,$*.tex,$@,$(RESTARTS)-$$i)\
+			); \
+			$(call run-latex,$*); \
+			$(CP) '$*.log' '$*.'$(RESTARTS)-$$i'.log'; \
+			$(call test-run-again,$*) || break; \
+		done; \
+	else \
+		$(MV) '$@.1st.make' '$@'; \
+	fi; \
+	$(call copy-with-logging,$@,$(BINARY_TARGET_DIR)); \
+	$(call latex-color-log,$*)
+
+# Build the .bbl file.  When dependencies are included, this will (or will
+# not!) depend on something.bib, which we detect, acting accordingly.  The
+# dependency creation also produces the %.auxbbl.make file.  BibTeX is a bit
+# finicky about what you call the actual files, but we can rest assured that if
+# a .auxbbl.make file exists, then the .aux file does, as well.  The
+# .auxbbl.make file is a cookie indicating whether the .bbl needs to be
+# rewritten.  It only changes if the .aux file changes in ways relevant to .bbl
+# creation.
+#
+# Note that we do NOT touch the .bbl file if there is no need to
+# create/recreate it.  We would like to leave existing files alone if they
+# don't need to be changed, thus possibly avoiding a rebuild trigger.
+%.bbl: %.auxbbl.make
+	$(QUIET)\
+	$(if $(filter %.bib,$^),\
+		$(call echo-build,$(filter %.bib,$?) $*.aux,$@); \
+		$(call run-bibtex,$*); \
+		$(TOUCH) $@.cookie; \
+	) \
+	if $(EGREP) -q 'bibstyle.(apacann|chcagoa|[^}]*annot)' '$*.aux'; then \
+		$(call echo-build,** annotated extra latex **,output ignored,$(RESTARTS)-1); \
+		$(call run-latex,$*); \
+		$(CP) '$*.log' '$*.$(RESTARTS)-annotated.log'; \
+		$(if $(filter %.bib,$^),\
+			$(call echo-build,** annotated extra bibtex ** $(filter %.bib,$?) $*.aux,$@); \
+			$(call run-bibtex,$*); \
+			$(TOUCH) $@.cookie; \
+		) \
+		$(call echo-build,** annotated extra latex **,output ignored,$(RESTARTS)-2); \
+		$(call run-latex,$*); \
+	fi
+
+# Create the index file - note that we do *not* depend on %.tex here, since
+# that unnecessarily restricts the kinds of indices that we can build to those
+# with exactly the same stem as the source file.  Things like splitidx create
+# idx files with other names.
+#
+# Therefore, we add the .tex dependency in the sourcestem.d file in the call to
+# get index file dependencies from the logs.
+%.ind:	%.idx
+	$(QUIET)$(call echo-build,$<,$@)
+	$(QUIET)$(call run-makeindex,$<,$@,$*.ilg)
+
+# Create the glossary file
+%.gls:	%.glo %.tex
+	$(QUIET)$(call echo-build,$<,$@)
+	$(QUIET)$(call run-makeindex,$<,$@,$*.glg,-s nomencl.ist)
+
+# Create the nomenclature file
+%.nls:	%.nlo %.tex
+	$(QUIET)$(call echo-build,$<,$@)
+	$(QUIET)$(call run-makeindex,$<,$@,$*.nlg,-s nomencl.ist)
+
+# SCRIPTED LaTeX TARGETS
+#
+# Keep the generated .tex files around for debugging if needed.
+.SECONDARY: $(all_tex_targets)
+
+%.tex::	%.tex.sh
+	$(QUIET)$(call run-script,$(SHELL),$<,$@)
+
+%.tex::	%.tex.py
+	$(QUIET)$(call run-script,$(PYTHON),$<,$@)
+
+%.tex::	%.tex.pl
+	$(QUIET)$(call run-script,$(PERL),$<,$@)
+
+%.tex::	%.rst $(RST_STYLE_FILE)
+	$(QUIET)\
+	$(call run-script,$(RST2LATEX)\
+		--documentoptions=letterpaper\
+		$(if $(RST_STYLE_FILE),--stylesheet=$(RST_STYLE_FILE),),$<,$@)
+
+#
+# GRAPHICS TARGETS
+#
+.PHONY: all-graphics
+all-graphics:	$(all_graphics_targets);
+
+ifneq "$(strip $(BUILD_STRATEGY))" "pdflatex"
+.PHONY: all-pstex
+all-pstex:	$(all_pstex_targets);
+endif
+
+.PHONY: all-dot2tex
+all-dot2tex:	$(all_dot2tex_targets);
+
+.PHONY: show-graphics
+show-graphics: all-graphics
+	$(VIEW_GRAPHICS) $(all_known_graphics)
+
+$(gray_eps_file):
+	$(QUIET)$(call echo-build,$^,$@)
+	$(QUIET)$(call create-gray-eps-file,$@)
+
+ifeq "$(strip $(BUILD_STRATEGY))" "pdflatex"
+%.pdf: %.eps $(if $(GRAY),$(gray_eps_file))
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-eps-to-pdf,$<,$@,$(GRAY))
+
+ifeq "$(strip $(GNUPLOT_OUTPUT_EXTENSION))" "pdf"
+%.pdf:	%.gpi %.gpi.d $(gpi_sed)
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-gpi,$<,$@,$(GRAY))
+endif
+
+%.pdf:	%.fig
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-fig,$<,$@,$(GRAY))
+
+endif
+
+%.eps:	%.gpi %.gpi.d $(gpi_sed)
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-gpi,$<,$@,$(GRAY))
+
+%.eps: %.fig
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-fig,$<,$@,$(GRAY))
+
+%.eps: %.dot $(if $(GRAY),$(gray_eps_file))
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-dot,$<,$@,$<.log,$(GRAY))
+
+%.eps: %.xvg $(if $(GRAY),$(gray_eps_file))
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-xvg,$<,$@,$(GRAY))
+
+%.eps: %.svg $(if $(GRAY),$(gray_eps_file))
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-svg,$<,$@,$(GRAY))
+
+%.eps: %.jpg $(if $(GRAY),$(gray_eps_file))
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-jpg,$<,$@,$(GRAY))
+
+%.eps: %.png $(if $(GRAY),$(gray_eps_file))
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-png,$<,$@,$(GRAY))
+
+%.eps: %.eps.gz $(if $(GRAY),$(gray_eps_file))
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-epsgz,$<,$@,$(GRAY))
+
+%.pstex: %.fig
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-fig-pstex,$<,$@,$(GRAY))
+
+%.pstex_t: %.fig %.pstex
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-fig-pstex-t,$<,$@,$*.pstex,$(GRAY))
+
+%.dot_t: %.dot
+	$(QUIET)$(call echo-graphic,$^,$@)
+	$(QUIET)$(call convert-dot-tex,$<,$@)
+
+#
+# DEPENDENCY-RELATED TARGETS.
+#
+
+# Generate all of the information needed to get dependencies
+# As a side effect, this creates a .dvi or .pdf file (depending on the build
+# strategy).  We need to be sure to remove it if there are errors.  Errors can
+# take several forms and all of them are found within the log file:
+#	* There was a LaTeX error
+#	* A needed file was not found
+#	* Cross references need adjustment
+#
+# Behavior:
+#	This rule is responsible for generating the following:
+#	%.aux
+#	%.d
+#	%.aux.make
+#	%.(pdf|dvi).1st.make (the .pdf or .dvi output file, moved)
+#
+#	Steps:
+#
+#	Run latex
+#	Move .pdf or .dvi somewhere else (make no judgements about success)
+#	Flatten the .aux file into another file
+#	Add source dependencies
+#	Add graphic dependencies
+#	Add bib dependencies
+#
+#	Create cookies for various suffixes that may represent files that
+#	need to be read by LaTeX in order for it to function properly.
+#
+#	Note that if some of the dependencies are discovered because they turn
+#	up missing in the log file, we really need the .d file to be reloaded.
+#	Adding a sleep command helps with this.  Otherwise make is extremely
+#	nondeterministic, sometimes working, sometimes not.
+#
+#	Usually we can force this by simply removing the generated pdf file and
+#	not creating a .1st.make file..
+#
+%.$(build_target_extension).1st.make %.d %.aux %.aux.make %.fls: %.tex
+	$(QUIET)$(call echo-build,$<,$*.d $*.$(build_target_extension).1st.make,$(RESTARTS)-1)
+	$(QUIET)\
+	$(call run-latex,$<,--recorder) || $(sh_true); \
+	$(CP) '$*.log' '$*.$(RESTARTS)-1.log'; \
+	$(call die-on-dot2tex,$*.log); \
+	$(call die-on-no-aux,$*); \
+	$(call flatten-aux,$*.aux,$*.aux.make); \
+	$(ECHO) "# vim: ft=make" > $*.d; \
+	$(ECHO) ".PHONY: $*._graphics" >> $*.d; \
+	$(call get-inputs,$*.fls,$(addprefix $*.,aux aux.make d $(build_target_extension))) >> $*.d; \
+	$(call get-missing-inputs,$*.log,$(addprefix $*.,aux aux.make d $(build_target_extension))) >> $*.d; \
+	$(ECHO) ".SECONDEXPANSION:" >> $*.d; \
+	$(call get-graphics,$*) >> $*.d; \
+	$(call get-log-index,$*,$(addprefix $*.,d aux aux.make)) >> $*.d; \
+	$(call get-bibs,$*.aux.make,$(addprefix $*.,bbl aux aux.make)) >> $*.d; \
+	$(EGREP) -q "# MISSING" $*.d && $(SLEEP) 1 && $(RM) $*.pdf; \
+	$(call move-if-exists,$*.$(build_target_extension),$*.$(build_target_extension).1st.make); \
+	for s in toc out lot lof lol nav; do \
+		if [ -e "$*.$$s" ]; then \
+			if ! $(DIFF) -q $*.$$s $*.$$s.make >/dev/null 2>&1; then \
+				$(TOUCH) $*.run.cookie; \
+			fi; \
+			$(CP) $*.$$s $*.$$s.make; \
+		fi; \
+	done
+
+# This is a cookie that is updated if the flattened aux file has changed in a
+# way that affects the bibliography generation.
+.SECONDARY: $(addsuffix .auxbbl.make,$(stems_ssg))
+%.auxbbl.make: %.aux.make
+	$(QUIET)\
+	$(call make-auxbbl-file,$<,$@.temp); \
+	$(call replace-if-different-and-remove,$@.temp,$@)
+
+# Build a dependency file for .gpi files.  These often plot data files that
+# also reside in the directory, so if a data file changes, it's nice to know
+# about it.  This also handles loaded .gpi files, whose filename should have
+# _include_. in it.
+%.gpi.d: %.gpi
+	$(QUIET)$(call echo-build,$<,$@)
+	$(QUIET)$(call make-gpi-d,$<,$@)
+
+# Store the paper size for this document -- note that if beamer is used we set
+# it to the special BEAMER paper size.  We only do this, however, if the
+# special comment exists, in which case we enlarge the output with psnup.
+#
+#	The paper size is extracted from a documentclass attribute.
+%.paper.make: %.tex
+	$(QUIET)$(SED) \
+	-e '/\\documentclass/,/}/{' \
+	-e '  s/%.*//' \
+	-e '  H' \
+	-e '  /}/{' \
+	-e '    s/.*//' \
+	-e '    x' \
+	-e '    /\\documentclass/!d' \
+	-e '    s/[\n[:space:]]*//g' \
+	-e '    s/\([,{[]\)\([[:alnum:]]\{1,\}\)paper\([],}]\)/\1%-\2-%\3/g' \
+	-e '    s/\([,{[]\)\(landscape\)\([],}]\)/\1%-\2-%\3/g' \
+	-e '    s/^[^%]*%-//' \
+	-e '    s/-%[^%]*$$//' \
+	-e '    s/-%[^%]%-/ /g' \
+	-e '    p' \
+	-e '  }' \
+	-e '  d' \
+	-e '}' \
+	-e 'd' \
+	$< > $@; \
+	$(EGREP) -q '^[^%]*\\documentclass[^{]*{beamer}' $< && \
+	(\
+		$(EGREP) -q '^%%[[:space:]]*BEAMER[[:space:]]*LARGE$$' $< && \
+		$(ECHO) "BEAMER" > $@ || \
+		: > $@ \
+	) || $(sh_true)
+
+# Store embedding instructions for this document using a special comment
+%.embed.make: %.tex
+	$(QUIET)$(EGREP) '^%%[[:space:]]*NO[[:space:]]*EMBED[[:space:]]*$$' $< \
+		&& $(ECHO) '' > $@ \
+		|| $(ECHO) '1' > $@;
+
+#
+# HELPFUL PHONY TARGETS
+#
+
+.PHONY: _all_programs
+_all_programs:
+	$(QUIET)$(ECHO) "== All External Programs Used =="
+	$(QUIET)$(call output-all-programs)
+
+.PHONY: _check_programs
+_check_programs:
+	$(QUIET)$(ECHO) "== Checking Makefile Dependencies =="; $(ECHO)
+	$(QUIET) \
+	$(ECHO) hi; \
+	allprogs=`\
+	 ($(call output-all-programs)) | \
+	 $(SED) \
+	 -e 's/^[[:space:]]*//' \
+	 -e '/^#/d' \
+	 -e 's/[[:space:]]*#.*//' \
+	 -e '/^=/s/[[:space:]]/_/g' \
+	 -e '/^[[:space:]]*$$/d' \
+	 -e 's/^[^=].*=[[:space:]]*\([^[:space:]]\{1,\}\).*$$/\\1/' \
+	 `; \
+	spaces='                             '; \
+	for p in $${allprogs}; do \
+	case $$p in \
+		=*) $(ECHO); $(ECHO) "$$p";; \
+		*) \
+			$(ECHO) -n "$$p:$$spaces" | $(SED) -e 's/^\(.\{0,20\}\).*$$/\1/'; \
+			loc=`$(WHICH) $$p`; \
+			if [ x"$$?" = x"0" ]; then \
+				$(ECHO) "$(C_SUCCESS)Found:$(C_RESET) $$loc"; \
+			else \
+				$(ECHO) "$(C_FAILURE)Not Found$(C_RESET)"; \
+			fi; \
+			;; \
+	esac; \
+	done
+
+.PHONY: _check_gpi_files
+_check_gpi_files:
+	$(QUIET)$(ECHO) "== Checking all .gpi files for common errors =="; \
+	$(ECHO); \
+	for f in $(files.gpi); do \
+	result=`$(EGREP) '^([^#]*set terminal |set output )' $$f`; \
+	$(ECHO) -n "$$f: "; \
+	if [ x"$$result" = x"" ]; then \
+		$(ECHO) "$(C_SUCCESS)Okay$(C_RESET)"; \
+	else \
+		$(ECHO) "$(C_FAILURE)Warning: Problematic commands:$(C_RESET)";\
+		$(ECHO) "$(C_ERROR)$$result$(C_RESET)"; \
+	fi; \
+	done; \
+	$(ECHO)
+
+.PHONY: _all_stems
+_all_stems:
+	$(QUIET)$(ECHO) "== All Stems =="
+	$(QUIET)$(call echo-list,$(sort $(default_stems_ss)))
+
+.PHONY: _includes
+_includes:
+	$(QUIET)$(ECHO) "== Include Stems =="
+	$(QUIET)$(ECHO) "=== Sources ==="
+	$(QUIET)$(call echo-list,$(sort $(source_includes)))
+	$(QUIET)$(ECHO) "=== Graphics ==="
+	$(QUIET)$(call echo-list,$(sort $(graphic_includes)))
+
+.PHONY: _all_sources
+_all_sources:
+	$(QUIET)$(ECHO) "== All Sources =="
+	$(QUIET)$(call echo-list,$(sort $(all_files.tex)))
+
+.PHONY: _dependency_graph
+_dependency_graph:
+	$(QUIET)$(ECHO) "/* LaTeX Dependency Graph */"
+	$(QUIET)$(call output-dependency-graph)
+
+.PHONY: _show_dependency_graph
+_show_dependency_graph:
+	$(QUIET)$(call output-dependency-graph,$(graph_stem).dot)
+	$(QUIET)$(DOT) -Tps -o $(graph_stem).eps $(graph_stem).dot
+	$(QUIET)$(VIEW_POSTSCRIPT) $(graph_stem).eps
+	$(QUIET)$(call remove-temporary-files,$(graph_stem).*)
+
+.PHONY: _sources
+_sources:
+	$(QUIET)$(ECHO) "== Sources =="
+	$(QUIET)$(call echo-list,$(sort $(files.tex)))
+
+.PHONY: _scripts
+_scripts:
+	$(QUIET)$(ECHO) "== Scripts =="
+	$(QUIET)$(call echo-list,$(sort $(files_scripts)))
+
+.PHONY: _graphic_outputs
+_graphic_outputs:
+	$(QUIET)$(ECHO) "== Graphic Outputs =="
+	$(QUIET)$(call echo-list,$(sort $(all_graphics_targets)))
+
+.PHONY: _env
+_env:
+ifdef .VARIABLES
+	$(QUIET)$(ECHO) "== MAKE VARIABLES =="
+	$(QUIET)$(call echo-list,$(foreach var,$(sort $(.VARIABLES)),'$(var)'))
+endif
+	$(QUIET)$(ECHO) "== ENVIRONMENT =="
+	$(QUIET)$(ENV)
+
+#
+# CLEAN TARGETS
+#
+# clean-generated is somewhat unique - it relies on the .fls file being
+# properly built so that it can determine which of the files was generated, and
+# which was not.  Expect it to silently fail if the .fls file is missing.
+#
+# This is used to, e.g., clean up index files that are generated by the LaTeX.
+.PHONY: clean-generated
+clean-generated:
+	$(QUIET)$(call clean-files,$(foreach e,$(addsuffix .fls,$(all_stems_source)),\
+						$(shell $(call get-generated-names,$e))))
+
+.PHONY: clean-deps
+clean-deps:
+	$(QUIET)$(call clean-files,$(all_d_targets) *.make *.make.temp *.cookie)
+
+.PHONY: clean-tex
+clean-tex: clean-deps
+	$(QUIET)$(call clean-files,$(rm_tex))
+
+.PHONY: clean-graphics
+# TODO: This *always* deletes pstex files, even if they were not generated by
+# anything....  In other words, if you create a pstex and pstex_t pair by hand
+# an drop them in here without the generating fig file, they will be deleted
+# and you won't get them back.  It's a hack put in here because I'm not sure we
+# even want to keep pstex functionality, so my motivation is not terribly high
+# for doing it right.
+clean-graphics:
+	$(QUIET)$(call clean-files,$(all_graphics_targets) $(intermediate_graphics_targets) *.gpi.d *.pstex *.pstex_t *.dot_t)
+
+.PHONY: clean-backups
+clean-backups:
+	$(QUIET)$(call clean-files,$(backup_patterns) *.temp)
+
+.PHONY: clean-auxiliary
+clean-auxiliary:
+	$(QUIET)$(call clean-files,$(graph_stem).*)
+
+.PHONY: clean-nographics
+clean-nographics: clean-tex clean-deps clean-backups clean-auxiliary ;
+
+.PHONY: clean
+clean:	clean-generated clean-tex clean-graphics clean-deps clean-backups clean-auxiliary
+
+#
+# HELP TARGETS
+#
+
+.PHONY: help
+help:
+	$(help_text)
+
+.PHONY: version
+version:
+	$(QUIET)\
+	$(ECHO) "$(fileinfo) Version $(version)"; \
+	$(ECHO) "by $(author)"; \
+
+#
+# HELP TEXT
+#
+
+define help_text
+# $(fileinfo) Version $(version)
+#
+# by $(author)
+#
+# Generates a number of possible output files from a LaTeX document and its
+# various dependencies.  Handles .bib files, \include and \input, and .eps
+# graphics.  All dependencies are handled automatically by running LaTeX over
+# the source.
+#
+# USAGE:
+#
+#    make [GRAY=1] [VERBOSE=1] [SHELL_DEBUG=1] <target(s)>
+#
+# STANDARD OPTIONS:
+#    GRAY:
+#        Setting this variable forces all recompiled graphics to be grayscale.
+#        It is useful when creating a document for printing.  The default is
+#        to allow colors.  Note that it only changes graphics that need to be
+#        rebuilt!  It is usually a good idea to do a 'make clean' first.
+#
+#    VERBOSE:
+#        This turns off all @ prefixes for commands invoked by make.  Thus,
+#        you get to see all of the gory details of what is going on.
+#
+#    SHELL_DEBUG:
+#        This enables the -x option for sh, meaning that everything it does is
+#        echoed to stderr.  This is particularly useful for debugging
+#        what is going on in $$(shell ...) invocations.  One of my favorite
+#        debugging tricks is to do this:
+#
+#        make -d SHELL_DEBUG=1 VERBOSE=1 2>&1 | less
+#
+# STANDARD AUXILIARY FILES:
+#
+#      Makefile.ini
+#
+#          This file can contain variable declarations that override various
+#          aspects of the makefile.  For example, one might specify
+#
+#          neverclean := *.pdf *.ps
+#          onlysources.tex := main.tex
+#          LATEX_COLOR_WARNING := 'bold red uline'
+#
+#          And this would override the neverclean setting to ensure that pdf
+#          and ps files always remain behind, set the makefile to treat all
+#          .tex files that are not "main.tex" as includes (and therefore not
+#          default targets).  It also changes the LaTeX warning output to be
+#          red, bold, and underlined.
+#
+#          There are numerous variables in this file that can be overridden in
+#          this way.  Search for '?=' to find them all.
+#
+#          Also, you can put arbitrary targets into it if, for example, you
+#          want your source built from something else, e.g.:
+#
+#          generated.tex: generating_script.weird_lang depA depB
+#          	./generating_script.weird_lang > $$@
+#
+#          Note that if you are not careful, you can override the default
+#          target (what happens when you type "make" without arguments), so if
+#          you do use Makefile.ini, you probably want to start it with
+#          something like the following line:
+#
+#          default: all
+#
+#          Since the first target in any makefile is automatically the default,
+#          and the makefile already has a sensible "all" target, this will do
+#          what you want.
+#
+#          The Makefile.ini is imported before *anything else* is done, so go
+#          wild with your ideas for changes to this makefile in there.  It
+#          makes it easy to test them before submitting patches.
+#
+# STANDARD ENVIRONMENT VARIABLES:
+#
+#      LATEX_COLOR_WARNING		'$(LATEX_COLOR_WARNING)'
+#      LATEX_COLOR_ERROR		'$(LATEX_COLOR_ERROR)'
+#      LATEX_COLOR_UNDERFULL		'$(LATEX_COLOR_UNDERFULL)'
+#      LATEX_COLOR_OVERFULL		'$(LATEX_COLOR_OVERFULL)'
+#      LATEX_COLOR_PAGES		'$(LATEX_COLOR_PAGES)'
+#      LATEX_COLOR_BUILD		'$(LATEX_COLOR_BUILD)'
+#      LATEX_COLOR_GRAPHIC		'$(LATEX_COLOR_GRAPHIC)'
+#      LATEX_COLOR_DEP			'$(LATEX_COLOR_DEP)'
+#      LATEX_COLOR_SUCCESS		'$(LATEX_COLOR_SUCCESS)'
+#      LATEX_COLOR_FAILURE		'$(LATEX_COLOR_FAILURE)'
+#
+#   These may be redefined in your environment to be any of the following:
+#
+#      black
+#      red
+#      green
+#      yellow
+#      blue
+#      magenta
+#      cyan
+#      white
+#
+#   Bold or underline may be used, as well, either alone or in combination
+#   with colors:
+#
+#      bold
+#      uline
+#
+#   Order is not important.  You may want, for example, to specify:
+#
+#   export LATEX_COLOR_SUCCESS='bold blue uline'
+#
+#   in your .bashrc file.  I don't know why, but you may want to.
+#
+# STANDARD TARGETS:
+#
+#    all:
+#        Make all possible documents in this directory.  The documents are
+#        determined by scanning for .tex and .tex.sh (described in more detail
+#        later) and omitting any file that ends in ._include_.tex or
+#        ._nobuild_.tex.  The output is a set of .pdf files.
+#
+#        If you wish to omit files without naming them with the special
+#        underscore names, set the following near the top of the Makefile,
+#        or (this is recommended) within a Makefile.ini in the same directory:
+#
+#        	includes.tex := file1.tex file2.tex
+#
+#        This will cause the files listed to be considered as include files.
+#
+#        If you have only few source files, you can set
+#
+#        	onlysources.tex := main.tex
+#
+#        This will cause only the source files listed to be considered in
+#        dependency detection.  All other .tex files will be considered as
+#        include files.  Note that these options work for *any* source type,
+#        so you could do something similar with includes.gpi, for example.
+#        Note that this works for *any valid source* target.  All of the
+#        onlysources.* variables are commented out in the shipping version of
+#        this file, so it does the right thing when they simply don't exist.
+#        The comments are purely documentation.  If you know, for example, that
+#        file.mycoolformat is supported by this Makefile, but don't see the
+#        "onlysources.mycoolformat" declared in the comments, that doesn't mean
+#        you can't use it.  Go ahead and set "onlysources.mycoolformat" and it
+#        should do the right thing.
+#
+#    show:
+#        Builds and displays all documents in this directory.  It uses the
+#        environment-overridable value of VIEW_PDF (currently $(VIEW_PDF)) to
+#        do its work.
+#
+#    all-graphics:
+#        Make all of the graphics in this directory.
+#
+#    all-pstex (only for BUILD_STRATEGY=latex):
+#        Build all fig files into pstex and pstex_t files.  Gray DOES NOT WORK.
+#
+#    all-gray-pstex (only for BUILD_STRATEGY=latex):
+#    	 Build all fig files into grayscale pstex and pstex_t files.
+#
+#    all-dot2tex:
+#    	 Build all dot files into tex files.
+#
+#    show-graphics:
+#        Builds and displays all graphics in this directory.  Uses the
+#        environment-overridable value of VIEW_GRAPHICS (currently
+#        $(VIEW_GRAPHICS)) to do its work.
+#
+#    clean:
+#        Remove ALL generated files, leaving only source intact.
+#        This will *always* skip files mentioned in the "neverclean" variable,
+#        either in this file or specified in Makefile.ini:
+#
+#        	neverclean := *.pdf *.ps
+#
+#       The neverclean variable works on all "clean" targets below, as well.
+#
+#    clean-graphics:
+#        Remove all generated graphics files.
+#
+#    clean-backups:
+#        Remove all backup files: $(backup_patterns)
+#        (XFig and other editors have a nasty habit of leaving them around)
+#        Also removes Makefile-generated .temp files
+#
+#    clean-tex:
+#        Remove all files generated from LaTeX invocations except dependency
+#        information.  Leaves graphics alone.
+#
+#    clean-deps:
+#        Removes all auto-generated dependency information.
+#
+#    clean-auxiliary:
+#        Removes extra files created by various targets (like the dependency
+#        graph output).
+#
+#    clean-nographics:
+#        Cleans everything *except* the graphics files.
+#
+#    help:
+#        This help text.
+#
+#    version:
+#        Version information about this LaTeX makefile.
+#
+# DEBUG TARGETS:
+#
+#    _all_programs:
+#        A list of the programs used by this makefile.
+#
+#    _check_programs:
+#        Checks your system for the needed software and reports what it finds.
+#
+#    _check_gpi_files:
+#        Checks the .gpi files in the current directory for common errors, such
+#        as specification of the terminal or output file inside of the gpi file
+#        itself.
+#
+#    _dependency_graph:
+#        Outputs a .dot file to stdout that represents a graph of LaTeX
+#        dependencies.  To see it, use the _show_dependency_graph target or
+#        direct the output to a file, run dot on it, and view the output, e.g.:
+#
+#        make _dependency_graph > graph.dot
+#        dot -T ps -o graph.eps graph.dot
+#        gv graph.eps
+#
+#    _show_dependency_graph:
+#        Makes viewing the graph simple: extracts, builds and displays the
+#        dependency graph given in the _dependency_graph target using the value
+#        of the environment-overridable VIEW_POSTSCRIPT variable (currently set
+#        to $(VIEW_POSTSCRIPT)).  The postscript viewer is used because it
+#        makes it easier to zoom in on the graph, a critical ability for
+#        something so dense and mysterious.
+#
+#    _all_sources:
+#        List all .tex files in this directory.
+#
+#    _sources:
+#        Print out a list of all compilable sources in this directory.  This is
+#        useful for determining what make thinks it will be using as the
+#        primary source for 'make all'.
+#
+#    _scripts:
+#        Print out a list of scripts that make knows can be used to generate
+#        .tex files (described later).
+#
+#    _all_stems:
+#        Print a list of stems.  These represent bare targets that can be
+#        executed.  Listing <stem> as a bare target will produce <stem>.pdf.
+#
+#    _includes:
+#        A list of .d files that would be included in this run if _includes
+#        weren't specified.  This target may be used alone or in conjunction
+#        with other targets.
+#
+#    _graphic_outputs:
+#        A list of all generated .eps files
+#
+#    _env:
+#        A list of environment variables and their values.  If supported by
+#        your version of make, also a list of variables known to make.
+#
+# FILE TARGETS:
+#
+#    %, %.pdf:
+#        Build a PDF file from the corresponding %.tex file.
+#
+#        If BUILD_STRATEGY=pdflatex, then this builds the pdf directly.
+#        Otherwise, it uses this old-school but effective approach:
+#
+#            latex -> dvips -> ps2pdf
+#
+#        The BUILD_STRATEGY can be overridden in Makefile.ini in the same
+#        directory.  The default is pdflatex.
+#
+#        Reasons for using latex -> dvips include the "psfrag" package, and the
+#        generation of postscript instead of PDF.  Arguments for using pdflatex
+#        include "new and shiny" and "better supported."  I can't argue with
+#        either of those, and supporting them both didn't turn out to be that
+#        difficult, so there you have it.  Choices.
+#
+#    %._show:
+#        A phony target that builds the pdf file and then displays it using the
+#        environment-overridable value of VIEW_PDF ($(VIEW_PDF)).
+#
+#    %._graphics:
+#        A phony target that generates all graphics on which %.pdf (or %.dvi)
+#        depends.
+#
+#    %.ps (only for BUILD_STRATEGY=latex):
+#        Build a Postscript file from the corresponding %.tex file.
+#        This is done using dvips.  Paper size is automatically
+#        extracted from the declaration
+#
+#        \documentclass[<something>paper]
+#
+#        or it is the system default.
+#
+#        If using beamer (an excellent presentation class), the paper
+#        size is ignored.  More on this later.
+#
+#    %.dvi (only for BUILD_STRATEGY=latex):
+#        Build the DVI file from the corresponding %.tex file.
+#
+#    %.ind:
+#        Build the index for this %.tex file.
+#
+#    %.gls:
+#        Build the nomenclature glossary for this %.tex file.
+#
+#    %.nls:
+#        Build the (newer) nomenclature file for this %.tex file.
+#
+#    %.eps:
+#        Build an eps file from one of the following file types:
+#
+#       .dot    : graphviz
+#       .gpi    : gnuplot
+#       .fig    : xfig
+#       .xvg    : xmgrace
+#       .svg    : scalable vector graphics (goes through inkscape)
+#       .png    : png (goes through NetPBM)
+#       .jpg	: jpeg (goes through ImageMagick)
+#       .eps.gz : gzipped eps
+#
+#       The behavior of this makefile with each type is described in
+#       its own section below.
+#
+#    %.pstex{,_t} (only for BUILD_STRATEGY=latex):
+#       Build a .pstex_t file from a .fig file.
+#
+# FEATURES:
+#
+#    Optional Binary Directory:
+#        If you create the _out_ directory in the same place as the makefile,
+#        it will automatically be used as a dumping ground for .pdf (or .dvi,
+#        .ps, and .pdf) output files.
+#
+#        Alternatively, you can set the BINARY_TARGET_DIR variable, either as a
+#        make argument or in Makefile.ini, to point to your directory of
+#        choice.  Note that no pathname wildcard expansion is done in the
+#        makefile, so make sure that the path is complete before going in
+#        there.  E.g., if you want to specify something in your home directory,
+#        use $$HOME/ instead of ~/ so that the shell expands it before it gets
+#        to the makefile.
+#
+#    External Program Dependencies:
+#        Every external program used by the makefile is represented by an
+#        ALLCAPS variable at the top of this file.  This should allow you to
+#        make judgments about whether your system supports the use of this
+#        makefile.  The list is available in the ALL_PROGRAMS variable and,
+#        provided that you are using GNU make 3.80 or later (or you haven't
+#        renamed this file to something weird like "mylatexmakefile" and like
+#        invoking it with make -f) can be viewed using
+#
+#        make _all_programs
+#
+#        Additionally, the availability of these programs can be checked
+#        automatically for you by running
+#
+#        make _check_programs
+#
+#        The programs are categorized according to how important they are and
+#        what function they perform to help you decide which ones you really
+#        need.
+#
+#    Colorized Output:
+#        The output of commands is colorized to highlight things that are often
+#        important to developers.  This includes {underfull,overfull}
+#        {h,v}boxes, general LaTeX Errors, each stage of document building, and
+#        the number of pages in the final document.  The colors are obtained
+#        using 'tput', so colorization should work pretty well on any terminal.
+#
+#        The colors can be customized very simply by setting any of the
+#        LATEX_COLOR_<CONTEXT> variables in your environment (see above).
+#
+#    Predecessors to TeX Files:
+#        Given a target <target>, if no <target>.tex file exists but a
+#        corresponding script or predecessor file exists, then appropriate
+#        action will be taken to generate the tex file.
+#
+#        Currently supported script or predecessor languages are:
+#
+#        sh:     %.tex.sh
+#        perl:   %.tex.pl
+#        python: %.tex.py
+#
+#           Calls the script using the appropriate interpreter, assuming that
+#           its output is a .tex file.
+#
+#           The script is called thus:
+#
+#              <interpreter> <script file name> <target tex file>
+#
+#           and therefore sees exactly one parameter: the name of the .tex
+#           file that it is to create.
+#
+#           Why does this feature exist?  I ran into this while working on
+#           my paper dissertation.  I wrote a huge bash script that used a
+#           lot of sed to bring together existing papers in LaTeX.  It
+#           would have been nice had I had something like this to make my
+#           life easier, since as it stands I have to run the script and
+#           then build the document with make.  This feature provides hooks
+#           for complicated stuff that you may want to do, but that I have
+#           not considered.  It should work fine with included dependencies,
+#           too.
+#
+#           Scripts are run every time make is invoked.  Some trickery is
+#           employed to make sure that multiple restarts of make don't cause
+#           them to be run again.
+#
+#        reST: %.rst
+#
+#           Runs the reST to LaTeX converter to generate a .tex file
+#           If it finds a file names _rststyle_._include_.tex, uses it as
+#           the "stylesheet" option to rst2latex.
+#
+#           Note that this does not track sub-dependencies in rst files.  It
+#           assumes that the top-level rst file will change if you want a
+#           rebuild.
+#
+#    Dependencies:
+#
+#        In general, dependencies are extracted directly from LaTeX output on
+#        your document.  This includes
+#
+#        *    Bibliography information
+#        *    \include or \input files (honoring \includeonly, too)
+#        *    Graphics files inserted by the graphicx package
+#
+#        Where possible, all of these are built correctly and automatically.
+#        In the case of graphics files, these are generated from the following
+#        file types:
+#
+#        GraphViz:      .dot
+#        GNUPlot:       .gpi
+#        XFig:          .fig
+#        XMgrace:       .xvg
+#        SVG:           .svg
+#        PNG:           .png
+#        JPEG:          .jpg
+#        GZipped EPS:   .eps.gz
+#
+#        If the file exists as a .eps already, it is merely used (and will not
+#        be deleted by 'clean'!).
+#
+#        LaTeX and BibTeX are invoked correctly and the "Rerun to get
+#        cross-references right" warning is heeded a reasonable number of
+#        times.  In my experience this is enough for even the most troublesome
+#        documents, but it can be easily changed (if LaTeX has to be run after
+#        BibTeX more than three times, it is likely that something is moving
+#        back and forth between pages, and no amount of LaTeXing will fix
+#        that).
+#
+#        \includeonly is honored by this system, so files that are not
+#        specified there will not trigger a rebuild when changed.
+#
+#    Beamer:
+#        A special TeX source comment is recognized by this makefile (only when
+#        BUILD_STRATEGY=latex, since this invokes psnup):
+#
+#        %%[[:space:]]*BEAMER[[:space:]]*LARGE
+#
+#        The presence of this comment forces the output of dvips through psnup
+#        to enlarge beamer slides to take up an entire letter-sized page.  This
+#        is particularly useful when printing transparencies or paper versions
+#        of the slides.  For some reason landscape orientation doesn't appear
+#        to work, though.
+#
+#        If you want to put multiple slides on a page, use this option and then
+#        print using mpage, a2ps, or psnup to consolidate slides.  My personal
+#        favorite is a2ps, but your mileage may vary.
+#
+#        When beamer is the document class, dvips does NOT receive a paper size
+#        command line attribute, since beamer does special things with sizes.
+#
+#    GNUPlot Graphics:
+#        When creating a .gpi file, DO NOT INCLUDE the "set terminal" or "set
+#        output" commands!  The makefile will include terminal information for
+#        you.  Besides being unnecessary and potentially harmful, including the
+#        terminal definition in the .gpi file makes it harder for you, the one
+#        writing the document, to preview your graphics, e.g., with
+#
+#           gnuplot -persist myfile.gpi
+#
+#        so don't do specify a terminal or an output file in your .gpi files.
+#
+#        When building a gpi file into an eps file, there are several features
+#        available to the document designer:
+#
+#        Global Header:
+#            The makefile searches for the files in the variable GNUPLOT_GLOBAL
+#            in order:
+#
+#            ($(GNUPLOT_GLOBAL))
+#
+#            Only the first found is used.  All .gpi files in the directory are
+#            treated as though the contents of GNUPLOT_GLOBAL were directly
+#            included at the top of the file.
+#
+#            NOTE: This includes special comments! (see below)
+#
+#        Font Size:
+#            A special comment in a .gpi file (or a globally included file) of
+#            the form
+#
+#            ## FONTSIZE=<number>
+#
+#            will change the font size of the GPI output.  If font size is
+#            specified in both the global file and the GPI file, the
+#            specification in the individual GPI file is used.
+#
+#        Grayscale Output:
+#            GNUplot files also support a special comment to force them to be
+#            output in grayscale *no matter what*:
+#
+#            ## GRAY
+#
+#            This is not generally advisable, since you can always create a
+#            grayscale document using the forms mentioned above.  But, if your
+#            plot simply must be grayscale even in a document that allows
+#            colors, this is how you do it.
+#
+#    XFig Graphics:
+#            No special handling is done with XFig, except when a global
+#            grayscale method is used, e.g.
+#
+#                make GRAY=1 document
+#
+#            In these cases the .eps files is created using the -N switch to
+#            fig2dev to turn off color output.  (Only works with eps, not pstex
+#            output)
+#
+#    GraphVis Graphics:
+#            Color settings are simply ignored here.  The 'dot' program is used
+#            to transform a .dot file into a .eps file.
+#
+#            If you want, you can use the dot2tex program to convert dot files
+#            to tex graphics.  The default is to just call dot2tex with no
+#            arguments, but you can change the DOT2TEX definition to include
+#            options as needed (in your Makefile.ini).
+#
+#            Note that, as with pstex, the makefile cannot use latex's own
+#            output to discover all missing dot_t (output) files, since anytime
+#            TeX includes TeX, it has to bail when it can't find the include
+#            file.  It can therefore only stop on the first missing file it
+#            discovers, and we can't get a large list of them out easily.
+#
+#            So, the makefile errors out if it's missing an included dot_t
+#            file, then prompts the user to run this command manually:
+#
+#                make all-dot2tex
+#
+#    GZipped EPS Graphics:
+#
+#        A .eps.gz file is sometimes a nice thing to have.  EPS files can get
+#        very large, especially when created from bitmaps (don't do this if you
+#        don't have to).  This makefile will unzip them (not in place) to
+#        create the appropriate EPS file.
+#
+endef
+
+#
+# DEPENDENCY CHART:
+#
+#digraph "g" {
+#    rankdir=TB
+#    size="9,9"
+#    edge [fontsize=12 weight=10]
+#    node [shape=box fontsize=14 style=rounded]
+#
+#    eps [
+#        shape=Mrecord
+#        label="{{<gpi> GNUplot|<epsgz> GZip|<dot> Dot|<fig> XFig}|<eps> eps}"
+#        ]
+#    pstex [label="%.pstex"]
+#    pstex_t [label="%.pstex_t"]
+#    tex_outputs [shape=point]
+#    extra_tex_files [shape=point]
+#    gpi_data [label="<data>"]
+#    gpi_includes [label="_include_.gpi"]
+#    aux [label="%.aux"]
+#    fls [label="%.fls"]
+#    idx [label="%.idx"]
+#    glo [label="%.glo"]
+#    ind [label="%.ind"]
+#    log [label="%.log"]
+#    tex_sh [label="%.tex.sh"]
+#    rst [label="%.rst"]
+#    tex [
+#        shape=record
+#        label="<tex> %.tex|<include> _include_.tex"
+#        ]
+#    include_aux [label="_include_.aux"]
+#    file_bib [label=".bib"]
+#    bbl [label="%.bbl"]
+#    dvi [label="%.dvi"]
+#    ps [label="%.ps"]
+#    pdf [label="%.pdf"]
+#    fig [label=".fig"]
+#    dot [label=".dot"]
+#    gpi [label=".gpi"]
+#    eps_gz [label=".eps.gz"]
+#
+#    gpi_files [shape=point]
+#
+#    rst -> tex:tex [label="reST"]
+#    tex_sh -> tex:tex [label="sh"]
+#    tex_pl -> tex:tex [label="perl"]
+#    tex_py -> tex:tex [label="python"]
+#    tex -> tex_outputs [label="latex"]
+#    tex_outputs -> dvi
+#    tex_outputs -> aux
+#    tex_outputs -> log
+#    tex_outputs -> fls
+#    tex_outputs -> idx
+#    tex_outputs -> include_aux
+#    aux -> bbl [label="bibtex"]
+#    file_bib -> bbl [label="bibtex"]
+#    idx -> ind [label="makeindex"]
+#    glo -> gls [label="makeindex"]
+#    nlo -> nls [label="makeindex"]
+#    gls -> extra_tex_files
+#    nls -> extra_tex_files
+#    ind -> extra_tex_files
+#    bbl -> extra_tex_files
+#    eps -> extra_tex_files
+#    extra_tex_files -> dvi [label="latex"]
+#    gpi_files -> eps:gpi [label="gnuplot"]
+#    gpi -> gpi_files
+#    gpi_data -> gpi_files
+#    gpi_includes -> gpi_files
+#    eps_gz -> eps:epsgz [label="gunzip"]
+#    fig -> eps:fig [label="fig2dev"]
+#    fig -> pstex [label="fig2dev"]
+#    fig -> pstex_t [label="fig2dev"]
+#    pstex -> pstex_t [label="fig2dev"]
+#    dot -> eps:dot [label="dot"]
+#    dvi -> ps [label="dvips"]
+#    include_aux -> bbl [label="bibtex"]
+#    ps -> pdf [label="ps2pdf"]
+#
+#    edge [ color=blue label="" style=dotted weight=1 fontcolor=blue]
+#    fls -> tex:include [label="INPUT: *.tex"]
+#    fls -> file_bib [label="INPUT: *.aux"]
+#    aux -> file_bib [label="\\bibdata{...}"]
+#    include_aux -> file_bib [label="\\bibdata{...}"]
+#    log -> gpi [label="Graphic file"]
+#    log -> fig [label="Graphic file"]
+#    log -> eps_gz [label="Graphic file"]
+#    log -> dot [label="Graphic file"]
+#    log -> idx [label="No file *.ind"]
+#    log -> glo [label="No file *.gls"]
+#    log -> nlo [label="No file *.nls"]
+#    gpi -> gpi_data [label="plot '...'"]
+#    gpi -> gpi_includes [label="load '...'"]
+#    tex:tex -> ps [label="paper"]
+#    tex:tex -> pdf [label="embedding"]
+#}
+
+#
+# DEPENDENCY CHART SCRIPT
+#
+# $(call output_dependency_graph,[<output file>])
+define output-dependency-graph
+	if [ -f '$(this_file)' ]; then \
+	$(SED) \
+		-e '/^[[:space:]]*#[[:space:]]*DEPENDENCY CHART:/,/^$$/!d' \
+		-e '/DEPENDENCY CHART/d' \
+		-e '/^$$/d' \
+		-e 's/^[[:space:]]*#//' \
+		$(this_file) $(if $1,> '$1',); \
+	else \
+		$(ECHO) "Cannot determine the name of this makefile."; \
+	fi
+endef
+# vim: noet sts=0 sw=8 ts=8
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/documentation/de/README	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,4 @@
+Zum Erzeugen der Dokumentation folgenden Befehl auf
+der Kommandozeile absetzen:
+
+  make importer-manual.pdf
Binary file flys-backend/doc/documentation/de/figures/bfg_logo.png has changed
Binary file flys-backend/doc/documentation/de/figures/intevation-logo.pdf has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/documentation/de/importer-geodaesie.tex	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,356 @@
+\section{Geodatenimport}
+
+Der Geodaten Importer ist ein in Python geschriebenes Kommandozeilen Tool zum
+Import von Shapefiles in eine Datenbank. Zum Lesen der Shapefiles und zum
+Schreiben der Geodaten in die Datenbank wird GDAL verwendet. Der Import in eine
+Oracle Datenbank erfordert, dass GDAL und GDAL Python Bindungs mit
+Oracle Unterstützung installiert sind. Weitere Details hierzu befinden sich im
+Kapitel \ref{Systemanforderungen} und \ref{Installationsanleitung}.
+
+Der Importer kann mit einem Shellscript von der Kommandozeile gestartet werden
+(siehe Kapitel \ref{Starten des Geodaten Importers}). Nach dem Start wird anhand der
+Konfiguration festgestellt, welche Klassen von Shapefiles aus dem Dateisystem
+importiert werden sollen. Für jede Klasse gibt es einen speziellen
+Parser, der die speziellen Attribute eines Shapefiles liest und in die entsprechende
+Relation der Datenbank schreibt. Die Parser sind speziell auf das
+Dateisystem der BfG ausgerichtet. So wird z.B. erwartet, dass die Shapefiles der
+Gewässerachse im Ordner $Geodaesie/Flussachse+km$ liegen. Weitere Informationen zu
+den einzelnen Parsern sind dem nächsten Kapitel \ref{Beschreibung der Parser} zu
+entnehmen. Der Erfolg oder Misserfolg eines Shape-Imports wird je nach
+Konfiguration im Logfile vermerkt. Folgende Einträge können dem Logfile
+entnommen werden:
+
+\textbf{INFO: Inserted 4 features}
+\\Gibt die Anzahl der erfolgreich importierten Features an.\\
+
+\textbf{INFO: Failed to create 2 features}
+\\Gibt die Anzahl der Features an, die nicht importiert werden konnten.\\
+
+\textbf{INFO: Found 3 unsupported features}
+\\Gibt die Anzahl der Features an, die aufgrund ihres Datentyps nicht importiert
+werden konnten. Z.B: es werden Linien erwartet, im Shapefile sind jedoch
+Polygone enthalten.\\
+
+\textbf{ERROR: No source SRS given! No transformation possible!}
+\\Das Shapefile enthält keine Information, in welcher Projektion die Geometrien
+vorliegen. Es findet keine Transformation in die Zielprojektion statt. Bitte
+beachten Sie, dass FLYS diese Geometrien später ggf nicht korrekt darstellen
+kann.
+
+\textbf{ERROR: Unable to insert feature: DETAIL}
+\\Beim Lesen der Attribute eines Features ist ein Fehler aufgetreten.
+Das Feature konnte nicht in die Datenbank geschrieben werden.\\
+
+\textbf{ERROR: Exception while committing transaction}
+\\Beim Abschluss des Schreib-Vorgangs in die Datenbank ist ein unerwarteter
+Fehler aufgetreten. Die Features des Shapes sind nicht importiert worden.\\
+
+\textbf{ERROR 1: ORA-01017: invalid username/password; logon denied}
+\\Es konnte keine Verbindung zur Oracle Datenbank hergestellt werden. Prüfen Sie
+die Verbindungseinstellungen.
+
+Damit die Geodaten eines Shapes später eindeutig in der Datenbank identifiziert
+werden können, wird für jede Geometrie der Pfad des Shapes im Dateisystem in
+einer Spalte der Datenbank gespeichert. Anwendungen, die auf der Datenbank
+aufbauen, können die Geodaten eines Shapefiles später anhand dieses Merkmals
+gruppieren und anzeigen.
+
+
+\subsection{Beschreibung der Parser}
+\label{Beschreibung der Parser}
+
+Wie im letzten Kapitel beschrieben, sind die Parser speziell an das Dateisystem
+der BfG ausgerichtet. Im Folgenden werden zu jedem Parser folgende Informationen
+angegeben:
+
+\textbf{Pfad}
+\\Der Pfad, in dem die Shapefiles im Dateisystem abgelegt sein müssen ausgehend
+vom Gewässer Verzeichnis.
+
+\textbf{Geometrie}
+\\Der Geometrie Typ, der für diese Klasse von Shapefiles erwartet wird.
+
+\textbf{Attribute}
+\\Eine Liste der Attribute, die vom Parser aus dem Shape gelesen werden.
+
+
+\subsubsection{Achsen}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Geodaesie/Flussachse+km \\
+Geometrie   &   LINESTRING \\
+Attribute   &   name, kind \\
+\end{tabular}
+
+
+\subsubsection{Hydrologische Grenzen}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Hydrologie/Hydr.Grenzen/Linien \\
+Geometrie   &   LINESTRING, POLYGON \\
+Attribute   &   name, kind \\
+\end{tabular}
+
+\subsubsection{Bauwerke}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Geodaesie/Bauwerke \\
+Geometrie   &   LINESTRING \\
+Attribute   &   name, Name, KWNAAM \\
+\end{tabular}
+
+
+\subsubsection{Einzugsgebiete}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Hydrologie/Einzugsgebiet \\
+Geometrie   &   POLYGON, MULTIPOLYGON \\
+Attribute   &   name, Name, AREA, area \\
+\end{tabular}
+
+
+\subsubsection{Querprofilspuren}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Geodaesie/Querprofile \\
+Geometrie   &   LINESTRING \\
+Attribute   &   KILOMETER, KM, STATION, ELEVATION \\
+\end{tabular}
+
+
+\subsubsection{Festpunkte}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Geodaesie/Festpunkte \\
+Geometrie   &   POINT \\
+Attribute   &   name, KM, ELBE\_KM, X, Y, HPGP \\
+\end{tabular}
+
+
+\subsubsection{Talaue}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Hydrologie/Hydr.Grenzen \\
+Geometrie   &   POLYGON, MULTIPOLYGON \\
+Attribute   &   name \\
+\end{tabular}
+
+
+\subsubsection{Pegelstationen}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Hydrologie/Streckendaten \\
+Geometrie   &   POINT \\
+Attribute   &   Name, name, MPNAAM \\
+\end{tabular}
+
+
+\subsubsection{Hochwasserschutzanlagen}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Hydrologie/HW-Schutzanlagen \\
+Geometrie   &   LINESTRING \\
+Attribute   &   TYP, Bauart, Name, name \\
+\end{tabular}
+
+
+\subsubsection{Kilometrierung}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Geodaesie/Flussachse+km \\
+Geometrie   &   POINT \\
+Attribute   &   name, km, KM \\
+\end{tabular}
+
+
+\subsubsection{Linien}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Geodaesie/Linien \\
+Geometrie   &   LINESTRING, MULTILINESTRING \\
+Attribute   &   name, TYP, Z \\
+
+Anmerkung   & Wenn kein Attribut 'TYP' definiert ist, wird standardmäßig der Wert \\
+            & 'DAMM' angenommen. Fehlt ein Attribut 'Z' wird '9999' als Höhe \\
+            & angenommen. \\
+\end{tabular}
+
+
+\subsubsection{Überschwemmungsfläche}
+\hspace{1cm}
+\begin{tabular}[t]{ll}
+Pfad        &   Hydrologie/UeSG/Berechnung \\
+Geometrie   &   POLYGON, MULTIPOLYGON \\
+Attribut    &   name, diff, count, area, perimeter \\
+\end{tabular}
+
+
+\subsection{Systemanforderungen}
+\label{Systemanforderungen}
+\begin{itemize}
+  \item Oracle Datenbank inkl. Schema für FLYS
+  \item GDAL Binding für Python mit Oracle Support
+  \item ogr2ogr
+  \item Python $>=$ 2.6
+\end{itemize}
+
+
+\subsection{Installationsanleitung}
+\label{Installationsanleitung}
+\begin{itemize}
+
+ \item Python\\
+ Zum Starten des Importers ist es notwendig Python zu installieren. Dies können
+ Sie mit folgendem Befehl auf der Kommandozeile erledigen:
+
+ \begin{lstlisting}
+    zypper in python
+ \end{lstlisting}
+
+ \item Oracle Instantclient\\
+ Der Oracle Instantclient 11.2 wird benötigt, damit der Importer mittels Python
+ und GDAL in die bestehende Oracle Datenbank schreiben kann. Dazu ist es
+ erforderlich, folgende Archive von Oracle herunterzuladen. Zu finden sind die
+ folgenden Pakete unter\\
+ \href{http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html}{http://www.oracle.com/technetwork/topics/linuxx86-64soft-092277.html}
+
+ \begin{itemize}
+    \item instantclient-basic-linux-x86-64-11.2.0.2.0.zip
+    \item instantclient-sdk-linux-x86-64-11.2.0.2.0.zip
+    \item instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip
+ \end{itemize}
+
+ Anschließend führen Sie folgende Befehle auf der Kommandozeile aus:
+
+ \begin{lstlisting}
+
+    mkdir /opt
+
+    unzip ~/instantclient-basic-linux-x86-64-11.2.0.2.0.zip -d /opt
+    unzip ~/instantclient-sdk-linux-x86-64-11.2.0.2.0.zip -d /opt
+    unzip ~/instantclient-sqlplus-linux-x86-64-11.2.0.2.0.zip -d /opt
+
+    mkdir /opt/instantclient_11_2/lib
+    cd /opt/instantclient_11_2/lib
+    ln -s ../libclntsh.so.11.1 .
+    ln -s ../libclntsh.so.11.1 libclntsh.so
+    ln -s ../libnnz11.so .
+    ln -s ../libocci.so.11.1 .
+    ln -s ../libocci.so.11.1 libocci.so
+    ln -s ../libociei.so .
+    ln -s ../libocijdbc11.so .
+    ln -s ../libsqlplusic.so .
+    ln -s ../libsqlplus.so .
+
+    rpm -i --nodeps ~/flys-importer/rpm/RPMS/x86_64/libgdal1180-1.8.0-intevation1.x86_64.rpm 
+    rpm -i --nodeps ~/flys-importer/rpm/RPMS/x86_64/libgdal180-devel-1.8.0-intevation1.x86_64.rpm
+    rpm -i --nodeps ~/flys-importer/rpm/RPMS/x86_64/gdal180-1.8.0-intevation1.x86_64.rpm
+
+ \end{lstlisting}
+
+ Sollten keine Fehler aufgetreten sein, haben Sie den \textit{Oracle
+ Instantclient 11.2} erfolgreich entpackt und im Dateisystem unter
+ \textit{/opt/instantclient\_11\_2} abgelegt. Mit den Befehlen $rpm -i --nodeps$
+ haben Sie anschließend die notwendigen Bindings installiert, damit der Importer
+ die Geodaten in die Oracle Datenbank schreiben kann.
+
+\end{itemize}
+
+
+\subsection{Konfiguration}
+\label{Konfiguration}
+Der Geodaten Importer kann über die Datei \textit{contrib/run\_geo.sh}
+konfiguriert werden. Öffnen Sie die Datei mit einem Texteditor Ihrer Wahl.
+In den Zeilen 4-9 werden Optionen definiert, die zwangsläufig angepasst
+werden müssen:
+
+\textbf{RIVER\_PATH}
+\\Der Pfad zum Gewässer im Dateisystem.
+
+\textbf{RIVER\_ID}
+\\Die Datenbank ID des zu importierenden Gewässers.
+
+\textbf{TARGET\_SRS}
+\\Das EPSG Referenzsystem in das die Geodaten beim Import projeziert werden
+sollen.
+
+\textbf{HOST}
+\\Der Host der Datenbank.
+
+\textbf{USER}
+\\Der Nutzer, der zum Verbinden zur Datenbank verwendet wird.
+
+\textbf{PASS}
+\\Das Passwort für USER zum Verbinden zur Datenbank.
+
+In den Zeilen 12-23 werden weitere Optionen definiert, die bei Bedarf angepasst
+werden können. Falls nicht anders angegeben, können die Optionen mit den Werten
+`0` und `1` belegt werden.
+
+\textbf{VERBOSE}
+\\Dieser Wert gibt die Granularität der Log-Ausgaben während des
+Imports an. Je höher der Wert, desto mehr Informationen werden
+in das Logfile geschrieben. Aktuell sind die Werte `0`, `1` und
+`2` definiert. Wird der Wert `0` gesetzt, werden nur Fehler und
+Warnungen in das Logfile geschrieben. Bei `1` werden neben
+Fehlern und Warnungen auch Infos in das Logfile geschrieben. Bei
+`2` werden sämtliche Ausgaben des Programms geschrieben. Dieser
+Modus ist hauptsächlich für die Entwicklung gedacht.
+
+\textbf{SKIP\_AXIS}
+\\Bei gesetztem Wert `1` werden keine Flussachsen importiert.
+
+\textbf{SKIP\_KMS}
+\\Bei gesetztem Wert `1` werden keine Kilometrierungen importiert.
+
+\textbf{SKIP\_CROSSSECTIONS}
+\\Bei gesetztem Wert `1` werden keine Querprofilespuren importiert.
+
+\textbf{SKIP\_LINES}
+\\Bei gesetztem Wert `1` werden keine Linien importiert.
+
+\textbf{SKIP\_FIXPOINTS}
+\\Bei gesetztem Wert `1` werden keine Festpunkte importiert.
+
+\textbf{SKIP\_BUILDINGS}
+\\Bei gesetztem Wert `1` werden keine Bauwerke importiert.
+
+\textbf{SKIP\_FLOODPLAINS}
+\\Bei gesetztem Wert `1` werden keine Talauen importiert.
+
+\textbf{SKIP\_HYDR\_BOUNDARIES}
+\\Bei gesetztem Wert `1` werden keine hydrologischen Grenzen importiert.
+
+\textbf{SKIP\_HWS}
+\\Bei gesetztem Wert `1` werden kein Hochwasserschutzanlagen importiert.
+
+\textbf{SKIP\_GAUGE\_LOCATION}
+\\Bei gesetztem Wert `1` werden keine Pegelorte importiert.
+
+\textbf{SKIP\_CATCHMENTS}
+\\Bei gesetztem Wert `1` werden keine Einzugsgebiete importiert.
+
+\textbf{SKIP\_UESG}
+\\Bei gesetztem Wert `1` werden keine Überschwemmungsflächen importiert.
+
+
+\subsection{Starten des Geodaten Importers}
+\label{Starten des Geodaten Importers}
+Der Geodaten Importer wird mittels eines Shellskripts von einer Konsole
+gestartet. Dazu führen Sie folgenden Befehl aus:\\
+
+\begin{lstlisting}
+    sh contrib/run_geo.sh > geo-import.log
+\end{lstlisting}
+
+Der Importer wird nun gestartet. Sämtliche Log-Ausgaben werden in die Datei
+$geo-import.log$ geschrieben.
+
+\textbf{Hinweis}
+\\Bitte beachten Sie, dass der Geodaten Importer aufgrund der eingesetzten
+Technologien derzeit nicht in der Lage ist, lesend auf die Oracle Datenbank
+zuzugreifen. Entsprechend kann beim Import nicht festgestellt werden, ob sich
+Shapefiles bereits in der Datenbank befinden, oder nicht. Ein erneuter Import
+Vorgang der Geodaten würde also dazu führen, dass Geometrien doppelt in der
+Datenbank abgelegt werden.
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/documentation/de/importer-hydr-morph.tex	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,749 @@
+\section{Fachdatenimport}
+
+Der Fachdatenimporter dient dazu, hydrologische und morphologische Gewässerdaten
+aus dem Dateisystem in die FLYS3-Datenbank zu importieren. Das Werkzeug
+orientiert sich hierbei an der Dateihierachie, so wie sie auch von Desktop-FLYS
+ausgelesen wird. Der Import Vorgang ist in zwei Phasen unterteilt:
+
+\begin{itemize}
+    \item Lesen aller Daten eines Gewässers aus dem Dateisystem.
+    \item Schreiben der erfolgreich eingelesenen Daten in die Datenbank.
+\end{itemize}
+
+Sollte beim Lese- oder Schreib-Vorgang eines Gewässers ein Fehler auftreten, so
+werden sämtliche Daten des Gewässers verworfen. Beide Phasen zusammen bilden
+somit eine Transaktion.
+
+\textbf{Hinweis}
+\\Der Import geht wie auch Desktop-FLYS davon aus, dass die Dateien Latin-1
+encodiert vorliegen! Stellen Sie also sicher, dass das von Ihnen verwendete
+Encoding korrekt ist. Andernfalls ist es möglich, dass es während des Imports zu
+unerwarteten Problemen kommt.
+
+Der Importer ist ein in Java geschriebenes Werkzeug und kann von der Konsole aus
+gestartet werden. Sämtlich Konfigurationen können über sogenannte
+\textit{System-Properties} übergeben werden. Eine \textit{System-Property} wird
+dabei mittels \textit{-Dkey=value} beim Start übergeben. Im folgenden Beispiel
+würde der Importer mit einer Konfiguration \textit{flys.backend.importer.dry.run},
+welche den Wert \textit{true} gesetzt hat, gestartet.
+
+\begin{lstlisting}
+    java -Dflys.backend.importer.dry.run=true de.intevation.flys.importer.Importer
+\end{lstlisting}
+
+Auf gleiche Weise können dem Importer sämtliche Optionen zur Konfiguration
+beim Start mitgegeben werden. Im folgenden werden die möglichen System-Properties und
+ihre Auswirkung auf den Import genauer beschrieben. In den Kapiteln
+\ref{configuration} und \ref{start-hydr} wird zur Einfachheit jedoch ein
+Shellskript verwendet, das eine Standardkonfiguration vorgibt und den Importer
+mit allen erforderlichen Konfigurationen startet.
+
+
+\subsection{Importierte Daten}
+In diesem Kapitel werden die Datentypen aufgelistet und erläutert, wie sie vom
+Importer eingelesen werden.
+
+\subsubsection{Streckenfavoriten (*.km-Dateien)}
+Der Import der Streckenfavoriten kann mit \textbf{-Dflys.backend.importer.skip.annotations=true}
+unterdrückt werden.
+
+Zur Klassifikation von Streckenfavoriten muss mittels\\
+\textbf{-Dflys.backend.importer.annotation.types=DATEI} der Pfad zu einer
+XML-Datei angegeben werden. In dieser Datei werden die Typen und Regeln
+festgelegt, anhand derer die Klassifikation während des Import-Vorgangs
+vorgenommen wird. Details hierzu befinden sich im Kapitel \ref{annotation-types}.
+
+\subsubsection{Pegel, Stammdaten (*.glt, *.sta-Dateien)}
+Der Import von Pegel- und Stammdaten kann mit \textbf{'-Dflys.backend.importer.skip.gauges=true'}
+unterdrückt werden. Die .glt-Datei, die neben der .wst-Datei liegt, wird zuerst
+ausgelesen. Es werden nur *.sta-Datei von Pegeln geladen, die in der .glt-Datei
+vermerkt sind.
+
+Mittels \textbf{-Dflys.backend.sta.parse.gauge.numbers=true'} wird versucht, die
+offiziellen Pegelnummern aus den Stammdaten zu extrahieren.
+\textbf{Dies ist mit Vorsicht zu behandeln, denn die meisten STA-Dateien
+enthalten invalide Pegelnummern.}
+
+Die System-Property \textbf{flys.backend.main.value.types} kann einen String
+mit gültigen Typen von Stammdaten enthalten. Vorbelegt ist \textit{QWTD}. In der
+Praxis ist \textit{QWD} eine sinnvolle Belegung.
+
+\subsubsection{Basis-Wasserstände (gewaesser.wst-Dateien)}
+Der Import von Wasserständen kann mit \textbf{-Dflys.backend.importer.skip.wst=true} unterdrückt werden.
+
+\subsubsection{Zusätzliche Längsschnitte (*.zus, *.wst-Dateien)}
+Der Import von zusätzlichen Längsschnitten kann mit \textbf{-Dflys.backend.importer.skip.extra.wsts=true}
+unterdrückt werden. Es werden die *.zus- und *.wst-Dateien aus dem Verzeichnis
+\textit{../Zus.Längsschnitte} relativ zur gewaesser.wst-Datei betrachtet.
+
+\subsubsection{Fixierungen (*.wst-Dateien)}
+Der Import von Fixierungen kann mit \textbf{-Dflys.backend.importer.skip.fixations=true}
+unterdrückt werden. Es werden die *.wst-Dateien aus dem Verzeichnis
+\textit{../Fixierungen} relativ zur gewaesser.wst-Datei betrachtet.
+
+\subsubsection{Amtliche Linien (*.wst-Dateien)}
+Der Import von amtlichen Linien kann mit \textbf{-Dflys.backend.importer.skip.official.lines=true}
+unterdrückt werden. Es werden die \textit{Amtl\_Linien.wst}-Dateien aus dem
+Verzeichnis \textit{../Basisdaten} und \textit{../Fixierungen} relativ zur
+\textit{gewaesser.wst}-Datei betrachtet.
+
+\subsubsection{Profilspuren (*.prf-Dateien)}
+Der Import von Profilspuren kann mit \textbf{-Dflys.backend.importer.skip.prfs=true}
+unterdrückt werden. Es werden rekursiv alle *.prf-Dateien aus \textit{../../..}
+relativ zur gewaesser.wst-Datei betrachtet. Vor dem Import werden mit Hilfe
+eines Längen- und eines MD5-Summen-Vergleichs inhaltliche Duplikate
+ausgeschlossen.
+
+\subsubsection{Hydraulische Kennzahlen (*.hyk)}
+Der Import von hydraulischen Kennzahlen kann mit \textbf{-Dflys.backend.importer.skip.hyks=true} unterdrückt
+werden. Es werden rekursiv alle *.hyk-Dateien aus \textit{../../..} relativ zur
+gewaesser.wst-Datei betrachtet. Vor dem Import werden mit Hilfe eines Längen- und
+eines MD5-Summen-Vergleichs inhaltliche Duplikate ausgeschlossen.
+
+\subsubsection{Hochwassermarken (*.zus, *.wst)}
+Der Import von Hochwassermarken kann mit \textbf{-Dflys.backend.importer.skip.flood.water=true}
+unterdrückt werden. Es werden die *.zus- und *.wst-Dateien aus dem Verzeichnis
+\textit{../HW-Marken} relativ zur gewaesser.wst-Datei betrachtet.
+
+\subsubsection{Hochwasserschutzanlagen (*.zus)}
+Der Import von Hochwasserschutzanlagen kann mit \textbf{-Dflys.backend.importer.skip.flood.protection=true}
+unterdrückt werden. Es werden die *.zus- und *.wst-Dateien aus dem Verzeichnis
+\textit{../HW-Schutzanlagen} relativ zur \textit{gewaesser.wst}-Datei betrachtet.
+
+\subsubsection{Sohlhöhen (Peilungen)}
+Der Import von Sohlhöhen-Peilungen kann mit \textbf{-Dflys.backend.importer.skip.bed.height.single=true}
+unterdrückt werden. Es werden die Dateien aus dem Verzeichnis
+\textit{Morphologie/Sohlhoehen/Einzeljahre} geladen.
+
+\subsubsection{Sohlhöhen (Epochen)}
+Der Import Sohlhöhen-Epochen kann mit \textbf{-Dflys.backend.importer.skip.bed.height.epoch=true}
+unterdrückt werden. Es werden die Dateien aus dem Verzeichnis
+\textit{Morphologie/Sohlhoehen/Epochen} geladen.
+
+\subsubsection{Sedimentdichte}
+Der Import der Sedimentdichte kann mit
+\textbf{-Dflys.backend.importer.skip.sediment.density=true}
+unterdrückt werden. Es werden alle Dateien aus dem Verzeichnis
+\textit{Morphologie/Sedimentdichte} geladen.
+
+\subsubsection{Morphologische Breite}
+Der Import der morphologischen Breite kann mit
+\textbf{-Dflys.backend.importer.skip.morphological.width=true}
+unterdrückt werden. Es werden alle Dateien aus dem Verzeichnis
+\textit{Morphologie/morphologische\_Breite} geladen.
+
+\subsubsection{Fließgeschwindigkeit}
+Der Import der Fließgeschwindigkeit kann mit
+\textbf{-Dflys.backend.importer.skip.flow.velocity=true}
+unterdrückt werden. Es werden alle Modellrechnungen aus dem Verzeichnis\\
+\textit{Morphologie/Geschwindigkeit\_Schubspannung/Modellrechnungen} und\\
+\textit{Morphologie/Geschwindigkeit\_Schubspannung/v-Messungen} geladen.
+
+\subsubsection{Sedimentfracht}
+Der Import der Sedimentfracht kann mit
+\textbf{-Dflys.backend.importer.skip.sediment.yield=true}
+unterdrückt werden. Es werden alle Dateien aus dem Verzeichnis
+\textit{Morphologie/Fracht} geladen. Dabei werden die Dateien aus dem
+Unterverzeichnissen \textit{Einzeljahre} und \textit{Epochen} entsprechend als
+\textit{Einzeljahre} und \textit{Epochen} vermerkt.
+
+\subsubsection{Wasserspiegellagen für MINFO}
+Der Import der MINFO spezifischen Wasserspiegellagen kann mit\\
+\textbf{-Dflys.backend.importer.skip.waterlevels=true}
+unterdrückt werden. Es werden alle Dateien aus dem Verzeichnis
+\textit{Morphologie/Fixierungsanalyse/Wasserspiegellagen} geladen.
+
+\subsubsection{Wasserspiegeldifferenzen für MINFO}
+Der Import der Wasserspiegellagendifferenzen kann mit\\
+\textbf{-Dflys.backend.importer.skip.waterlevel.differences=true}
+unterdrückt werden. Es werden alle Dateien aus dem Verzeichnis
+\textit{Morphologie/Fixierungsanalyse/Wasserspiegeldifferenzen} geladen.
+
+\subsubsection{Transport Abfluss Beziehung}
+Der Import der Daten für die Transport Abfluss Beziehung kann mit\\
+\textbf{flys.backend.importer.skip.sq.relation=true} unterdrückt
+werden. Es werden alle Dateien unter
+\textit{Feststofftransport-Abfluss-Beziehung} geladen.
+
+
+\subsection{Klassifikation von Streckenfavoriten}
+\label{annotation-types}
+Streckenfavoriten werden aus KM-Dateien importiert. Um die einzelnen Einträge
+einer Kategorie (Brücke, Pegel, etc.) zuzuordnen, kann eine XML angegeben werden,
+in der Regeln für diese Klassifikation definiert werden. Schematisch gliedert
+sich diese Datei in die zwei Bereiche 'types' und 'patterns':
+
+\begin{lstlisting}
+    <annotation>
+        <types>
+            <type>...</type>
+            <type>...</type>
+            ...
+        </types>
+        <patterns>
+            <pattern>...</pattern>
+            <pattern>...</pattern>
+            ...
+        </patterns>
+    </annotation>
+\end{lstlisting}
+
+In der Sektion \textit{types} werden die Kategorien vereinbart, in die klassifiziert
+werden soll. Dies geschieht mit entsprechenden Zeilen in der XML Datei. Es folgt
+ein Auszug aus einer solchen Datei:
+
+\begin{lstlisting}
+  <type name="Pegel"/>
+  <type name="Brücke"/>
+  ...
+  <type name="Sonstige" default="true"/>
+\end{lstlisting}
+
+Das Attribut 'default' darf maximal einmal vergeben werden und besagt, dass diese
+Kategorie gewählt werden soll, wenn keine andere Kategorie zugeordnet werden kann.
+
+In der Sektion 'patterns' werden dann die Regeln definiert, die einzelne Einträge
+den zuvor definierten Kategorien zuordnet. Hierfür können zwei Arten von
+Definitionen angegeben werden:
+
+\begin{lstlisting}
+  <file pattern="^Brücken$" type="Brücke"/>
+\end{lstlisting}
+
+oder
+
+\begin{lstlisting}
+  <line pattern="^Brücke[:\s].*$" type="Brücke"/>
+\end{lstlisting}
+
+Die erste Variante bestimmt die Kategorie, die pro KM-Datei gelten soll.
+\textit{pattern} ist hierbei ein regulärer Ausdruck, der auf den Dateinamen
+angewandt wird. Passt der Name der Datei auf den regulären Ausdruck, wird
+\textit{type} als Vorgabe angenommen. Treffen mehrere \textit{file}-Regeln zu,
+wird der erste Treffer angewandt. Findet keine der \textit{file}-Regeln Anwendung, wird
+die Kategorie ausgewählt, die in der \textit{types}-Section das Attribut
+\textit{default} gesetzt hat.
+
+Die zweite Regel-Variante \textit{line} wird auf jeden Eintrag innerhalb einer KM-Datei
+auf den Bezeichner der Streckenfavoriten angewandt. Als Muster dient auch hier
+ein regulärer Ausdruck, der über das Attribut \textit{pattern} definiert wird.
+Die Kategorie wird im Trefferfall über das Attribut \textit{type} bestimmt.
+Treffen mehrere Regeln zu, wird die Kategorie gewählt, die zum ersten Treffer
+gehört. Trifft keine Regel zu wird der Eintrag der Kategorie zugeteilt, die für
+die beinhaltende Datei als Vorgabe gilt.
+
+
+\subsection{Konfiguration}
+\label{configuration}
+Zum Starten des Importers ist es notwendig, in der Datei
+\textit{contrib/run\_hydr\_morph.sh} die Variablen am Anfang der Datei
+anzupassen. Im folgenden werden notwendige und optionale Einstellungen
+beschrieben, die beim Starten des Importers berücksichtigt werden. Folgende
+Einstellungen sind zwangsläufig an die bestehende Umgebung anzupassen:
+
+\textbf{INFO\_GEW}
+\\Diese Option muss auf eine valide *.gew Datei verweisen (bekannt aus
+Desktop-FLYS). Wichtig für den Importer sind in dieser Datei die Zeilen, die mit
+\textit{WSTDatei:} beginnen. In ihnen wird der Pfad zu der zentralen WST-Datei
+des jeweiligen Gewässers angegeben. Alle anderen importierten Dateien werden in
+ihrer Lage im Dateisystem relativ zur Lage dieser Datei betrachtet.
+
+\textbf{BACKEND\_USER}
+\\Der Nutzername, der zum Verbinden zur Datenbank verwendet werden soll.
+
+\textbf{BACKEND\_PASS}
+\\Das Passwort, welches in Kombination mit \textbf{BACKEND\_USER} zum Verbinden
+zur Datenbank verwendet werden soll.
+
+\textbf{BACKEND\_HOST}
+\\Der Datenbank-Host. In der Regel sollte hier \textit{localhost} eingetragen
+werden, da es empfohlen wird, den Importer auf dem selben Host zu starten, auf
+dem auch die Datenbank läuft.
+
+\textbf{BACKEND\_PORT}
+\\Der Port auf dem die Datenbank zu erreichen ist. Bei einer Oracle XE Instanz
+z.B.: \textit{1521}, sofern nicht anders konfiguriert.
+
+\textbf{BACKEND\_NAME}
+\\Der Name der Datenbank Instanz. Beispielsweise \textit{XE} bei einer Oracle XE
+Instanz.
+
+\textbf{BACKEND\_DB\_PREFIX}
+\\Der Präfix zum Aufbau einer Datenbankverbindung. Für Oracle z.B.: \textit{jdbc:oracle:thin:@}.
+
+\textbf{BACKEND\_DB\_DRIVER}
+\\Der Name des JDBC-Treibers, der es erlaubt das Protokoll der Datenbank zu
+sprechen. Im Falle einer Oracle XE wäre dies z.B.: \textit{oracle.jdbc.OracleDriver}.
+
+\textbf{BACKEND\_DB\_DIALECT}
+\\Der Hibernate-Dialekt, den die Datenbank versteht. Im Falle einer Oracle-XE
+wäre dies z.B.: \textit{org.hibernate.dialect.OracleDialect}.
+
+
+Weitere Details zum Verbinden zu einer Oracle Datenbank finden Sie unter\\
+\href{http://www.orafaq.com/wiki/JDBC}{http://www.orafaq.com/wiki/JDBC}. Alle weiteren Einstellungen sind
+optional anpassbar:
+
+\textbf{LOG4J\_CONFIG}
+\\Der Fachdatenimport verwendet die externe Bibliothek \textit{Apache Log4J} zum Loggen
+von Informationen. Dazu ist es notwendig eine entsprechende Konfiguration beim
+Start anzugeben. \textit{LOG4J\_CONFIG} verweist in diesem Fall auf eine externe
+Datei zur Konfiguration von Log4J. Im Standardfall wird die Datei
+\textit{conf/log4j.properties} verwendet, welche eine sinnvolle Standardkonfiguration
+enthält. Sollten Sie diese Konfiguration verwenden, wird beim Import eine
+Log-Datei namens \textit{import.log} erstellt, die maximal 100 MB groß werden
+kann. Sollte die Log-Datei größer als 100 MB anwachsen, wird die aktuelle Datei
+nach \textit{import.log.1} umbenannt und eine neue Datei \textit{import.log}
+wird begonnen. Maximal werden 10 Log-Dateien gespeichert. Für weitere Details
+zu Log4J siehe Online Dokumentation unter
+\href{http://logging.apache.org/log4j/1.2/}{http://logging.apache.org/log4j/1.2/}
+
+
+\textbf{IMPORTER\_MAINVALUE\_TYPES}
+\\Diese Einstellung erlaubt die Angabe eines Textes, der aus den gültigen Typen
+für Hauptwerte zusammengesetzt ist. \textit{QWTD} ist standardmäßig gesetzt.
+
+\textbf{IMPORTER\_ANNOTATION\_TYPES}
+\\Diese Einstellung verweist auf eine Datei (relativ zum Ort der \textit{run.sh}
+im Dateisystem), die die möglichen Typen von Streckenfavoriten und deren Regeln
+definiert. Siehe hierzu auch Kapitel \ref{annotation-types}.
+
+
+Die im folgenden beschriebenen Einstellungen können jeweils die Werte
+\textit{true} oder \textit{false} annehmen und sind optional anzupassen.
+
+\textbf{IMPORTER\_DRY\_RUN}
+\\Falls \textit{true} gesetzt wird, wird der Import nur simuliert. Es werden
+keine Daten in die Datenbank geschrieben. Dies kann z.B.: zum Ermitteln
+potentieller Dateninkonsistenzen sinnvoll sein.
+
+\textbf{IMPORTER\_SKIP\_GAUGES}
+\\Wenn \textit{true} gesetzt ist werden keine Pegel- und Stammdaten bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_ANNOTATIONS}
+\\Wenn \textit{true} gesetzt ist werden keine Streckenfavoriten bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_WST}
+\\Wenn \textit{true} gesetzt ist werden keine WST Dateien bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_PRFS}
+\\Wenn \textit{true} gesetzt ist werden keine Querprofilspuren bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_HYKS}
+\\Wenn \textit{true} gesetzt ist werden keine HYK Dateien bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_EXTRA\_WST}
+\\Wenn \textit{true} gesetzt ist werden keine zusätzlichen Längsschnitte
+bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_FIXATIONS}
+\\Wenn \textit{true} gesetzt ist werden keine Fixierungen bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_OFFICIAL\_LINES}
+\\Wenn \textit{true} gesetzt ist werden keine offiziellen Linien bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_FLOOD\_WATER}
+\\Wenn \textit{true} gesetzt ist werden keine Hochwassermarken bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_FLOOD\_PROTECTION}
+\\Wenn \textit{true} gesetzt ist werden keine Hochwasserschutzanlagen
+bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_BED\_HEIGHT\_SINGLE}
+\\Wenn \textit{true} gesetzt ist werden keine mittleren Sohlhöhen (Peilungen) bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_BED\_HEIGHT\_EPOCH}
+\\Wenn \textit{true} gesetzt ist werden keine mittleren Sohlhöhen (Epochen)
+bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_SEDIMENT\_DENSITY}
+\\Wenn \textit{true} gesetzt ist werden keine Dateien zur Sedimentdichte
+bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_MORPHOLOGICAL\_WIDTH}
+\\Wenn \textit{true} gesetzt ist wird keine morphologische Breite bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_FLOW\_VELOCITY}
+\\Wenn \textit{true} gesetzt ist werden keine Fließgeschwindigkeiten bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_SEDIMENT\_YIELD}
+\\Wenn \textit{true} gesetzt ist werden keine Sedimentfrachten bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_WATERLEVELS}
+\\Wenn \textit{true} gesetzt ist werden keine Wasserspiegellagen für MINFO bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_WATERLEVEL\_DIFFERENCES}
+\\Wenn \textit{true} gesetzt ist werden keine Wasserspiegellagendifferenzen für
+MINFO bearbeitet.
+
+\textbf{IMPORTER\_SKIP\_SQ\_RELATION}
+\\Wenn \textit{true} gesetzt ist werden keine Daten für die Berechnungsart
+SQ-Beziehung bearbeitet.
+
+
+
+\subsection{Fehler und Warnungen}
+
+\subsubsection{Fehler}
+
+\textbf{error while parsing gew}
+\\Die GEW-Datei ist fehlerhaft oder konnte nicht geöffnet werden.
+
+\textbf{File 'XYZ' is broken!}
+\\Die Datei XYZ ist inkonsistent und führt zu Fehlern.
+
+\textbf{Error while parsing file for morph. width.}
+\\Beim Lesen der morphologischen Breite trat ein Fehler auf.
+
+\textbf{Error while storing flow velocity model.}
+\\Beim Schreiben eines Fließgeschwindigkeitsmodells trat ein Fehler auf.
+
+\textbf{Error while storing flow velocity measurement.}
+\\Beim Schreiben einer Fließgeschwindigkeitsmessung trat ein Fehler auf.
+
+\textbf{Error while storing sediment yield.}
+\\Beim Schreiben einer Sedimentablagerung trat ein Fehler auf.
+
+\textbf{Error while storing waterlevel diff.}
+\\Beim Schreiben einer Wasserspiegeldifferenz trat ein Fehler auf.
+
+\textbf{Error while storing sq relation.}
+\\Beim Schreiben einer S(Q) Beziehung trat ein Fehler auf.
+
+\textbf{Error reading PRF file.}
+\\Beim Lesen einer PRF-Datei trat ein Fehler auf.
+
+\textbf{Error closing PRF file.}
+\\Beim Schließen einer PRF-Datei trat ein Fehler auf.
+
+\textbf{HYK 1: not enough elements in line \#}
+\\Eine Zeile in einer HYK-Datei hat nicht genügend Elemente.
+
+\textbf{HYK 2: not enough elements in line \#}
+\\Eine Zeile in einer HYK-Datei hat nicht genügend Elemente.
+
+\textbf{HYK 5: not enough elements in line \#}
+\\Eine Zeile in einer HYK-Datei hat nicht genügend Elemente.
+
+\textbf{HYK 6: not enough elements in line \#}
+\\Eine Zeile in einer HYK-Datei hat nicht genügend Elemente.
+
+\textbf{HYK: parsing num zones, bottom or top height failed in line \#}
+\\Die Anzahl der Zonen oder Daten über die Zonen sind nicht korrekt.
+
+\textbf{HYK: HYK: number of flow zones mismatches in line \#}
+\\Die Anzahl der Zonen oder Daten über die Zonen sind nicht korrekt.
+
+\textbf{HYK: cannot parse number in line \#}
+\\Eine Zahl wurde erwartet.
+
+\textbf{HYK: Error reading file.}
+\\Beim Lesen einer HYK-Datei trat ein Fehler auf.
+
+\textbf{HYK: Error closing file.}
+\\Beim Schließen einer HYK-Datei trat ein Fehler auf.
+
+\subsubsection{Warnungen}
+\textbf{annotation type file 'XYZ' is not readable.}
+\\Die Datein XYZ kann nicht gelesen werden.
+
+\textbf{cannot parse annotation types file.}
+\\Während der Verarbeitung der Annotationsdatei ist Fehler aufgetreten.
+
+\textbf{Cannot read directory.}
+\\Verzeichnis konnte nicht gelesen werden.
+
+\textbf{no official lines wst file found}
+\\Keine Datei mit amtlichen Linien gefunden.
+
+\textbf{cannot read fixations wst file directory}
+\\Das Verzeichnis mit den Fixierungen kann nicht gelesen werden.
+
+\textbf{cannot read extra longitudinal wst file directory}
+\\Das Verzeichnis mit den zusätzlichen Längsschnitten kann nicht gelesen werden.
+
+\textbf{cannot read gauges from 'XYZ'}
+\\Die Pegelgültigkeiten können nicht gelesen werden.
+
+\textbf{HYK file 'XYZ' seems to be a duplicate.}
+\\Die HYK-Datei wurde unter anderem Namen aber gleichen Inhalts bereits
+gefunden.
+
+\textbf{PRF file 'XYZ' seems to be a duplicate.}
+\\Die PRF-Datei wurde unter anderem Namen aber mit gleichem Inhalt bereits
+gefunden.
+
+\textbf{Skip invalid SedimentYield: time interval or unit null!}
+\\Eine Sedimentablagerung ist ungültig und wurde ausgelassen.
+
+\textbf{skip flow velocity model: No discharge zone specified.}
+\\Da kein Abflussbereich angegeben wurde, wurde das Fließgeschwindigkeitsmodell ausgelassen.
+
+\textbf{skip invalid waterlevel - no unit set!}
+\\Ein einheitenloser Wasserstand wurde ausgelassen.
+
+\textbf{Cannot parse time range.}
+\\Das Zeitformat wurde nicht erkannt.
+
+\textbf{skip invalid data line \#}
+\\Ungültige Datenzeile wurde ausgelassen.
+
+\textbf{Error while parsing sq relation row \#}
+\\Eine Zeile in der S(Q)-Beziehung ist ungültig.
+
+\textbf{GLT: no gauge found in line \#}
+\\In der GLT-Datei wurde ein Pegel erwartet, aber nicht gefunden.
+
+\textbf{GLT: line \# has not enough columns.}
+\\Eine Zeile in der Pegelgültigkeitsdatei hat nicht genug Spalten.
+
+\textbf{Error while parsing flow velocity values.}
+\\Invalide Datenzeile in einer Datei mit einer Fliessgeschwindigkeitsmessung.
+
+\textbf{skip invalid data line: \#}
+\\Invalide Datenzeile in einer Datei mit einer Fliessgeschwindigkeitsmessung.
+
+\textbf{skip invalid waterlevel line: \#}
+\\Invalide Datenzeile in einer Datei mit Wasserstandsdifferenzen.
+
+\textbf{Error while parsing value: \#}
+\\Invalide Datenzeile in einer Datei mit Wasserstandsdifferenzen.
+
+\textbf{Error while parsing station: \#}
+\\Invalide Datenzeile in einer Datei mit Wasserstandsdifferenzen.
+
+\textbf{skip invalid MainValue part: \#}
+\\Invalide Datenzeile in einer Datei Fließgeschwindigkeitsmodellen.
+
+\textbf{skip invalid gauge part: \#}
+\\Invalide Datenzeile in einer Datei Fließgeschwindigkeitsmodellen.
+
+\textbf{Error while parsing Q value: $<Q>$}
+\\Invalide Datenzeile in einer Datei Fließgeschwindigkeitsmodellen.
+
+\textbf{skip invalid data line: \#}
+\\Invalide Datenzeile in einer Datei Fließgeschwindigkeitsmodellen.
+
+\textbf{Error while parsing flow velocity values.}
+\\Invalide Datenzeile in einer Datei Fließgeschwindigkeitsmodellen.
+
+\textbf{Error while parsing number from data row: \#}
+\\In der eingelesenen Zeile konnte keine Zahl gefunden werden.
+
+\textbf{Unknown meta line: \#}
+\\Invalide Datenzeile in einer Datei mit Sedimentdichten.
+
+\textbf{Error while parsing numbers in: \#}
+\\Invalide Datenzeile in einer Datei mit Sedimentdichten.
+
+\textbf{skip invalid data line: \#}
+\\Invalide Datenzeile in einer Datei mit Sedimentdichten.
+
+\textbf{Error while parsing numbers in \#}
+\\Invalide Datenzeile in einer Datei mit Sedimentdichten.
+
+\textbf{STA file is empty}
+\\Stammdatendatei ist leer oder hat zu wenige Zeilen.
+
+\textbf{STA file has not enough lines}
+\\Stammdatendatei ist leer oder hat zu wenige Zeilen.
+
+\textbf{STA file is too short}
+\\Stammdatendatei ist leer oder hat zu wenige Zeilen.
+
+\textbf{First line in STA file is too short.}
+\\Die erste Zeile der Stammdaten ist zu kurz.
+
+\textbf{STA: second line is too short}
+\\Die zweite Zeile ist zu kurz.
+
+\textbf{STA: parsing of the datum of the gauge failed}
+\\Das Datum in der Stammdatendatei konnte nicht gelesen werden.
+
+\textbf{STA: 'XYZ' is not a valid long number.}
+\\Die Pegelnummer ist invalide.
+
+\textbf{STA: Not enough columns for aeo and datum}
+\\AEO und Pegelnullpunkt können nicht ermittelt werden.
+
+\textbf{STA: cannot parse aeo or datum.}
+\\AEO oder Pegelnullpunkt sind invalide.
+
+\textbf{STA: value not parseable in line \#}
+\\Wert ist nicht als Zahl zu interpretieren.
+
+\textbf{PRF: cannot open file $<FILE>$}
+\\Die PRF kann nicht geöffnet werden.
+
+\textbf{PRF: file is empty}\\
+\textbf{PRF: First line does not look like a PRF data pattern.}\\
+\textbf{PRF: premature EOF. Expected integer in line 2}\\
+\textbf{PRF: Expected $<num>$ in line 2}\\
+\textbf{PRF: invalid integer in line 2}\\
+\textbf{PRF: premature EOF. Expected pattern for km extraction}\\
+\textbf{PRF: line 4 does not look like a PRF km extraction pattern.}\\
+\textbf{PRF: premature EOF. Expected skip row count.}\\
+\textbf{PRF: line 5 is not an positive integer.}\\
+\textbf{PRF: cannot extract km in line \#}
+\\Das PRF-Format ist komplex. Bei oben genannten Fehlern sollten weitere
+Information zur genaueren Analyse herangezogen werden.
+
+\textbf{cannot access WST file $FILE$}
+\\Die WST-Datei konnte nicht gefunden werden.
+
+\textbf{Found an invalid row in the AT file.}
+\\Eine Zeile in einer AT-Datei ist nicht korrekt.
+
+\textbf{AT: invalid number $XYZ$}
+\\Eine Zahl wurde erwartet aber nicht gefunden.
+
+\textbf{Try to add Q range without waterlevel!}
+\\Q-Bereich ohne Wasserstand gefunden.
+
+\textbf{Error while parsing Q range: \#}
+\\Invalider Q-Bereich
+
+\textbf{skip invalid waterlevel line: \#}
+\\Ungültige Wasserstandslinie
+
+\textbf{Error while parsing number values: \#}
+\\Ungültige Zahlenwerte.
+
+\textbf{ANN: not enough columns in line \#}
+\\Nicht genug Zeichenspalten in KM-Datei
+
+\textbf{ANN: invalid number in line \#}
+\\Ungültige Zahl.
+
+\textbf{ANN: cannot parse 'Unterkante' in line \#}
+\\Die Unterkante in einer KM-Datei konnte nicht gelesen werden.
+
+\textbf{ANN: cannot parse 'Unterkante' or 'Oberkante' in line \#}
+\\Unter- oder Oberkannte liegen in einem falschen Format vor.
+
+\textbf{ANN: duplicated annotation 'XYZ' in line \#}
+\\Ein Duplikat eines Streckenfavoriten wurde gefunden.
+
+\textbf{ANN: 'XYZ' is not a directory.}
+\\Unterverzeichnis konnte nicht geöffnet werden.
+
+\textbf{ANN: cannot list directory 'XYZ'}
+\\Unterverzeichnis konnte nicht durchsucht werden.
+
+\textbf{BHP: Meta line did not match any known type: \#}
+\\Unbekannter Typ.
+
+\textbf{BHP: Error while parsing timeinterval!}
+\\Ungültiges Zeitinterval.
+
+\textbf{BHP: Error while parsing year!}
+\\Ungültige Jahresangabe.
+
+\textbf{BHP: Error while parsing sounding width!}
+\\Unbekannte Peilungsbreite.
+
+\textbf{BHP: Error while parsing range!}
+\\Bereichsangabe fehlerhaft.
+
+\textbf{MWP: Unknown meta line: \#}
+\\Meta-Informationen ungültig.
+
+\textbf{MWP: skip invalid data line: \#}
+\\Ungültige Datenzeile wurde übersprungen.
+
+\textbf{MWP: Error while parsing numbers in \#}
+\\Falsche Zahlenformat.
+
+\textbf{ANNCLASS: rule has no name}
+\\Klassifizierungsregel für Streckenfavoriten hat keinen Namen.
+
+\textbf{ANNCLASS: pattern has no 'pattern' attribute.}
+\\Klassifizierungsmuster für Streckenfavoriten hat kein Muster.
+
+\textbf{ANNCLASS: pattern has unknown type 'XYZ'}
+\\Klassifizierungsmuster für Streckenfavoriten konnte keinem Typ zugeordnet werden.
+
+\textbf{ANNCLASS: pattern 'XYZ' is invalid.}
+\\Klassifizierungsmuster für Streckenfavoriten ist ungültig.
+
+\textbf{BSP: Error while parsing data row.}
+\\Ungültige Datenzeile.
+
+\textbf{SYP: Unknown meta line: \#}
+\\Ungültige Metadatenzeile.
+
+\textbf{SYP: skip invalid data line \#}
+\\Ungültige Datenzeile wurde übersprungen.
+
+\textbf{SYP: Error while parsing numbers in \#}
+\\Ungültige Zahlenformatierung.
+
+\textbf{SYP: Unknown time interval string 'XYZ'}
+\\Falsches Datumformat.
+
+\textbf{SYP: Error while parsing years 'XYZ'}
+\\Falsches Jahreszahlformat.
+
+\textbf{SYP: Error while parsing ranges of 'XYZ'}
+\\Bereichsangaben fehlerhaft.
+
+\textbf{SYP: Unknown grain fraction 'XYZ'}
+\\Unbekannte Kornfraktion.
+
+\textbf{WST: invalid number.}
+\\Ungültige Zahl.
+
+\textbf{WST: km $km$ ($<Zeile>$) found more than once. $->$ ignored.}
+\\Ein Kilometer ist doppelt in einer WST-Datei enthalten.
+
+\textbf{HYK: zone coordinates swapped in line \#}
+\\Fließzonenkordinaten wurden in umgekehrter Reihenfolge angeben.
+
+\textbf{BHS: Skip invalid file 'XYZ'}
+\\Die Inhalte der Datei sind ungültig.
+
+\textbf{ISQ: Unable to store sq relation value.}
+\\S(Q) Beziehung konnte nicht gespeichert werden.
+
+\textbf{ISQ: Cannot determine sq relation without time interval.}
+\\Einer S(Q)-Beziehung ist keine zeitliche Gültigkeit zugeordnet.
+
+\textbf{IWD: skip invalid waterlevel difference - no unit set!}
+\\Wasserstandsdifferenz hat keine Einheit.
+
+\textbf{BHE: Skip file - invalid current elevation model.}
+\\Höhenmodell ungültig.
+
+\textbf{BHE: Skip file - invalid time range.}
+\\Zeitbereich ungültig.
+
+\textbf{BHE: Skip file - invalid km range.}
+\\Kilometerbereich ungültig.
+
+
+\subsection{Hinweise zum Betrieb}
+Aufgrund des hohen Speicherverbrauchs des Importers wird empfohlen, der JVM
+mindestens 8 GiB Hauptspeicher zuzuordnen. Dies kann beim Starten des Java
+Prozesses mittels folgendem Parameter '-Xmx8192m' getan werden. Das
+Shellskript zum Starten des Importers setzt diesen Wert standardmäßig.
+Besonders speicherintensiv ist der Import der HYKs und der PRFs.
+Hier ist es unter Umständen empfehlenswert, diese in zwei oder drei
+Schritten zu importieren. Zuerst die sonstigen hydrologischen Daten importieren;
+anschließend einen Import-Vorgang ausschließlich für HYKs starten; anschließend
+einen Import-Vorgang für PRFs starten. Siehe Kapitel \ref{configuration} für
+weitere Informationen zum Aktivieren/Deaktivieren einzelner Dateitypen beim
+Import.
+
+
+\subsection{Starten des Fachdaten Importers}
+\label{start-hydr}
+Der Fachdaten Importer wird mit Hilfe eines Shellskripts von einer Konsole
+gestartet. Dazu führen folgenden Befehl aus:\\
+
+\begin{lstlisting}
+    contrib/run_hydr_morph.sh
+\end{lstlisting}
+
+Nachdem der Prompt der Konsole zurückkehrt, ist der Import abgeschlossen oder es
+ist ein Fehler aufgetreten. Weitere Informationen entnehmen Sie der Log-Datei.
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/documentation/de/importer-manual.tex	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,204 @@
+\documentclass[12pt]{scrartcl}
+
+%----------------------------------------------
+% Load packages
+
+\usepackage{a4}
+\usepackage{times}
+\usepackage[latin1]{inputenc}
+\usepackage{fancyhdr}
+%\usepackage{german}
+%\usepackage[marvosym]{eurofont}
+%\usepackage[all, light]{draftcopy}
+%\usepackage{supertabular}
+%\usepackage{colortbl}
+%\usepackage{epsf}
+\usepackage{graphicx}
+\usepackage{lastpage}
+%\usepackage{proposal}
+\usepackage{listings}
+\usepackage[hyperindex=true, bookmarks=true, breaklinks=true,
+colorlinks=true, linkcolor=red,bookmarksopen]{hyperref}
+
+%----------------------------------------------
+% Document DATE and VERSION
+% set these values when releasing a new version
+
+\newcommand{\documentdate}{30. August 2012}
+\newcommand{\documentversion}{1.0}
+\newcommand{\documentrevision}{rev5303}
+\newcommand{\documentID}{importer-manual.tex}
+%----------------------------------------------
+
+%----------------------------------------------
+% Document TITLE
+\newcommand{\documenttitle}{FLYS: Datenimport von Fach- und Geodaten}
+
+\newcommand{\todo}{\textcolor{red}{ TODO }}
+
+
+%----------------------------------------------
+% Some parameters for layouting
+
+\paperwidth=21cm
+\hoffset=-0.54cm
+\textwidth=16cm
+
+\paperheight=29.7cm
+\voffset=-1.5cm
+\topmargin=0cm
+\headheight=1cm
+\textheight=24cm
+
+\setcounter{secnumdepth}{4}
+\setcounter{tocdepth}{4}
+
+%----------------------------------------------
+
+\begin{document}
+
+\lstset{ %
+language=sh,
+basicstyle=\ttfamily,       % the size of the fonts that are used for the code
+numbers=left,                   % where to put the line-numbers
+numberstyle=\footnotesize,      % the size of the fonts that are used for the line-numbers
+numbersep=5pt,                  % how far the line-numbers are from the code
+% backgroundcolor=\color{white},  % choose the background color. You must add \usepackage{color}
+showspaces=false,               % show spaces adding particular underscores
+showstringspaces=false,         % underline spaces within strings
+showtabs=false,                 % show tabs within strings adding particular underscores
+frame=single,                   % adds a frame around the code
+tabsize=2,                      % sets default tabsize to 2 spaces
+captionpos=b,                   % sets the caption-position to bottom
+breaklines=true,                % sets automatic line breaking
+breakatwhitespace=false,        % sets if automatic breaks should only happen at whitespace
+title=\lstname,                 % show the filename of files included with \lstinputlisting; also try caption instead of title
+escapeinside={\%*}{*)}          % if you want to add a comment within your code
+% morekeywords={*,...}            % if you want to add more keywords to the set
+}
+
+%-----------------------------------
+% HEADER/FOOTER DEFINITION
+
+% for some pages latex switches back to pagestyle plain :-(
+\fancypagestyle{plain}{%
+	\fancyhf{} % clear all header and footer fields
+	\fancyhead[LO,RE]{\footnotesize \documenttitle\\ \leftmark}
+	\fancyfoot[RO,LE]{\footnotesize Intevation GmbH} % Author
+	\fancyfoot[CO,CE]{\footnotesize \thepage/\pageref{LastPage}}
+	\fancyfoot[LO,RE]{\footnotesize \documentdate
+	\\\documentID}
+	\renewcommand{\footrulewidth}{0.4pt}
+}
+
+% and now define pagestyle fancy
+\fancyhead{} % clear all fields
+\fancyhead[LO]{\footnotesize \documenttitle\\ \leftmark}
+
+\fancyfoot{}% clear all fields
+\fancyfoot[RO]{\footnotesize Intevation GmbH} % Author
+\fancyfoot[CO]{\footnotesize \thepage/\pageref{LastPage}}
+\fancyfoot[LO]{\footnotesize \documentdate
+\\\documentID}
+
+\renewcommand{\footrulewidth}{0.4pt}
+
+%
+% END Header/Footer Definition
+%-----------------------------------
+
+%----------------------------------------------
+% MACRO DEFINITION
+%
+%   \Fig{figure}{lof text}{caption} :
+%			places 'figure' and
+%                       writes 'caption' at the bottom with leading
+%                       'Abbildung figno:'. 'lof text' is added to the list of
+%                       figures.
+%                       Example:
+%                       \Fig{\epsfxsize30mm \epsffile{x.eps}}{the x}{the x}
+%
+%   \FigNoEntry{}{} :
+%			same as above, no entry in figures list
+%
+%   \FigCaption{} :
+%			line with figure caption, setting figure
+%                       counter and figures list
+%
+%   \Tab{table}{lot text}{caption} :
+%			places 'table' and writes caption on top of the table
+%			with leading 'Tabelle tabno:'. 'lot text' is added to
+%			the list of tables.
+%****************************************************************************
+%       Figure makro for graphics continously enumerated.
+%
+
+\newcounter{FigCnt}
+\newcounter{TabCnt}
+
+\newcommand{\Fig}[3]%
+{
+        \refstepcounter{FigCnt}
+        \addcontentsline{lof}{figure}%
+                {\protect\numberline{\arabic{FigCnt}}{#2}}
+        \mbox{#1}
+
+\nopagebreak
+        {Abbildung \arabic{FigCnt}: #3}
+
+}
+
+\newcommand{\FigNoEntry}[2]%
+{
+        \refstepcounter{FigCnt}
+        \mbox{#1}
+
+\nopagebreak
+        {Abbildung \arabic{FigCnt}: #2}
+
+}
+
+\newcommand{\FigCaption}[1]%
+{
+        \refstepcounter{FigCnt}
+        \addcontentsline{lof}{figure}%
+                {\protect\numberline{\arabic{FigCnt}}{#1}}
+
+        %{Figure \thesection.\arabic{FigCnt}: #1}
+}
+
+\newcommand{\Tab}[3]%
+{
+        \refstepcounter{TabCnt}
+        \addcontentsline{lot}{figure}%
+                {\protect\numberline{\arabic{TabCnt}}{#2}}
+        {Tabelle \arabic{TabCnt}: #3}
+\nopagebreak
+        #1
+
+}
+
+\hyphenation{Intevation}
+% end macro definition
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\newcounter{schritt}
+\renewcommand{\theschritt}{\Roman{schritt}}
+%\makeatletter\renewcommand{\p@schritt}{Abschnitt~\thesubsubsection~}\makeatother
+
+%-----------------------------------
+% DOCUMENT SETTINGS
+\pagestyle{fancy}
+\setlength{\parindent}{0cm}
+\setlength{\parskip}{5pt plus 2pt minus 1pt}
+
+% Start actual content here
+\include{title}
+\newpage
+\tableofcontents
+\include{overview}
+\include{importer-hydr-morph}
+\include{importer-geodaesie}
+
+\end{document}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/documentation/de/overview.tex	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,104 @@
+\section{Übersicht}
+
+Diese Dokumentation beschreibt die von Intevation entwickelten Werkzeuge zum
+Importieren der hydrologischen, morphologischen und geodätischen Daten der BfG.
+Die im folgenden\\ beschriebenen Werkzeuge zum Importieren der fachlichen und
+geodätischen Daten sind speziell auf das Verzeichnissystem der BfG ausgerichtet.
+Dabei wird angenommen, dass sich das Verzeichnis eines Gewässers auf oberster
+Ebene in drei Unterverzeichnisse aufgliedert:
+
+\begin{itemize}
+    \item Geodaesie
+    \item Hydrologie
+    \item Morphologie
+\end{itemize}
+
+Desweiteren beziehen sich die Befehle, die auf der Kommandozeile abgesetzt
+werden, auf ein SuSE-Linux-Enterprise-Server Version 11. Bitte beachten Sie
+auch, dass einige der Befehle \textit{root}-Rechte benötigen.
+
+\subsection{Vorbereitungen}
+
+\subsubsection{Entpacken des Datenimporters}
+
+Damit die Software performant und korrekt ausgeführt werden kann, ist es
+erforderlich, dass sie auf dem selben System installiert und ausgeführt wird,
+wie auch die Datenbank installiert ist. Sollten Sie das Paket nicht auf dem
+Zielsystem selbst heruntergeladen haben, sind ggf. weitere Werkzeuge notwendig.
+Im Fall, dass das Sie von einem Windows System auf das Zielsystem zugreifen
+wollen, können Sie beispielsweise folgende Werkzeuge verwenden:
+
+\begin{itemize}
+\item WinSCP \\
+WinSCP ist ein Open Source Werkzeug zum Transferieren von Dateien zwischen zwei
+Systemen. Um das heruntergeladene Paket auf das Zielsystem zu transferieren,
+können Sie WinSCP benutzen. Für weitere Informationen und den Gebrauch von
+WinSCP lesen Sie bitte unter folgender Adresse nach:
+\href{http://winscp.net/}{http://winscp.net/}.
+
+\item Putty \\
+Putty ist ein Open Source Werkzeug, mit dem Sie sich von einem Windows System
+per SSH auf das Zielsystem verbinden können. Anschließend können Sie über die
+Kommandozeile auf dem Zielsystem die Befehle, die in diesem Dokument beschrieben
+sind, ausführen. Für weitere Informationen zu Putty und dessen Gebrauch lesen
+Sie bitte unter folgender Adresse nach: \href{http://www.putty.org/}
+{http://www.putty.org/}.
+\end{itemize}
+
+Bitte beachten Sie, dass diese Werkzeuge nicht zur Installtion und zum Betrieb
+der Software selbst notwendig sind!
+
+
+\subsubsection{Vorbereiten der Datenbank}
+
+Nachdem Sie das Paket nun in das Heimatverzeichnis des Nutzers auf das
+Zielsystem kopiert haben, entpacken Sie es mit folgenden Befehlen:
+
+\begin{lstlisting}
+    cd ~
+    tar xvfz flys-importer.tar.gz
+    cd flys-importer
+\end{lstlisting}
+
+Bevor die Importer verwendet werden können, ist es notwendig, dass eine leere
+Oracle Datenbank vorhanden ist. Anschließend müssen folgende SQL Skripte in
+diese Datenbank eingespielt werden:
+
+\begin{enumerate}
+\item oracle.sql \\
+In diesem SQL Skript befindet sich das Schema zum Speichern der hydrologischen
+Daten.
+
+\item oracle-minfo.sql \\
+In diesem SQL Skript befindet sich das Schema zum Speichern der morphologischen
+Daten.
+
+\item oracle-spatial.sql \\
+In diesem SQL Skript befindet sich das Schema zum Speichern der geodätischen
+Daten.
+
+\item oracle-spatial\_idx.sql \\
+Mittels diesem SQL Skript werden die Indizes zum geodätischen Datenbankschema\\
+hinzugefügt.
+
+\item import-dems.sql \\
+In diesem Skript sind Befehle zum Einfügen der digitalen Geländemodelle
+enthalten. Die Dateipfade in diesem Skript sind so anzupassen, dass sie auf die
+entsprechenden Geländemodelle im Dateisystem verweisen. Es ist notwendig die
+Pfade absolut anzugeben.
+
+\end{enumerate}
+
+Zum Einspielen dieser Schemata setzen Sie folgende Befehle auf der Kommandozeile
+ab. Beachten Sie, dass $sqlplus$ im Pfad liegen muss, und der Linux-Nutzer
+dies Kommando ausführen können muss. Außerdem sind $benutzername$ und $passwort$
+entsprechend Ihres Datenbank-Zugangs anzupassen.
+
+\begin{lstlisting}
+    sqlplus benutzername/passwort @schema/oracle.sql
+    sqlplus benutzername/passwort @schema/oracle-minfo.sql
+    sqlplus benutzername/passwort @schema/oracle-spatial.sql
+    sqlplus benutzername/passwort @schema/oracle-spatial_idx.sql
+    sqlplus benutzername/passwort @schema/import-dems.sql
+\end{lstlisting}
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/documentation/de/title.tex	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,52 @@
+%-----------------------------------
+% TITLE PAGE
+
+\begin{figure}[ht]
+  \begin{minipage}[b]{0.5\linewidth}
+    \centering
+     \includegraphics[scale=0.75]{figures/bfg_logo} \\
+     {\tt http://www.bafg.de}\\[4.0cm]
+  \end{minipage}
+  \begin{minipage}[b]{0.5\linewidth}
+    \centering
+    \includegraphics[width=0.75\textwidth]{figures/intevation-logo}
+     {\tt http://intevation.de/geospatial}\\[2.0cm]
+  \end{minipage}
+\end{figure}
+
+ \vspace{4cm}
+ 
+ {
+ 	\sffamily\large
+ 	Dokumentation Datenimport FLYS, BfG
+ 
+ 	\vspace{1cm}
+ 	{
+ 		\bfseries\huge
+ 		Installation, Konfiguration, Betrieb
+ 	}
+ 
+ 	\vspace{1cm}
+ 	Version \documentversion
+
+    Datum: \documentdate
+ 
+ 	Revision: \documentrevision
+ }
+ 
+ \vspace{4cm}
+ 
+ \thispagestyle{empty}
+ 
+ \vfill
+ 
+ \begin{flushleft}
+ {\bf Authors}:\\
+ Ingo Weinzierl$<$ingo.weinzierl@intevation.de$>$\\
+ Sascha Teichmann $<$sascha.teichmann@intevation.de$>$\\
+ {\bf Intevation GmbH},\\
+ Neuer Graben 17, 49074 Osnabrück, Germany\\
+ Tel: ++49 541 33 50 83 - 0 \\
+ \url{http://www.intevation.net/geospatial}
+ 
+ \end{flushleft}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/import-dems.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,165 @@
+-- SAAR
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    0,
+    7.9,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0000-0079_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    8.0,
+    20.4,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0080-0204_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    20.5,
+    31.4,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0205-0314_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    31.5,
+    54.1,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0315-0541_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    54.2,
+    65.5,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0542-0655_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    65.6,
+    82.8,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0656-0828_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    82.9,
+    93.1,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0829-0931_erweitert.txt'
+);
+
+
+-- ELBE
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    0.0,
+    101.1,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_00000_10110.grd'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    99.2,
+    203.0,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_09920_20300.grd'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    202.0,
+    299.8,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_20200_29980.grd'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    298.1,
+    401.0,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_29810_40100.grd'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    400.0,
+    500.9,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_40000_50090.grd'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    500.1,
+    583.3,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_50010_58330.grd'
+);
+
+
+-- MOSEL
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    0.0,
+    5.8,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0000-0580.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    5.8,
+    15.3,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0058-0153.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    15.3,
+    41.6,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0153-0416.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    41.4,
+    101.2,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0414-1012O.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    41.4,
+    101.21,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0414-1012W.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    101.2,
+    148.8,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1012-1488.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    148.8,
+    166.6,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1488-1666.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    166.6,
+    196.0,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1666-1960.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    196.0,
+    204.4,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1960-2044.XYZ'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    204.4,
+    218.4,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/2044-2184.XYZ'
+);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-drop-minfo.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,102 @@
+SET AUTOCOMMIT ON;
+
+ALTER TABLE elevation_model DROP CONSTRAINT fk_unit;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_bed_single_river_id;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_type;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_location_system;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_cur_elevation_model;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_old_elevation_model;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_range;
+ALTER TABLE bed_height_single_values DROP CONSTRAINT fk_bed_single_values_parent;
+ALTER TABLE bed_height_epoch_values DROP CONSTRAINT fk_bed_epoch_values_parent;
+ALTER TABLE bed_height_epoch DROP CONSTRAINT fk_epoch_cur_elevation_model;
+ALTER TABLE bed_height_epoch DROP CONSTRAINT fk_epoch_old_elevation_model;
+ALTER TABLE bed_height_epoch DROP CONSTRAINT fk_epoch_range;
+ALTER TABLE depths DROP CONSTRAINT fk_depths_unit_id;
+ALTER TABLE sediment_density DROP CONSTRAINT fk_sd_depth_id;
+ALTER TABLE sediment_density DROP CONSTRAINT fk_sd_unit_id;
+ALTER TABLE sediment_density_values DROP CONSTRAINT fk_sdv_sediment_density_id;
+ALTER TABLE morphologic_width DROP CONSTRAINT fk_mw_river_id;
+ALTER TABLE morphologic_width DROP CONSTRAINT fk_mw_unit_id;
+ALTER TABLE morphologic_width_values DROP CONSTRAINT fk_mwv_morphologic_width_id;
+ALTER TABLE flow_velocity_model_values DROP CONSTRAINT fk_fvv_flow_velocity_model_id;
+ALTER TABLE flow_velocity_model DROP CONSTRAINT fk_fvm_river_id;
+ALTER TABLE flow_velocity_model DROP CONSTRAINT fk_fvm_discharge_zone_id;
+ALTER TABLE discharge_zone DROP CONSTRAINT fk_dz_river_id;
+ALTER TABLE flow_velocity_measurements DROP CONSTRAINT fk_fvm_rivers_id;
+ALTER TABLE flow_velocity_measure_values DROP CONSTRAINT fk_fvmv_measurements_id;
+ALTER TABLE grain_fraction DROP CONSTRAINT fk_gf_unit_id;
+ALTER TABLE sediment_yield DROP CONSTRAINT fk_sy_grain_fraction_id;
+ALTER TABLE sediment_yield DROP CONSTRAINT fk_sy_unit_id;
+ALTER TABLE sediment_yield DROP CONSTRAINT fk_sy_time_interval_id;
+ALTER TABLE sediment_yield DROP CONSTRAINT fk_sy_river_id;
+ALTER TABLE sediment_yield_values DROP CONSTRAINT fk_syv_sediment_yield_id;
+ALTER TABLE waterlevel DROP CONSTRAINT fk_w_river_id;
+ALTER TABLE waterlevel DROP CONSTRAINT fk_w_unit_id;
+ALTER TABLE waterlevel_q_range DROP CONSTRAINT fk_wqr_waterlevel_id;
+ALTER TABLE waterlevel_values DROP CONSTRAINT fk_wv_waterlevel_q_range_id;
+ALTER TABLE waterlevel_difference DROP CONSTRAINT fk_wd_river_id;
+ALTER TABLE waterlevel_difference DROP CONSTRAINT fk_wd_unit_id;
+ALTER TABLE waterlevel_difference_column DROP CONSTRAINT fk_wdc_difference_id;
+ALTER TABLE waterlevel_difference_values DROP CONSTRAINT fk_wdv_column_id;
+ALTER TABLE sq_relation DROP CONSTRAINT fk_sqr_tinterval_id;
+ALTER TABLE sq_relation DROP CONSTRAINT fk_sqr_river_id;
+ALTER TABLE sq_relation_value DROP CONSTRAINT fk_sqr_id;
+
+DROP TABLE bed_height_type;
+DROP TABLE location_system;
+DROP TABLE elevation_model;
+DROP TABLE bed_height_single;
+DROP TABLE bed_height_single_values;
+DROP TABLE bed_height_epoch_values;
+DROP TABLE bed_height_epoch;
+DROP TABLE depths;
+DROP TABLE sediment_density;
+DROP TABLE sediment_density_values;
+DROP TABLE morphologic_width;
+DROP TABLE morphologic_width_values;
+DROP TABLE discharge_zone;
+DROP TABLE flow_velocity_model;
+DROP TABLE flow_velocity_model_values;
+DROP TABLE flow_velocity_measurements;
+DROP TABLE flow_velocity_measure_values;
+DROP TABLE grain_fraction;
+DROP TABLE sediment_yield;
+DROP TABLE sediment_yield_values;
+DROP TABLE waterlevel;
+DROP TABLE waterlevel_q_range;
+DROP TABLE waterlevel_values;
+DROP TABLE waterlevel_difference;
+DROP TABLE waterlevel_difference_column;
+DROP TABLE waterlevel_difference_values;
+DROP TABLE sq_relation_value;
+DROP TABLE sq_relation;
+
+DROP SEQUENCE BED_HEIGHT_TYPE_SEQ;
+DROP SEQUENCE LOCATION_SYSTEM_SEQ;
+DROP SEQUENCE ELEVATION_MODEL_SEQ;
+DROP SEQUENCE BED_HEIGHT_SINGLE_ID_SEQ;
+DROP SEQUENCE BED_SINGLE_VALUES_ID_SEQ;
+DROP SEQUENCE BED_EPOCH_VALUES_ID_SEQ;
+DROP SEQUENCE BED_HEIGHT_EPOCH_ID_SEQ;
+DROP SEQUENCE DEPTHS_ID_SEQ;
+DROP SEQUENCE SEDIMENT_DENSITY_ID_SEQ;
+DROP SEQUENCE SEDIMENT_DENSITY_VALUES_ID_SEQ;
+DROP SEQUENCE MORPHOLOGIC_WIDTH_ID_SEQ;
+DROP SEQUENCE MORPH_WIDTH_VALUES_ID_SEQ;
+DROP SEQUENCE DISCHARGE_ZONE_ID_SEQ;
+DROP SEQUENCE FLOW_VELOCITY_MODEL_ID_SEQ;
+DROP SEQUENCE FLOW_VELOCITY_M_VALUES_ID_SEQ;
+DROP SEQUENCE FV_MEASURE_ID_SEQ;
+DROP SEQUENCE FV_MEASURE_VALUES_ID_SEQ;
+DROP SEQUENCE GRAIN_FRACTION_ID_SEQ;
+DROP SEQUENCE SEDIMENT_YIELD_ID_SEQ;
+DROP SEQUENCE SEDIMENT_YIELD_VALUES_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_Q_RANGES_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_VALUES_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_DIFFERENCE_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_DIFF_COLUMN_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_DIFF_VALUES_ID_SEQ;
+DROP SEQUENCE SQ_RELATION_ID_SEQ;
+DROP SEQUENCE SQ_RELATION_VALUES_ID_SEQ;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-drop-spatial.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,68 @@
+DROP TRIGGER river_axes_trigger;
+DROP TABLE river_axes;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'RIVER_AXES';
+DROP SEQUENCE RIVER_AXES_ID_SEQ;
+
+DROP TRIGGER river_axes_km_trigger;
+DROP TABLE river_axes_km;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'RIVER_AXES_KM';
+DROP SEQUENCE RIVER_AXES_KM_ID_SEQ;
+
+DROP TRIGGER cross_section_tracks_trigger;
+DROP TABLE cross_section_tracks;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'CROSS_SECTION_TRACKS';
+DROP SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
+
+DROP TRIGGER lines_trigger;
+DROP TABLE lines;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'LINES';
+DROP SEQUENCE LINES_ID_SEQ;
+
+DROP TRIGGER buildings_trigger;
+DROP TABLE buildings;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'BUILDINGS';
+DROP SEQUENCE BUILDINGS_ID_SEQ;
+
+DROP TRIGGER fixpoints_trigger;
+DROP TABLE fixpoints;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'FIXPOINTS';
+DROP SEQUENCE FIXPOINTS_ID_SEQ;
+
+DROP TRIGGER floodplain_trigger;
+DROP TABLE floodplain;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'FLOODPLAIN';
+DROP SEQUENCE FLOODPLAIN_ID_SEQ;
+
+DROP TRIGGER dem_trigger;
+DROP TABLE dem;
+DROP SEQUENCE DEM_ID_SEQ;
+
+DROP TRIGGER catchment_trigger;
+DROP TABLE catchment;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'CATCHMENT';
+DROP SEQUENCE CATCHMENT_ID_SEQ;
+
+DROP TRIGGER hws_trigger;
+DROP TABLE hws;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HWS';
+DROP SEQUENCE HWS_ID_SEQ;
+
+DROP TRIGGER floodmaps_trigger;
+DROP TABLE floodmaps;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'FLOODMAPS';
+DROP SEQUENCE FLOODMAPS_ID_SEQ;
+
+DROP TRIGGER hydr_boundaries_trigger;
+DROP TABLE hydr_boundaries;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HYDR_BOUNDARIES';
+DROP SEQUENCE HYDR_BOUNDARIES_ID_SEQ;
+
+DROP TRIGGER hydr_boundaries_poly_trigger;
+DROP TABLE hydr_boundaries_poly;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HYDR_BOUNDARIES_POLY';
+DROP SEQUENCE HYDR_BOUNDARIES_POLY_ID_SEQ;
+
+DROP TRIGGER gauge_location_trigger;
+DROP TABLE gauge_location;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'GAUGE_LOCATION';
+DROP SEQUENCE GAUGE_LOCATION_ID_SEQ;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-drop.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,90 @@
+ALTER TABLE annotations DROP CONSTRAINT cAnnotationsRanges;
+ALTER TABLE annotations DROP CONSTRAINT cAnnotationsEdges;
+ALTER TABLE annotations DROP CONSTRAINT cAnnotationsPositions;
+ALTER TABLE annotations DROP CONSTRAINT cAnnotationsAttributes;
+ALTER TABLE annotations DROP CONSTRAINT cAnnotationsTypes;
+ALTER TABLE cross_section_lines DROP CONSTRAINT cQPSLinesCrossSections;
+ALTER TABLE cross_section_points DROP CONSTRAINT cQPSPointsCrossSectionLines;
+ALTER TABLE cross_sections DROP CONSTRAINT cCrossSectionsRivers;
+ALTER TABLE cross_sections DROP CONSTRAINT cCrossSectionsTimeIntervals;
+ALTER TABLE discharge_tables DROP CONSTRAINT cDischargeTablesTime_intervals;
+ALTER TABLE discharge_tables DROP CONSTRAINT cDischargeTablesGauges;
+ALTER TABLE gauges DROP CONSTRAINT cGaugesRivers;
+ALTER TABLE gauges DROP CONSTRAINT cGaugesRanges;
+ALTER TABLE hyk_entries DROP CONSTRAINT cHykEntriesHyks;
+ALTER TABLE hyk_flow_zones DROP CONSTRAINT cHykFlowZonesHykFormations;
+ALTER TABLE hyk_flow_zones DROP CONSTRAINT cHykFlowZonesHykFlowZoneTypes;
+ALTER TABLE hyks DROP CONSTRAINT cHyksRivers;
+ALTER TABLE hyk_formations DROP CONSTRAINT cHykFormationsHykEntries;
+ALTER TABLE main_values DROP CONSTRAINT cMainValuesTimeIntervals;
+ALTER TABLE main_values DROP CONSTRAINT cMainValuesGauges;
+ALTER TABLE main_values DROP CONSTRAINT cMainValuesNamedMainValues;
+ALTER TABLE named_main_values DROP CONSTRAINT cNamedMainValuesMainValueTypes;
+ALTER TABLE ranges DROP CONSTRAINT cRangesRivers;
+ALTER TABLE rivers DROP CONSTRAINT cRiversUnits;
+ALTER TABLE wst_column_q_ranges DROP CONSTRAINT cWstColumnQRangesWstColums;
+ALTER TABLE wst_column_q_ranges DROP CONSTRAINT cWstColumnQRangesWstQRanges;
+ALTER TABLE wst_column_values DROP CONSTRAINT cWstColumnValuesWstColumns;
+ALTER TABLE wst_columns DROP CONSTRAINT cWstColumnsTime_intervals;
+ALTER TABLE wst_columns DROP CONSTRAINT cWstColumnsWsts;
+ALTER TABLE wst_q_ranges DROP CONSTRAINT cWstQRangesRanges;
+ALTER TABLE wsts DROP CONSTRAINT cWstsRivers;
+DROP TABLE annotation_types;
+DROP TABLE annotations;
+DROP TABLE attributes;
+DROP TABLE cross_section_lines;
+DROP TABLE cross_section_points;
+DROP TABLE cross_sections;
+DROP TABLE discharge_table_values;
+DROP TABLE discharge_tables;
+DROP TABLE edges;
+DROP TABLE gauges;
+DROP TABLE hyk_entries;
+DROP TABLE hyk_flow_zone_types;
+DROP TABLE hyk_flow_zones;
+DROP TABLE hyk_formations;
+DROP TABLE hyks;
+DROP TABLE main_value_types;
+DROP TABLE main_values;
+DROP TABLE named_main_values;
+DROP TABLE positions;
+DROP TABLE ranges;
+DROP TABLE rivers;
+DROP TABLE time_intervals;
+DROP TABLE units;
+DROP TABLE wst_column_q_ranges;
+DROP TABLE wst_column_values;
+DROP TABLE wst_columns;
+DROP TABLE wst_q_ranges;
+DROP TABLE wsts;
+DROP SEQUENCE ANNOTATION_TYPES_ID_SEQ;
+DROP SEQUENCE ANNOTATIONS_ID_SEQ;
+DROP SEQUENCE ATTRIBUTES_ID_SEQ;
+DROP SEQUENCE CROSS_SECTION_LINES_ID_SEQ;
+DROP SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+DROP SEQUENCE CROSS_SECTIONS_ID_SEQ;
+DROP SEQUENCE DISCHARGE_TABLE_VALUES_ID_SEQ;
+DROP SEQUENCE DISCHARGE_TABLES_ID_SEQ;
+DROP SEQUENCE EDGES_ID_SEQ;
+DROP SEQUENCE GAUGES_ID_SEQ;
+DROP SEQUENCE HYK_ENTRIES_ID_SEQ;
+DROP SEQUENCE HYK_FLOW_ZONE_TYPES_ID_SEQ;
+DROP SEQUENCE HYK_FLOW_ZONES_ID_SEQ;
+DROP SEQUENCE HYK_FORMATIONS_ID_SEQ;
+DROP SEQUENCE HYKS_ID_SEQ;
+DROP SEQUENCE MAIN_VALUE_TYPES_ID_SEQ;
+DROP SEQUENCE MAIN_VALUES_ID_SEQ;
+DROP SEQUENCE NAMED_MAIN_VALUES_ID_SEQ;
+DROP SEQUENCE POSITIONS_ID_SEQ;
+DROP SEQUENCE RANGES_ID_SEQ;
+DROP SEQUENCE RIVERS_ID_SEQ;
+DROP SEQUENCE TIME_INTERVALS_ID_SEQ;
+DROP SEQUENCE UNITS_ID_SEQ;
+DROP SEQUENCE WST_COLUMN_Q_RANGES_ID_SEQ;
+DROP SEQUENCE WST_COLUMN_VALUES_ID_SEQ;
+DROP SEQUENCE WST_COLUMNS_ID_SEQ;
+DROP SEQUENCE WST_Q_RANGES_ID_SEQ;
+DROP SEQUENCE WSTS_ID_SEQ;
+DROP VIEW wst_value_table;
+DROP VIEW wst_w_values ;
+DROP VIEW wst_q_values;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-minfo.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,381 @@
+SET AUTOCOMMIT ON;
+
+CREATE SEQUENCE LOCATION_SYSTEM_SEQ;
+
+CREATE TABLE location_system (
+    id          NUMBER(38,0) NOT NULL,
+    name        VARCHAR(32)  NOT NULL,
+    description VARCHAR(255),
+    PRIMARY KEY(id)
+);
+
+
+CREATE SEQUENCE ELEVATION_MODEL_SEQ;
+
+CREATE TABLE elevation_model (
+    id          NUMBER(38,0) NOT NULL,
+    name        VARCHAR(32)  NOT NULL,
+    unit_id     NUMBER(38,0) NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_unit FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+CREATE SEQUENCE BED_HEIGHT_TYPE_SEQ;
+
+CREATE TABLE bed_height_type (
+    id          NUMBER(38,0) NOT NULL,
+    name        VARCHAR(16)  NOT NULL,
+    description VARCHAR(255),
+    PRIMARY KEY(id)
+);
+
+
+
+CREATE SEQUENCE BED_HEIGHT_SINGLE_ID_SEQ;
+
+CREATE TABLE bed_height_single (
+    id                      NUMBER(38,0) NOT NULL,
+    river_id                NUMBER(38,0) NOT NULL,
+    year                    NUMBER(38,0) NOT NULL,
+    sounding_width          NUMBER(38,0) NOT NULL,
+    type_id                 NUMBER(38,0) NOT NULL,
+    location_system_id      NUMBER(38,0) NOT NULL,
+    cur_elevation_model_id  NUMBER(38,0) NOT NULL,
+    old_elevation_model_id  NUMBER(38,0),
+    range_id                NUMBER(38,0) NOT NULL,
+    evaluation_by           VARCHAR(255),
+    description             VARCHAR(255),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_single_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_type FOREIGN KEY (type_id) REFERENCES bed_height_type(id),
+    CONSTRAINT fk_location_system FOREIGN KEY (location_system_id) REFERENCES location_system(id),
+    CONSTRAINT fk_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_range FOREIGN KEY (range_id) REFERENCES ranges(id)
+);
+
+
+CREATE SEQUENCE BED_HEIGHT_EPOCH_ID_SEQ;
+
+CREATE TABLE bed_height_epoch (
+    id                      NUMBER(38,0) NOT NULL,
+    river_id                NUMBER(38,0) NOT NULL,
+    time_interval_id        NUMBER(38,0) NOT NULL,
+    -- sounding_with           NUMBER(38,0) NOT NULL,
+    -- type_id                 NUMBER(38,0) NOT NULL,
+    cur_elevation_model_id  NUMBER(38,0) NOT NULL,
+    old_elevation_model_id  NUMBER(38,0),
+    range_id                NUMBER(38,0) NOT NULL,
+    evaluation_by           VARCHAR(255),
+    description             VARCHAR(255),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_time_interval FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id),
+    CONSTRAINT fk_epoch_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_epoch_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_epoch_range FOREIGN KEY (range_id) REFERENCES ranges(id)
+);
+
+
+CREATE SEQUENCE BED_SINGLE_VALUES_ID_SEQ;
+
+CREATE TABLE bed_height_single_values (
+    id                      NUMBER(38,0) NOT NULL,
+    bed_height_single_id    NUMBER(38,0) NOT NULL,
+    station                 NUMBER(38,2) NOT NULL,
+    height                  NUMBER(38,2),
+    uncertainty             NUMBER(38,2),
+    data_gap                NUMBER(38,2) NOT NULL,
+    sounding_width          NUMBER(38,2) NOT NULL,
+    width                   NUMBER(38,2) NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_single_values_parent FOREIGN KEY (bed_height_single_id) REFERENCES bed_height_single(id)
+);
+
+
+CREATE SEQUENCE BED_EPOCH_VALUES_ID_SEQ;
+
+CREATE TABLE bed_height_epoch_values (
+    id                      NUMBER(38,0) NOT NULL,
+    bed_height_epoch_id     NUMBER(38,0) NOT NULL,
+    station                 NUMBER(38,2) NOT NULL,
+    height                  NUMBER(38,2),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_epoch_values_parent FOREIGN KEY (bed_height_epoch_id) REFERENCES bed_height_epoch(id)
+);
+
+
+CREATE SEQUENCE DEPTHS_ID_SEQ;
+
+CREATE TABLE depths (
+    id      NUMBER(38,0) NOT NULL,
+    lower   NUMBER(38,2) NOT NULL,
+    upper   NUMBER(38,2) NOT NULL,
+    unit_id NUMBER(38,0) NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_depths_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_DENSITY_ID_SEQ;
+
+CREATE TABLE sediment_density (
+    id          NUMBER(38,0) NOT NULL,
+    river_id    NUMBER(38,0) NOT NULL,
+    depth_id    NUMBER(38,0) NOT NULL,
+    unit_id     NUMBER(38,0) NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_sd_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sd_depth_id FOREIGN KEY (depth_id) REFERENCES depths(id),
+    CONSTRAINT fk_sd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_DENSITY_VALUES_ID_SEQ;
+
+CREATE TABLE sediment_density_values (
+    id                  NUMBER(38,0) NOT NULL,
+    sediment_density_id NUMBER(38,0) NOT NULL,
+    station             NUMBER(38,2) NOT NULL,
+    density             NUMBER(38,2) NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_sdv_sediment_density_id FOREIGN KEY(sediment_density_id) REFERENCES sediment_density(id)
+);
+
+
+CREATE SEQUENCE MORPHOLOGIC_WIDTH_ID_SEQ;
+
+CREATE TABLE morphologic_width (
+    id          NUMBER(38,0) NOT NULL,
+    river_id    NUMBER(38,0) NOT NULL,
+    unit_id     NUMBER(38,0) NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_mw_river_id FOREIGN KEY(river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_mw_unit_id FOREIGN KEY(unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE MORPH_WIDTH_VALUES_ID_SEQ;
+
+CREATE TABLE morphologic_width_values (
+    id                      NUMBER(38,0) NOT NULL,
+    morphologic_width_id    NUMBER(38,0) NOT NULL,
+    station                 NUMBER(38,3) NOT NULL,
+    width                   NUMBER(38,3) NOT NULL,
+    description             VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_mwv_morphologic_width_id FOREIGN KEY (morphologic_width_id) REFERENCES morphologic_width(id)
+);
+
+
+CREATE SEQUENCE DISCHARGE_ZONE_ID_SEQ;
+
+CREATE TABLE discharge_zone (
+    id                      NUMBER(38,0) NOT NULL,
+    river_id                NUMBER(38,0) NOT NULL,
+    gauge_name              VARCHAR(64)  NOT NULL, -- this is not very proper, but there are gauges with no db instance
+    value                   NUMBER(38,3) NOT NULL,
+    lower_discharge         VARCHAR(16)  NOT NULL,
+    upper_discharge         VARCHAR(16),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_dz_river_id FOREIGN KEY (river_id) REFERENCES rivers(id)
+);
+
+
+CREATE SEQUENCE FLOW_VELOCITY_MODEL_ID_SEQ;
+
+CREATE TABLE flow_velocity_model (
+    id                  NUMBER(38,0) NOT NULL,
+    river_id            NUMBER(38,0) NOT NULL,
+    discharge_zone_id   NUMBER(38,0) NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvm_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_fvm_discharge_zone_id FOREIGN KEY (discharge_zone_id) REFERENCES discharge_zone (id)
+);
+
+
+CREATE SEQUENCE FLOW_VELOCITY_M_VALUES_ID_SEQ;
+
+CREATE TABLE flow_velocity_model_values (
+    id                      NUMBER(38,0) NOT NULL,
+    flow_velocity_model_id  NUMBER(38,0) NOT NULL,
+    station                 NUMBER(38,3) NOT NULL,
+    q                       NUMBER(38,3) NOT NULL,
+    total_channel           NUMBER(38,3) NOT NULL,
+    main_channel            NUMBER(38,3) NOT NULL,
+    shear_stress            NUMBER(38,3) NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_fvv_flow_velocity_model_id FOREIGN KEY (flow_velocity_model_id) REFERENCES flow_velocity_model(id)
+);
+
+
+
+CREATE SEQUENCE FV_MEASURE_ID_SEQ;
+
+CREATE TABLE flow_velocity_measurements (
+    id          NUMBER(38,0) NOT NULL,
+    river_id    NUMBER(38,0) NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvm_rivers_id FOREIGN KEY (river_id) REFERENCES rivers(id)
+);
+
+CREATE SEQUENCE FV_MEASURE_VALUES_ID_SEQ;
+
+CREATE TABLE flow_velocity_measure_values (
+    id              NUMBER(38,0) NOT NULL,
+    measurements_id NUMBER(38,0) NOT NULL,
+    station         NUMBER(38,3) NOT NULL,
+    datetime        TIMESTAMP,
+    w               NUMBER(38,3) NOT NULL,
+    q               NUMBER(38,3) NOT NULL,
+    v               NUMBER(38,3) NOT NULL,
+    description     VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvmv_measurements_id FOREIGN KEY (measurements_id) REFERENCES flow_velocity_measurements (id)
+);
+
+
+CREATE SEQUENCE GRAIN_FRACTION_ID_SEQ;
+
+CREATE TABLE grain_fraction (
+    id      NUMBER(38,0)   NOT NULL,
+    name    VARCHAR(64)    NOT NULL,
+    lower   NUMBER(38,3),
+    upper   NUMBER(38,3),
+    unit_id NUMBER(38,0),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_gf_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_YIELD_ID_SEQ;
+
+CREATE TABLE sediment_yield (
+    id                  NUMBER(38,0) NOT NULL,
+    river_id            NUMBER(38,0) NOT NULL,
+    grain_fraction_id   NUMBER(38,0),
+    unit_id             NUMBER(38,0) NOT NULL,
+    time_interval_id    NUMBER(38,0) NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sy_grain_fraction_id FOREIGN KEY (grain_fraction_id) REFERENCES grain_fraction(id),
+    CONSTRAINT fk_sy_unit_id FOREIGN KEY (unit_id) REFERENCES units(id),
+    CONSTRAINT fk_sy_time_interval_id FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_YIELD_VALUES_ID_SEQ;
+
+CREATE TABLE sediment_yield_values (
+    id                  NUMBER(38,0) NOT NULL,
+    sediment_yield_id   NUMBER(38,0) NOT NULL,
+    station             NUMBER(38,3) NOT NULL,
+    value               NUMBER(38,3) NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_syv_sediment_yield_id FOREIGN KEY (sediment_yield_id) REFERENCES sediment_yield(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_ID_SEQ;
+
+CREATE TABLE waterlevel (
+    id          NUMBER(38,0) NOT NULL,
+    river_id    NUMBER(38,0) NOT NULL,
+    unit_id     NUMBER(38,0) NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_w_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_w_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_Q_RANGES_ID_SEQ;
+
+CREATE TABLE waterlevel_q_range (
+    id              NUMBER(38,0) NOT NULL,
+    waterlevel_id   NUMBER(38,0) NOT NULL,
+    q               NUMBER(38,2) NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wqr_waterlevel_id FOREIGN KEY (waterlevel_id) REFERENCES waterlevel(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_VALUES_ID_SEQ;
+
+CREATE TABLE waterlevel_values (
+    id                      NUMBER(38,0) NOT NULL,
+    waterlevel_q_range_id   NUMBER(38,0) NOT NULL,
+    station                 NUMBER(38,3) NOT NULL,
+    w                       NUMBER(38,2) NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wv_waterlevel_q_range_id FOREIGN KEY (waterlevel_q_range_id) REFERENCES waterlevel_q_range(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFFERENCE_ID_SEQ;
+
+CREATE TABLE waterlevel_difference (
+    id          NUMBER(38,0) NOT NULL,
+    river_id    NUMBER(38,0) NOT NULL,
+    unit_id     NUMBER(38,0) NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wd_river_id FOREIGN KEY (river_id) REFERENCES rivers (id),
+    CONSTRAINT fk_wd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFF_COLUMN_ID_SEQ;
+
+CREATE TABLE waterlevel_difference_column (
+    id              NUMBER(38,0) NOT NULL,
+    difference_id   NUMBER(38,0) NOT NULL,
+    description     VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wdc_difference_id FOREIGN KEY (difference_id) REFERENCES waterlevel_difference (id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFF_VALUES_ID_SEQ;
+
+CREATE TABLE waterlevel_difference_values (
+    id          NUMBER(38,0) NOT NULL,
+    column_id   NUMBER(38,0) NOT NULL,
+    station     NUMBER(38,3) NOT NULL,
+    value       NUMBER(38,2) NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wdv_column_id FOREIGN KEY (column_id) REFERENCES waterlevel_difference_column (id)
+);
+
+
+CREATE SEQUENCE SQ_RELATION_ID_SEQ;
+
+CREATE TABLE sq_relation (
+    id               NUMBER(38,0) NOT NULL,
+    river_id         NUMBER(38,0) NOT NULL,
+    time_interval_id NUMBER(38,0) NOT NULL,
+    description      VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_sqr_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sqr_tinterval_id FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id)
+);
+
+
+CREATE SEQUENCE SQ_RELATION_VALUES_ID_SEQ;
+
+CREATE TABLE sq_relation_value (
+    id             NUMBER(38,0) NOT NULL,
+    sq_relation_id NUMBER(38,0) NOT NULL,
+    parameter      VARCHAR(16)  NOT NULL,
+    fraction       VARCHAR(32)  NOT NULL,
+    function       VARCHAR(32)  NOT NULL,
+    km             NUMBER(38,3) NOT NULL,
+    a              NUMBER(38, 3) NOT NULL,
+    b              NUMBER(38,3) NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_sqr_id FOREIGN KEY (sq_relation_id) REFERENCES sq_relation(id)
+);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-spatial.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,289 @@
+-- Geodaesie/Flussachse+km/achse
+CREATE SEQUENCE RIVER_AXES_ID_SEQ;
+CREATE TABLE river_axes(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    kind     NUMBER(38) DEFAULT 0 NOT NULL,
+    name     VARCHAR(64),
+    path     VARCHAR(256),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('river_axes', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER river_axes_trigger BEFORE INSERT ON river_axes FOR each ROW
+    BEGIN
+        SELECT RIVER_AXES_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX river_axes_spatial_idx ON river_axes(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+
+-- Geodaesie/Flussachse+km/km.shp
+CREATE SEQUENCE RIVER_AXES_KM_ID_SEQ;
+CREATE TABLE river_axes_km(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    km NUMBER(6,3),
+    name     VARCHAR(64),
+    path     VARCHAR(256),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('river_axes_km', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER river_axes_km_trigger BEFORE INSERT ON river_axes_km FOR each ROW
+    BEGIN
+        SELECT river_axes_km_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX river_axes_km_spatial_idx ON river_axes_km(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=point');
+
+
+--Geodaesie/Querprofile/QP-Spuren/qps.shp
+CREATE SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
+CREATE TABLE cross_section_tracks (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    km       NUMBER(38,12) NOT NULL,
+    z        NUMBER(38,12) DEFAULT 0 NOT NULL,
+    name     VARCHAR(64),
+    path     VARCHAR(256),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('cross_section_tracks', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER cross_section_tracks_trigger BEFORE INSERT ON cross_section_tracks FOR each ROW
+    BEGIN
+        SELECT CROSS_SECTION_TRACKS_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX CrossSectionTracks_spatial_idx ON cross_section_tracks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+
+-- TODO: TestMe. Fix Importer-Script. Fix oracle_spatial_idx.sql script.
+-- Geodaesie/Linien/rohre-und-speeren
+CREATE SEQUENCE LINES_ID_SEQ;
+CREATE TABLE lines (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    kind     VARCHAR2(16) NOT NULL,
+    z        NUMBER(38,12) DEFAULT 0,
+    name     VARCHAR(64),
+    path     VARCHAR(256),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('lines', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER lines_trigger BEFORE INSERT ON lines FOR each ROW
+    BEGIN
+        SELECT LINES_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+-- NOTE: Should lines should be 3D.
+-- TODO: Test index. 
+--CREATE INDEX lines_idx ON lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+-- 'kind':
+-- 0: ROHR1
+-- 1: DAMM
+
+
+-- Geodaesie/Bauwerke/Wehre.shp
+CREATE SEQUENCE BUILDINGS_ID_SEQ;
+CREATE TABLE buildings(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    name VARCHAR2(255),
+    path     VARCHAR(256),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('buildings', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER buildings_trigger BEFORE INSERT ON buildings FOR each ROW
+    BEGIN
+        SELECT BUILDINGS_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX buildings_spatial_idx ON buildings(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+
+-- Geodaesie/Festpunkte/Festpunkte.shp
+CREATE SEQUENCE FIXPOINTS_ID_SEQ;
+CREATE TABLE fixpoints (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    x NUMBER(38,11),
+    y NUMBER(38,11),
+    km NUMBER(38,11) NOT NULL,
+    HPGP VARCHAR2(255),
+    name VARCHAR(64),
+    path     VARCHAR(256),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('fixpoints', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER fixpoints_trigger BEFORE INSERT ON fixpoints FOR each ROW
+    BEGIN
+        SELECT FIXPOINTS_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX fixpoints_spatial_idx ON fixpoints(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
+
+
+-- Hydrologie/Hydr. Grenzen/talaue.shp
+CREATE SEQUENCE FLOODPLAIN_ID_SEQ;
+CREATE TABLE floodplain(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    name     VARCHAR(64),
+    path     VARCHAR(256),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('floodplain', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER floodplain_trigger BEFORE INSERT ON floodplain FOR each ROW
+    BEGIN
+        SELECT FLOODPLAIN_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX floodplain_spatial_idx ON floodplain(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POLYGON');
+
+
+-- TODO: Test-Me. Fix Importer-Skript.
+-- NOTE: It's not a spatial schema!
+-- Geodaesie/Hoehenmodelle/*
+CREATE SEQUENCE DEM_ID_SEQ;
+CREATE TABLE dem (
+    ID NUMBER PRIMARY KEY NOT NULL,
+    river_id NUMBER(38),
+    -- XXX Should we use the ranges table instead?
+    lower    NUMBER(19,5),
+    upper    NUMBER(19,5),
+    path     VARCHAR(256),
+    UNIQUE (river_id, lower, upper)
+);
+CREATE OR REPLACE TRIGGER dem_trigger BEFORE INSERT ON dem FOR each ROW
+    BEGIN
+        SELECT DEM_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+
+
+-- Hydrologie/Einzugsgebiete/EZG.shp
+CREATE SEQUENCE CATCHMENT_ID_SEQ;
+CREATE TABLE catchment(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    area NUMBER(19,5),
+    name VARCHAR2(255),
+    path     VARCHAR(256),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('CATCHMENT', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+
+CREATE TRIGGER catchment_trigger BEFORE INSERT ON catchment FOR each ROW
+    BEGIN
+        SELECT CATCHMENT_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX catchment_spatial_idx ON catchment(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=polygon');
+
+--Hydrologie/HW-Schutzanlagen/hws.shp
+CREATE SEQUENCE HWS_ID_SEQ;
+CREATE TABLE hws(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    hws_facility VARCHAR2(255),
+    type VARCHAR2(255),
+    name VARCHAR(64),
+    path     VARCHAR(256),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hws', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER hws_trigger BEFORE INSERT ON hws FOR each ROW
+    BEGIN
+        SELECT HWS_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX hws_spatial_idx ON hws(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+
+--Hydrologie/UeSG
+CREATE SEQUENCE FLOODMAPS_ID_SEQ;
+CREATE TABLE floodmaps (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    name VARCHAR(255),
+    kind NUMBER(38),
+    diff NUMBER(19,5),
+    count NUMBER(38),
+    area NUMBER(19,5),
+    perimeter NUMBER(19,5),
+    path     VARCHAR(256),
+    id NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('floodmaps', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER floodmaps_trigger BEFORE INSERT ON floodmaps FOR each ROW
+    BEGIN
+        SELECT FLOODMAPS_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+CREATE INDEX floodmaps_spatial_idx ON floodmaps(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON');
+
+
+--Hydrologie/Hydr.Grenzen/Linien
+CREATE SEQUENCE HYDR_BOUNDARIES_ID_SEQ;
+CREATE TABLE hydr_boundaries (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    name VARCHAR(255),
+    kind NUMBER(38),
+    path     VARCHAR(256),
+    id NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hydr_boundaries', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER hydr_boundaries_trigger BEFORE INSERT ON hydr_boundaries FOR each ROW
+    BEGIN
+        SELECT HYDR_BOUNDARIES_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+CREATE INDEX hydr_boundaries_idx ON hydr_boundaries(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+CREATE SEQUENCE HYDR_BOUNDARIES_POLY_ID_SEQ;
+CREATE TABLE hydr_boundaries_poly (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    name VARCHAR(255),
+    kind NUMBER(38),
+    path     VARCHAR(256),
+    id NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hydr_boundaries_poly', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER hydr_boundaries_poly_trigger BEFORE INSERT ON hydr_boundaries_poly FOR each ROW
+    BEGIN
+        SELECT HYDR_BOUNDARIES_POLY_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+CREATE INDEX hydr_boundaries_poly_idx ON hydr_boundaries_poly(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON');
+
+
+-- Hydrologie/Streckendaten/
+CREATE SEQUENCE GAUGE_LOCATION_ID_SEQ;
+CREATE TABLE gauge_location (
+    OGR_FID     NUMBER(38),
+    GEOM        MDSYS.SDO_GEOMETRY,
+    river_id    NUMBER(38),
+    name        VARCHAR(64),
+    path     VARCHAR(256),
+    id          NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('gauge_location', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER gauge_location_trigger BEFORE INSERT ON gauge_location FOR EACH ROW
+    BEGIN
+        SELECT GAUGE_LOCATION_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+CREATE INDEX gauge_location_idx ON gauge_location(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-spatial_idx.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,9 @@
+CREATE INDEX catchment_spatial_idx ON catchment(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=multipolygon');
+CREATE INDEX river_axes_km_spatial_idx ON river_axes_km(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=point');
+CREATE INDEX buildings_spatial_idx ON buildings(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+CREATE INDEX fixpoints_spatial_idx ON fixpoints(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
+CREATE INDEX river_axes_spatial_idx ON river_axes(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+CREATE INDEX CrossSectionTracks_spatial_idx ON cross_section_tracks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+CREATE INDEX hws_spatial_idx ON hws(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+CREATE INDEX floodplain_spatial_idx ON floodplain(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POLYGON');
+CREATE INDEX lines_idx ON lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,474 @@
+-- ANNOTATION_TYPES
+CREATE SEQUENCE ANNOTATION_TYPES_ID_SEQ;
+
+CREATE TABLE annotation_types (
+    id              NUMBER(38,0) NOT NULL, 
+    name            VARCHAR2(255),
+    PRIMARY KEY     (id)
+);
+
+
+-- ANNOTATIONS
+CREATE SEQUENCE ANNOTATIONS_ID_SEQ;
+
+CREATE TABLE annotations (
+    id              NUMBER(38,0) NOT NULL, 
+    attribute_id    NUMBER(38,0),
+    edge_id         NUMBER(38,0),
+    position_id     NUMBER(38,0),
+    range_id        NUMBER(38,0),
+    type_id         NUMBER(38,0),
+    PRIMARY KEY     (id)
+);
+
+
+-- ATTRIBUTES 
+CREATE SEQUENCE ATTRIBUTES_ID_SEQ;
+
+CREATE TABLE attributes (
+    id              NUMBER(38,0) NOT NULL, 
+    value           VARCHAR2(255), 
+    primary key     (id)
+);
+
+
+-- CROSS_SECTION_LINES
+CREATE SEQUENCE CROSS_SECTION_LINES_ID_SEQ;
+
+CREATE TABLE cross_section_lines (
+    id                  NUMBER(38,0) NOT NULL,
+    km                  NUMBER(38,2),
+    cross_section_id    NUMBER(38,0), 
+    PRIMARY KEY         (id)
+);
+
+
+-- CROSS_SECTION_POINTS
+CREATE SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+
+CREATE TABLE cross_section_points (
+    id                      NUMBER(38,0) NOT NULL,
+    col_pos                 NUMBER(38,0),
+    x                       NUMBER(38,2),
+    y                       NUMBER(38,2),
+    cross_section_line_id   NUMBER(38,0),
+    PRIMARY KEY             (id)
+);
+
+
+-- CROSS_SECTIONS
+CREATE SEQUENCE CROSS_SECTIONS_ID_SEQ;
+
+CREATE TABLE cross_sections (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    river_id            NUMBER(38,0),
+    time_interval_id    NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+-- Indices for faster access of the points
+CREATE INDEX cross_section_lines_km_idx
+    ON cross_section_lines(km);
+CREATE INDEX cross_section_points_line_idx
+    ON cross_section_points(cross_section_line_id);
+
+-- DISCHARGE_TABLE_VALUES
+CREATE SEQUENCE DISCHARGE_TABLE_VALUES_ID_SEQ;
+
+CREATE TABLE discharge_table_values (
+    id                  NUMBER(38,0) NOT NULL,
+    q                   NUMBER(38,2),
+    w                   NUMBER(38,2),
+    table_id            NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- DISCHARGE_TABLES
+CREATE SEQUENCE DISCHARGE_TABLES_ID_SEQ;
+
+CREATE TABLE discharge_tables (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    kind                NUMBER(38,0),
+    gauge_id            NUMBER(38,0),
+    time_interval_id    NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- EDGES
+CREATE SEQUENCE EDGES_ID_SEQ;
+
+CREATE TABLE edges (
+    id                  NUMBER(38,0) NOT NULL,
+    bottom              NUMBER(38,2),
+    top                 NUMBER(38,2),
+    PRIMARY KEY         (id)
+);
+
+
+-- GAUGES
+CREATE SEQUENCE GAUGES_ID_SEQ;
+
+CREATE TABLE gauges (
+    id                  NUMBER(38,0) NOT NULL,
+    aeo                 NUMBER(38,2),
+    datum               NUMBER(38,2), 
+    name                VARCHAR2(255),
+    station             NUMBER(38,2),
+    official_number     NUMBER(38,0),
+    range_id            NUMBER(38,0),
+    river_id            NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- HYK_ENTRIES
+CREATE SEQUENCE HYK_ENTRIES_ID_SEQ;
+
+CREATE TABLE hyk_entries (
+    id                  NUMBER(38,0) NOT NULL,
+    km                  NUMBER(38,2),
+    measure             TIMESTAMP,
+    hyk_id              NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- HYK_FLOW_ZONE_TYPES
+CREATE SEQUENCE HYK_FLOW_ZONE_TYPES_ID_SEQ;
+
+CREATE TABLE hyk_flow_zone_types (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    name                VARCHAR2(255),
+    PRIMARY KEY         (id)
+);
+
+
+-- HYK_FLOW_ZONES
+CREATE SEQUENCE HYK_FLOW_ZONES_ID_SEQ;
+
+CREATE TABLE hyk_flow_zones (
+    id                  NUMBER(38,0) NOT NULL,
+    a                   NUMBER(38,2),
+    b                   NUMBER(38,2),
+    formation_id        NUMBER(38,0),
+    type_id             NUMBER(38,0),
+    primary key         (id)
+);
+
+
+-- HYK_FORMATIONS
+CREATE SEQUENCE HYK_FORMATIONS_ID_SEQ;
+
+CREATE TABLE hyk_formations (
+    id                  NUMBER(38,0) NOT NULL,
+    bottom              NUMBER(38,2),
+    distance_hf         NUMBER(38,2),
+    distance_vl         NUMBER(38,2),
+    distance_vr         NUMBER(38,2),
+    formation_num       NUMBER(38,0),
+    top                 NUMBER(38,2),
+    hyk_entry_id        NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- HYKS
+CREATE SEQUENCE HYKS_ID_SEQ;
+
+CREATE TABLE hyks (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    river_id            NUMBER(38,0),
+    primary key         (id)
+);
+
+
+-- MAIN_VALUE_TYPES
+CREATE SEQUENCE MAIN_VALUE_TYPES_ID_SEQ;
+
+CREATE TABLE main_value_types (
+    id                  NUMBER(38,0) NOT NULL,
+    name                VARCHAR2(255),
+    PRIMARY KEY         (id)
+);
+
+
+-- MAIN_VALUES
+CREATE SEQUENCE MAIN_VALUES_ID_SEQ;
+
+CREATE TABLE main_values (
+    id                  NUMBER(38,0) NOT NULL,
+    value               NUMBER(38,2),
+    gauge_id            NUMBER(38,0),
+    named_value_id      NUMBER(38,0),
+    time_interval_id    NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- NAMED_MAIN_VALUES
+CREATE SEQUENCE NAMED_MAIN_VALUES_ID_SEQ;
+
+CREATE TABLE named_main_values (
+    id                  NUMBER(38,0) NOT NULL,
+    name                VARCHAR2(255),
+    type_id             NUMBER(38,0),
+    PRIMARY KEY (id)
+);
+
+
+-- POSITIONS
+CREATE SEQUENCE POSITIONS_ID_SEQ;
+
+CREATE TABLE positions (
+    id                  NUMBER(38,0) NOT NULL,
+    value               VARCHAR2(255 char),
+    PRIMARY KEY         (id)
+);
+
+
+--- RANGES
+CREATE SEQUENCE RANGES_ID_SEQ;
+
+CREATE TABLE ranges (
+    id                  NUMBER(38,0) NOT NULL,
+    a                   NUMBER(38,10),
+    b                   NUMBER(38,10),
+    river_id            NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- RIVERS
+CREATE SEQUENCE RIVERS_ID_SEQ;
+
+CREATE TABLE rivers (
+    id                  NUMBER(38,0) NOT NULL,
+    km_up               NUMBER(38,0),
+    name                VARCHAR2(255),
+    wst_unit_id         NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- TIME_INTERVALS
+CREATE SEQUENCE TIME_INTERVALS_ID_SEQ;
+
+CREATE TABLE time_intervals (
+    id                  NUMBER(38,0) NOT NULL, 
+    start_time          TIMESTAMP,
+    stop_time           TIMESTAMP,
+    PRIMARY KEY         (id)
+);
+
+
+--- UNITS
+CREATE SEQUENCE UNITS_ID_SEQ;
+
+CREATE TABLE units (
+    id                  NUMBER(38,0) NOT NULL,
+    name                VARCHAR2(255),
+    PRIMARY KEY         (id)
+);
+
+
+-- WST_COLUMN_Q_RANGES
+CREATE SEQUENCE WST_COLUMN_Q_RANGES_ID_SEQ;
+
+CREATE TABLE wst_column_q_ranges (
+    id                  NUMBER(38,0) NOT NULL,
+    wst_column_id       NUMBER(38,0),
+    wst_q_range_id      NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- WST_COLUMN_VALUES
+CREATE SEQUENCE WST_COLUMN_VALUES_ID_SEQ;
+
+CREATE TABLE wst_column_values (
+    id                  NUMBER(38,0) NOT NULL,
+    position            NUMBER(38,5),
+    w                   NUMBER(38,5),
+    wst_column_id       NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- WST_COLUMNS
+CREATE SEQUENCE WST_COLUMNS_ID_SEQ;
+
+CREATE TABLE wst_columns (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    name                VARCHAR2(255),
+    position            NUMBER(38,0),
+    time_interval_id    NUMBER(38,0),
+    wst_id              NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- WST_Q_RANGES
+CREATE SEQUENCE WST_Q_RANGES_ID_SEQ;
+
+CREATE TABLE wst_q_ranges (
+    id                  NUMBER(38,0) NOT NULL,
+    q                   NUMBER(38,5),
+    range_id            NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- WSTS
+CREATE SEQUENCE WSTS_ID_SEQ;
+
+CREATE TABLE wsts (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    kind                NUMBER(38,0),
+    river_id            NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- ADD CONSTRAINTs
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsRanges FOREIGN KEY (range_id) REFERENCES ranges;
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsEdges FOREIGN KEY (edge_id) REFERENCES edges;
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsPositions FOREIGN KEY (position_id) REFERENCES positions;
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsAttributes FOREIGN KEY (attribute_id) REFERENCES attributes;
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsTypes FOREIGN KEY (type_id) REFERENCES annotation_types;
+ALTER TABLE cross_section_lines ADD CONSTRAINT cQPSLinesCrossSections FOREIGN KEY (cross_section_id) REFERENCES cross_sections;
+ALTER TABLE cross_section_points ADD CONSTRAINT cQPSPointsCrossSectionLines FOREIGN KEY (cross_section_line_id) REFERENCES cross_section_lines;
+ALTER TABLE cross_sections ADD CONSTRAINT cCrossSectionsRivers FOREIGN KEY (river_id) REFERENCES rivers;
+ALTER TABLE cross_sections ADD CONSTRAINT cCrossSectionsTimeIntervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
+ALTER TABLE discharge_table_values ADD CONSTRAINT cTableValuesDischargeTables foreign key (table_id) REFERENCES discharge_tables;
+ALTER TABLE discharge_tables ADD CONSTRAINT cDischargeTablesTime_intervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
+ALTER TABLE discharge_tables ADD CONSTRAINT cDischargeTablesGauges FOREIGN KEY (gauge_id) REFERENCES gauges;
+ALTER TABLE gauges ADD CONSTRAINT cGaugesRivers FOREIGN KEY (river_id) REFERENCES rivers;
+ALTER TABLE gauges ADD CONSTRAINT cGaugesRanges FOREIGN KEY (range_id) REFERENCES ranges;
+ALTER TABLE hyk_entries ADD CONSTRAINT cHykEntriesHyks FOREIGN KEY (hyk_id) REFERENCES hyks;
+ALTER TABLE hyk_flow_zones ADD CONSTRAINT cHykFlowZonesHykFormations FOREIGN KEY (formation_id) REFERENCES hyk_formations;
+ALTER TABLE hyk_flow_zones ADD CONSTRAINT cHykFlowZonesHykFlowZoneTypes FOREIGN KEY (type_id) REFERENCES hyk_flow_zone_types;
+ALTER TABLE hyks ADD CONSTRAINT cHyksRivers FOREIGN KEY (river_id) REFERENCES rivers;
+ALTER TABLE hyk_formations ADD CONSTRAINT cHykFormationsHykEntries FOREIGN KEY (hyk_entry_id) REFERENCES hyk_entries;
+ALTER TABLE main_values ADD CONSTRAINT cMainValuesTimeIntervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
+ALTER TABLE main_values ADD CONSTRAINT cMainValuesGauges FOREIGN KEY (gauge_id) REFERENCES gauges;
+ALTER TABLE main_values ADD CONSTRAINT cMainValuesNamedMainValues FOREIGN KEY (named_value_id) REFERENCES named_main_values;
+ALTER TABLE named_main_values ADD CONSTRAINT cNamedMainValuesMainValueTypes FOREIGN KEY (type_id) REFERENCES main_value_types;
+ALTER TABLE ranges ADD CONSTRAINT cRangesRivers FOREIGN KEY (river_id) REFERENCES rivers;
+ALTER TABLE rivers ADD CONSTRAINT cRiversUnits FOREIGN KEY (wst_unit_id) REFERENCES units;
+ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstColums FOREIGN KEY (wst_column_id) REFERENCES wst_columns;
+ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstQRanges FOREIGN KEY (wst_q_range_id) REFERENCES wst_q_ranges;
+ALTER TABLE wst_column_values ADD CONSTRAINT cWstColumnValuesWstColumns FOREIGN KEY (wst_column_id) REFERENCES wst_columns;
+ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsTime_intervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
+ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsWsts FOREIGN KEY (wst_id) REFERENCES wsts;
+ALTER TABLE wst_q_ranges ADD CONSTRAINT cWstQRangesRanges FOREIGN KEY (range_id) REFERENCES RANGES;
+ALTER TABLE wsts ADD CONSTRAINT cWstsRivers FOREIGN KEY (river_id) REFERENCES rivers;
+
+-- VIEWS
+
+CREATE VIEW wst_value_table AS
+    SELECT wcv.position AS position,
+           w,
+           (SELECT q
+            FROM   wst_column_q_ranges wcqr
+                   JOIN wst_q_ranges wqr
+                     ON wcqr.wst_q_range_id = wqr.id
+                   JOIN ranges r
+                     ON r.id = wqr.range_id
+            WHERE  wcqr.wst_column_id = wc.id
+                   AND wcv.position BETWEEN r.a AND r.b) AS q,
+           wc.position                                   AS column_pos,
+           w.id                                          AS wst_id
+    FROM   wst_column_values wcv
+           JOIN wst_columns wc
+             ON wcv.wst_column_id = wc.id
+           JOIN wsts w
+             ON wc.wst_id = w.id
+    ORDER  BY wcv.position ASC,
+          wc.position DESC;
+
+-- view to select the w values of a WST
+CREATE VIEW wst_w_values  AS
+    SELECT wcv.position   AS km, 
+           wcv.w          AS w,  
+           wc.position    AS column_pos, 
+           w.id           AS wst_id
+        FROM wst_column_values wcv
+        JOIN wst_columns wc ON wcv.wst_column_id = wc.id
+        JOIN wsts w         ON wc.wst_id = w.id
+    ORDER BY wcv.position, wc.position;
+
+-- view to select the q values of a WST
+CREATE VIEW wst_q_values AS
+    SELECT wc.position AS column_pos,
+           wqr.q       AS q, 
+           r.a         AS a, 
+           r.b         AS b,
+           wc.wst_id   AS wst_id
+    FROM wst_column_q_ranges wcqr
+    JOIN wst_q_ranges wqr ON wcqr.wst_q_range_id = wqr.id
+    JOIN ranges r         ON wqr.range_id        = r.id
+    JOIN wst_columns wc   ON wcqr.wst_column_id  = wc.id
+    ORDER BY wc.position, wcqr.wst_column_id, r.a;
+
+-- Views to make the 'Amtlichen Linien' easier to access.
+
+CREATE VIEW official_lines
+AS
+  SELECT w.river_id AS river_id,
+         w.id       AS wst_id,
+         wc.id      AS wst_column_id,
+         wc.name    AS name,
+         wc.position AS wst_column_pos
+  FROM   wsts w
+         JOIN wst_columns wc
+           ON wc.wst_id = w.id
+  WHERE  w.kind = 3;
+
+CREATE VIEW q_main_values
+AS
+  SELECT riv.id AS river_id,
+         g.id   AS gauge_id,
+         g.name AS gauge_name,
+         r.a    AS a,
+         r.b    AS b,
+         REGEXP_REPLACE(
+            nmv.name, '[:space:]*\(.*\)[:space:]*', '') AS name,
+         CAST(mv.value AS NUMERIC(38, 5)) AS value
+  FROM   main_values mv
+         JOIN named_main_values nmv
+           ON mv.named_value_id = nmv.id
+         JOIN main_value_types mvt
+           ON nmv.type_id = mvt.id
+         JOIN gauges g
+           ON mv.gauge_id = g.id
+         JOIN ranges r
+           ON g.range_id = r.id
+         JOIN rivers riv
+           ON g.river_id = riv.id
+  WHERE  mvt.name = 'Q'
+  ORDER  BY g.id, CAST(mv.value AS NUMERIC(38,5));
+
+CREATE VIEW official_q_values
+AS
+  SELECT ol.river_id AS river_id,
+         wst_id,
+         wst_column_id,
+         gauge_id,
+         gauge_name,
+         a,
+         b,
+         ol.name,
+         value,
+         wst_column_pos
+  FROM   official_lines ol
+         JOIN q_main_values qmv
+           ON ol.river_id = qmv.river_id
+              AND ol.name = qmv.name;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle_create_user.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,12 @@
+--CREATE TABLESPACE for user
+CREATE TABLESPACE "test" DATAFILE '/u01/app/oracle/oradata/XE/test.dbf' SIZE 500M AUTOEXTEND ON NEXT 100M MAXSIZE 1G LOGGING ONLINE PERMANENT BLOCKSIZE 8192 EXTENT MANAGEMENT LOCAL AUTOALLOCATE DEFAULT NOCOMPRESS SEGMENT SPACE MANAGEMENT AUTO;
+--CREATE USER
+CREATE USER test IDENTIFIED BY test;
+-- USER SQL
+ALTER USER test DEFAULT TABLESPACE "test" TEMPORARY TABLESPACE "TEMP" ACCOUNT UNLOCK ;
+-- QUOTA for user on TABLESPACE
+ALTER USER test QUOTA UNLIMITED ON "test";
+GRANT ALL on "MDSYS"."ALL_SDO_GEOM_METADATA" to test ;
+GRANT CREATE SESSION TO test ;
+GRANT CREATE VIEW TO test;
+GRANT CONNECT, RESOURCE TO test;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/postgresql-minfo.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,382 @@
+BEGIN;
+
+CREATE SEQUENCE LOCATION_SYSTEM_SEQ;
+
+CREATE TABLE location_system (
+    id          int NOT NULL,
+    name        VARCHAR(32)  NOT NULL,
+    description VARCHAR(255),
+    PRIMARY KEY(id)
+);
+
+
+CREATE SEQUENCE ELEVATION_MODEL_SEQ;
+
+CREATE TABLE elevation_model (
+    id          int NOT NULL,
+    name        VARCHAR(32)  NOT NULL,
+    unit_id     int NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_unit FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+CREATE SEQUENCE BED_HEIGHT_TYPE_SEQ;
+
+CREATE TABLE bed_height_type (
+    id          int NOT NULL,
+    name        VARCHAR(16)  NOT NULL,
+    description VARCHAR(255),
+    PRIMARY KEY(id)
+);
+
+
+
+CREATE SEQUENCE BED_HEIGHT_SINGLE_ID_SEQ;
+
+CREATE TABLE bed_height_single (
+    id                      int NOT NULL,
+    river_id                int NOT NULL,
+    year                    int NOT NULL,
+    sounding_width          int NOT NULL,
+    type_id                 int NOT NULL,
+    location_system_id      int NOT NULL,
+    cur_elevation_model_id  int NOT NULL,
+    old_elevation_model_id  int,
+    range_id                int NOT NULL,
+    evaluation_by           VARCHAR(255),
+    description             VARCHAR(255),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_single_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_type FOREIGN KEY (type_id) REFERENCES bed_height_type(id),
+    CONSTRAINT fk_location_system FOREIGN KEY (location_system_id) REFERENCES location_system(id),
+    CONSTRAINT fk_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_range FOREIGN KEY (range_id) REFERENCES ranges(id)
+);
+
+
+CREATE SEQUENCE BED_HEIGHT_EPOCH_ID_SEQ;
+
+CREATE TABLE bed_height_epoch (
+    id                      int NOT NULL,
+    river_id                int NOT NULL,
+    time_interval_id        int NOT NULL,
+    -- sounding_with           int NOT NULL,
+    -- type_id                 int NOT NULL,
+    cur_elevation_model_id  int NOT NULL,
+    old_elevation_model_id  int,
+    range_id                int NOT NULL,
+    evaluation_by           VARCHAR(255),
+    description             VARCHAR(255),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_time_interval FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id),
+    CONSTRAINT fk_epoch_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_epoch_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_epoch_range FOREIGN KEY (range_id) REFERENCES ranges(id)
+);
+
+
+CREATE SEQUENCE BED_SINGLE_VALUES_ID_SEQ;
+
+CREATE TABLE bed_height_single_values (
+    id                      int NOT NULL,
+    bed_height_single_id    int NOT NULL,
+    station                 NUMERIC NOT NULL,
+    height                  NUMERIC,
+    uncertainty             NUMERIC,
+    data_gap                NUMERIC NOT NULL,
+    sounding_width          NUMERIC NOT NULL,
+    width                   NUMERIC NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_single_values_parent FOREIGN KEY (bed_height_single_id) REFERENCES bed_height_single(id)
+);
+
+
+CREATE SEQUENCE BED_EPOCH_VALUES_ID_SEQ;
+
+CREATE TABLE bed_height_epoch_values (
+    id                      int NOT NULL,
+    bed_height_epoch_id     int NOT NULL,
+    station                 NUMERIC NOT NULL,
+    height                  NUMERIC,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_epoch_values_parent FOREIGN KEY (bed_height_epoch_id) REFERENCES bed_height_epoch(id)
+);
+
+
+CREATE SEQUENCE DEPTHS_ID_SEQ;
+
+CREATE TABLE depths (
+    id      int NOT NULL,
+    lower   NUMERIC NOT NULL,
+    upper   NUMERIC NOT NULL,
+    unit_id int NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_depths_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_DENSITY_ID_SEQ;
+
+CREATE TABLE sediment_density (
+    id          int NOT NULL,
+    river_id    int NOT NULL,
+    depth_id    int NOT NULL,
+    unit_id     int NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_sd_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sd_depth_id FOREIGN KEY (depth_id) REFERENCES depths(id),
+    CONSTRAINT fk_sd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_DENSITY_VALUES_ID_SEQ;
+
+CREATE TABLE sediment_density_values (
+    id                  int NOT NULL,
+    sediment_density_id int NOT NULL,
+    station             NUMERIC NOT NULL,
+    density             NUMERIC NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_sdv_sediment_density_id FOREIGN KEY(sediment_density_id) REFERENCES sediment_density(id)
+);
+
+
+CREATE SEQUENCE MORPHOLOGIC_WIDTH_ID_SEQ;
+
+CREATE TABLE morphologic_width (
+    id          int NOT NULL,
+    river_id    int NOT NULL,
+    unit_id     int NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_mw_river_id FOREIGN KEY(river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_mw_unit_id FOREIGN KEY(unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE MORPH_WIDTH_VALUES_ID_SEQ;
+
+CREATE TABLE morphologic_width_values (
+    id                      int NOT NULL,
+    morphologic_width_id    int NOT NULL,
+    station                 NUMERIC NOT NULL,
+    width                   NUMERIC NOT NULL,
+    description             VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_mwv_morphologic_width_id FOREIGN KEY (morphologic_width_id) REFERENCES morphologic_width(id)
+);
+
+
+CREATE SEQUENCE DISCHARGE_ZONE_ID_SEQ;
+
+CREATE TABLE discharge_zone (
+    id                      int NOT NULL,
+    river_id                int NOT NULL,
+    gauge_name              VARCHAR(64)  NOT NULL, -- this is not very proper, but there are gauges with no db instance
+    value                   NUMERIC NOT NULL,
+    lower_discharge         VARCHAR(16)  NOT NULL,
+    upper_discharge         VARCHAR(16),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_dz_river_id FOREIGN KEY (river_id) REFERENCES rivers(id)
+);
+
+
+CREATE SEQUENCE FLOW_VELOCITY_MODEL_ID_SEQ;
+
+CREATE TABLE flow_velocity_model (
+    id                  int NOT NULL,
+    river_id            int NOT NULL,
+    discharge_zone_id   int NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvm_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_fvm_discharge_zone_id FOREIGN KEY (discharge_zone_id) REFERENCES discharge_zone (id)
+);
+
+
+CREATE SEQUENCE FLOW_VELOCITY_M_VALUES_ID_SEQ;
+
+CREATE TABLE flow_velocity_model_values (
+    id                      int NOT NULL,
+    flow_velocity_model_id  int NOT NULL,
+    station                 NUMERIC NOT NULL,
+    q                       NUMERIC NOT NULL,
+    total_channel           NUMERIC NOT NULL,
+    main_channel            NUMERIC NOT NULL,
+    shear_stress            NUMERIC NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_fvv_flow_velocity_model_id FOREIGN KEY (flow_velocity_model_id) REFERENCES flow_velocity_model(id)
+);
+
+
+
+CREATE SEQUENCE FV_MEASURE_ID_SEQ;
+
+CREATE TABLE flow_velocity_measurements (
+    id          int NOT NULL,
+    river_id    int NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvm_rivers_id FOREIGN KEY (river_id) REFERENCES rivers(id)
+);
+
+CREATE SEQUENCE FV_MEASURE_VALUES_ID_SEQ;
+
+CREATE TABLE flow_velocity_measure_values (
+    id              int NOT NULL,
+    measurements_id int NOT NULL,
+    station         NUMERIC NOT NULL,
+    datetime        TIMESTAMP,
+    w               NUMERIC NOT NULL,
+    q               NUMERIC NOT NULL,
+    v               NUMERIC NOT NULL,
+    description     VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvmv_measurements_id FOREIGN KEY (measurements_id) REFERENCES flow_velocity_measurements (id)
+);
+
+
+CREATE SEQUENCE GRAIN_FRACTION_ID_SEQ;
+
+CREATE TABLE grain_fraction (
+    id      int   NOT NULL,
+    name    VARCHAR(64)    NOT NULL,
+    lower   NUMERIC,
+    upper   NUMERIC,
+    unit_id int,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_gf_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_YIELD_ID_SEQ;
+
+CREATE TABLE sediment_yield (
+    id                  int NOT NULL,
+    river_id            int NOT NULL,
+    grain_fraction_id   int,
+    unit_id             int NOT NULL,
+    time_interval_id    int NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sy_grain_fraction_id FOREIGN KEY (grain_fraction_id) REFERENCES grain_fraction(id),
+    CONSTRAINT fk_sy_unit_id FOREIGN KEY (unit_id) REFERENCES units(id),
+    CONSTRAINT fk_sy_time_interval_id FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_YIELD_VALUES_ID_SEQ;
+
+CREATE TABLE sediment_yield_values (
+    id                  int NOT NULL,
+    sediment_yield_id   int NOT NULL,
+    station             NUMERIC NOT NULL,
+    value               NUMERIC NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_syv_sediment_yield_id FOREIGN KEY (sediment_yield_id) REFERENCES sediment_yield(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_ID_SEQ;
+
+CREATE TABLE waterlevel (
+    id          int NOT NULL,
+    river_id    int NOT NULL,
+    unit_id     int NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_w_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_w_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_Q_RANGES_ID_SEQ;
+
+CREATE TABLE waterlevel_q_range (
+    id              int NOT NULL,
+    waterlevel_id   int NOT NULL,
+    q               NUMERIC NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wqr_waterlevel_id FOREIGN KEY (waterlevel_id) REFERENCES waterlevel(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_VALUES_ID_SEQ;
+
+CREATE TABLE waterlevel_values (
+    id                      int NOT NULL,
+    waterlevel_q_range_id   int NOT NULL,
+    station                 NUMERIC NOT NULL,
+    w                       NUMERIC NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wv_waterlevel_q_range_id FOREIGN KEY (waterlevel_q_range_id) REFERENCES waterlevel_q_range(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFFERENCE_ID_SEQ;
+
+CREATE TABLE waterlevel_difference (
+    id          int NOT NULL,
+    river_id    int NOT NULL,
+    unit_id     int NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wd_river_id FOREIGN KEY (river_id) REFERENCES rivers (id),
+    CONSTRAINT fk_wd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFF_COLUMN_ID_SEQ;
+
+CREATE TABLE waterlevel_difference_column (
+    id              int NOT NULL,
+    difference_id   int NOT NULL,
+    description     VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wdc_difference_id FOREIGN KEY (difference_id) REFERENCES waterlevel_difference (id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFF_VALUES_ID_SEQ;
+
+CREATE TABLE waterlevel_difference_values (
+    id          int NOT NULL,
+    column_id   int NOT NULL,
+    station     NUMERIC NOT NULL,
+    value       NUMERIC NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wdv_column_id FOREIGN KEY (column_id) REFERENCES waterlevel_difference_column (id)
+);
+
+
+CREATE SEQUENCE SQ_RELATION_ID_SEQ;
+
+CREATE TABLE sq_relation (
+    id               int NOT NULL,
+    river_id         int NOT NULL,
+    time_interval_id int NOT NULL,
+    description      VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_sqr_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sqr_tinterval_id FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id)
+);
+
+
+CREATE SEQUENCE SQ_RELATION_VALUES_ID_SEQ;
+
+CREATE TABLE sq_relation_value (
+    id             int NOT NULL,
+    sq_relation_id int NOT NULL,
+    parameter      VARCHAR(16)  NOT NULL,
+    fraction       VARCHAR(32)  NOT NULL,
+    function       VARCHAR(32)  NOT NULL,
+    km             NUMERIC NOT NULL,
+    a              NUMERIC NOT NULL,
+    b              NUMERIC NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_sqr_id FOREIGN KEY (sq_relation_id) REFERENCES sq_relation(id)
+);
+COMMIT;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/postgresql-spatial.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,161 @@
+BEGIN;
+
+-- Geodaesie/Flussachse+km/achse
+CREATE SEQUENCE RIVER_AXES_ID_SEQ;
+CREATE TABLE river_axes (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    kind     int             NOT NULL DEFAULT 0
+);
+SELECT AddGeometryColumn('river_axes', 'geom', 31466, 'LINESTRING', 2);
+ALTER TABLE river_axes ALTER COLUMN id SET DEFAULT NEXTVAL('RIVER_AXES_ID_SEQ');
+
+
+-- TODO: TestMe.
+-- Geodaesie/Flussachse+km/km.shp
+CREATE SEQUENCE RIVER_AXES_KM_ID_SEQ;
+CREATE TABLE river_axes_km (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    km       NUMERIC NOT NULL
+);
+SELECT AddGeometryColumn('river_axes_km', 'geom', 31466, 'POINT', 2);
+ALTER TABLE river_axes_km ALTER COLUMN id SET DEFAULT NEXTVAL('RIVER_AXES_KM_ID_SEQ');
+
+
+--Geodaesie/Querprofile/QP-Spuren/qps.shp
+CREATE SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
+CREATE TABLE cross_section_tracks (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    km       NUMERIC NOT NULL,
+    z        NUMERIC NOT NULL DEFAULT 0
+);
+SELECT AddGeometryColumn('cross_section_tracks', 'geom', 31466, 'LINESTRING', 2);
+ALTER TABLE cross_section_tracks ALTER COLUMN id SET DEFAULT NEXTVAL('CROSS_SECTION_TRACKS_ID_SEQ');
+
+
+-- Geodaesie/Linien/rohre-und-spreen
+CREATE SEQUENCE LINES_ID_SEQ;
+CREATE TABLE lines (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    kind     VARCHAR(16) NOT NULL,
+    z        NUMERIC DEFAULT 0
+);
+SELECT AddGeometryColumn('lines', 'geom', 31466, 'LINESTRING', 4);
+ALTER TABLE lines ALTER COLUMN id SET DEFAULT NEXTVAL('LINES_ID_SEQ');
+-- 'kind':
+-- 0: ROHR1
+-- 1: DAMM
+
+
+-- Geodaesie/Bauwerke/Wehre.shp
+CREATE SEQUENCE BUILDINGS_ID_SEQ;
+CREATE TABLE buildings (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    name     VARCHAR(256)
+);
+SELECT AddGeometryColumn('buildings', 'geom', 31466, 'LINESTRING', 2);
+ALTER TABLE buildings ALTER COLUMN id SET DEFAULT NEXTVAL('BUILDINGS_ID_SEQ');
+
+
+-- Geodaesie/Festpunkte/Festpunkte.shp
+CREATE SEQUENCE FIXPOINTS_ID_SEQ;
+CREATE TABLE fixpoints (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    x        int,
+    y        int,
+    km       NUMERIC NOT NULL,
+    HPGP     VARCHAR(2)
+);
+SELECT AddGeometryColumn('fixpoints', 'geom', 31466, 'POINT', 2);
+ALTER TABLE fixpoints ALTER COLUMN id SET DEFAULT NEXTVAL('FIXPOINTS_ID_SEQ');
+
+
+-- Hydrologie/Hydr. Grenzen/talaue.shp
+CREATE SEQUENCE FLOODPLAIN_ID_SEQ;
+CREATE TABLE floodplain (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id)
+);
+SELECT AddGeometryColumn('floodplain', 'geom', 31466, 'POLYGON', 2);
+ALTER TABLE floodplain ALTER COLUMN id SET DEFAULT NEXTVAL('FLOODPLAIN_ID_SEQ');
+
+
+-- Geodaesie/Hoehenmodelle/*
+CREATE SEQUENCE DEM_ID_SEQ;
+CREATE TABLE dem (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    -- XXX Should we use the ranges table instead?
+    lower    NUMERIC,
+    upper    NUMERIC,
+    path     VARCHAR(256),
+    UNIQUE (river_id, lower, upper)
+);
+ALTER TABLE dem ALTER COLUMN id SET DEFAULT NEXTVAL('DEM_ID_SEQ');
+
+
+-- Hydrologie/Einzugsgebiete/EZG.shp
+CREATE SEQUENCE CATCHMENT_ID_SEQ;
+CREATE TABLE catchment (
+    id int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    area NUMERIC,
+    name VARCHAR(256)
+);
+SELECT AddGeometryColumn('catchment','geom',31466,'POLYGON',2);
+ALTER TABLE catchment ALTER COLUMN id SET DEFAULT NEXTVAL('CATCHMENT_ID_SEQ');
+
+
+--Hydrologie/HW-Schutzanlagen/hws.shp
+CREATE SEQUENCE HWS_ID_SEQ;
+CREATE TABLE hws (
+    id int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    hws_facility VARCHAR(256),
+    type VARCHAR(256)
+);
+SELECT AddGeometryColumn('hws','geom',31466,'LINESTRING',2);
+ALTER TABLE hws ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_ID_SEQ');
+
+
+--
+--Hydrologie/UeSG
+--
+-- 'kind' can be one of:
+-- 200 = Messung
+-- 111 = Berechnung->Aktuell->BfG
+-- 112 = Berechnung->Aktuell->Land
+-- 121 = Berechnung->Potenziell->BfG
+-- 122 = Berechnung->Potenziell->Land
+--
+CREATE SEQUENCE FLOODMAPS_SEQ;
+CREATE FUNCTION floodmaps_id_func() RETURNS trigger AS $floodmaps_id_func$
+    BEGIN
+        NEW.id := nextval('floodmaps_seq');
+        RETURN NEW;
+    END;
+$floodmaps_id_func$ LANGUAGE plpgsql;
+
+CREATE TABLE floodmaps (
+    id         int PRIMARY KEY NOT NULL,
+    river_id   int REFERENCES rivers(id),
+    name       varchar(64) NOT NULL,
+    kind       int NOT NULL,
+    diff       real,
+    count      int,
+    area       real,
+    perimeter  real
+);
+SELECT AddGeometryColumn('floodmaps', 'geom', 31466, 'MULTIPOLYGON', 2);
+ALTER TABLE floodmaps DROP CONSTRAINT enforce_geotype_geom;
+ALTER TABLE floodmaps ADD CONSTRAINT enforce_geotype_geom CHECK (geometrytype(geom) = 'POLYGON'::text OR geometrytype(geom) = 'MULTIPOLYGON'::text);
+ALTER TABLE floodmaps ALTER COLUMN id SET DEFAULT NEXTVAL('FLOODMAPS_SEQ');
+
+CREATE TRIGGER floodmaps_id_trigger BEFORE INSERT OR UPDATE ON floodmaps
+    FOR EACH ROW EXECUTE PROCEDURE floodmaps_id_func();
+END;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/postgresql.sql	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,415 @@
+BEGIN;
+
+CREATE SEQUENCE UNITS_ID_SEQ;
+
+CREATE TABLE units (
+    id   int PRIMARY KEY NOT NULL,
+    name VARCHAR(32)     NOT NULL UNIQUE
+);
+
+-- Gewaesser
+CREATE SEQUENCE RIVERS_ID_SEQ;
+
+CREATE TABLE rivers (
+    id          int PRIMARY KEY NOT NULL,
+    name        VARCHAR(256)    NOT NULL UNIQUE,
+    km_up       BOOLEAN         NOT NULL DEFAULT true,
+    wst_unit_id int             NOT NULL REFERENCES units(id)
+);
+
+-- Bruecke, Haefen, etc.
+CREATE SEQUENCE ATTRIBUTES_ID_SEQ;
+
+CREATE TABLE attributes (
+    id    int PRIMARY KEY NOT NULL,
+    value VARCHAR(256)    NOT NULL UNIQUE
+);
+
+-- segments from/to at a river
+CREATE SEQUENCE RANGES_ID_SEQ;
+
+CREATE TABLE ranges (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int             NOT NULL REFERENCES rivers(id),
+    a        NUMERIC         NOT NULL,
+    b        NUMERIC,
+    UNIQUE (river_id, a, b)
+);
+
+-- Lage 'links', 'rechts', etc.
+CREATE SEQUENCE POSITIONS_ID_SEQ;
+
+CREATE TABLE positions (
+    id    int PRIMARY KEY NOT NULL,
+    value VARCHAR(256)    NOT NULL UNIQUE
+);
+
+-- Kante 'oben', 'unten'
+CREATE SEQUENCE EDGES_ID_SEQ;
+
+CREATE TABLE edges (
+    id     int PRIMARY KEY NOT NULL,
+    top    NUMERIC,
+    bottom NUMERIC
+);
+
+-- Types of annotatations (Hafen, Bruecke, Zufluss, ...)
+CREATE SEQUENCE ANNOTATION_TYPES_ID_SEQ;
+
+CREATE TABLE annotation_types (
+    id    int PRIMARY KEY NOT NULL,
+    name  VARCHAR(256)    NOT NULL UNIQUE
+);
+
+-- Some object (eg. Hafen) at a segment of river
+-- plus its position.
+CREATE SEQUENCE ANNOTATIONS_ID_SEQ;
+
+CREATE TABLE annotations (
+    id           int PRIMARY KEY NOT NULL,
+    range_id     int             NOT NULL REFERENCES ranges(id),
+    attribute_id int             NOT NULL REFERENCES attributes(id),
+    position_id  int REFERENCES positions(id),
+    edge_id      int REFERENCES edges(id),
+    type_id      int REFERENCES annotation_types(id)
+);
+
+-- Pegel
+CREATE SEQUENCE GAUGES_ID_SEQ;
+
+CREATE TABLE gauges (
+    id              int PRIMARY KEY NOT NULL,
+    name            VARCHAR(256)    NOT NULL,
+    river_id        int             NOT NULL REFERENCES rivers(id),
+    station         NUMERIC         NOT NULL UNIQUE,
+    aeo             NUMERIC         NOT NULL,
+    official_number int8                     UNIQUE,
+
+    -- Pegelnullpunkt
+    datum    NUMERIC NOT NULL,
+    -- Streckengueltigkeit
+    range_id int REFERENCES ranges (id),
+
+    UNIQUE (name, river_id),
+    UNIQUE (river_id, station)
+);
+
+-- Type of a Hauptwert 'W', 'Q', 'D', etc.
+CREATE SEQUENCE MAIN_VALUE_TYPES_ID_SEQ;
+
+CREATE TABLE main_value_types (
+    id   int PRIMARY KEY NOT NULL,
+    name VARCHAR(256)    NOT NULL UNIQUE
+);
+
+--  Named type of a Hauptwert (eg. HQ100)
+CREATE SEQUENCE NAMED_MAIN_VALUES_ID_SEQ;
+
+CREATE TABLE named_main_values (
+    id      int PRIMARY KEY NOT NULL,
+    name    VARCHAR(256)    NOT NULL UNIQUE,
+    type_id int NOT NULL REFERENCES main_value_types(id),
+    UNIQUE (name, type_id)
+);
+
+-- Table for time intervals
+CREATE SEQUENCE TIME_INTERVALS_ID_SEQ;
+
+CREATE TABLE time_intervals (
+    id         int PRIMARY KEY NOT NULL,
+    start_time TIMESTAMP       NOT NULL,
+    stop_time  TIMESTAMP,
+    CHECK (start_time <= stop_time)
+);
+
+-- Stammdaten
+CREATE SEQUENCE MAIN_VALUES_ID_SEQ;
+
+CREATE TABLE main_values (
+    id             int PRIMARY KEY NOT NULL,
+    gauge_id       int NOT NULL REFERENCES gauges(id),
+    named_value_id int NOT NULL REFERENCES named_main_values(id),
+    value          NUMERIC NOT NULL,
+
+    time_interval_id int REFERENCES time_intervals(id),
+
+    -- TODO: better checks
+    UNIQUE (gauge_id, named_value_id, time_interval_id)
+);
+
+-- Abflusstafeln
+CREATE SEQUENCE DISCHARGE_TABLES_ID_SEQ;
+
+CREATE TABLE discharge_tables (
+    id               int PRIMARY KEY NOT NULL,
+    gauge_id         int NOT NULL REFERENCES gauges(id),
+    description      VARCHAR(256) NOT NULL,
+    kind             int NOT NULL DEFAULT 0,
+    time_interval_id int REFERENCES time_intervals(id)
+
+    -- TODO: better checks
+    -- UNIQUE (gauge_id, kind, time_interval_id)
+);
+
+-- Values of the Abflusstafeln
+CREATE SEQUENCE DISCHARGE_TABLE_VALUES_ID_SEQ;
+
+CREATE TABLE discharge_table_values (
+    id       int PRIMARY KEY NOT NULL,
+    table_id int NOT NULL REFERENCES discharge_tables(id),
+    q        NUMERIC NOT NULL,
+    w        NUMERIC NOT NULL,
+
+    UNIQUE (table_id, q, w)
+);
+
+-- WST files
+CREATE SEQUENCE WSTS_ID_SEQ;
+
+CREATE TABLE wsts (
+    id          int PRIMARY KEY NOT NULL,
+    river_id    int NOT NULL REFERENCES rivers(id),
+    description VARCHAR(256) NOT NULL,
+    kind        int NOT NULL DEFAULT 0,
+    -- TODO: more meta infos
+    UNIQUE (river_id, description)
+);
+
+-- columns of WST files
+CREATE SEQUENCE WST_COLUMNS_ID_SEQ;
+
+CREATE TABLE wst_columns (
+    id          int PRIMARY KEY NOT NULL,
+    wst_id      int NOT NULL REFERENCES wsts(id),
+    name        VARCHAR(256) NOT NULL,
+    description VARCHAR(256),
+    position    int NOT NULL DEFAULT 0,
+
+    time_interval_id int REFERENCES time_intervals(id),
+
+    UNIQUE (wst_id, name),
+    UNIQUE (wst_id, position)
+);
+
+-- w values in  WST file column
+CREATE SEQUENCE WST_COLUMN_VALUES_ID_SEQ;
+
+CREATE TABLE wst_column_values (
+    id            int PRIMARY KEY NOT NULL,
+    wst_column_id int NOT NULL REFERENCES wst_columns(id),
+    position      NUMERIC NOT NULL,
+    w             NUMERIC NOT NULL,
+
+    UNIQUE (position, wst_column_id),
+    UNIQUE (position, wst_column_id, w)
+);
+
+-- bind q values to range
+CREATE SEQUENCE WST_Q_RANGES_ID_SEQ;
+
+CREATE TABLE wst_q_ranges (
+    id       int PRIMARY KEY NOT NULL,
+    range_id int NOT NULL REFERENCES ranges(id),
+    q        NUMERIC NOT NULL
+);
+
+-- bind q ranges to wst columns
+CREATE SEQUENCE WST_COLUMN_Q_RANGES_ID_SEQ;
+
+CREATE TABLE wst_column_q_ranges (
+    id             int PRIMARY KEY NOT NULL,
+    wst_column_id  int NOT NULL REFERENCES wst_columns(id),
+    wst_q_range_id int NOT NULL REFERENCES wst_q_ranges(id),
+
+    UNIQUE (wst_column_id, wst_q_range_id)
+);
+
+CREATE VIEW wst_value_table AS
+    SELECT wcv.position AS position,
+           w,
+           (SELECT q
+            FROM   wst_column_q_ranges wcqr
+                   JOIN wst_q_ranges wqr
+                     ON wcqr.wst_q_range_id = wqr.id
+                   JOIN ranges r
+                     ON r.id = wqr.range_id
+            WHERE  wcqr.wst_column_id = wc.id
+                   AND wcv.position BETWEEN r.a AND r.b) AS q,
+           wc.position                                   AS column_pos,
+           w.id                                          AS wst_id
+    FROM   wst_column_values wcv
+           JOIN wst_columns wc
+             ON wcv.wst_column_id = wc.id
+           JOIN wsts w
+             ON wc.wst_id = w.id
+    ORDER  BY wcv.position ASC,
+          wc.position DESC;
+
+-- view to select the w values of a WST
+CREATE VIEW wst_w_values AS
+    SELECT wcv."position" AS km, 
+           wcv.w          AS w,  
+           wc."position"  AS column_pos, 
+           w.id           AS wst_id
+        FROM wst_column_values wcv
+        JOIN wst_columns wc ON wcv.wst_column_id = wc.id
+        JOIN wsts w         ON wc.wst_id = w.id
+    ORDER BY wcv."position", wc."position";
+
+-- view to select the q values of a WST
+CREATE VIEW wst_q_values AS
+    SELECT wc.position AS column_pos,
+           wqr.q       AS q, 
+           r.a         AS a, 
+           r.b         AS b,
+           wc.wst_id   AS wst_id
+    FROM wst_column_q_ranges wcqr
+    JOIN wst_q_ranges wqr ON wcqr.wst_q_range_id = wqr.id
+    JOIN ranges r         ON wqr.range_id        = r.id
+    JOIN wst_columns wc   ON wcqr.wst_column_id  = wc.id
+    ORDER BY wc.position, wcqr.wst_column_id, r.a;
+
+-- data for the cross-sections
+
+CREATE SEQUENCE CROSS_SECTIONS_ID_SEQ;
+
+CREATE TABLE cross_sections (
+    id               int PRIMARY KEY NOT NULL,
+    river_id         int             NOT NULL REFERENCES rivers(id),
+    time_interval_id int                      REFERENCES time_intervals(id),
+    description      VARCHAR(256)
+);
+
+CREATE SEQUENCE CROSS_SECTION_LINES_ID_SEQ;
+
+CREATE TABLE cross_section_lines (
+    id               int PRIMARY KEY NOT NULL,
+    km               NUMERIC         NOT NULL,
+    cross_section_id int             NOT NULL REFERENCES cross_sections(id),
+    UNIQUE (km, cross_section_id)
+);
+
+CREATE SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+
+CREATE TABLE cross_section_points (
+    id                    int PRIMARY KEY NOT NULL,
+    cross_section_line_id int             NOT NULL REFERENCES cross_section_lines(id),
+    col_pos               int             NOT NULL,
+    x                     NUMERIC         NOT NULL,
+    y                     NUMERIC         NOT NULL,
+    UNIQUE (cross_section_line_id, col_pos)
+);
+
+-- Indices for faster access of the points
+CREATE INDEX cross_section_lines_km_idx
+    ON cross_section_lines(km);
+CREATE INDEX cross_section_points_line_idx
+    ON cross_section_points(cross_section_line_id);
+
+-- Hydraulische Kenngroessen
+
+CREATE SEQUENCE HYKS_ID_SEQ;
+
+CREATE TABLE hyks (
+    id          int PRIMARY KEY NOT NULL,
+    river_id    int             NOT NULL REFERENCES rivers(id),
+    description VARCHAR(256)    NOT NULL
+);
+
+CREATE SEQUENCE HYK_ENTRIES_ID_SEQ;
+
+CREATE TABLE hyk_entries (
+    id          int PRIMARY KEY NOT NULL,
+    hyk_id      int             NOT NULL REFERENCES hyks(id),
+    km          NUMERIC         NOT NULL,
+    measure     TIMESTAMP,
+    UNIQUE (hyk_id, km)
+);
+
+CREATE SEQUENCE HYK_FORMATIONS_ID_SEQ;
+
+CREATE TABLE hyk_formations (
+    id            int PRIMARY KEY NOT NULL,
+    formation_num int             NOT NULL DEFAULT 0,
+    hyk_entry_id  int             NOT NULL REFERENCES hyk_entries(id),
+    top           NUMERIC         NOT NULL,
+    bottom        NUMERIC         NOT NULL,
+    distance_vl   NUMERIC         NOT NULL,
+    distance_hf   NUMERIC         NOT NULL,
+    distance_vr   NUMERIC         NOT NULL,
+    UNIQUE (hyk_entry_id, formation_num)
+);
+
+CREATE SEQUENCE HYK_FLOW_ZONE_TYPES_ID_SEQ;
+
+CREATE TABLE hyk_flow_zone_types (
+    id          int PRIMARY KEY NOT NULL,
+    name        VARCHAR(50)     NOT NULL UNIQUE,
+    description VARCHAR(256)
+);
+
+CREATE SEQUENCE HYK_FLOW_ZONES_ID_SEQ;
+
+CREATE TABLE hyk_flow_zones (
+    id           int PRIMARY KEY NOT NULL,
+    formation_id int             NOT NULL REFERENCES hyk_formations(id),
+    type_id      int             NOT NULL REFERENCES hyk_flow_zone_types(id),
+    a            NUMERIC         NOT NULL,
+    b            NUMERIC         NOT NULL,
+    CHECK (a <= b)
+);
+
+CREATE VIEW official_lines
+AS
+  SELECT w.river_id AS river_id,
+         w.id       AS wst_id,
+         wc.id      AS wst_column_id,
+         wc.name    AS name,
+         wc.position AS wst_column_pos
+  FROM   wsts w
+         JOIN wst_columns wc
+           ON wc.wst_id = w.id
+  WHERE  w.kind = 3;
+
+CREATE VIEW q_main_values
+AS
+  SELECT riv.id AS river_id,
+         g.id   AS gauge_id,
+         g.name AS gauge_name,
+         r.a    AS a,
+         r.b    AS b,
+         REGEXP_REPLACE(
+            nmv.name, E'[:space:]*\\(.*\\)[:space:]*', '') AS name,
+         CAST(mv.value AS NUMERIC(38, 2)) AS value
+  FROM   main_values mv
+         JOIN named_main_values nmv
+           ON mv.named_value_id = nmv.id
+         JOIN main_value_types mvt
+           ON nmv.type_id = mvt.id
+         JOIN gauges g
+           ON mv.gauge_id = g.id
+         JOIN ranges r
+           ON g.range_id = r.id
+         JOIN rivers riv
+           ON g.river_id = riv.id
+  WHERE  mvt.name = 'Q'
+  ORDER  BY g.id, CAST(mv.value AS NUMERIC(38,2));
+
+CREATE VIEW official_q_values
+AS
+  SELECT ol.river_id AS river_id,
+         wst_id,
+         wst_column_id,
+         gauge_id,
+         gauge_name,
+         a,
+         b,
+         ol.name,
+         value,
+         wst_column_pos
+  FROM   official_lines ol
+         JOIN q_main_values qmv
+           ON ol.river_id = qmv.river_id
+              AND ol.name = qmv.name;
+
+COMMIT;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/pom.xml	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,113 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <groupId>de.intevation.flys</groupId>
+  <artifactId>flys-backend</artifactId>
+  <version>1.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+
+  <name>flys-backend</name>
+  <url>http://maven.apache.org</url>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>hibernate3-maven-plugin</artifactId>
+        <version>2.2</version>
+        <!--
+        <configuration>
+            <componentProperties>
+                <propertyfile>src/main/config/hbm.properties</propertyfile>
+            </componentProperties>
+        </configuration>
+        -->
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>de.intevation.artifacts.common</groupId>
+      <artifactId>artifacts-common</artifactId>
+      <version>1.0-SNAPSHOT</version>
+    </dependency>    
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>3.8.1</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-core</artifactId>
+      <version>3.6.5.Final</version>
+    </dependency>
+    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-entitymanager</artifactId>
+      <version>3.6.5.Final</version>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <version>1.2.14</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-dbcp</groupId>
+      <artifactId>commons-dbcp</artifactId>
+      <version>1.4</version>
+    </dependency>
+    <dependency>
+      <groupId>postgresql</groupId>
+      <artifactId>postgresql</artifactId>
+      <version>8.4-702.jdbc4</version>
+      <scope>runtime</scope>
+    </dependency>
+    <dependency>
+        <groupId>org.hibernatespatial</groupId>
+        <artifactId>hibernate-spatial-postgis</artifactId>
+        <version>1.1</version>
+    </dependency>
+    <dependency>
+        <groupId>org.hibernatespatial</groupId>
+        <artifactId>hibernate-spatial-oracle</artifactId>
+        <version>1.1</version>
+    </dependency>
+    <dependency>
+        <groupId>org.postgis</groupId>
+        <artifactId>postgis-jdbc</artifactId>
+        <version>1.3.3</version>
+    </dependency>
+  </dependencies>
+
+  <repositories>
+    <repository>
+      <id>repository.jboss.org/nexus</id>
+      <name>JBoss Repository - Nexus</name>
+      <url>http://repository.jboss.org/nexus/content/groups/public/</url>
+    </repository>
+    <repository>
+        <id>OSGEO GeoTools repo</id>
+        <url>http://download.osgeo.org/webdav/geotools</url>
+    </repository>
+    <repository>
+        <id>Hibernate Spatial repo</id>
+        <url>http://www.hibernatespatial.org/repository</url>
+    </repository>
+  </repositories>
+</project>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/App.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,30 @@
+package de.intevation.flys;
+
+import de.intevation.flys.backend.SessionFactoryProvider;
+import de.intevation.flys.backend.FLYSCredentials;
+
+import org.hibernate.cfg.Configuration;
+
+import org.hibernate.dialect.resolver.DialectFactory;
+
+public class App
+{
+    public static void dumpSchema(Configuration cfg) {
+        System.out.println("BEGIN;");
+
+        String [] setupScript = cfg.generateSchemaCreationScript(
+            DialectFactory.constructDialect(
+                FLYSCredentials.getDefault().getDialect()));
+
+        for (String line: setupScript) {
+            System.out.println(line + ";");
+        }
+
+        System.out.println("COMMIT;");
+    }
+
+    public static void main(String [] args) {
+        dumpSchema(SessionFactoryProvider.createConfiguration());
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/Credentials.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,79 @@
+package de.intevation.flys.backend;
+
+public abstract class Credentials
+{
+    protected String   user;
+    protected String   password;
+    protected String   dialect;
+    protected String   driver;
+    protected String   url;
+    protected Class [] classes;
+
+    public Credentials() {
+    }
+
+    public Credentials(
+        String   user,
+        String   password,
+        String   dialect,
+        String   driver,
+        String   url,
+        Class [] classes
+    ) {
+        this.user     = user;
+        this.password = password;
+        this.dialect  = dialect;
+        this.driver   = driver;
+        this.url      = url;
+        this.classes  = classes;
+    }
+
+    public String getUser() {
+        return user;
+    }
+
+    public void setUser(String user) {
+        this.user = user;
+    }
+
+    public String getPassword() {
+        return password;
+    }
+
+    public void setPassword(String password) {
+        this.password = password;
+    }
+
+    public String getDialect() {
+        return dialect;
+    }
+
+    public void setDialect(String dialect) {
+        this.dialect = dialect;
+    }
+
+    public String getDriver() {
+        return driver;
+    }
+
+    public void setDriver(String driver) {
+        this.driver = driver;
+    }
+
+    public String getUrl() {
+        return url;
+    }
+
+    public void setUrl(String url) {
+        this.url = url;
+    }
+
+    public Class [] getClasses() {
+        return classes;
+    }
+
+    public void setClasses(Class [] classes) {
+        this.classes = classes;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,231 @@
+package de.intevation.flys.backend;
+
+import de.intevation.artifacts.common.utils.Config;
+
+import de.intevation.flys.model.Annotation;
+import de.intevation.flys.model.AnnotationType;
+import de.intevation.flys.model.Attribute;
+import de.intevation.flys.model.BedHeightEpoch;
+import de.intevation.flys.model.BedHeightEpochValue;
+import de.intevation.flys.model.BedHeightSingle;
+import de.intevation.flys.model.BedHeightSingleValue;
+import de.intevation.flys.model.BedHeightType;
+import de.intevation.flys.model.Building;
+import de.intevation.flys.model.Catchment;
+import de.intevation.flys.model.CrossSection;
+import de.intevation.flys.model.CrossSectionLine;
+import de.intevation.flys.model.CrossSectionPoint;
+import de.intevation.flys.model.CrossSectionTrack;
+import de.intevation.flys.model.DGM;
+import de.intevation.flys.model.Depth;
+import de.intevation.flys.model.DischargeTable;
+import de.intevation.flys.model.DischargeTableValue;
+import de.intevation.flys.model.DischargeZone;
+import de.intevation.flys.model.Edge;
+import de.intevation.flys.model.ElevationModel;
+import de.intevation.flys.model.Fixpoint;
+import de.intevation.flys.model.Floodmaps;
+import de.intevation.flys.model.Floodplain;
+import de.intevation.flys.model.FlowVelocityMeasurement;
+import de.intevation.flys.model.FlowVelocityMeasurementValue;
+import de.intevation.flys.model.FlowVelocityModel;
+import de.intevation.flys.model.FlowVelocityModelValue;
+import de.intevation.flys.model.Gauge;
+import de.intevation.flys.model.GaugeLocation;
+import de.intevation.flys.model.GrainFraction;
+import de.intevation.flys.model.HYK;
+import de.intevation.flys.model.HYKEntry;
+import de.intevation.flys.model.HYKFlowZone;
+import de.intevation.flys.model.HYKFlowZoneType;
+import de.intevation.flys.model.HYKFormation;
+import de.intevation.flys.model.Hws;
+import de.intevation.flys.model.HydrBoundary;
+import de.intevation.flys.model.HydrBoundaryPoly;
+import de.intevation.flys.model.Line;
+import de.intevation.flys.model.LocationSystem;
+import de.intevation.flys.model.MainValue;
+import de.intevation.flys.model.MainValueType;
+import de.intevation.flys.model.MorphologicalWidth;
+import de.intevation.flys.model.MorphologicalWidthValue;
+import de.intevation.flys.model.NamedMainValue;
+import de.intevation.flys.model.Position;
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.RiverAxis;
+import de.intevation.flys.model.RiverAxisKm;
+import de.intevation.flys.model.SQRelation;
+import de.intevation.flys.model.SQRelationValue;
+import de.intevation.flys.model.SedimentDensity;
+import de.intevation.flys.model.SedimentDensityValue;
+import de.intevation.flys.model.SedimentYield;
+import de.intevation.flys.model.SedimentYieldValue;
+import de.intevation.flys.model.TimeInterval;
+import de.intevation.flys.model.Unit;
+import de.intevation.flys.model.Waterlevel;
+import de.intevation.flys.model.WaterlevelDifference;
+import de.intevation.flys.model.WaterlevelDifferenceColumn;
+import de.intevation.flys.model.WaterlevelDifferenceValue;
+import de.intevation.flys.model.WaterlevelQRange;
+import de.intevation.flys.model.WaterlevelValue;
+import de.intevation.flys.model.Wst;
+import de.intevation.flys.model.WstColumn;
+import de.intevation.flys.model.WstColumnQRange;
+import de.intevation.flys.model.WstColumnValue;
+import de.intevation.flys.model.WstQRange;
+
+public class FLYSCredentials
+extends      Credentials
+{
+    public static final String XPATH_USER =
+        "/artifact-database/backend-database/user/text()";
+
+    public static final String XPATH_PASSWORD =
+        "/artifact-database/backend-database/password/text()";
+
+    public static final String XPATH_DIALECT =
+        "/artifact-database/backend-database/dialect/text()";
+
+    public static final String XPATH_DRIVER =
+        "/artifact-database/backend-database/driver/text()";
+
+    public static final String XPATH_URL =
+        "/artifact-database/backend-database/url/text()";
+
+    public static final String DEFAULT_USER =
+        System.getProperty("flys.backend.user", "flys");
+
+    public static final String DEFAULT_PASSWORD =
+        System.getProperty("flys.backend.password", "flys");
+
+    public static final String DEFAULT_DIALECT =
+        System.getProperty(
+            "flys.backend.dialect",
+            "org.hibernate.dialect.PostgreSQLDialect");
+
+    public static final String DEFAULT_DRIVER =
+        System.getProperty(
+            "flys.backend.driver",
+            "org.postgresql.Driver");
+
+    public static final String DEFAULT_URL =
+        System.getProperty(
+            "flys.backend.url",
+            "jdbc:postgresql://localhost:5432/flys");
+
+    public static final Class [] CLASSES = {
+        Annotation.class,
+        AnnotationType.class,
+        Attribute.class,
+        BedHeightEpoch.class,
+        BedHeightEpochValue.class,
+        BedHeightSingle.class,
+        BedHeightSingleValue.class,
+        BedHeightType.class,
+        Building.class,
+        Catchment.class,
+        CrossSection.class,
+        CrossSectionLine.class,
+        CrossSectionPoint.class,
+        CrossSectionTrack.class,
+        Depth.class,
+        DGM.class,
+        DischargeTable.class,
+        DischargeTableValue.class,
+        DischargeZone.class,
+        Edge.class,
+        ElevationModel.class,
+        Fixpoint.class,
+        Floodplain.class,
+        Floodmaps.class,
+        FlowVelocityMeasurement.class,
+        FlowVelocityMeasurementValue.class,
+        FlowVelocityModel.class,
+        FlowVelocityModelValue.class,
+        Gauge.class,
+        GaugeLocation.class,
+        GrainFraction.class,
+        Hws.class,
+        HydrBoundary.class,
+        HydrBoundaryPoly.class,
+        HYK.class,
+        HYKEntry.class,
+        HYKFormation.class,
+        HYKFlowZoneType.class,
+        HYKFlowZone.class,
+        Line.class,
+        LocationSystem.class,
+        MainValueType.class,
+        MorphologicalWidth.class,
+        MorphologicalWidthValue.class,
+        NamedMainValue.class,
+        MainValue.class,
+        Position.class,
+        Range.class,
+        River.class,
+        RiverAxis.class,
+        RiverAxisKm.class,
+        SedimentDensity.class,
+        SedimentDensityValue.class,
+        SedimentYield.class,
+        SedimentYieldValue.class,
+        SQRelation.class,
+        SQRelationValue.class,
+        TimeInterval.class,
+        Unit.class,
+        Waterlevel.class,
+        WaterlevelDifference.class,
+        WaterlevelDifferenceColumn.class,
+        WaterlevelDifferenceValue.class,
+        WaterlevelQRange.class,
+        WaterlevelValue.class,
+        WstColumn.class,
+        WstColumnQRange.class,
+        WstColumnValue.class,
+        Wst.class,
+        WstQRange.class
+    };
+
+    public FLYSCredentials() {
+    }
+
+    public FLYSCredentials(
+        String user,
+        String password,
+        String dialect,
+        String driver,
+        String url
+    ) {
+        super(user, password, dialect, driver, url, CLASSES);
+    }
+
+    private static Credentials instance;
+
+    public static synchronized Credentials getInstance() {
+        if (instance == null) {
+            String user =
+                Config.getStringXPath(XPATH_USER, DEFAULT_USER);
+            String password =
+                Config.getStringXPath(XPATH_PASSWORD, DEFAULT_PASSWORD);
+            String dialect =
+                Config.getStringXPath(XPATH_DIALECT, DEFAULT_DIALECT);
+            String driver =
+                Config.getStringXPath(XPATH_DRIVER, DEFAULT_DRIVER);
+            String url =
+                Config.getStringXPath(XPATH_URL, DEFAULT_URL);
+
+            instance = new FLYSCredentials(
+                user, password, dialect, driver, url);
+        }
+        return instance;
+    }
+
+    public static Credentials getDefault() {
+        return new FLYSCredentials(
+            DEFAULT_USER,
+            DEFAULT_PASSWORD,
+            DEFAULT_DIALECT,
+            DEFAULT_DRIVER,
+            DEFAULT_URL);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SedDBCredentials.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,195 @@
+package de.intevation.flys.backend;
+
+import de.intevation.artifacts.common.utils.Config;
+
+import de.intevation.seddb.model.Bezugspegel;
+import de.intevation.seddb.model.Bezugspegelgew;
+import de.intevation.seddb.model.BezugspegelgewId;
+import de.intevation.seddb.model.Bild;
+import de.intevation.seddb.model.Gewaesser;
+import de.intevation.seddb.model.Gfaenger;
+import de.intevation.seddb.model.Glotlinks;
+import de.intevation.seddb.model.GlotlinksId;
+import de.intevation.seddb.model.Glotrechte;
+import de.intevation.seddb.model.Gprobe;
+import de.intevation.seddb.model.GprobeId;
+import de.intevation.seddb.model.Gsiebsatz;
+import de.intevation.seddb.model.Gsiebung;
+import de.intevation.seddb.model.Gsiebungsieb;
+import de.intevation.seddb.model.GsiebungsiebId;
+import de.intevation.seddb.model.Hpeilpunkt;
+import de.intevation.seddb.model.HpeilpunktId;
+import de.intevation.seddb.model.Hpeilung;
+import de.intevation.seddb.model.Messung;
+import de.intevation.seddb.model.Messunglotcount;
+import de.intevation.seddb.model.MessunglotcountId;
+import de.intevation.seddb.model.Messungsta;
+import de.intevation.seddb.model.MessungstaId;
+import de.intevation.seddb.model.Messunguferbezug;
+import de.intevation.seddb.model.MessunguferbezugId;
+import de.intevation.seddb.model.Mpeilpunkt;
+import de.intevation.seddb.model.MpeilpunktId;
+import de.intevation.seddb.model.Mpeilung;
+import de.intevation.seddb.model.Probebild;
+import de.intevation.seddb.model.Siebanalyse;
+import de.intevation.seddb.model.Slotlinks;
+import de.intevation.seddb.model.SlotlinksId;
+import de.intevation.seddb.model.Slotrechte;
+import de.intevation.seddb.model.Sohlprobe;
+import de.intevation.seddb.model.Sohltest;
+import de.intevation.seddb.model.Sprobe;
+import de.intevation.seddb.model.SprobeId;
+import de.intevation.seddb.model.Ssiebung;
+import de.intevation.seddb.model.Ssiebungsieb;
+import de.intevation.seddb.model.SsiebungsiebId;
+import de.intevation.seddb.model.Station;
+import de.intevation.seddb.model.Stationgew;
+import de.intevation.seddb.model.StationgewId;
+import de.intevation.seddb.model.TmpGloChanged;
+import de.intevation.seddb.model.TmpMesAchanged;
+import de.intevation.seddb.model.TmpMesGchanged;
+import de.intevation.seddb.model.TmpMesQchanged;
+import de.intevation.seddb.model.TmpMesSchanged;
+import de.intevation.seddb.model.Zzarchiv;
+import de.intevation.seddb.model.Zzprobenahmeart;
+import de.intevation.seddb.model.Zzsondierungart;
+import de.intevation.seddb.model.Zzthema;
+
+public class SedDBCredentials
+extends      Credentials
+{
+    public static final String XPATH_USER =
+        "/artifact-database/seddb-database/user/text()";
+
+    public static final String XPATH_PASSWORD =
+        "/artifact-database/seddb-database/password/text()";
+
+    public static final String XPATH_DIALECT =
+        "/artifact-database/seddb-database/dialect/text()";
+
+    public static final String XPATH_DRIVER =
+        "/artifact-database/seddb-database/driver/text()";
+
+    public static final String XPATH_URL =
+        "/artifact-database/seddb-database/url/text()";
+
+    public static final String DEFAULT_USER =
+        System.getProperty("flys.seddb.user", "seddb");
+
+    public static final String DEFAULT_PASSWORD =
+        System.getProperty("flys.seddb.password", "seddb");
+
+    public static final String DEFAULT_DIALECT =
+        System.getProperty(
+            "flys.seddb.dialect",
+            "org.hibernate.dialect.PostgreSQLDialect");
+
+    public static final String DEFAULT_DRIVER =
+        System.getProperty(
+            "flys.seddb.driver",
+            "org.postgresql.Driver");
+
+    public static final String DEFAULT_URL =
+        System.getProperty(
+            "flys.seddb.url",
+            "jdbc:postgresql://localhost:5432/seddb");
+
+    public static final Class [] CLASSES = {
+        BezugspegelgewId.class,
+        Bezugspegelgew.class,
+        Bezugspegel.class,
+        Bild.class,
+        Gewaesser.class,
+        Gfaenger.class,
+        GlotlinksId.class,
+        Glotlinks.class,
+        Glotrechte.class,
+        GprobeId.class,
+        Gprobe.class,
+        Gsiebsatz.class,
+        Gsiebung.class,
+        GsiebungsiebId.class,
+        Gsiebungsieb.class,
+        HpeilpunktId.class,
+        Hpeilpunkt.class,
+        Hpeilung.class,
+        Messung.class,
+        MessunglotcountId.class,
+        Messunglotcount.class,
+        MessungstaId.class,
+        Messungsta.class,
+        MessunguferbezugId.class,
+        Messunguferbezug.class,
+        MpeilpunktId.class,
+        Mpeilpunkt.class,
+        Mpeilung.class,
+        Probebild.class,
+        Siebanalyse.class,
+        SlotlinksId.class,
+        Slotlinks.class,
+        Slotrechte.class,
+        Sohlprobe.class,
+        Sohltest.class,
+        SprobeId.class,
+        Sprobe.class,
+        Ssiebung.class,
+        SsiebungsiebId.class,
+        Ssiebungsieb.class,
+        StationgewId.class,
+        Stationgew.class,
+        Station.class,
+        TmpGloChanged.class,
+        TmpMesAchanged.class,
+        TmpMesGchanged.class,
+        TmpMesQchanged.class,
+        TmpMesSchanged.class,
+        Zzarchiv.class,
+        Zzprobenahmeart.class,
+        Zzsondierungart.class,
+        Zzthema.class
+    };
+
+    private static Credentials instance;
+
+    public SedDBCredentials() {
+    }
+
+    public SedDBCredentials(
+        String user,
+        String password,
+        String dialect,
+        String driver,
+        String url
+    ) {
+        super(user, password, dialect, driver, url, CLASSES);
+    }
+
+    public static synchronized Credentials getInstance() {
+        if (instance == null) {
+            String user =
+                Config.getStringXPath(XPATH_USER, DEFAULT_USER);
+            String password =
+                Config.getStringXPath(XPATH_PASSWORD, DEFAULT_PASSWORD);
+            String dialect =
+                Config.getStringXPath(XPATH_DIALECT, DEFAULT_DIALECT);
+            String driver =
+                Config.getStringXPath(XPATH_DRIVER, DEFAULT_DRIVER);
+            String url =
+                Config.getStringXPath(XPATH_URL, DEFAULT_URL);
+
+            instance = new SedDBCredentials(
+                user, password, dialect, driver, url);
+        }
+        return instance;
+    }
+
+    public static Credentials getDefault() {
+        return new SedDBCredentials(
+            DEFAULT_USER,
+            DEFAULT_PASSWORD,
+            DEFAULT_DIALECT,
+            DEFAULT_DRIVER,
+            DEFAULT_URL);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SedDBSessionHolder.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,43 @@
+package de.intevation.flys.backend;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+
+public class SedDBSessionHolder
+{
+    private static Logger log =
+        Logger.getLogger(SedDBSessionHolder.class);
+
+    public static final ThreadLocal<Session> HOLDER =
+        new ThreadLocal<Session>() {
+            @Override
+            protected Session initialValue() {
+                return create();
+            }
+        };
+
+    private SedDBSessionHolder() {
+    }
+
+    public synchronized static Session create() {
+        log.debug("create");
+        SessionFactory sessionFactory =
+            SessionFactoryProvider.getSedDBSessionFactory();
+        return sessionFactory.openSession();
+    }
+
+    public static Session acquire() {
+        log.debug("acquire");
+        Session session = create();
+        HOLDER.set(session);
+        return session;
+    }
+
+    public static void release() {
+        log.debug("release");
+        HOLDER.remove();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,170 @@
+package de.intevation.flys.backend;
+
+import java.lang.management.ManagementFactory;
+
+import java.util.Properties;
+
+import javax.management.InstanceAlreadyExistsException;
+import javax.management.MBeanRegistrationException;
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.NotCompliantMBeanException;
+import javax.management.ObjectName;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.SessionFactory;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.cfg.Environment;
+
+import org.hibernate.impl.SessionFactoryImpl;
+
+import org.hibernate.jmx.StatisticsService;
+
+public final class SessionFactoryProvider
+{
+    private static Logger log = Logger.getLogger(SessionFactoryProvider.class);
+
+    public static final boolean ENABLE_JMX =
+        Boolean.getBoolean("flys.backend.enablejmx");
+
+    private static SessionFactory flysSessionFactory;
+    private static SessionFactory sedDBSessionFactory;
+
+    private SessionFactoryProvider() {
+    }
+
+    public static synchronized SessionFactory getSessionFactory() {
+        if (flysSessionFactory == null) {
+            flysSessionFactory =
+                createSessionFactory(FLYSCredentials.getInstance());
+        }
+        return flysSessionFactory;
+    }
+
+    public static SessionFactory createSessionFactory() {
+        return createSessionFactory(FLYSCredentials.getDefault());
+    }
+
+    public static synchronized SessionFactory getSedDBSessionFactory() {
+        if (sedDBSessionFactory == null) {
+            sedDBSessionFactory =
+                createSessionFactory(SedDBCredentials.getInstance());
+        }
+        return sedDBSessionFactory;
+    }
+
+    public static SessionFactory createSedDBSessionFactory() {
+        return createSessionFactory(SedDBCredentials.getDefault());
+    }
+
+    public static SessionFactory createSessionFactory(
+        Credentials credentials
+    ) {
+        Configuration cfg = createConfiguration(credentials);
+
+        SessionFactory factory = cfg.buildSessionFactory();
+
+        if (ENABLE_JMX) {
+            registerAsMBean(factory);
+        }
+        else {
+            log.info("No JMX support for hibernate.");
+        }
+
+        return factory;
+    }
+
+    public static void registerAsMBean(SessionFactory factory) {
+
+        StatisticsService statsMBean = new StatisticsService();
+        statsMBean.setSessionFactory(factory);
+        statsMBean.setStatisticsEnabled(true);
+
+        try {
+            MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+            mbs.registerMBean(
+                statsMBean,
+                new ObjectName("Hibernate:application=Statistics"));
+
+            log.info("Enabled JMX support for hibernate.");
+        }
+        catch (MalformedObjectNameException mone) {
+            log.warn(mone, mone);
+        }
+        catch (InstanceAlreadyExistsException iaee) {
+            log.warn(iaee, iaee);
+        }
+        catch (MBeanRegistrationException mbre) {
+            log.warn(mbre, mbre);
+        }
+        catch (NotCompliantMBeanException ncmbe) {
+            log.warn(ncmbe, ncmbe);
+        }
+    }
+
+    public static Configuration createConfiguration() {
+        return createConfiguration(FLYSCredentials.getInstance());
+    }
+
+    public static Configuration createConfiguration(
+        Credentials credentials
+    ) {
+        Configuration cfg = new Configuration();
+
+        for (Class clazz: credentials.getClasses()) {
+            cfg.addAnnotatedClass(clazz);
+        }
+
+        if (log.isDebugEnabled()) {
+            log.debug("user: "    + credentials.getUser());
+            log.debug("dialect: " + credentials.getDialect());
+            log.debug("driver: "  + credentials.getDriver());
+            log.debug("url: "     + credentials.getUrl());
+        }
+
+        Properties props = new Properties();
+
+        // We rely on our own connection pool
+        props.setProperty(
+            "hibernate.connection.provider_class",
+            "de.intevation.flys.utils.DBCPConnectionProvider");
+
+        props.setProperty(Environment.DIALECT, credentials.getDialect());
+        props.setProperty(Environment.USER,    credentials.getUser());
+        props.setProperty(Environment.PASS,    credentials.getPassword());
+        props.setProperty(Environment.DRIVER,  credentials.getDriver());
+        props.setProperty(Environment.URL,     credentials.getUrl());
+
+        cfg.mergeProperties(props);
+
+        return cfg;
+    }
+
+
+    public static String getProperty(SessionFactoryImpl factory, String key) {
+        Properties props = factory.getProperties();
+        return props.getProperty(key);
+    }
+
+    public static String getUser(SessionFactoryImpl factory) {
+        return getProperty(factory, Environment.USER);
+    }
+
+
+    public static String getPass(SessionFactoryImpl factory) {
+        return getProperty(factory, Environment.PASS);
+    }
+
+
+    public static String getURL(SessionFactoryImpl factory) {
+        return getProperty(factory, Environment.URL);
+    }
+
+
+    public static String getDriver(SessionFactoryImpl factory) {
+        return getProperty(factory, Environment.DRIVER);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SessionHolder.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,44 @@
+package de.intevation.flys.backend;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+
+
+public class SessionHolder
+{
+    private static Logger log =
+        Logger.getLogger(SessionHolder.class);
+
+    public static final ThreadLocal<Session> HOLDER =
+        new ThreadLocal<Session>() {
+            @Override
+            protected Session initialValue() {
+                return create();
+            }
+        };
+
+    private SessionHolder() {
+    }
+
+    public synchronized static Session create() {
+        log.debug("create");
+        SessionFactory sessionFactory =
+            SessionFactoryProvider.getSessionFactory();
+        return sessionFactory.openSession();
+    }
+
+    public static Session acquire() {
+        log.debug("acquire");
+        Session session = create();
+        HOLDER.set(session);
+        return session;
+    }
+
+    public static void release() {
+        log.debug("release");
+        HOLDER.remove();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SpatialInfo.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,165 @@
+package de.intevation.flys.backend;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+import de.intevation.flys.model.Building;
+import de.intevation.flys.model.CrossSectionTrack;
+import de.intevation.flys.model.Fixpoint;
+import de.intevation.flys.model.Line;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.RiverAxis;
+
+
+public class SpatialInfo {
+
+    private static Logger logger = Logger.getLogger(SpatialInfo.class);
+
+    protected static String RIVERNAME = System.getProperty(
+        "flys.backend.spatial.river", "Saar");
+
+    protected Session session;
+
+
+    public static void main(String[] args) {
+        logger.info("Start SpatialInfo application.");
+
+        SpatialInfo spatial = null;
+
+        try {
+            spatial = new SpatialInfo();
+
+            River river = spatial.getRiver(RIVERNAME);
+            if (river == null) {
+                logger.warn("Could not find river '" + RIVERNAME + "'!");
+                return;
+            }
+
+            logger.info("Spatial information of River '" + RIVERNAME + "'");
+            spatial.doRiverAxisInfo(river);
+            spatial.doCrossSectionTracksInfo(river);
+            spatial.doLinesInfo(river);
+            spatial.doBuildingsInfo(river);
+            spatial.doFixpointsInfo(river);
+        }
+        finally {
+            if (spatial != null) {
+                spatial.close();
+            }
+        }
+
+        logger.info("Finish SpatialInfo application.");
+    }
+
+
+    public SpatialInfo() {
+        session = SessionFactoryProvider
+            .createSessionFactory()
+            .openSession();
+    }
+
+
+    public void close() {
+        session.close();
+    }
+
+
+    protected River getRiver(String rivername) {
+        Query query = session.createQuery(
+            "from River where name =:name");
+        query.setParameter("name", rivername);
+
+        List<River> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No river '" + rivername + "' found!");
+            return null;
+        }
+
+        return list.get(0);
+    }
+
+
+    protected void doRiverAxisInfo(River river) {
+        List<RiverAxis> axis = RiverAxis.getRiverAxis(river.getName());
+        if (axis != null && axis.size() > 0) {
+            logger.debug("TODO: Compute length and boundary.");
+        }
+        else {
+            logger.warn("River has no RiverAxis.");
+        }
+    }
+
+
+    protected void doCrossSectionTracksInfo(River river) {
+        Query query = session.createQuery(
+            "from CrossSectionTrack where river =:river");
+        query.setParameter("river", river);
+
+        List<CrossSectionTrack> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No CrossSectionTracks for '" + river.getName() + "' found!");
+            return;
+        }
+        else {
+            logger.info("River contains " + list.size() + " CrossSectionTracks.");
+        }
+    }
+
+
+    protected void doLinesInfo(River river) {
+        Query query = session.createQuery(
+            "from Line where river =:river");
+        query.setParameter("river", river);
+
+        List<Line> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No Lines for '" + river.getName() + "' found!");
+            return;
+        }
+        else {
+            logger.info("River contains " + list.size() + " Lines.");
+        }
+    }
+
+
+    protected void doBuildingsInfo(River river) {
+        Query query = session.createQuery(
+            "from Building where river =:river");
+        query.setParameter("river", river);
+
+        List<Building> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No Buildings for '" + river.getName() + "' found!");
+            return;
+        }
+        else {
+            logger.info("River contains " + list.size() + " Buildings.");
+        }
+    }
+
+
+    protected void doFixpointsInfo(River river) {
+        Query query = session.createQuery(
+            "from Fixpoint where river =:river");
+        query.setParameter("river", river);
+
+        List<Fixpoint> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No Fixpoints for '" + river.getName() + "' found!");
+            return;
+        }
+        else {
+            logger.info("River contains " + list.size() + " Fixpoints.");
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/Config.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,175 @@
+package de.intevation.flys.importer;
+
+public class Config
+{
+    public static final String SKIP_DEFAULT =
+        "flys.backend.importer.skip.default";
+
+    public static final String DRY_RUN =
+        "flys.backend.importer.dry.run";
+
+    public static final String INFO_GEW_FILE =
+        "flys.backend.importer.infogew.file";
+
+    public static final String ANNOTATION_TYPES =
+        "flys.backend.importer.annotation.types";
+
+    public static final String SKIP_GAUGES =
+        "flys.backend.importer.skip.gauges";
+
+    public static final String SKIP_ANNOTATIONS =
+        "flys.backend.importer.skip.annotations";
+
+    public static final String SKIP_PRFS =
+        "flys.backend.importer.skip.prfs";
+
+    public static final String SKIP_HYKS =
+        "flys.backend.importer.skip.hyks";
+
+    public static final String SKIP_WST =
+        "flys.backend.importer.skip.wst";
+
+    public static final String SKIP_EXTRA_WSTS =
+        "flys.backend.importer.skip.extra.wsts";
+
+    public static final String SKIP_FIXATIONS =
+        "flys.backend.importer.skip.fixations";
+
+    public static final String SKIP_OFFICIAL_LINES =
+        "flys.backend.importer.skip.official.lines";
+
+    public static final String SKIP_FLOOD_WATER =
+        "flys.backend.importer.skip.flood.water";
+
+    public static final String SKIP_FLOOD_PROTECTION =
+        "flys.backend.importer.skip.flood.protection";
+
+    public static final String SKIP_BED_HEIGHT_SINGLE =
+        "flys.backend.importer.skip.bed.height.single";
+
+    public static final String SKIP_BED_HEIGHT_EPOCH =
+        "flys.backend.importer.skip.bed.height.epoch";
+
+    public static final String SKIP_SEDIMENT_DENSITY =
+        "flys.backend.importer.skip.sediment.density";
+
+    public static final String SKIP_MORPHOLOGICAL_WIDTH =
+        "flys.backend.importer.skip.morphological.width";
+
+    public static final String SKIP_FLOW_VELOCITY =
+        "flys.backend.importer.skip.flow.velocity";
+
+    public static final String SKIP_SEDIMENT_YIELD =
+        "flys.backend.importer.skip.sediment.yield";
+
+    public static final String SKIP_WATERLEVELS =
+        "flys.backend.importer.skip.waterlevels";
+
+    public static final String SKIP_WATERLEVEL_DIFFERENCES =
+        "flys.backend.importer.skip.waterlevel.differences";
+
+    public static final String SKIP_SQ_RELATION =
+        "flys.backend.importer.skip.sq.relation";
+
+
+    public static final Config INSTANCE = new Config();
+
+    private Config () {
+    }
+
+    public static final boolean getFlag(String key) {
+        String flag = System.getProperty(key);
+        return flag != null
+            ? Boolean.valueOf(flag)
+            : Boolean.getBoolean(SKIP_DEFAULT);
+    }
+
+    public boolean dryRun() {
+        return getFlag(DRY_RUN);
+    }
+
+    public String getInfoGewFile() {
+        return System.getProperty(INFO_GEW_FILE);
+    }
+
+    public String getAnnotationTypes() {
+        return System.getProperty(ANNOTATION_TYPES);
+    }
+
+    public boolean skipGauges() {
+        return getFlag(SKIP_GAUGES);
+    }
+
+    public boolean skipAnnotations() {
+        return getFlag(SKIP_ANNOTATIONS);
+    }
+
+    public boolean skipPRFs() {
+        return getFlag(SKIP_PRFS);
+    }
+
+    public boolean skipHYKs() {
+        return getFlag(SKIP_HYKS);
+    }
+
+    public boolean skipWst() {
+        return getFlag(SKIP_WST);
+    }
+
+    public boolean skipExtraWsts() {
+        return getFlag(SKIP_EXTRA_WSTS);
+    }
+
+    public boolean skipFixations() {
+        return getFlag(SKIP_FIXATIONS);
+    }
+
+    public boolean skipOfficialLines() {
+        return getFlag(SKIP_OFFICIAL_LINES);
+    }
+
+    public boolean skipFloodWater() {
+        return getFlag(SKIP_FLOOD_WATER);
+    }
+
+    public boolean skipFloodProtection() {
+        return getFlag(SKIP_FLOOD_PROTECTION);
+    }
+
+    public boolean skipBedHeightSingle() {
+        return getFlag(SKIP_BED_HEIGHT_SINGLE);
+    }
+
+    public boolean skipBedHeightEpoch() {
+        return getFlag(SKIP_BED_HEIGHT_EPOCH);
+    }
+
+    public boolean skipSedimentDensity() {
+        return getFlag(SKIP_SEDIMENT_DENSITY);
+    }
+
+    public boolean skipMorphologicalWidth() {
+        return getFlag(SKIP_MORPHOLOGICAL_WIDTH);
+    }
+
+    public boolean skipFlowVelocity() {
+        return getFlag(SKIP_FLOW_VELOCITY);
+    }
+
+    public boolean skipSedimentYield() {
+        return getFlag(SKIP_SEDIMENT_YIELD);
+    }
+
+    public boolean skipWaterlevels() {
+        return getFlag(SKIP_WATERLEVELS);
+    }
+
+    public boolean skipWaterlevelDifferences() {
+        return getFlag(SKIP_WATERLEVEL_DIFFERENCES);
+    }
+
+    public boolean skipSQRelation() {
+        return getFlag(SKIP_SQ_RELATION);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportAnnotation.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,146 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Annotation;
+import de.intevation.flys.model.AnnotationType;
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.Position;
+import de.intevation.flys.model.Attribute;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Edge;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportAnnotation
+implements   Comparable<ImportAnnotation>
+{
+    protected ImportAttribute      attribute;
+    protected ImportPosition       position;
+    protected ImportRange          range;
+    protected ImportEdge           edge;
+    protected ImportAnnotationType type;
+
+    protected Annotation      peer;
+
+    public ImportAnnotation() {
+    }
+
+    public ImportAnnotation(
+        ImportAttribute      attribute,
+        ImportPosition       position,
+        ImportRange          range,
+        ImportEdge           edge,
+        ImportAnnotationType type
+    ) {
+        this.attribute = attribute;
+        this.position  = position;
+        this.range     = range;
+        this.edge      = edge;
+        this.type      = type;
+    }
+
+    public int compareTo(ImportAnnotation other) {
+        int d = attribute.compareTo(other.attribute);
+        if (d != 0) {
+            return d;
+        }
+
+        if ((d = position.compareTo(other.position)) != 0) {
+            return d;
+        }
+
+        if ((d = range.compareTo(other.range)) != 0) {
+            return d;
+        }
+
+        if (edge == null && other.edge != null) return -1;
+        if (edge != null && other.edge == null) return +1;
+        if (edge == null && other.edge == null) return 0;
+
+        if ((d = edge.compareTo(other.edge)) != 0) {
+            return d;
+        }
+
+        if (type == null && other.type != null) return -1;
+        if (type != null && other.type == null) return +1;
+        if (type == null && other.type == null) return 0;
+
+        return type.compareTo(other.type);
+    }
+
+    public ImportAttribute getAttribute() {
+        return attribute;
+    }
+
+    public void setAttribute(ImportAttribute attribute) {
+        this.attribute = attribute;
+    }
+
+    public ImportPosition getPosition() {
+        return position;
+    }
+
+    public void setPosition(ImportPosition position) {
+        this.position = position;
+    }
+
+    public ImportRange getRange() {
+        return range;
+    }
+
+    public void setRange(ImportRange range) {
+        this.range = range;
+    }
+
+    public ImportEdge getEdge() {
+        return edge;
+    }
+
+    public void setEdge(ImportEdge edge) {
+        this.edge = edge;
+    }
+
+    public ImportAnnotationType getType() {
+        return type;
+    }
+
+    public void setType(ImportAnnotationType type) {
+        this.type = type;
+    }
+
+    public Annotation getPeer(River river) {
+        if (peer == null) {
+            Range          r = range.getPeer(river);
+            Attribute      a = attribute.getPeer();
+            Position       p = position.getPeer();
+            Edge           e = edge != null ? edge.getPeer() : null;
+            AnnotationType t = type != null ? type.getPeer() : null;
+
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from Annotation where "    +
+                "range=:range and "         +
+                "attribute=:attribute and " +
+                "position=:position and "   +
+                "edge=:edge and "           +
+                "type=:type");
+            query.setParameter("range",     r);
+            query.setParameter("attribute", a);
+            query.setParameter("position",  p);
+            query.setParameter("edge",      e);
+            query.setParameter("type",      t);
+            List<Annotation> annotations = query.list();
+            if (annotations.isEmpty()) {
+                peer = new Annotation(r, a, p, e, t);
+                session.save(peer);
+            }
+            else {
+                peer = annotations.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportAnnotationType.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,54 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.AnnotationType;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportAnnotationType
+implements   Comparable<ImportAnnotationType>
+{
+    protected String         name;
+    protected AnnotationType peer;
+
+    public ImportAnnotationType() {
+    }
+
+    public ImportAnnotationType(String name) {
+        this.name = name;
+    }
+
+    public int compareTo(ImportAnnotationType other) {
+        return name.compareTo(other.name);
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    public AnnotationType getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from AnnotationType where name=:name");
+            query.setParameter("name", name);
+            List<AnnotationType> types = query.list();
+            if (types.isEmpty()) {
+                peer = new AnnotationType(name);
+                session.save(peer);
+            }
+            else {
+                peer = types.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportAttribute.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,65 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Attribute;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportAttribute
+implements   Comparable<ImportAttribute>
+{
+    protected String value;
+
+    protected Attribute peer;
+
+    public ImportAttribute() {
+    }
+
+    public ImportAttribute(String value) {
+        this.value = value;
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+
+    public int compareTo(ImportAttribute other) {
+        return value.compareTo(other.value);
+    }
+
+    @Override
+    public boolean equals(Object other) {
+        if (other == this) return true;
+        if (!(other instanceof ImportAttribute)) return false;
+        return value.equals(((ImportAttribute)other).value);
+    }
+
+    @Override
+    public int hashCode() {
+        return value.hashCode();
+    }
+
+    public Attribute getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from Attribute where value=:value");
+            query.setString("value", value);
+            List<Attribute> attributes = query.list();
+            if (attributes.isEmpty()) {
+                peer = new Attribute(value);
+                session.save(peer);
+            }
+            else {
+                peer = attributes.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeight.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,44 @@
+package de.intevation.flys.importer;
+
+
+import java.sql.SQLException;
+
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.River;
+
+
+public interface ImportBedHeight {
+
+    String getDescription();
+
+    void addValue(ImportBedHeightValue value);
+
+    void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException;
+
+    Object getPeer(River river);
+
+    int getValueCount();
+
+    void setYear(int year);
+
+    void setTimeInterval(ImportTimeInterval timeInterval);
+
+    void setSoundingWidth(int soundingWidth);
+
+    void setDescription(String description);
+
+    void setEvaluationBy(String evaluationBy);
+
+    void setRange(ImportRange range);
+
+    void setType(ImportBedHeightType type);
+
+    void setLocationSystem(ImportLocationSystem locationSystem);
+
+    void setCurElevationModel(ImportElevationModel model);
+
+    void setOldElevationModel(ImportElevationModel model);
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,198 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import java.sql.SQLException;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.BedHeightEpoch;
+import de.intevation.flys.model.ElevationModel;
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.TimeInterval;
+
+
+public class ImportBedHeightEpoch implements ImportBedHeight
+{
+    private static Logger log = Logger.getLogger(ImportBedHeightEpoch.class);
+
+    protected String evaluationBy;
+    protected String description;
+
+    protected ImportTimeInterval   timeInterval;
+    protected ImportRange          range;
+    protected ImportElevationModel curElevationModel;
+    protected ImportElevationModel oldElevationModel;
+
+    protected List<ImportBedHeightEpochValue> values;
+
+    protected BedHeightEpoch peer;
+
+
+    public ImportBedHeightEpoch(String description) {
+        this.description = description;
+        this.values      = new ArrayList<ImportBedHeightEpochValue>();
+    }
+
+
+    public String getDescription() {
+        return description;
+    }
+
+
+    public int getValueCount() {
+        return values.size();
+    }
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+
+    public void setEvaluationBy(String evaluationBy) {
+        this.evaluationBy = evaluationBy;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public void setRange(ImportRange range) {
+        this.range = range;
+    }
+
+    public void setCurElevationModel(ImportElevationModel curElevationModel) {
+        this.curElevationModel = curElevationModel;
+    }
+
+    public void setOldElevationModel(ImportElevationModel oldElevationModel) {
+        this.oldElevationModel = oldElevationModel;
+    }
+
+    public void setYear(int year) {
+        // do nothing
+    }
+
+    public void setSoundingWidth(int soundingWidth) {
+        // do nothing
+    }
+
+    public void setLocationSystem(ImportLocationSystem locationSystem) {
+        // do nothing
+    }
+
+    public void setType(ImportBedHeightType type) {
+        // do nothing
+    }
+
+    @Override
+    public void addValue(ImportBedHeightValue value) {
+        values.add((ImportBedHeightEpochValue) value);
+    }
+
+
+    @Override
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("Store dependencies for epoch: '" + getDescription() + "'");
+
+        if (curElevationModel != null) {
+            curElevationModel.storeDependencies();
+        }
+
+        if (oldElevationModel != null) {
+            oldElevationModel.storeDependencies();
+        }
+
+        BedHeightEpoch peer = getPeer(river);
+
+        log.debug("store values now...");
+
+        for (ImportBedHeightEpochValue value: values) {
+            value.storeDependencies(peer);
+        }
+
+        Session session = ImporterSession.getInstance().getDatabaseSession();
+        session.flush();
+    }
+
+
+    @Override
+    public BedHeightEpoch getPeer(River river) {
+        if (peer == null) {
+            ElevationModel theCurModel = curElevationModel != null
+                ? curElevationModel.getPeer()
+                : null;
+
+            if (theCurModel == null) {
+                log.warn("BHE: Skip file - invalid current elevation model.");
+                return null;
+            }
+
+            TimeInterval theTime = timeInterval != null
+                ? timeInterval.getPeer()
+                : null;
+
+            if (theTime == null) {
+                log.warn("BHE: Skip file - invalid time range.");
+                return null;
+            }
+
+            Range theRange = range != null ? range.getPeer(river) : null;
+
+            if (theRange == null) {
+                log.warn("BHE: Skip file - invalid km range.");
+                return null;
+            }
+
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from BedHeightEpoch where " +
+                "   river=:river and " +
+                "   timeInterval=:timeInterval and " +
+                "   curElevationModel=:curElevationModel and " +
+                "   range=:range and " +
+                "   evaluationBy=:evaluationBy and " +
+                "   description=:description");
+
+            query.setParameter("river", river);
+            query.setParameter("timeInterval", theTime);
+            query.setParameter("curElevationModel", theCurModel);
+            query.setParameter("range", theRange);
+            query.setParameter("evaluationBy", evaluationBy);
+            query.setParameter("description", description);
+
+            List<BedHeightEpoch> bedHeights = query.list();
+
+            if (bedHeights.isEmpty()) {
+                log.info("Create new BedHeightEpoch DB instance.");
+
+                peer = new BedHeightEpoch(
+                    river,
+                    theTime,
+                    theRange,
+                    theCurModel,
+                    oldElevationModel != null ? oldElevationModel.getPeer() : null,
+                    evaluationBy,
+                    description
+                );
+
+                session.save(peer);
+            }
+            else {
+                peer = bedHeights.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightEpochValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,75 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.BedHeightEpoch;
+import de.intevation.flys.model.BedHeightEpochValue;
+
+
+public class ImportBedHeightEpochValue implements ImportBedHeightValue {
+
+    private static final Logger log =
+        Logger.getLogger(ImportBedHeightEpochValue.class);
+
+
+    private BigDecimal station;
+    private BigDecimal height;
+
+    private BedHeightEpochValue peer;
+
+
+    public ImportBedHeightEpochValue() {
+    }
+
+
+    public ImportBedHeightEpochValue(BigDecimal station, BigDecimal height) {
+        this.station = station;
+        this.height  = height;
+    }
+
+
+    public void storeDependencies(BedHeightEpoch bedHeight) {
+        getPeer(bedHeight);
+    }
+
+
+    public BedHeightEpochValue getPeer(BedHeightEpoch bedHeight) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from BedHeightEpochValue where " +
+                "   bedHeight=:bedHeight and " +
+                "   station=:station and " +
+                "   height=:height");
+
+            query.setParameter("bedHeight", bedHeight);
+            query.setParameter("station", station);
+            query.setParameter("height", height);
+
+            List<BedHeightEpochValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new BedHeightEpochValue(
+                    bedHeight,
+                    station,
+                    height
+                );
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,189 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import java.sql.SQLException;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.BedHeightSingle;
+import de.intevation.flys.model.BedHeightType;
+import de.intevation.flys.model.ElevationModel;
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.River;
+
+
+public class ImportBedHeightSingle implements ImportBedHeight
+{
+    private static Logger log = Logger.getLogger(ImportBedHeightSingle.class);
+
+    protected int year;
+    protected int soundingWidth;
+
+    protected String evaluationBy;
+    protected String description;
+
+    protected ImportRange          range;
+    protected ImportBedHeightType  type;
+    protected ImportLocationSystem locationSystem;
+    protected ImportElevationModel curElevationModel;
+    protected ImportElevationModel oldElevationModel;
+
+    protected List<ImportBedHeightSingleValue> values;
+
+    protected BedHeightSingle peer;
+
+
+    public ImportBedHeightSingle(String description) {
+        this.description = description;
+        this.values      = new ArrayList<ImportBedHeightSingleValue>();
+    }
+
+
+    public String getDescription() {
+        return description;
+    }
+
+    public int getValueCount() {
+        return values.size();
+    }
+
+
+    public void setYear(int year) {
+        this.year = year;
+    }
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        // do nothing
+    }
+
+    public void setSoundingWidth(int soundingWidth) {
+        this.soundingWidth = soundingWidth;
+    }
+
+    public void setEvaluationBy(String evaluationBy) {
+        this.evaluationBy = evaluationBy;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public void setRange(ImportRange range) {
+        this.range = range;
+    }
+
+    public void setType(ImportBedHeightType type) {
+        this.type = type;
+    }
+
+    public void setLocationSystem(ImportLocationSystem locationSystem) {
+        this.locationSystem = locationSystem;
+    }
+
+    public void setCurElevationModel(ImportElevationModel curElevationModel) {
+        this.curElevationModel = curElevationModel;
+    }
+
+    public void setOldElevationModel(ImportElevationModel oldElevationModel) {
+        this.oldElevationModel = oldElevationModel;
+    }
+
+    @Override
+    public void addValue(ImportBedHeightValue value) {
+        values.add((ImportBedHeightSingleValue) value);
+    }
+
+    @Override
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("Store dependencies for single: '" + getDescription() + "'");
+
+        if (type != null) {
+            type.storeDependencies();
+        }
+
+        if (locationSystem != null) {
+            locationSystem.storeDependencies();
+        }
+
+        if (curElevationModel != null) {
+            curElevationModel.storeDependencies();
+        }
+
+        if (oldElevationModel != null) {
+            oldElevationModel.storeDependencies();
+        }
+
+        BedHeightSingle peer = getPeer(river);
+
+        for (ImportBedHeightSingleValue value: values) {
+            value.storeDependencies(peer);
+        }
+
+        Session session = ImporterSession.getInstance().getDatabaseSession();
+        session.flush();
+    }
+
+    @Override
+    public BedHeightSingle getPeer(River river) {
+        if (peer == null) {
+            BedHeightType  theType     = type != null ? type.getPeer() : null;
+            ElevationModel theCurModel = curElevationModel.getPeer();
+            Range          theRange    = range != null ? range.getPeer(river) : null;
+
+            if (theType == null || theCurModel == null || theRange == null) {
+                log.warn("BHS: Skip invalid file '" + description + "'");
+                return null;
+            }
+
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from BedHeightSingle where " +
+                "river=:river and year=:year and soundingWidth=:soundingWidth " +
+                "and type=:type and locationSystem=:locationSystem and " +
+                "curElevationModel=:curElevationModel and range=:range");
+
+            query.setParameter("river", river);
+            query.setParameter("year", year);
+            query.setParameter("soundingWidth", soundingWidth);
+            query.setParameter("type", theType);
+            query.setParameter("locationSystem", locationSystem.getPeer());
+            query.setParameter("curElevationModel", theCurModel);
+            query.setParameter("range", range.getPeer(river));
+
+            List<BedHeightSingle> bedHeights = query.list();
+            if (bedHeights.isEmpty()) {
+                log.info("Create new BedHeightSingle DB instance.");
+
+                peer = new BedHeightSingle(
+                    river,
+                    year,
+                    soundingWidth,
+                    theType,
+                    locationSystem.getPeer(),
+                    theCurModel,
+                    oldElevationModel != null ? oldElevationModel.getPeer() : null,
+                    range.getPeer(river),
+                    evaluationBy,
+                    description
+                );
+
+                session.save(peer);
+            }
+            else {
+                peer = bedHeights.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightSingleValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,101 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.BedHeightSingle;
+import de.intevation.flys.model.BedHeightSingleValue;
+
+
+public class ImportBedHeightSingleValue implements ImportBedHeightValue {
+
+    private static final Logger log =
+        Logger.getLogger(ImportBedHeightSingleValue.class);
+
+
+    protected ImportBedHeightSingle bedHeight;
+
+    protected BigDecimal station;
+    protected BigDecimal height;
+    protected BigDecimal uncertainty;
+    protected BigDecimal dataGap;
+    protected BigDecimal soundingWidth;
+    protected BigDecimal width;
+
+    protected BedHeightSingleValue peer;
+
+
+    public ImportBedHeightSingleValue(
+        ImportBedHeightSingle bedHeight,
+        BigDecimal station,
+        BigDecimal height,
+        BigDecimal uncertainty,
+        BigDecimal dataGap,
+        BigDecimal soundingWidth,
+        BigDecimal width
+    ) {
+        this.bedHeight     = bedHeight;
+        this.station       = station;
+        this.height        = height;
+        this.uncertainty   = uncertainty;
+        this.dataGap       = dataGap;
+        this.soundingWidth = soundingWidth;
+        this.width         = width;
+    }
+
+
+    public void storeDependencies(BedHeightSingle bedHeight) {
+        getPeer(bedHeight);
+    }
+
+
+    public BedHeightSingleValue getPeer(BedHeightSingle bedHeight) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from BedHeightSingleValue where " +
+                "   bedHeight=:bedHeight and " +
+                "   station=:station and " +
+                "   height=:height and " +
+                "   uncertainty=:uncertainty and " +
+                "   dataGap=:dataGap and " +
+                "   soundingWidth=:soundingWidth and " +
+                "   width=:width");
+
+            query.setParameter("bedHeight", bedHeight);
+            query.setParameter("station", station);
+            query.setParameter("height", height);
+            query.setParameter("uncertainty", uncertainty);
+            query.setParameter("dataGap", dataGap);
+            query.setParameter("soundingWidth", soundingWidth);
+            query.setParameter("width", width);
+
+            List<BedHeightSingleValue> values = query.list();
+            if (values.isEmpty()) {
+                peer = new BedHeightSingleValue(
+                    bedHeight,
+                    station,
+                    height,
+                    uncertainty,
+                    dataGap,
+                    soundingWidth,
+                    width
+                );
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightType.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,60 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.BedHeightType;
+
+
+public class ImportBedHeightType {
+
+    private static final Logger log =
+        Logger.getLogger(ImportBedHeightType.class);
+
+    protected String name;
+    protected String description;
+
+    protected BedHeightType peer;
+
+
+    public ImportBedHeightType(String name, String description) {
+        this.name        = name;
+        this.description = description;
+    }
+
+
+    public void storeDependencies() {
+        BedHeightType type = getPeer();
+    }
+
+
+    public BedHeightType getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from BedHeightType where " +
+                "name=:name and description=:description");
+
+            query.setParameter("name", name);
+            query.setParameter("description", description);
+
+            List<BedHeightType> types = query.list();
+
+            if (types.isEmpty()) {
+                peer = new BedHeightType(name, description);
+                session.save(peer);
+            }
+            else {
+                peer = types.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,7 @@
+package de.intevation.flys.importer;
+
+
+public interface ImportBedHeightValue {
+
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportCrossSection.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,119 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.CrossSection;
+import de.intevation.flys.model.TimeInterval;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+public class ImportCrossSection
+{
+    private static Logger log = Logger.getLogger(ImportRiver.class);
+
+    protected ImportRiver                  river;
+    protected String                       description;
+    protected ImportTimeInterval           timeInterval;
+    protected List<ImportCrossSectionLine> lines;
+
+    protected CrossSection peer;
+
+    public ImportCrossSection() {
+    }
+
+    public ImportCrossSection(
+        ImportRiver                  river,
+        String                       description,
+        ImportTimeInterval           timeInterval,
+        List<ImportCrossSectionLine> lines
+    ) {
+        this.river        = river;
+        this.description  = description;
+        this.timeInterval = timeInterval;
+        this.lines        = lines;
+        wireWithLines();
+    }
+
+    public void wireWithLines() {
+        for (ImportCrossSectionLine line: lines) {
+            line.setCrossSection(this);
+        }
+    }
+
+    public ImportRiver getRiver() {
+        return river;
+    }
+
+    public void setRiver(ImportRiver river) {
+        this.river = river;
+    }
+
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public ImportTimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    public void storeDependencies() {
+
+        log.info("store cross section '" + description + "'");
+
+        getPeer();
+
+        int i = 1, N = lines.size();
+
+        for (ImportCrossSectionLine line: lines) {
+            line.storeDependencies();
+            log.info("  stored " + i + " lines. remaining: " + (N-i));
+            ++i;
+        }
+    }
+
+    public CrossSection getPeer() {
+
+        if (peer == null) {
+            River r = river.getPeer();
+            TimeInterval t = timeInterval != null
+                ? timeInterval.getPeer()
+                : null;
+
+            Session session =
+                ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from CrossSection where " +
+                "river=:r and "            +
+                "timeInterval=:t and "     +
+                "description=:d");
+
+            query.setParameter("r", r);
+            query.setParameter("t", t);
+            query.setParameter("d", description);
+
+            List<CrossSection> crossSections = query.list();
+            if (crossSections.isEmpty()) {
+                peer = new CrossSection(r, t, description);
+                session.save(peer);
+            }
+            else {
+                peer = crossSections.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportCrossSectionLine.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,118 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.CrossSection;
+import de.intevation.flys.model.CrossSectionPoint;
+import de.intevation.flys.model.CrossSectionLine;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+import java.util.Comparator;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class ImportCrossSectionLine
+{
+    public static final Comparator<CrossSectionPoint> INDEX_CMP =
+        new Comparator<CrossSectionPoint>() {
+            public int compare(CrossSectionPoint a, CrossSectionPoint b) {
+                return a.getColPos().compareTo(b.getColPos());
+            }
+        };
+
+    protected Double         km;
+    protected ImportCrossSection crossSection;
+    protected List<XY>           points;
+
+    protected CrossSectionLine peer;
+
+    public ImportCrossSectionLine() {
+    }
+
+    public ImportCrossSectionLine(Double km, List<XY> points) {
+        this.km     = km;
+        this.points = points;
+    }
+
+    public ImportCrossSection getCrossSection() {
+        return crossSection;
+    }
+
+    public void setCrossSection(ImportCrossSection crossSection) {
+        this.crossSection = crossSection;
+    }
+
+    public Double getKm() {
+        return km;
+    }
+
+    public void setKm(Double km) {
+        this.km = km;
+    }
+
+    public void storeDependencies() {
+        storePoints();
+    }
+
+    protected void storePoints() {
+        CrossSectionLine csl = getPeer();
+
+        Map<CrossSectionPoint, CrossSectionPoint> map =
+            new TreeMap<CrossSectionPoint, CrossSectionPoint>(INDEX_CMP);
+
+        // build index for faster collision lookup
+        List<CrossSectionPoint> ps = csl.getPoints();
+        if (ps != null) {
+            for (CrossSectionPoint point: ps) {
+                map.put(point, point);
+            }
+        }
+
+        Session session =
+            ImporterSession.getInstance().getDatabaseSession();
+
+        CrossSectionPoint key = new CrossSectionPoint();
+
+        for (XY xy: points) {
+            key.setColPos(xy.getIndex());
+            CrossSectionPoint csp = map.get(key);
+            if (csp == null) { // create new
+                csp = new CrossSectionPoint(
+                    csl, key.getColPos(),
+                    Double.valueOf(xy.getX()),
+                    Double.valueOf(xy.getY()));
+            }
+            else { // update old
+                csp.setX(Double.valueOf(xy.getX()));
+                csp.setY(Double.valueOf(xy.getY()));
+            }
+            session.save(csp);
+        }
+    }
+
+    public CrossSectionLine getPeer() {
+        if (peer == null) {
+            CrossSection cs = crossSection.getPeer();
+
+            Session session =
+                ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from CrossSectionLine where crossSection=:cs and km=:km");
+            query.setParameter("cs", cs);
+            query.setParameter("km", km);
+
+            List<CrossSectionLine> lines = query.list();
+            if (lines.isEmpty()) {
+                peer = new CrossSectionLine(cs, km);
+                session.save(peer);
+            }
+            else {
+                peer = lines.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportDepth.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,75 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.Depth;
+
+
+public class ImportDepth {
+
+    private static Logger log = Logger.getLogger(ImportDepth.class);
+
+
+    protected Depth peer;
+
+    protected BigDecimal lower;
+    protected BigDecimal upper;
+
+    protected ImportUnit unit;
+
+
+    public ImportDepth(BigDecimal lower, BigDecimal upper, ImportUnit unit) {
+        this.lower = lower;
+        this.upper = upper;
+        this.unit  = unit;
+    }
+
+
+    public void storeDependencies() {
+        log.info("store dependencies");
+
+        getPeer();
+    }
+
+
+    public Depth getPeer() {
+        log.info("get peer");
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from Depth where " +
+                "   lower=:lower and " +
+                "   upper=:upper and " +
+                "   unit=:unit");
+
+            query.setParameter("lower", lower);
+            query.setParameter("upper", upper);
+            query.setParameter("unit", unit.getPeer());
+
+            List<Depth> depths = query.list();
+
+            if (depths.isEmpty()) {
+                log.debug("Create new Depth DB instance.");
+
+                peer = new Depth(lower, upper, unit.getPeer());
+
+                session.save(peer);
+            }
+            else {
+                peer = depths.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportDischargeTable.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,116 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.DischargeTable;
+import de.intevation.flys.model.Gauge;
+import de.intevation.flys.model.TimeInterval;
+
+import org.apache.log4j.Logger;
+
+public class ImportDischargeTable
+{
+    private static Logger log = Logger.getLogger(ImportDischargeTable.class);
+
+    protected DischargeTable peer;
+
+    protected String         description;
+
+    protected Integer        kind;
+
+    protected List<ImportDischargeTableValue> dischargeTableValues;
+
+    protected ImportTimeInterval timeInterval;
+
+    public ImportDischargeTable() {
+        this(0, null);
+    }
+
+    public ImportDischargeTable(int kind, String description) {
+        this.kind            = kind;
+        this.description     = description;
+        dischargeTableValues = new ArrayList<ImportDischargeTableValue>();
+    }
+
+
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public void addDischargeTableValue(ImportDischargeTableValue value) {
+        dischargeTableValues.add(value);
+    }
+
+
+    public void setDischargeTableValues(List<ImportDischargeTableValue> values){
+        this.dischargeTableValues = values;
+    }
+
+
+    public List<ImportDischargeTableValue> getDischargeTableValues() {
+        return dischargeTableValues;
+    }
+
+    public ImportTimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+
+    public DischargeTable getPeer(Gauge gauge) {
+        if (peer == null) {
+            TimeInterval ti = timeInterval != null
+                ? timeInterval.getPeer()
+                : null;
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from DischargeTable where " +
+                "gauge.id=:gauge and kind=:kind and " +
+                "description=:description and timeInterval=:interval");
+            query.setParameter("gauge",       gauge.getId());
+            query.setParameter("description", description);
+            query.setParameter("kind",        kind);
+            query.setParameter("interval",    ti);
+
+            List<DischargeTable> dischargeTables = query.list();
+            if (dischargeTables.isEmpty()) {
+                peer = new DischargeTable(gauge, description, kind, ti);
+                session.save(peer);
+            }
+            else {
+                peer = dischargeTables.get(0);
+            }
+        }
+
+        return peer;
+    }
+
+
+    public void storeDependencies(Gauge gauge) {
+        log.info("store discharge table '" + description + "'");
+        storeDischargeTableValues(gauge);
+    }
+
+
+    public void storeDischargeTableValues(Gauge gauge) {
+        DischargeTable dischargeTable = getPeer(gauge);
+
+        for (ImportDischargeTableValue value: dischargeTableValues) {
+            value.getPeer(dischargeTable);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportDischargeTableValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,34 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import de.intevation.flys.model.DischargeTable;
+import de.intevation.flys.model.DischargeTableValue;
+
+
+public class ImportDischargeTableValue
+{
+    private BigDecimal q;
+    private BigDecimal w;
+
+    private DischargeTableValue peer;
+
+    public ImportDischargeTableValue() {
+    }
+
+
+    public ImportDischargeTableValue(BigDecimal q, BigDecimal w) {
+        this.q = q;
+        this.w = w;
+    }
+
+
+    public DischargeTableValue getPeer(DischargeTable dischargeTable) {
+        if (peer == null) {
+            peer = ImporterSession.getInstance()
+                .getDischargeTableValue(dischargeTable, q, w);
+        }
+
+        return peer;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportDischargeZone.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,90 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.DischargeZone;
+import de.intevation.flys.model.River;
+
+
+public class ImportDischargeZone {
+
+    private static final Logger log =
+        Logger.getLogger(ImportDischargeZone.class);
+
+
+    private String gaugeName;
+
+    private BigDecimal value;
+
+    private String lowerDischarge;
+    private String upperDischarge;
+
+    private DischargeZone peer;
+
+
+    public ImportDischargeZone(
+        String     gaugeName,
+        BigDecimal value,
+        String     lowerDischarge,
+        String     upperDischarge
+    ) {
+        this.gaugeName      = gaugeName;
+        this.value          = value;
+        this.lowerDischarge = lowerDischarge;
+        this.upperDischarge = upperDischarge;
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.debug("store dependencies");
+
+        getPeer(river);
+    }
+
+
+    public DischargeZone getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from DischargeZone where " +
+                "   river=:river and " +
+                "   gaugeName=:gaugeName and " +
+                "   value=:value"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("gaugeName", gaugeName);
+            query.setParameter("value", value);
+
+            List<DischargeZone> zone = query.list();
+
+            if (zone.isEmpty()) {
+                peer = new DischargeZone(
+                    river,
+                    gaugeName,
+                    value,
+                    lowerDischarge,
+                    upperDischarge);
+
+                session.save(peer);
+            }
+            else {
+                peer = zone.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportEdge.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,75 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Edge;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+public class ImportEdge
+implements   Comparable<ImportEdge>
+{
+    protected BigDecimal top;
+    protected BigDecimal bottom;
+
+    protected Edge peer;
+
+    public ImportEdge() {
+    }
+
+    public ImportEdge(BigDecimal top, BigDecimal bottom) {
+        this.top    = top;
+        this.bottom = bottom;
+    }
+
+    public BigDecimal getTop() {
+        return top;
+    }
+
+    public void setTop(BigDecimal top) {
+        this.top = top;
+    }
+
+    public BigDecimal getBottom() {
+        return bottom;
+    }
+
+    public void setBottom(BigDecimal bottom) {
+        this.bottom = bottom;
+    }
+
+    private static final int compare(BigDecimal a, BigDecimal b) {
+        if (a == null && b != null) return -1;
+        if (a != null && b == null) return +1;
+        if (a == null && b == null) return  0;
+        return a.compareTo(b);
+    }
+
+    public int compareTo(ImportEdge other) {
+        int cmp = compare(top, other.top);
+        return cmp != 0 ? cmp : compare(bottom, other.bottom);
+    }
+
+    public Edge getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from Edge where top=:top and bottom=:bottom");
+            query.setParameter("top", top);
+            query.setParameter("bottom", bottom);
+            List<Edge> edges = query.list();
+            if (edges.isEmpty()) {
+                peer = new Edge(top, bottom);
+                session.save(peer);
+            }
+            else {
+                peer = edges.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportElevationModel.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,59 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.ElevationModel;
+
+
+public class ImportElevationModel {
+
+    private static final Logger log =
+        Logger.getLogger(ImportElevationModel.class);
+
+    protected String name;
+
+    protected ImportUnit unit;
+
+    protected ElevationModel peer;
+
+
+    public ImportElevationModel(String name, ImportUnit unit) {
+        this.name = name;
+        this.unit = unit;
+    }
+
+
+    public void storeDependencies() {
+        ElevationModel model = getPeer();
+    }
+
+    public ElevationModel getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from ElevationModel where " +
+                "name=:name and unit=:unit");
+            query.setParameter("name", name);
+            query.setParameter("unit", unit.getPeer());
+            List<ElevationModel> models = query.list();
+
+            if (models.isEmpty()) {
+                log.info("Create new ElevationModel DB instance.");
+
+                peer = new ElevationModel(name, unit.getPeer());
+                session.save(peer);
+            }
+            else {
+                peer = models.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurement.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,92 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.FlowVelocityMeasurement;
+import de.intevation.flys.model.River;
+
+
+public class ImportFlowVelocityMeasurement {
+
+    private static final Logger log =
+        Logger.getLogger(ImportFlowVelocityMeasurement.class);
+
+
+    private String description;
+
+    private List<ImportFlowVelocityMeasurementValue> values;
+
+    private FlowVelocityMeasurement peer;
+
+
+    public ImportFlowVelocityMeasurement() {
+        this(null);
+    }
+
+
+    public ImportFlowVelocityMeasurement(String description) {
+        this.description = description;
+        this.values      = new ArrayList<ImportFlowVelocityMeasurementValue>();
+    }
+
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public void addValue(ImportFlowVelocityMeasurementValue value) {
+        this.values.add(value);
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.debug("store dependencies");
+
+        FlowVelocityMeasurement peer = getPeer(river);
+
+        for (ImportFlowVelocityMeasurementValue value: values) {
+            value.storeDependencies(peer);
+        }
+    }
+
+
+    public FlowVelocityMeasurement getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from FlowVelocityMeasurement where " +
+                "   river=:river and " +
+                "   description=:description"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("description", description);
+
+            List<FlowVelocityMeasurement> measurement = query.list();
+
+            if (measurement.isEmpty()) {
+                peer = new FlowVelocityMeasurement(river, description);
+
+                session.save(peer);
+            }
+            else {
+                peer = measurement.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurementValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,100 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+import java.sql.SQLException;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.FlowVelocityMeasurement;
+import de.intevation.flys.model.FlowVelocityMeasurementValue;
+
+
+public class ImportFlowVelocityMeasurementValue {
+
+    private static final Logger log =
+        Logger.getLogger(ImportFlowVelocityMeasurementValue.class);
+
+
+    private Date datetime;
+
+    private String description;
+
+    private BigDecimal station;
+    private BigDecimal w;
+    private BigDecimal q;
+    private BigDecimal v;
+
+    private FlowVelocityMeasurementValue peer;
+
+
+    public ImportFlowVelocityMeasurementValue(
+        Date       datetime,
+        BigDecimal station,
+        BigDecimal w,
+        BigDecimal q,
+        BigDecimal v,
+        String     description
+    ) {
+        this.datetime    = datetime;
+        this.station     = station;
+        this.w           = w;
+        this.q           = q;
+        this.v           = v;
+        this.description = description;
+    }
+
+
+
+    public void storeDependencies(FlowVelocityMeasurement measurement)
+    throws SQLException, ConstraintViolationException
+    {
+        log.debug("store dependencies");
+
+        getPeer(measurement);
+    }
+
+
+    public FlowVelocityMeasurementValue getPeer(FlowVelocityMeasurement m) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from FlowVelocityMeasurementValue where " +
+                "   measurement=:measurement and " +
+                "   station=:station and " +
+                "   datetime=:datetime"
+            );
+
+            query.setParameter("measurement", m);
+            query.setParameter("station", station);
+            query.setParameter("datetime", datetime);
+
+            List<FlowVelocityMeasurementValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new FlowVelocityMeasurementValue(
+                    m,
+                    datetime,
+                    station,
+                    w,
+                    q,
+                    v,
+                    description);
+
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,118 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.DischargeZone;
+import de.intevation.flys.model.FlowVelocityModel;
+import de.intevation.flys.model.River;
+
+
+public class ImportFlowVelocityModel {
+
+    private static final Logger log =
+        Logger.getLogger(ImportFlowVelocityModel.class);
+
+
+    private String description;
+
+    private ImportDischargeZone dischargeZone;
+
+    private List<ImportFlowVelocityModelValue> values;
+
+    private FlowVelocityModel peer;
+
+
+    public ImportFlowVelocityModel() {
+        values = new ArrayList<ImportFlowVelocityModelValue>();
+    }
+
+
+    public ImportFlowVelocityModel(
+        ImportDischargeZone dischargeZone,
+        String              description
+    ) {
+        this();
+
+        this.dischargeZone = dischargeZone;
+        this.description   = description;
+    }
+
+
+    public void setDischargeZone(ImportDischargeZone dischargeZone) {
+        this.dischargeZone = dischargeZone;
+    }
+
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public void addValue(ImportFlowVelocityModelValue value) {
+        this.values.add(value);
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.debug("store dependencies");
+
+        if (dischargeZone == null) {
+            log.warn("skip flow velocity model: No discharge zone specified.");
+            return;
+        }
+
+        dischargeZone.storeDependencies(river);
+
+        FlowVelocityModel peer = getPeer(river);
+
+        int i = 0;
+
+        for (ImportFlowVelocityModelValue value: values) {
+            value.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " flow velocity model values.");
+    }
+
+
+    public FlowVelocityModel getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            DischargeZone zone = dischargeZone.getPeer(river);
+
+            Query query = session.createQuery(
+                "from FlowVelocityModel where " +
+                "   river=:river and " +
+                "   dischargeZone=:dischargeZone"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("dischargeZone", zone);
+
+            List<FlowVelocityModel> model = query.list();
+
+            if (model.isEmpty()) {
+                peer = new FlowVelocityModel(river, zone);
+                session.save(peer);
+            }
+            else {
+                peer = model.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,77 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+import java.sql.SQLException;
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.FlowVelocityModel;
+import de.intevation.flys.model.FlowVelocityModelValue;
+
+
+public class ImportFlowVelocityModelValue {
+
+    private BigDecimal station;
+    private BigDecimal q;
+    private BigDecimal totalChannel;
+    private BigDecimal mainChannel;
+    private BigDecimal shearStress;
+
+    private FlowVelocityModelValue peer;
+
+
+    public ImportFlowVelocityModelValue(
+        BigDecimal station,
+        BigDecimal q,
+        BigDecimal totalChannel,
+        BigDecimal mainChannel,
+        BigDecimal shearStress
+    ) {
+        this.station      = station;
+        this.q            = q;
+        this.totalChannel = totalChannel;
+        this.mainChannel  = mainChannel;
+        this.shearStress  = shearStress;
+    }
+
+
+    public void storeDependencies(FlowVelocityModel model)
+    throws SQLException, ConstraintViolationException
+    {
+        getPeer(model);
+    }
+
+
+    public FlowVelocityModelValue getPeer(FlowVelocityModel model) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from FlowVelocityModelValue where " +
+                "   flowVelocity=:model and " +
+                "   station=:station"
+            );
+
+            query.setParameter("model", model);
+            query.setParameter("station", station);
+
+            List<FlowVelocityModelValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new FlowVelocityModelValue(
+                    model, station, q, totalChannel, mainChannel, shearStress);
+
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportGauge.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,268 @@
+package de.intevation.flys.importer;
+
+import java.io.File;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import java.math.BigDecimal;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Gauge;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.io.IOException;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.parsers.AtFileParser;
+import de.intevation.flys.importer.parsers.StaFileParser;
+
+public class ImportGauge
+{
+    private static Logger log = Logger.getLogger(ImportGauge.class);
+
+    public static final String HISTORICAL_DISCHARGE_TABLES =
+        "Histor.Abflusstafeln";
+
+    protected ImportRange range;
+
+    protected File        staFile;
+    protected File        atFile;
+
+    protected String      name;
+    protected BigDecimal  aeo;
+    protected BigDecimal  datum;
+    protected BigDecimal  station;
+    protected Long        officialNumber;
+
+    protected Gauge  peer;
+
+    protected ImportDischargeTable dischargeTable;
+
+    protected List<ImportMainValueType>  mainValueTypes;
+    protected List<ImportNamedMainValue> namedMainValues;
+    protected List<ImportMainValue>      mainValues;
+    protected List<ImportDischargeTable> historicalDischargeTables;
+
+    public ImportGauge() {
+        historicalDischargeTables = new ArrayList<ImportDischargeTable>();
+    }
+
+    public ImportGauge(ImportRange range, File staFile, File atFile) {
+        this();
+        this.range   = range;
+        this.staFile = staFile;
+        this.atFile  = atFile;
+    }
+
+    public void setRange(ImportRange range) {
+        this.range = range;
+    }
+
+    public void setStaFile(File staFile) {
+        this.staFile = staFile;
+    }
+
+    public File getStaFile() {
+        return staFile;
+    }
+
+    public void setAtFile(File atFile) {
+        this.atFile = atFile;
+    }
+
+    public File getAtFile() {
+        return atFile;
+    }
+
+    public BigDecimal getAeo() {
+        return aeo;
+    }
+
+    public void setAeo(BigDecimal aeo) {
+        this.aeo = aeo;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public BigDecimal getDatum() {
+        return datum;
+    }
+
+    public void setDatum(BigDecimal datum) {
+        this.datum = datum;
+    }
+
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    public Long getOfficialNumber() {
+        return officialNumber;
+    }
+
+    public void setOfficialNumber(Long officialNumber) {
+        this.officialNumber = officialNumber;
+    }
+
+    public ImportDischargeTable getDischargeTable() {
+        return dischargeTable;
+    }
+
+    public void setDischargeTable(ImportDischargeTable dischargeTable) {
+        this.dischargeTable = dischargeTable;
+    }
+
+    public List<ImportMainValueType> getMainValueTypes() {
+        return mainValueTypes;
+    }
+
+    public void setMainValueTypes(List<ImportMainValueType> mainValueTypes) {
+        this.mainValueTypes = mainValueTypes;
+    }
+
+    public List<ImportNamedMainValue> getNamedMainValues() {
+        return namedMainValues;
+    }
+
+    public void setNamedMainValues(List<ImportNamedMainValue> namedMainValues) {
+        this.namedMainValues = namedMainValues;
+    }
+
+    public List<ImportMainValue> getMainValues() {
+        return mainValues;
+    }
+
+    public void setMainValues(List<ImportMainValue> mainValues) {
+        this.mainValues = mainValues;
+    }
+
+    public void parseDependencies() throws IOException {
+        StaFileParser sfp = new StaFileParser();
+        sfp.parse(this);
+
+        AtFileParser afp = new AtFileParser();
+        setDischargeTable(afp.parse(getAtFile()));
+        parseHistoricalDischargeTables();
+    }
+
+    public void parseHistoricalDischargeTables() throws IOException {
+        log.info("parse historical discharge tables");
+
+        File riverDir = atFile.getParentFile().getParentFile();
+
+        File histDischargeDir = FileTools.repair(
+            new File(riverDir, HISTORICAL_DISCHARGE_TABLES));
+
+        if (!histDischargeDir.isDirectory() || !histDischargeDir.canRead()) {
+            log.info("cannot find '" + histDischargeDir + "'");
+            return;
+        }
+
+        histDischargeDir = FileTools.repair(
+            new File(histDischargeDir, getName()));
+
+        if (!histDischargeDir.isDirectory() || !histDischargeDir.canRead()) {
+            log.info("cannot find '" + histDischargeDir + "'");
+            return;
+        }
+
+        File [] files = histDischargeDir.listFiles();
+
+        if (files == null) {
+            log.info("cannot read directory '" + histDischargeDir + "'");
+            return;
+        }
+
+        for (File file: files) {
+            if (!file.isFile() || !file.canRead()) {
+                continue;
+            }
+            String name = file.getName().toLowerCase();
+            if (!name.endsWith(".at")) {
+                continue;
+            }
+            log.info("found at file '" + file.getName() + "'");
+
+            AtFileParser afp = new AtFileParser();
+            historicalDischargeTables.add(
+                afp.parse(file, HISTORICAL_DISCHARGE_TABLES + "/", 1));
+        }
+    }
+
+    public void storeDependencies(River river) {
+
+        Gauge gauge = getPeer(river);
+
+        log.info("store main value types");
+        for (ImportMainValueType mainValueType: mainValueTypes) {
+            mainValueType.getPeer();
+        }
+
+        log.info("store named main values");
+        for (ImportNamedMainValue namedMainValue: namedMainValues) {
+            namedMainValue.getPeer();
+        }
+
+        log.info("store main values");
+        for (ImportMainValue mainValue: mainValues) {
+            mainValue.getPeer(river);
+        }
+
+        storeDischargeTable(gauge);
+        storeHistoricalDischargeTable(gauge);
+    }
+
+    public void storeDischargeTable(Gauge gauge) {
+        log.info("store discharge table");
+        dischargeTable.getPeer(gauge);
+        dischargeTable.storeDependencies(gauge);
+    }
+
+    public void storeHistoricalDischargeTable(Gauge gauge) {
+        log.info("store historical discharge tables");
+        for (ImportDischargeTable hdt: historicalDischargeTables) {
+            hdt.storeDependencies(gauge);
+        }
+    }
+
+    public Gauge getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from Gauge where name=:name " +
+                "and river.id=:river");
+            query.setString("name", name);
+            query.setParameter("river", river.getId());
+            List<Gauge> gauges = query.list();
+            if (gauges.isEmpty()) {
+                peer = new Gauge(
+                    name, river,
+                    station, aeo, datum,
+                    officialNumber,
+                    range.getPeer(river));
+                session.save(peer);
+            }
+            else {
+                peer = gauges.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportGrainFraction.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,89 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.GrainFraction;
+import de.intevation.flys.model.Unit;
+
+
+public class ImportGrainFraction {
+
+    private static final Logger log =
+        Logger.getLogger(ImportGrainFraction.class);
+
+    private String name;
+
+    private Double lower;
+    private Double upper;
+
+    private ImportUnit unit;
+
+    private GrainFraction peer;
+
+
+    public ImportGrainFraction(String name) {
+        this.name = name;
+    }
+
+
+    public ImportGrainFraction(
+        String     name,
+        Double     lower,
+        Double     upper,
+        ImportUnit unit
+    ) {
+        this.name  = name;
+        this.lower = lower;
+        this.upper = upper;
+        this.unit  = unit;
+    }
+
+
+    public void storeDependencies() {
+        log.debug("store dependencies");
+
+        getPeer();
+    }
+
+
+    public GrainFraction getPeer() {
+        log.debug("get peer");
+
+        Unit u = unit != null ? unit.getPeer() : null;
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from GrainFraction where " +
+                "   name=:name and " +
+                "   lower=:lower and " +
+                "   upper=:upper and " +
+                "   unit=:unit"
+            );
+
+            query.setParameter("name", name);
+            query.setParameter("lower", lower);
+            query.setParameter("upper", upper);
+            query.setParameter("unit", u);
+
+            List<GrainFraction> fractions = query.list();
+            if (fractions.isEmpty()) {
+                log.info("create new GrainFraction");
+
+                peer = new GrainFraction(name, lower, upper, u);
+                session.save(peer);
+            }
+            else {
+                peer = fractions.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportHYK.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,80 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.HYK;
+import de.intevation.flys.model.River;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import org.apache.log4j.Logger;
+
+public class ImportHYK
+{
+    private static Logger log = Logger.getLogger(ImportHYK.class);
+
+    protected ImportRiver river;
+    protected String      description;
+
+    protected List<ImportHYKEntry> entries;
+
+    protected HYK peer;
+
+    public ImportHYK() {
+        entries = new ArrayList<ImportHYKEntry>();
+    }
+
+    public ImportHYK(ImportRiver river, String description) {
+        this();
+        this.river       = river;
+        this.description = description;
+    }
+
+    public ImportRiver getRiver() {
+        return river;
+    }
+
+    public void setRiver(ImportRiver river) {
+        this.river = river;
+    }
+
+    public void addEntry(ImportHYKEntry entry) {
+        entries.add(entry);
+        entry.setHYK(this);
+    }
+
+    public void storeDependencies() {
+        log.info("store HYK '" + description + "'");
+        getPeer();
+        for (int i = 0, N = entries.size(); i < N; ++i) {
+            ImportHYKEntry entry = entries.get(i);
+            log.info("  store km " + entry.getKm() +
+                " (" + (i+1) + " of " + N + ")");
+            entry.storeDependencies();
+        }
+    }
+
+    public HYK getPeer() {
+        if (peer == null) {
+            River r = river.getPeer();
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            Query query = session.createQuery(
+                "from HYK where river=:river and description=:description");
+            query.setParameter("river", r);
+            query.setParameter("description", description);
+            List<HYK> hyks = query.list();
+            if (hyks.isEmpty()) {
+                peer = new HYK(r, description);
+                session.save(peer);
+            }
+            else {
+                peer = hyks.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportHYKEntry.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,93 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.HYKEntry;
+import de.intevation.flys.model.HYK;
+
+import java.util.Date;
+import java.util.List;
+import java.util.ArrayList;
+
+import java.math.BigDecimal;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+public class ImportHYKEntry
+{
+    protected ImportHYK  hyk;
+    protected BigDecimal km;
+    protected Date       measure;
+
+    protected List<ImportHYKFormation> formations;
+
+    protected HYKEntry peer;
+
+    public ImportHYKEntry() {
+        formations = new ArrayList<ImportHYKFormation>();
+    }
+
+    public ImportHYKEntry(
+        ImportHYK  hyk,
+        BigDecimal km,
+        Date       measure
+    ) {
+        this();
+        this.hyk     = hyk;
+        this.km      = km;
+        this.measure = measure;
+    }
+
+    public ImportHYK getHYK() {
+        return hyk;
+    }
+
+    public void setHYK(ImportHYK hyk) {
+        this.hyk = hyk;
+    }
+
+    public BigDecimal getKm() {
+        return km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+    public void addFormation(ImportHYKFormation formation) {
+        int numFormation = formations.size();
+        formations.add(formation);
+        formation.setFormationNum(numFormation);
+        formation.setEntry(this);
+    }
+
+    public void storeDependencies() {
+        getPeer();
+        for (ImportHYKFormation formation: formations) {
+            formation.storeDependencies();
+        }
+    }
+
+    public HYKEntry getPeer() {
+        if (peer == null) {
+            HYK h = hyk.getPeer();
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            Query query = session.createQuery(
+                "from HYKEntry where HYK=:hyk " +
+                "and km=:km and measure=:measure");
+            query.setParameter("hyk", h);
+            query.setParameter("km", km);
+            query.setParameter("measure", measure);
+            List<HYKEntry> entries = query.list();
+            if (entries.isEmpty()) {
+                peer = new HYKEntry(h, km, measure);
+                session.save(peer);
+            }
+            else {
+                peer = entries.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportHYKFlowZone.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,76 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.HYKFormation;
+import de.intevation.flys.model.HYKFlowZone;
+import de.intevation.flys.model.HYKFlowZoneType;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+public class ImportHYKFlowZone
+{
+    protected ImportHYKFormation    formation;
+    protected ImportHYKFlowZoneType type;
+    protected BigDecimal            a;
+    protected BigDecimal            b;
+
+    protected HYKFlowZone peer;
+
+    public ImportHYKFlowZone() {
+    }
+
+    public ImportHYKFlowZone(
+        ImportHYKFormation    formation,
+        ImportHYKFlowZoneType type,
+        BigDecimal            a,
+        BigDecimal            b
+    ) {
+        this.formation = formation;
+        this.type      = type;
+        this.a         = a;
+        this.b         = b;
+    }
+
+    public ImportHYKFormation getFormation() {
+        return formation;
+    }
+
+    public void setFormation(ImportHYKFormation formation) {
+        this.formation = formation;
+    }
+
+    public void storeDependencies() {
+        getPeer();
+    }
+
+    public HYKFlowZone getPeer() {
+        if (peer == null) {
+            HYKFormation    f = formation.getPeer();
+            HYKFlowZoneType t = type.getPeer();
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            Query query = session.createQuery(
+                "from HYKFlowZone where formation=:formation " +
+                "and type=:type and a=:a and b=:b");
+            query.setParameter("formation", f);
+            query.setParameter("type", t);
+            query.setParameter("a", a);
+            query.setParameter("b", b);
+            List<HYKFlowZone> zones = query.list();
+            if (zones.isEmpty()) {
+                peer = new HYKFlowZone(f, t, a, b);
+                session.save(peer);
+            }
+            else {
+                peer = zones.get(0);
+            }
+
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportHYKFlowZoneType.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,41 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.HYKFlowZoneType;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportHYKFlowZoneType
+{
+    private String          name;
+    private HYKFlowZoneType peer;
+
+    public ImportHYKFlowZoneType() {
+    }
+
+    public ImportHYKFlowZoneType(String name) {
+        this.name = name;
+    }
+
+    public HYKFlowZoneType getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            Query query = session.createQuery(
+                "from HYKFlowZoneType where name=:name");
+            query.setParameter("name", name);
+            List<HYKFlowZoneType> flowZoneTypes = query.list();
+            if (flowZoneTypes.isEmpty()) {
+                peer = new HYKFlowZoneType(name);
+                session.save(peer);
+            }
+            else {
+                peer = flowZoneTypes.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportHYKFormation.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,150 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.HYKFormation;
+import de.intevation.flys.model.HYKEntry;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.math.BigDecimal;
+
+public class ImportHYKFormation
+{
+    protected int            formationNum;
+    protected ImportHYKEntry entry;
+    protected BigDecimal     top;
+    protected BigDecimal     bottom;
+    protected BigDecimal     distanceVL;
+    protected BigDecimal     distanceHF;
+    protected BigDecimal     distanceVR;
+
+    protected List<ImportHYKFlowZone> zones;
+
+    protected HYKFormation peer;
+
+    public ImportHYKFormation() {
+        zones = new ArrayList<ImportHYKFlowZone>();
+    }
+
+    public ImportHYKFormation(
+        int            formationNum,
+        ImportHYKEntry entry,
+        BigDecimal     top,
+        BigDecimal     bottom,
+        BigDecimal     distanceVL,
+        BigDecimal     distanceHF,
+        BigDecimal     distanceVR
+    ) {
+        this();
+        this.formationNum = formationNum;
+        this.entry        = entry;
+        this.top          = top;
+        this.bottom       = bottom;
+        this.distanceVL   = distanceVL;
+        this.distanceHF   = distanceHF;
+        this.distanceVR   = distanceVR;
+    }
+
+    public void addFlowZone(ImportHYKFlowZone zone) {
+        zones.add(zone);
+        zone.setFormation(this);
+    }
+
+    public int getFormationNum() {
+        return formationNum;
+    }
+
+    public void setFormationNum(int formationNum) {
+        this.formationNum = formationNum;
+    }
+
+    public ImportHYKEntry getEntry() {
+        return entry;
+    }
+
+    public void setEntry(ImportHYKEntry entry) {
+        this.entry = entry;
+    }
+
+    public BigDecimal getTop() {
+        return top;
+    }
+
+    public void setTop(BigDecimal top) {
+        this.top = top;
+    }
+
+    public BigDecimal getBottom() {
+        return bottom;
+    }
+
+    public void setBottom(BigDecimal bottom) {
+        this.bottom = bottom;
+    }
+
+    public BigDecimal getDistanceVL() {
+        return distanceVL;
+    }
+
+    public void setDistanceVL(BigDecimal distanceVL) {
+        this.distanceVL = distanceVL;
+    }
+
+    public BigDecimal getDistanceHF() {
+        return distanceHF;
+    }
+
+    public void setDistanceHF(BigDecimal distanceHF) {
+        this.distanceHF = distanceHF;
+    }
+
+    public BigDecimal getDistanceVR() {
+        return distanceVR;
+    }
+
+    public void setDistanceVR(BigDecimal distanceVR) {
+        this.distanceVR = distanceVR;
+    }
+
+    public void storeDependencies() {
+        getPeer();
+        for (ImportHYKFlowZone zone: zones) {
+            zone.storeDependencies();
+        }
+    }
+
+    public HYKFormation getPeer() {
+        if (peer == null) {
+            HYKEntry e = entry.getPeer();
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            Query query = session.createQuery(
+                "from HYKFormation where formationNum=:formationNum " +
+                "and entry=:entry and top=:top and bottom=:bottom " +
+                "and distanceVL=:distanceVL and distanceHF=:distanceHF " +
+                "and distanceVR=:distanceVR");
+            query.setParameter("formationNum", formationNum);
+            query.setParameter("entry", e);
+            query.setParameter("top", top);
+            query.setParameter("bottom", bottom);
+            query.setParameter("distanceVL", distanceVL);
+            query.setParameter("distanceHF", distanceHF);
+            query.setParameter("distanceVR", distanceVR);
+            List<HYKFormation> formations = query.list();
+            if (formations.isEmpty()) {
+                peer = new HYKFormation(
+                    formationNum, e, top, bottom,
+                    distanceVL, distanceHF, distanceVR);
+                session.save(peer);
+            }
+            else {
+                peer = formations.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportLocationSystem.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,60 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.LocationSystem;
+
+
+public class ImportLocationSystem {
+
+    private static final Logger log =
+        Logger.getLogger(ImportLocationSystem.class);
+
+
+    protected String name;
+    protected String description;
+
+    protected LocationSystem peer;
+
+
+    public ImportLocationSystem(String name, String description) {
+        this.name        = name;
+        this.description = description;
+    }
+
+    public void storeDependencies() {
+        log.info("store LocationSystem '" + name + "'");
+        LocationSystem ls = getPeer();
+
+        Session session = ImporterSession.getInstance().getDatabaseSession();
+        session.flush();
+    }
+
+    public LocationSystem getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from LocationSystem where " +
+                "name=:name and description=:description");
+            query.setParameter("name", name);
+            query.setParameter("description", description);
+
+            List<LocationSystem> lss = query.list();
+            if (lss.isEmpty()) {
+                peer = new LocationSystem(name, description);
+                session.save(peer);
+            }
+            else {
+                peer = lss.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportMainValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,83 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+import de.intevation.flys.model.MainValue;
+import de.intevation.flys.model.Gauge;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.NamedMainValue;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+public class ImportMainValue
+{
+    protected ImportGauge          gauge;
+    protected ImportNamedMainValue mainValue;
+    protected BigDecimal           value;
+
+    protected MainValue peer;
+
+    public ImportMainValue() {
+    }
+
+    public ImportMainValue(
+        ImportGauge          gauge,
+        ImportNamedMainValue mainValue,
+        BigDecimal           value
+    ) {
+        this.gauge     = gauge;
+        this.mainValue = mainValue;
+        this.value     = value;
+    }
+
+    public ImportGauge getGauge() {
+        return gauge;
+    }
+
+    public void setGauge(ImportGauge gauge) {
+        this.gauge = gauge;
+    }
+
+    public ImportNamedMainValue getMainValue() {
+        return mainValue;
+    }
+
+    public void setMainValue(ImportNamedMainValue mainValue) {
+        this.mainValue = mainValue;
+    }
+
+    public BigDecimal getValue() {
+        return value;
+    }
+
+    public void setValue(BigDecimal value) {
+        this.value = value;
+    }
+
+    public MainValue getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from MainValue where "
+                + "gauge.id=:gauge_id and mainValue.id=:name_id "
+                + "and value=:value");
+            Gauge          g = gauge.getPeer(river);
+            NamedMainValue n = mainValue.getPeer();
+            query.setParameter("gauge_id", g.getId());
+            query.setParameter("name_id",  n.getId());
+            query.setParameter("value",    value);
+            List<MainValue> values = query.list();
+            if (values.isEmpty()) {
+                peer = new MainValue(g, n, value, null);
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportMainValueType.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,65 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.MainValueType;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportMainValueType
+implements   Comparable<ImportMainValueType>
+{
+    protected String name;
+
+    protected MainValueType peer;
+
+    public ImportMainValueType() {
+    }
+
+    public ImportMainValueType(String name) {
+        this.name = name;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public int compareTo(ImportMainValueType other) {
+        return name.compareTo(other.name);
+    }
+
+    @Override
+    public boolean equals(Object other) {
+        if (other == this) return true;
+        if (!(other instanceof ImportMainValueType)) return false;
+        return name.equals(((ImportMainValueType)other).name);
+    }
+
+    @Override
+    public int hashCode() {
+        return name.hashCode();
+    }
+
+    public MainValueType getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from MainValueType where name=:name");
+            query.setString("name", name);
+            List<MainValueType> values = query.list();
+            if (values.isEmpty()) {
+                peer = new MainValueType(name);
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportMorphWidth.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,91 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.MorphologicalWidth;
+import de.intevation.flys.model.River;
+
+
+public class ImportMorphWidth {
+
+    private static Logger log = Logger.getLogger(ImportMorphWidth.class);
+
+
+    protected MorphologicalWidth peer;
+
+    protected ImportUnit unit;
+
+    protected List<ImportMorphWidthValue> values;
+
+
+    public ImportMorphWidth() {
+        this.values = new ArrayList<ImportMorphWidthValue>();
+    }
+
+
+    public void addValue(ImportMorphWidthValue value) {
+        this.values.add(value);
+    }
+
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("store dependencies");
+
+        MorphologicalWidth peer = getPeer(river);
+
+        log.info("store morphological width values");
+
+        for (ImportMorphWidthValue value: values) {
+            value.storeDependencies(peer);
+        }
+    }
+
+
+    public MorphologicalWidth getPeer(River river) {
+        log.info("get peer");
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from MorphologicalWidth where " +
+                "   river=:river and " +
+                "   unit=:unit");
+
+            query.setParameter("river", river);
+            query.setParameter("unit", unit.getPeer());
+
+            List<MorphologicalWidth> widths = query.list();
+
+            if (widths.isEmpty()) {
+                log.debug("Create new MorphologicalWidth DB instance.");
+
+                peer = new MorphologicalWidth(river, unit.getPeer());
+
+                session.save(peer);
+            }
+            else {
+                peer = widths.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportMorphWidthValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,81 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.MorphologicalWidth;
+import de.intevation.flys.model.MorphologicalWidthValue;
+
+
+public class ImportMorphWidthValue {
+
+    private static Logger log = Logger.getLogger(ImportMorphWidthValue.class);
+
+
+    protected MorphologicalWidthValue peer;
+
+    protected BigDecimal station;
+    protected BigDecimal width;
+
+    protected String description;
+
+
+    public ImportMorphWidthValue(
+        BigDecimal station,
+        BigDecimal width,
+        String     description
+    ) {
+        this.station     = station;
+        this.width       = width;
+        this.description = description;
+    }
+
+
+    public void storeDependencies(MorphologicalWidth parent) {
+        getPeer(parent);
+    }
+
+
+    public MorphologicalWidthValue getPeer(MorphologicalWidth parent) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from MorphologicalWidthValue where " +
+                "   morphologicalWidth=:morphologicalWidth and " +
+                "   station=:station and " +
+                "   width=:width and " +
+                "   description=:description");
+
+            query.setParameter("morphologicalWidth", parent);
+            query.setParameter("station", station);
+            query.setParameter("width", width);
+            query.setParameter("description", description);
+
+            List<MorphologicalWidthValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new MorphologicalWidthValue(
+                    parent,
+                    station,
+                    width,
+                    description
+                );
+
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportNamedMainValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,66 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import de.intevation.flys.model.NamedMainValue;
+import de.intevation.flys.model.MainValueType;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+public class ImportNamedMainValue
+{
+    protected ImportMainValueType mainValueType;
+    protected String              name;
+
+    protected NamedMainValue      peer;
+
+    public ImportNamedMainValue() {
+    }
+
+    public ImportNamedMainValue(
+        ImportMainValueType mainValueType,
+        String              name
+    ) {
+        this.mainValueType = mainValueType;
+        this.name          = name;
+    }
+
+    public ImportMainValueType getMainValueType() {
+        return mainValueType;
+    }
+
+    public void setMainValueType(ImportMainValueType mainValueType) {
+        this.mainValueType = mainValueType;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public NamedMainValue getPeer() {
+        if (peer == null) {
+            MainValueType type = mainValueType.getPeer();
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from NamedMainValue where " +
+                "name=:name and type.id=:id");
+            query.setString("name", name);
+            query.setParameter("id", type.getId());
+            List<NamedMainValue> named = query.list();
+            if (named.isEmpty()) {
+                peer = new NamedMainValue(name, type);
+                session.save(peer);
+            }
+            else {
+                peer = named.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportPosition.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,54 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Position;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportPosition
+implements   Comparable<ImportPosition>
+{
+    protected String value;
+
+    protected Position peer;
+
+    public ImportPosition() {
+    }
+
+    public ImportPosition(String value) {
+        this.value = value;
+    }
+
+    public int compareTo(ImportPosition other) {
+        return value.compareTo(other.value);
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+
+    public Position getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from Position where value=:value");
+            query.setString("value", value);
+            List<Position> positions = query.list();
+            if (positions.isEmpty()) {
+                peer = new Position(value);
+                session.save(peer);
+            }
+            else {
+                peer = positions.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportRange.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,70 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.River;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+public class ImportRange
+implements   Comparable<ImportRange>
+{
+    private static Logger log = Logger.getLogger(ImportRange.class);
+
+    protected BigDecimal a;
+    protected BigDecimal b;
+
+    protected Range peer;
+
+    public ImportRange() {
+    }
+
+    public ImportRange(BigDecimal a, BigDecimal b) {
+        this.a = a;
+        this.b = b;
+    }
+
+    private static final int compare(BigDecimal a, BigDecimal b) {
+        if (a == null && b == null) {
+            return 0;
+        }
+        if (a == null && b != null) {
+            return -1;
+        }
+        if (a != null && b == null) {
+            return +1;
+        }
+        return a.compareTo(b);
+    }
+
+    public int compareTo(ImportRange other) {
+        int cmp = compare(a, other.a);
+        if (cmp != 0) return cmp;
+        return compare(b, other.b);
+    }
+
+    public BigDecimal getA() {
+        return a;
+    }
+
+    public void setA(BigDecimal a) {
+        this.a = a;
+    }
+
+    public BigDecimal getB() {
+        return b;
+    }
+
+    public void setB(BigDecimal b) {
+        this.b = b;
+    }
+
+    public Range getPeer(River river) {
+        if (peer == null) {
+            peer = ImporterSession.getInstance().getRange(river, a, b);
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,1298 @@
+package de.intevation.flys.importer;
+
+import de.intevation.artifacts.common.utils.FileTools.HashedFile;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.importer.parsers.AnnotationClassifier;
+import de.intevation.flys.importer.parsers.AnnotationsParser;
+import de.intevation.flys.importer.parsers.BedHeightEpochParser;
+import de.intevation.flys.importer.parsers.BedHeightSingleParser;
+import de.intevation.flys.importer.parsers.FlowVelocityMeasurementParser;
+import de.intevation.flys.importer.parsers.FlowVelocityModelParser;
+import de.intevation.flys.importer.parsers.HYKParser;
+import de.intevation.flys.importer.parsers.MorphologicalWidthParser;
+import de.intevation.flys.importer.parsers.PRFParser;
+import de.intevation.flys.importer.parsers.PegelGltParser;
+import de.intevation.flys.importer.parsers.SedimentDensityParser;
+import de.intevation.flys.importer.parsers.SedimentYieldParser;
+import de.intevation.flys.importer.parsers.SQRelationParser;
+import de.intevation.flys.importer.parsers.WaterlevelDifferencesParser;
+import de.intevation.flys.importer.parsers.WaterlevelParser;
+import de.intevation.flys.importer.parsers.WstParser;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Unit;
+
+import java.io.File;
+import java.io.IOException;
+
+import java.sql.SQLException;
+
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+import org.hibernate.exception.ConstraintViolationException;
+
+public class ImportRiver
+{
+    private static Logger log = Logger.getLogger(ImportRiver.class);
+
+    public static final String PEGEL_GLT = "PEGEL.GLT";
+
+    public static final String FIXATIONS = "Fixierungen";
+
+    public static final String EXTRA_LONGITUDINALS =
+        "Zus.L\u00e4ngsschnitte";
+
+    public static final String [] OFFICIAL_LINES_FOLDERS = {
+        "Basisdaten",
+        "Fixierungen" };
+
+    public static final String OFFICIAL_LINES =
+        "Amtl_Linien.wst";
+
+    public static final String FLOOD_WATER = "HW-Marken";
+
+    public static final String FLOOD_PROTECTION =
+        "HW-Schutzanlagen";
+
+    public static final String MINFO_DIR = "Morphologie";
+
+    public static final String BED_HEIGHT_DIR = "Sohlhoehen";
+
+    public static final String BED_HEIGHT_SINGLE_DIR = "Einzeljahre";
+
+    public static final String BED_HEIGHT_EPOCH_DIR = "Epochen";
+
+    public static final String SEDIMENT_DENSITY_DIR = "Sedimentdichte";
+
+    public static final String MORPHOLOGICAL_WIDTH_DIR = "morphologische_Breite";
+
+    public static final String FLOW_VELOCITY_DIR = "Geschwindigkeit_Schubspannung";
+
+    public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen";
+
+    public static final String FLOW_VELOCITY_MEASUREMENTS = "v-Messungen";
+
+    public static final String SEDIMENT_YIELD_DIR = "Fracht";
+
+    public static final String SEDIMENT_YIELD_SINGLE_DIR = "Einzeljahre";
+
+    public static final String SEDIMENT_YIELD_EPOCH_DIR = "Epochen";
+
+    public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse";
+
+    public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen";
+
+    public static final String MINFO_WATERLEVEL_DIFF_DIR = "Wasserspiegeldifferenzen";
+
+    public static final String MINFO_SQ_DIR = "Feststofftransport-Abfluss-Beziehung";
+
+
+    protected String name;
+
+    protected File   wstFile;
+
+    protected File   bbInfoFile;
+
+    protected List<ImportGauge> gauges;
+
+    protected List<ImportAnnotation> annotations;
+
+    protected List<ImportHYK> hyks;
+
+    protected List<ImportCrossSection> crossSections;
+
+    protected List<ImportWst> extraWsts;
+
+    protected List<ImportWst> fixations;
+
+    protected List<ImportWst> officialLines;
+
+    protected List<ImportWst> floodWater;
+
+    protected List<ImportWst> floodProtection;
+
+    protected List<ImportBedHeight> bedHeightSingles;
+
+    protected List<ImportBedHeight> bedHeightEpochs;
+
+    protected List<ImportSedimentDensity> sedimentDensities;
+
+    protected List<ImportMorphWidth> morphologicalWidths;
+
+    protected List<ImportFlowVelocityModel> flowVelocityModels;
+
+    protected List<ImportFlowVelocityMeasurement> flowVelocityMeasurements;
+
+    protected List<ImportSedimentYield> sedimentYields;
+
+    protected List<ImportWaterlevel> waterlevels;
+
+    protected List<ImportWaterlevelDifference> waterlevelDiffs;
+
+    protected List<ImportSQRelation> sqRelations;
+
+    protected ImportWst wst;
+
+    protected ImportUnit wstUnit;
+
+    protected AnnotationClassifier annotationClassifier;
+
+    protected River peer;
+
+    public ImportRiver() {
+        hyks                      = new ArrayList<ImportHYK>();
+        crossSections             = new ArrayList<ImportCrossSection>();
+        extraWsts                 = new ArrayList<ImportWst>();
+        fixations                 = new ArrayList<ImportWst>();
+        officialLines             = new ArrayList<ImportWst>();
+        floodWater                = new ArrayList<ImportWst>();
+        floodProtection           = new ArrayList<ImportWst>();
+        sedimentDensities         = new ArrayList<ImportSedimentDensity>();
+        morphologicalWidths       = new ArrayList<ImportMorphWidth>();
+        flowVelocityModels        = new ArrayList<ImportFlowVelocityModel>();
+        flowVelocityMeasurements  = new ArrayList<ImportFlowVelocityMeasurement>();
+        sedimentYields            = new ArrayList<ImportSedimentYield>();
+        waterlevels               = new ArrayList<ImportWaterlevel>();
+        waterlevelDiffs           = new ArrayList<ImportWaterlevelDifference>();
+        sqRelations               = new ArrayList<ImportSQRelation>();
+    }
+
+    public ImportRiver(
+        String               name,
+        File                 wstFile,
+        File                 bbInfoFile,
+        AnnotationClassifier annotationClassifier
+    ) {
+        this();
+        this.name                 = name;
+        this.wstFile              = wstFile;
+        this.bbInfoFile           = bbInfoFile;
+        this.annotationClassifier = annotationClassifier;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public File getWstFile() {
+        return wstFile;
+    }
+
+    public void setWstFile(File wstFile) {
+        this.wstFile = wstFile;
+    }
+
+    public File getBBInfo() {
+        return bbInfoFile;
+    }
+
+    public void setBBInfo(File bbInfoFile) {
+        this.bbInfoFile = bbInfoFile;
+    }
+
+    public ImportWst getWst() {
+        return wst;
+    }
+
+    public void setWst(ImportWst wst) {
+        this.wst = wst;
+    }
+
+    public File getMinfoDir() {
+        File riverDir  = wstFile.getParentFile().getParentFile().getParentFile();
+        return new File(riverDir, MINFO_DIR);
+    }
+
+    public void parseDependencies() throws IOException {
+        parseGauges();
+        parseAnnotations();
+        parsePRFs();
+        parseHYKs();
+        parseWst();
+        parseExtraWsts();
+        parseFixations();
+        parseOfficialLines();
+        parseFloodWater();
+        parseFloodProtection();
+        parseBedHeight();
+        parseSedimentDensity();
+        parseMorphologicalWidth();
+        parseFlowVelocity();
+        parseSedimentYield();
+        parseWaterlevels();
+        parseWaterlevelDifferences();
+        parseSQRelation();
+    }
+
+    public void parseFloodProtection() throws IOException {
+        if (Config.INSTANCE.skipFloodProtection()) {
+            log.info("skip parsing flood protection");
+            return;
+        }
+
+        log.info("Parse flood protection wst file");
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+
+        File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION));
+
+        if (!dir.isDirectory() || !dir.canRead()) {
+            log.info("no directory '" + dir + "' found");
+            return;
+        }
+
+        File [] files = dir.listFiles();
+
+        if (files == null) {
+            log.warn("cannot read '" + dir + "'");
+            return;
+        }
+
+        for (File file: files) {
+            if (!file.isFile() || !file.canRead()) {
+                continue;
+            }
+            String name = file.getName().toLowerCase();
+            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
+                continue;
+            }
+            log.info("found file '" + file.getName() + "'");
+            WstParser wstParser = new WstParser();
+            wstParser.parse(file);
+            ImportWst iw = wstParser.getWst();
+            iw.setKind(5);
+            iw.setDescription(FLOOD_PROTECTION + "/" + iw.getDescription());
+            floodProtection.add(iw);
+        }
+    }
+
+
+    public void parseBedHeight() throws IOException {
+        File minfoDir     = getMinfoDir();
+        File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR);
+        File singlesDir   = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR);
+        File epochDir     = new File(bedHeightDir, BED_HEIGHT_EPOCH_DIR);
+
+        if (Config.INSTANCE.skipBedHeightSingle()) {
+            log.info("skip parsing bed height single.");
+        }
+        else {
+            log.info("Parse bed height single.");
+            parseBedHeightSingles(singlesDir);
+        }
+
+        if (Config.INSTANCE.skipBedHeightEpoch()) {
+            log.info("skip parsing bed height epochs.");
+        }
+        else {
+            log.info("Parse bed height epochs.");
+            parseBedHeightEpochs(epochDir);
+        }
+    }
+
+
+    protected void parseSedimentDensity() throws IOException {
+        if (Config.INSTANCE.skipSedimentDensity()) {
+            log.info("skip parsing sediment density.");
+            return;
+        }
+
+        log.debug("Parse sediment density");
+
+        File minfoDir = getMinfoDir();
+        File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR);
+
+        File[] files = sediment.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot read directory '" + sediment + "'");
+            return;
+        }
+
+        SedimentDensityParser parser = new SedimentDensityParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        sedimentDensities = parser.getSedimentDensities();
+
+        log.info("Parsed " + sedimentDensities.size() + " sediment densities.");
+    }
+
+
+    protected void parseMorphologicalWidth() throws IOException {
+        if (Config.INSTANCE.skipMorphologicalWidth()) {
+            log.info("skip parsing morphological width.");
+            return;
+        }
+
+        log.debug("Parse morphological width");
+
+        File minfoDir = getMinfoDir();
+        File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR);
+
+        File[] files = morphDir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot read directory '" + morphDir + "'");
+            return;
+        }
+
+        MorphologicalWidthParser parser = new MorphologicalWidthParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        morphologicalWidths = parser.getMorphologicalWidths();
+
+        log.info("Parsed " + morphologicalWidths.size() + " morph. widths files.");
+    }
+
+
+    protected void parseFlowVelocity() throws IOException {
+        if (Config.INSTANCE.skipFlowVelocity()) {
+            log.info("skip parsing flow velocity");
+            return;
+        }
+
+        log.debug("Parse flow velocity");
+
+        File minfoDir   = getMinfoDir();
+        File flowDir    = new File(minfoDir, FLOW_VELOCITY_DIR);
+        File modelDir   = new File(flowDir, FLOW_VELOCITY_MODEL);
+        File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS);
+
+        File[] modelFiles   = modelDir.listFiles();
+        File[] measureFiles = measureDir.listFiles();
+
+        if (modelFiles == null) {
+            log.warn("Cannot read directory '" + modelDir + "'");
+        }
+        else {
+            FlowVelocityModelParser parser = new FlowVelocityModelParser();
+
+            for (File model: modelFiles) {
+                log.debug("Parse file '" + model + "'");
+                parser.parse(model);
+            }
+
+            flowVelocityModels = parser.getModels();
+        }
+
+        if (measureFiles == null) {
+            log.warn("Cannot read directory '" + measureDir + "'");
+        }
+        else {
+            FlowVelocityMeasurementParser parser =
+                new FlowVelocityMeasurementParser();
+
+            for (File measurement: measureFiles) {
+                log.debug("Parse file '" + measurement + "'");
+                parser.parse(measurement);
+            }
+
+            flowVelocityMeasurements = parser.getMeasurements();
+        }
+    }
+
+
+    protected void parseSedimentYield() throws IOException {
+        if (Config.INSTANCE.skipSedimentYield()) {
+            log.info("skip parsing sediment yield data");
+            return;
+        }
+
+        log.debug("Parse sediment yield data");
+
+        File minfoDir         = getMinfoDir();
+        File sedimentYieldDir = new File(minfoDir, SEDIMENT_YIELD_DIR);
+
+        File singleDir = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR);
+        File epochDir  = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR);
+
+        File[] singles = singleDir.listFiles();
+        File[] epochs  = epochDir.listFiles();
+
+        SedimentYieldParser parser = new SedimentYieldParser();
+
+        if (singles == null || singles.length == 0) {
+            log.warn("Cannot read directory '" + singleDir + "'");
+        }
+        else {
+            for (File file: singles) {
+                if (file.isDirectory()) {
+                    for (File child: file.listFiles()) {
+                        parser.parse(child);
+                    }
+                }
+                else {
+                    parser.parse(file);
+                }
+            }
+        }
+
+        if (epochs == null || epochs.length == 0) {
+            log.warn("Cannot read directory '" + epochDir + "'");
+        }
+        else {
+            for (File file: epochs) {
+                if (file.isDirectory()) {
+                    for (File child: file.listFiles()) {
+                        parser.parse(child);
+                    }
+                }
+                else {
+                    parser.parse(file);
+                }
+            }
+        }
+
+        sedimentYields = parser.getSedimentYields();
+    }
+
+
+    protected void parseWaterlevels() throws IOException {
+        if (Config.INSTANCE.skipWaterlevels()) {
+            log.info("skip parsing waterlevels");
+            return;
+        }
+
+        log.info("Parse waterlevels");
+
+        File minfo  = getMinfoDir();
+        File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
+        File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR);
+
+        File[] files = wspDir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot read directory '" + wspDir + "'");
+            return;
+        }
+
+        WaterlevelParser parser = new WaterlevelParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        waterlevels = parser.getWaterlevels();
+    }
+
+
+    protected void parseWaterlevelDifferences() throws IOException {
+        if (Config.INSTANCE.skipWaterlevelDifferences()) {
+            log.info("skip parsing waterlevel differences");
+            return;
+        }
+
+        log.info("Parse waterlevel differences");
+
+        File minfo  = getMinfoDir();
+        File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
+        File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR);
+
+        File[] files = diffDir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot read directory '" + diffDir + "'");
+            return;
+        }
+
+        WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        waterlevelDiffs = parser.getDifferences();
+    }
+
+
+    protected void parseSQRelation() throws IOException {
+        if (Config.INSTANCE.skipSQRelation()) {
+            log.info("skip parsing sq relation");
+            return;
+        }
+
+        log.info("Parse sq relations");
+
+        File minfo = getMinfoDir();
+        File sqDir = new File(minfo, MINFO_SQ_DIR);
+
+        File[] files = sqDir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot read directory '" + sqDir + "'");
+            return;
+        }
+
+        SQRelationParser parser = new SQRelationParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        sqRelations = parser.getSQRelations();
+
+        log.debug("Parsed " + sqRelations.size() + " SQ relations.");
+    }
+
+
+    protected void parseBedHeightSingles(File dir) throws IOException {
+        log.debug("Parse bed height singles");
+
+        File[] files = dir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot read directory '" + dir + "'");
+            return;
+        }
+
+        BedHeightSingleParser parser = new BedHeightSingleParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        bedHeightSingles = parser.getBedHeights();
+    }
+
+
+    protected void parseBedHeightEpochs(File dir) throws IOException {
+        log.debug("Parse bed height epochs");
+
+        File[] files = dir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot read directory '" + dir + "'");
+            return;
+        }
+
+        BedHeightEpochParser parser = new BedHeightEpochParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        bedHeightEpochs = parser.getBedHeights();
+    }
+
+
+    public void parseFloodWater() throws IOException {
+        if (Config.INSTANCE.skipFloodWater()) {
+            log.info("skip parsing flod water");
+            return;
+        }
+
+        log.info("Parse flood water wst file");
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+
+        File dir = FileTools.repair(new File(riverDir, FLOOD_WATER));
+
+        if (!dir.isDirectory() || !dir.canRead()) {
+            log.info("no directory '" + dir + "' found");
+            return;
+        }
+
+        File [] files = dir.listFiles();
+
+        if (files == null) {
+            log.warn("cannot read '" + dir + "'");
+            return;
+        }
+
+        for (File file: files) {
+            if (!file.isFile() || !file.canRead()) {
+                continue;
+            }
+            String name = file.getName().toLowerCase();
+            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
+                continue;
+            }
+            log.info("found file '" + file.getName() + "'");
+            WstParser wstParser = new WstParser();
+            wstParser.parse(file);
+            ImportWst iw = wstParser.getWst();
+            iw.setKind(4);
+            iw.setDescription(FLOOD_WATER + "/" + iw.getDescription());
+            floodWater.add(iw);
+        }
+    }
+
+    public void parseOfficialLines() throws IOException {
+        if (Config.INSTANCE.skipOfficialLines()) {
+            log.info("skip parsing official lines");
+            return;
+        }
+
+        log.info("Parse official wst files");
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+
+        for (String folder: OFFICIAL_LINES_FOLDERS) {
+            File dir = FileTools.repair(new File(riverDir, folder));
+
+            if (!dir.isDirectory() || !dir.canRead()) {
+                log.info("no directory '" + folder + "' found");
+                continue;
+            }
+
+            File file = FileTools.repair(new File(dir, OFFICIAL_LINES));
+            if (!file.isFile() || !file.canRead()) {
+                log.warn("no official lines wst file found");
+                continue;
+            }
+            log.debug("Found WST file: " + file);
+
+            WstParser wstParser = new WstParser();
+            wstParser.parse(file);
+            ImportWst iw = wstParser.getWst();
+            iw.setKind(3);
+            iw.setDescription(folder + "/" + iw.getDescription());
+            officialLines.add(iw);
+        } // for all folders
+
+    }
+
+    public void parseFixations() throws IOException {
+        if (Config.INSTANCE.skipFixations()) {
+            log.info("skip parsing fixations");
+            return;
+        }
+
+        log.info("Parse fixation wst files");
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+
+        File fixDir = FileTools.repair(
+            new File(riverDir, FIXATIONS));
+
+        if (!fixDir.isDirectory() || !fixDir.canRead()) {
+            log.info("no fixation wst file directory found");
+            return;
+        }
+
+        File [] files = fixDir.listFiles();
+
+        if (files == null) {
+            log.warn("cannot read fixations wst file directory");
+            return;
+        }
+
+        for (File file: files) {
+            if (!file.isFile() || !file.canRead()) {
+                continue;
+            }
+            String name = file.getName().toLowerCase();
+            if (!name.endsWith(".wst")) {
+                continue;
+            }
+            log.debug("Found WST file: " + file);
+
+            WstParser wstParser = new WstParser();
+            wstParser.parse(file);
+            ImportWst iw = wstParser.getWst();
+            iw.setKind(2);
+            iw.setDescription(FIXATIONS+ "/" + iw.getDescription());
+            fixations.add(iw);
+        }
+    }
+
+    public void parseExtraWsts() throws IOException {
+        if (Config.INSTANCE.skipExtraWsts()) {
+            log.info("skip parsing extra WST files");
+            return;
+        }
+
+        log.info("Parse extra longitudinal wst files");
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+
+        File extraDir = FileTools.repair(
+            new File(riverDir, EXTRA_LONGITUDINALS));
+
+        if (!extraDir.isDirectory() || !extraDir.canRead()) {
+            log.info("no extra longitudinal wst file directory found");
+            return;
+        }
+
+        File [] files = extraDir.listFiles();
+
+        if (files == null) {
+            log.warn("cannot read extra longitudinal wst file directory");
+            return;
+        }
+
+        for (File file: files) {
+            if (!file.isFile() || !file.canRead()) {
+                continue;
+            }
+            String name = file.getName().toLowerCase();
+            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
+                continue;
+            }
+            log.debug("Found WST file: " + file);
+
+            WstParser wstParser = new WstParser();
+            wstParser.parse(file);
+            ImportWst iw = wstParser.getWst();
+            iw.setKind(1);
+            iw.setDescription(EXTRA_LONGITUDINALS + "/" + iw.getDescription());
+            extraWsts.add(iw);
+        }
+
+    }
+
+    public void parseWst() throws IOException {
+        if (Config.INSTANCE.skipWst()) {
+            log.info("skip parsing WST file");
+            return;
+        }
+
+        WstParser wstParser = new WstParser();
+        wstParser.parse(wstFile);
+        wst = wstParser.getWst();
+    }
+
+    public void parseGauges() throws IOException {
+        if (Config.INSTANCE.skipGauges()) {
+            log.info("skip parsing gauges");
+            return;
+        }
+
+        File gltFile = new File(wstFile.getParentFile(), PEGEL_GLT);
+        gltFile = FileTools.repair(gltFile);
+
+        if (!gltFile.isFile() || !gltFile.canRead()) {
+            log.warn("cannot read gauges from '" + gltFile + "'");
+            return;
+        }
+
+        PegelGltParser pgltp = new PegelGltParser();
+        pgltp.parse(gltFile);
+
+        gauges = pgltp.getGauges();
+
+        for (ImportGauge gauge: gauges) {
+            gauge.parseDependencies();
+        }
+    }
+
+    public void parseAnnotations() throws IOException {
+        if (Config.INSTANCE.skipAnnotations()) {
+            log.info("skip parsing annotations");
+            return;
+        }
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+        AnnotationsParser aparser =
+            new AnnotationsParser(annotationClassifier);
+        aparser.parse(riverDir);
+
+        annotations = aparser.getAnnotations();
+    }
+
+    public void parseHYKs() {
+        if (Config.INSTANCE.skipHYKs()) {
+            log.info("skip parsing HYK files");
+            return;
+        }
+
+        log.info("looking for HYK files");
+        HYKParser parser = new HYKParser();
+        File riverDir = wstFile
+            .getParentFile()  // Basisdaten
+            .getParentFile()  // Hydrologie
+            .getParentFile(); // <river>
+
+        parser.parseHYKs(riverDir, new HYKParser.Callback() {
+
+            Set<HashedFile> hfs = new HashSet<HashedFile>();
+
+            @Override
+            public boolean hykAccept(File file) {
+                HashedFile hf = new HashedFile(file);
+                boolean success = hfs.add(hf);
+                if (!success) {
+                    log.warn("HYK file '" + file + "' seems to be a duplicate.");
+                }
+                return success;
+            }
+
+            @Override
+            public void hykParsed(HYKParser parser) {
+                log.debug("callback from HYK parser");
+                ImportHYK hyk = parser.getHYK();
+                hyk.setRiver(ImportRiver.this);
+                hyks.add(hyk);
+            }
+        });
+    }
+
+    public void parsePRFs() {
+        if (Config.INSTANCE.skipPRFs()) {
+            log.info("skip parsing PRFs");
+            return;
+        }
+
+        log.info("looking for PRF files");
+        PRFParser parser = new PRFParser();
+        File riverDir = wstFile
+            .getParentFile()  // Basisdaten
+            .getParentFile()  // Hydrologie
+            .getParentFile(); // <river>
+
+        parser.parsePRFs(riverDir, new PRFParser.Callback() {
+
+            Set<HashedFile> prfs = new HashSet<HashedFile>();
+
+            @Override
+            public boolean prfAccept(File file) {
+                HashedFile hf = new HashedFile(file);
+                boolean success = prfs.add(hf);
+                if (!success) {
+                    log.warn("PRF file '" + file + "' seems to be a duplicate.");
+                }
+                return success;
+            }
+
+            @Override
+            public void prfParsed(PRFParser parser) {
+                log.debug("callback from PRF parser");
+
+                String  description = parser.getDescription();
+                Integer year        = parser.getYear();
+                ImportTimeInterval ti = year != null
+                    ? new ImportTimeInterval(yearToDate(year))
+                    : null;
+
+                List<ImportCrossSectionLine> lines =
+                    new ArrayList<ImportCrossSectionLine>();
+
+                for (Map.Entry<Double, List<XY>> entry: parser.getData().entrySet()) {
+                    Double km         = entry.getKey();
+                    List<XY>   points = entry.getValue();
+                    lines.add(new ImportCrossSectionLine(km, points));
+                }
+
+                crossSections.add(new ImportCrossSection(
+                    ImportRiver.this, description, ti, lines));
+            }
+        });
+    }
+
+    public static Date yearToDate(int year) {
+        Calendar cal = Calendar.getInstance();
+        cal.set(year, 5, 15, 12, 0, 0);
+        long ms = cal.getTimeInMillis();
+        cal.setTimeInMillis(ms - ms%1000);
+        return cal.getTime();
+    }
+
+    public void storeDependencies() {
+        storeWstUnit();
+        storeAnnotations();
+        storeHYKs();
+        storeCrossSections();
+        storeGauges();
+        storeWst();
+        storeExtraWsts();
+        storeFixations();
+        storeOfficialLines();
+        storeFloodWater();
+        storeFloodProtection();
+        storeBedHeight();
+        storeSedimentDensity();
+        storeMorphologicalWidth();
+        storeFlowVelocity();
+        storeSedimentYield();
+        storeWaterlevels();
+        storeWaterlevelDifferences();
+        storeSQRelations();
+    }
+
+    public void storeWstUnit() {
+        if (wst == null) {
+            wstUnit = new ImportUnit("NN + m");
+        }
+        else {
+            wstUnit = wst.getUnit();
+        }
+    }
+
+    public void storeHYKs() {
+        if (!Config.INSTANCE.skipHYKs()) {
+            log.info("store HYKs");
+            getPeer();
+            for (ImportHYK hyk: hyks) {
+                hyk.storeDependencies();
+            }
+        }
+    }
+
+    public void storeCrossSections() {
+        if (!Config.INSTANCE.skipPRFs()) {
+            log.info("store cross sections");
+            getPeer();
+            for (ImportCrossSection crossSection: crossSections) {
+                crossSection.storeDependencies();
+            }
+        }
+    }
+
+    public void storeWst() {
+        if (!Config.INSTANCE.skipWst()) {
+            River river = getPeer();
+            wst.storeDependencies(river);
+        }
+    }
+
+    public void storeFixations() {
+        if (!Config.INSTANCE.skipFixations()) {
+            log.info("store fixation wsts");
+            River river = getPeer();
+            for (ImportWst wst: fixations) {
+                log.debug("name: " + wst.getDescription());
+                wst.storeDependencies(river);
+            }
+        }
+    }
+
+    public void storeExtraWsts() {
+        if (!Config.INSTANCE.skipExtraWsts()) {
+            log.info("store extra wsts");
+            River river = getPeer();
+            for (ImportWst wst: extraWsts) {
+                log.debug("name: " + wst.getDescription());
+                wst.storeDependencies(river);
+            }
+        }
+    }
+
+    public void storeOfficialLines() {
+        if (!Config.INSTANCE.skipOfficialLines()) {
+            log.info("store official lines wsts");
+            River river = getPeer();
+            for (ImportWst wst: officialLines) {
+                log.debug("name: " + wst.getDescription());
+                wst.storeDependencies(river);
+            }
+        }
+    }
+
+    public void storeFloodWater() {
+        if (!Config.INSTANCE.skipFloodWater()) {
+            log.info("store flood water wsts");
+            River river = getPeer();
+            for (ImportWst wst: floodWater) {
+                log.debug("name: " + wst.getDescription());
+                wst.storeDependencies(river);
+            }
+        }
+    }
+
+    public void storeFloodProtection() {
+        if (!Config.INSTANCE.skipFloodProtection()) {
+            log.info("store flood protection wsts");
+            River river = getPeer();
+            for (ImportWst wst: floodProtection) {
+                log.debug("name: " + wst.getDescription());
+                wst.storeDependencies(river);
+            }
+        }
+    }
+
+
+    public void storeBedHeight() {
+        if (!Config.INSTANCE.skipBedHeightSingle()) {
+            log.info("store bed heights single");
+            storeBedHeightSingle();
+        }
+
+        if (!Config.INSTANCE.skipBedHeightEpoch()) {
+            log.info("store bed height epoch.");
+            storeBedHeightEpoch();
+        }
+    }
+
+
+    private void storeBedHeightSingle() {
+        River river = getPeer();
+
+        if (bedHeightSingles != null) {
+            for (ImportBedHeight tmp: bedHeightSingles) {
+                ImportBedHeightSingle single = (ImportBedHeightSingle) tmp;
+
+                String desc = single.getDescription();
+
+                log.debug("name: " + desc);
+
+                try {
+                    single.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+            }
+        }
+        else {
+            log.info("No single bed heights to store.");
+        }
+    }
+
+
+    private void storeBedHeightEpoch() {
+        River river = getPeer();
+
+        if (bedHeightEpochs != null) {
+            for (ImportBedHeight tmp: bedHeightEpochs) {
+                ImportBedHeightEpoch epoch = (ImportBedHeightEpoch) tmp;
+
+                String desc = epoch.getDescription();
+
+                log.debug("name: " + desc);
+
+                try {
+                    epoch.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+            }
+        }
+        else {
+            log.info("No epoch bed heights to store.");
+        }
+    }
+
+    public void storeSedimentDensity() {
+        if (!Config.INSTANCE.skipSedimentDensity()) {
+            log.info("store sediment density");
+
+            River river = getPeer();
+
+            for (ImportSedimentDensity density: sedimentDensities) {
+                String desc = density.getDescription();
+
+                log.debug("name: " + desc);
+
+                try {
+                    density.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+            }
+        }
+    }
+
+    public void storeMorphologicalWidth() {
+        if (!Config.INSTANCE.skipMorphologicalWidth()) {
+            log.info("store morphological width");
+
+            River river = getPeer();
+
+            for (ImportMorphWidth width: morphologicalWidths) {
+                try {
+                    width.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while parsing file for morph. width.", sqle);
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while parsing file for morph. width.", cve);
+                }
+            }
+        }
+    }
+
+    public void storeFlowVelocity() {
+        if (!Config.INSTANCE.skipFlowVelocity()) {
+            log.info("store flow velocity");
+
+            River river = getPeer();
+
+            for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels){
+                try {
+                    flowVelocityModel.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while storing flow velocity model.", sqle);
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while storing flow velocity model.", cve);
+                }
+            }
+
+            for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) {
+                try {
+                    m.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while storing flow velocity measurement.", sqle);
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while storing flow velocity measurement.", cve);
+                }
+            }
+        }
+    }
+
+
+    public void storeSedimentYield() {
+        if (!Config.INSTANCE.skipSedimentYield()) {
+            log.info("store sediment yield data");
+
+            River river = getPeer();
+
+            for (ImportSedimentYield sedimentYield: sedimentYields) {
+                try {
+                    sedimentYield.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while storing sediment yield.", sqle);
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while storing sediment yield.", cve);
+                }
+            }
+        }
+    }
+
+
+    public void storeWaterlevels() {
+        if (!Config.INSTANCE.skipWaterlevels()) {
+            log.info("store waterlevels");
+
+            River river = getPeer();
+
+            for (ImportWaterlevel waterlevel: waterlevels) {
+                waterlevel.storeDependencies(river);
+            }
+        }
+    }
+
+
+    public void storeWaterlevelDifferences() {
+        if (!Config.INSTANCE.skipWaterlevelDifferences()) {
+            log.info("store waterlevel differences");
+
+            River river = getPeer();
+
+            for (ImportWaterlevelDifference diff: waterlevelDiffs) {
+                try {
+                    diff.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while storing waterlevel diff.", sqle);
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while storing waterlevel diff.", cve);
+                }
+            }
+        }
+    }
+
+
+    public void storeSQRelations() {
+        if (!Config.INSTANCE.skipSQRelation()) {
+            log.info("store sq relations");
+
+            River river = getPeer();
+
+            int count = 0;
+
+            for (ImportSQRelation sqRelation: sqRelations) {
+                try {
+                    sqRelation.storeDependencies(river);
+                    count++;
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while storing sq relation.", sqle);
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while storing sq relation.", cve);
+                }
+            }
+
+            log.info("stored " + count + " sq relations.");
+        }
+    }
+
+
+    public void storeAnnotations() {
+        if (!Config.INSTANCE.skipAnnotations()) {
+            River river = getPeer();
+            for (ImportAnnotation annotation: annotations) {
+                annotation.getPeer(river);
+            }
+        }
+    }
+
+    public void storeGauges() {
+        if (!Config.INSTANCE.skipGauges()) {
+            log.info("store gauges:");
+            River river = getPeer();
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            for (ImportGauge gauge: gauges) {
+                log.info("\tgauge: " + gauge.getName());
+                gauge.storeDependencies(river);
+                ImporterSession.getInstance().getDatabaseSession();
+                session.flush();
+            }
+        }
+    }
+
+    public River getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from River where name=:name");
+
+            Unit u = wstUnit.getPeer();
+
+            query.setString("name", name);
+            List<River> rivers = query.list();
+            if (rivers.isEmpty()) {
+                log.info("Store new river '" + name + "'");
+                peer = new River(name, u);
+                session.save(peer);
+            }
+            else {
+                peer = rivers.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSQRelation.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,122 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.SQRelation;
+import de.intevation.flys.model.TimeInterval;
+
+
+public class ImportSQRelation {
+
+    private static Logger log = Logger.getLogger(ImportSQRelation.class);
+
+
+    private ImportTimeInterval timeInterval;
+
+    private String description;
+
+    private List<ImportSQRelationValue> values;
+
+    private SQRelation peer;
+
+
+    public ImportSQRelation() {
+        this.values = new ArrayList<ImportSQRelationValue>();
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("store dependencies");
+
+        SQRelation peer = getPeer(river);
+
+        timeInterval.getPeer();
+
+        int count = 0;
+
+        for (ImportSQRelationValue value: values) {
+            try {
+                value.storeDependencies(peer);
+                count++;
+            }
+            catch (SQLException sqle) {
+                log.warn("ISQ: Unable to store sq relation value.", sqle);
+            }
+            catch (ConstraintViolationException cve) {
+                log.warn("ISQ: Unable to store sq relation value.", cve);
+            }
+        }
+
+        log.info("stored " + count + " sq relation values.");
+    }
+
+
+    public SQRelation getPeer(River river) {
+        log.debug("getPeer()");
+
+        if (peer == null) {
+            TimeInterval timeInter = timeInterval.getPeer();
+
+            if (timeInter == null) {
+                log.warn("ISQ: Cannot determine sq relation without time interval");
+                return null;
+            }
+
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "FROM SQRelation WHERE river=:river AND timeInterval=:timeInter"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("timeInter", timeInter);
+
+            List<SQRelation> sq = query.list();
+
+            if (sq.isEmpty()) {
+                log.info("create new SQ relation '" + description + "'");
+
+                peer = new SQRelation(
+                    river,
+                    timeInter,
+                    description
+                );
+                session.save(peer);
+            }
+            else {
+                peer = sq.get(0);
+            }
+        }
+
+        return peer;
+    }
+
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+
+    public void addValue(ImportSQRelationValue value) {
+        if (value != null) {
+            this.values.add(value);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSQRelationValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,97 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.SQRelation;
+import de.intevation.flys.model.SQRelationValue;
+
+
+public class ImportSQRelationValue {
+
+    private static Logger log = Logger.getLogger(ImportSQRelationValue.class);
+
+
+    private SQRelationValue peer;
+
+    private String parameter;
+    private String fraction;
+    private String function;
+
+    private double km;
+    private double a;
+    private double b;
+
+
+    public ImportSQRelationValue(
+        String parameter,
+        String fraction,
+        String function,
+        double km,
+        double a,
+        double b
+    ) {
+        this.parameter = parameter;
+        this.fraction  = fraction;
+        this.function  = function;
+        this.km        = km;
+        this.a         = a;
+        this.b         = b;
+    }
+
+
+    public void storeDependencies(SQRelation owner)
+    throws SQLException, ConstraintViolationException
+    {
+        getPeer(owner);
+    }
+
+
+    public SQRelationValue getPeer(SQRelation owner) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from SQRelationValue " +
+                "   where sqRelation=:owner " +
+                "   and parameter=:parameter" +
+                "   and fraction=:fraction" +
+                "   and function=:function" +
+                "   and km=:km");
+
+            query.setParameter("owner", owner);
+            query.setString("parameter", parameter);
+            query.setString("fraction", fraction);
+            query.setString("function", function);
+            query.setDouble("km", km);
+
+            List<SQRelationValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new SQRelationValue(
+                    owner,
+                    parameter,
+                    fraction,
+                    function,
+                    km,
+                    a,
+                    b
+                );
+
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,116 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.SedimentDensity;
+
+
+public class ImportSedimentDensity {
+
+    private static Logger log = Logger.getLogger(ImportSedimentDensity.class);
+
+
+    protected SedimentDensity peer;
+
+    protected ImportDepth depth;
+
+    protected ImportUnit unit;
+
+    protected String description;
+
+    protected List<ImportSedimentDensityValue> values;
+
+
+    public ImportSedimentDensity(String description) {
+        this.description = description;
+        this.values      = new ArrayList<ImportSedimentDensityValue>();
+    }
+
+
+    public String getDescription() {
+        return description;
+    }
+
+
+    public void setDepth(ImportDepth depth) {
+        this.depth = depth;
+    }
+
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+
+    public void addValue(ImportSedimentDensityValue value) {
+        values.add(value);
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("store dependencies");
+
+        if (depth != null) {
+            depth.storeDependencies();
+        }
+
+        log.info("store sediment density values.");
+
+        SedimentDensity peer = getPeer(river);
+
+        for (ImportSedimentDensityValue value: values) {
+            value.storeDependencies(peer);
+        }
+    }
+
+
+    public SedimentDensity getPeer(River river) {
+        log.info("get peer");
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from SedimentDensity where " +
+                "   river=:river and " +
+                "   depth=:depth and " +
+                "   unit=:unit");
+
+            query.setParameter("river", river);
+            query.setParameter("depth", depth.getPeer());
+            query.setParameter("unit", unit.getPeer());
+
+            List<SedimentDensity> density = query.list();
+
+            if (density.isEmpty()) {
+                log.debug("Create new SedimentDensity DB instance.");
+
+                peer = new SedimentDensity(
+                    river,
+                    depth.getPeer(),
+                    unit.getPeer(),
+                    description);
+
+                session.save(peer);
+            }
+            else {
+                peer = density.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,87 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.SedimentDensity;
+import de.intevation.flys.model.SedimentDensityValue;
+
+
+public class ImportSedimentDensityValue {
+
+    private static final Logger log =
+        Logger.getLogger(ImportSedimentDensityValue.class);
+
+
+    protected SedimentDensityValue peer;
+
+    protected BigDecimal station;
+
+    protected BigDecimal density;
+
+    protected String description;
+
+
+    public ImportSedimentDensityValue(
+        BigDecimal station,
+        BigDecimal density,
+        String     description
+    ) {
+        this.station     = station;
+        this.density     = density;
+        this.description = description;
+    }
+
+
+    public void storeDependencies(SedimentDensity sedimentDensity) {
+        log.info("store dependencies");
+
+        getPeer(sedimentDensity);
+    }
+
+
+    public SedimentDensityValue getPeer(SedimentDensity sedimentDensity) {
+        log.info("get peer");
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from SedimentDensityValue where " +
+                "   sedimentDensity=:sedimentDensity and " +
+                "   station=:station and " +
+                "   density=:density and " +
+                "   description=:description");
+
+            query.setParameter("sedimentDensity", sedimentDensity);
+            query.setParameter("station", station);
+            query.setParameter("density", density);
+            query.setParameter("description", description);
+
+            List<SedimentDensityValue> values = query.list();
+            if (values.isEmpty()) {
+                log.debug("Create new SedimentDensityValue DB instance.");
+
+                peer = new SedimentDensityValue(
+                    sedimentDensity,
+                    station,
+                    density,
+                    description);
+
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentYield.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,133 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.GrainFraction;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.SedimentYield;
+import de.intevation.flys.model.TimeInterval;
+import de.intevation.flys.model.Unit;
+
+
+public class ImportSedimentYield {
+
+    private static Logger log = Logger.getLogger(ImportSedimentYield.class);
+
+    private ImportGrainFraction grainFraction;
+
+    private ImportUnit unit;
+
+    private ImportTimeInterval timeInterval;
+
+    private String description;
+
+    private List<ImportSedimentYieldValue> values;
+
+    private SedimentYield peer;
+
+
+    public ImportSedimentYield(String description) {
+        this.values      = new ArrayList<ImportSedimentYieldValue>();
+        this.description = description;
+    }
+
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+
+    public void setGrainFraction(ImportGrainFraction grainFraction) {
+        this.grainFraction = grainFraction;
+    }
+
+
+    public void addValue(ImportSedimentYieldValue value) {
+        this.values.add(value);
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.debug("store dependencies");
+
+        if (grainFraction != null) {
+            grainFraction.storeDependencies();
+        }
+
+        SedimentYield peer = getPeer(river);
+
+        int i = 0;
+
+        for (ImportSedimentYieldValue value: values) {
+            value.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " sediment yield values.");
+    }
+
+
+    public SedimentYield getPeer(River river) {
+        log.debug("get peer");
+
+        GrainFraction gf = grainFraction != null
+            ? grainFraction.getPeer()
+            : null;
+
+        Unit u = unit != null ? unit.getPeer() : null;
+
+        TimeInterval ti = timeInterval != null ? timeInterval.getPeer() : null;
+
+        if (ti == null || u == null) {
+            log.warn("Skip invalid SedimentYield: time interval or unit null!");
+            return null;
+        }
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from SedimentYield where " +
+                "   river=:river and " +
+                "   grainFraction=:grainFraction and " +
+                "   unit=:unit and " +
+                "   timeInterval=:timeInterval and " +
+                "   description=:description"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("grainFraction", gf);
+            query.setParameter("unit", u);
+            query.setParameter("timeInterval", ti);
+            query.setParameter("description", description);
+
+            List<SedimentYield> yields = query.list();
+            if (yields.isEmpty()) {
+                log.debug("create new SedimentYield");
+
+                peer = new SedimentYield(river, u, ti, gf, description);
+                session.save(peer);
+            }
+            else {
+                peer = yields.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentYieldValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,58 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.SedimentYield;
+import de.intevation.flys.model.SedimentYieldValue;
+
+
+public class ImportSedimentYieldValue {
+
+    private Double station;
+    private Double value;
+
+    private SedimentYieldValue peer;
+
+
+    public ImportSedimentYieldValue(Double station, Double value) {
+        this.station = station;
+        this.value   = value;
+    }
+
+
+    public void storeDependencies(SedimentYield sedimentYield) {
+        getPeer(sedimentYield);
+    }
+
+
+    public SedimentYieldValue getPeer(SedimentYield sedimentYield) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from SedimentYieldValue where " +
+                "   sedimentYield=:sedimentYield and " +
+                "   station=:station and " +
+                "   value=:value"
+            );
+
+            query.setParameter("sedimentYield", sedimentYield);
+            query.setParameter("station", station);
+            query.setParameter("value", value);
+
+            List<SedimentYieldValue> values = query.list();
+            if (values.isEmpty()) {
+                peer = new SedimentYieldValue(sedimentYield, station, value);
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportTimeInterval.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,69 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.TimeInterval;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+import java.util.Date;
+
+import org.apache.log4j.Logger;
+
+public class ImportTimeInterval
+{
+    private static Logger log = Logger.getLogger(ImportTimeInterval.class);
+
+    protected Date startTime;
+    protected Date stopTime;
+
+    protected TimeInterval peer;
+
+    public ImportTimeInterval() {
+    }
+
+    public ImportTimeInterval(Date startTime) {
+        this.startTime = startTime;
+    }
+
+    public ImportTimeInterval(Date startTime, Date stopTime) {
+        this.startTime = startTime;
+        this.stopTime  = stopTime;
+    }
+
+    public Date getStartTime() {
+        return startTime;
+    }
+
+    public void setStartTime(Date startTime) {
+        this.startTime = startTime;
+    }
+
+    public Date getStopTime() {
+        return stopTime;
+    }
+
+    public void setStopTime(Date stopTime) {
+        this.stopTime = stopTime;
+    }
+
+    public TimeInterval getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from TimeInterval where startTime=:a and stopTime=:b");
+            query.setParameter("a", startTime);
+            query.setParameter("b",  stopTime);
+            List<TimeInterval> intervals = query.list();
+            if (intervals.isEmpty()) {
+                peer = new TimeInterval(startTime, stopTime);
+                session.save(peer);
+            }
+            else {
+                peer = intervals.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportUnit.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,52 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.Unit;
+
+
+public class ImportUnit
+{
+    private static final Logger log = Logger.getLogger(ImportUnit.class);
+
+    protected String name;
+
+    protected Unit peer;
+
+
+    public ImportUnit(String name) {
+        this.name = name;
+    }
+
+
+    public String getName() {
+        return name;
+    }
+
+
+    public Unit getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from Unit where name=:name");
+            query.setParameter("name", name);
+
+            List<Unit> units = query.list();
+            if (units.isEmpty()) {
+                log.info("Store new unit '" + name + "'");
+
+                peer = new Unit(name);
+                session.save(peer);
+            }
+            else {
+                peer = units.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevel.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,96 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Unit;
+import de.intevation.flys.model.Waterlevel;
+
+
+public class ImportWaterlevel {
+
+    private static final Logger log = Logger.getLogger(ImportWaterlevel.class);
+
+
+    private ImportUnit unit;
+
+    private String description;
+
+    private List<ImportWaterlevelQRange> qRanges;
+
+    private Waterlevel peer;
+
+
+    public ImportWaterlevel(String description) {
+        this.qRanges = new ArrayList<ImportWaterlevelQRange>();
+
+        this.description = description;
+    }
+
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+
+    public void addValue(ImportWaterlevelQRange qRange) {
+        this.qRanges.add(qRange);
+    }
+
+
+    public void storeDependencies(River river) {
+        log.info("store dependencies");
+
+        Waterlevel peer = getPeer(river);
+
+        int i = 0;
+
+        for (ImportWaterlevelQRange qRange: qRanges) {
+            qRange.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " waterlevel q ranges");
+    }
+
+
+    public Waterlevel getPeer(River river) {
+        Unit u = unit != null ? unit.getPeer() : null;
+        if (u == null) {
+            log.warn("skip invalid waterlevel - no unit set!");
+            return null;
+        }
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from Waterlevel where " +
+                "   river=:river and " +
+                "   unit=:unit and " +
+                "   description=:description"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("unit", u);
+            query.setParameter("description", description);
+
+            List<Waterlevel> wsts = query.list();
+            if (wsts.isEmpty()) {
+                peer = new Waterlevel(river, u, description);
+                session.save(peer);
+            }
+            else {
+                peer = wsts.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelDifference.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,101 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Unit;
+import de.intevation.flys.model.WaterlevelDifference;
+
+
+public class ImportWaterlevelDifference {
+
+    private static final Logger log =
+        Logger.getLogger(ImportWaterlevelDifference.class);
+
+
+    private ImportUnit unit;
+
+    private String description;
+
+    private List<ImportWaterlevelDifferenceColumn> columns;
+
+    private WaterlevelDifference peer;
+
+
+    public ImportWaterlevelDifference(String description) {
+        this.columns = new ArrayList<ImportWaterlevelDifferenceColumn>();
+
+        this.description = description;
+    }
+
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+
+    public void addValue(ImportWaterlevelDifferenceColumn column) {
+        this.columns.add(column);
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("store dependencies");
+
+        WaterlevelDifference peer = getPeer(river);
+
+        int i = 0;
+
+        for (ImportWaterlevelDifferenceColumn column: columns) {
+            column.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " waterlevel difference columns");
+    }
+
+
+    public WaterlevelDifference getPeer(River river) {
+        Unit u = unit != null ? unit.getPeer() : null;
+        if (u == null) {
+            log.warn("IWD: skip invalid waterlevel difference - no unit set!");
+            return null;
+        }
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WaterlevelDifference where " +
+                "   river=:river and " +
+                "   unit=:unit and " +
+                "   description=:description"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("unit", u);
+            query.setParameter("description", description);
+
+            List<WaterlevelDifference> diffs = query.list();
+            if (diffs.isEmpty()) {
+                peer = new WaterlevelDifference(river, u, description);
+                session.save(peer);
+            }
+            else {
+                peer = diffs.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceColumn.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,81 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.WaterlevelDifference;
+import de.intevation.flys.model.WaterlevelDifferenceColumn;
+
+
+public class ImportWaterlevelDifferenceColumn {
+
+    private static final Logger log =
+        Logger.getLogger(ImportWaterlevelDifferenceColumn.class);
+
+
+    private String description;
+
+    private List<ImportWaterlevelDifferenceValue> values;
+
+    private WaterlevelDifferenceColumn peer;
+
+
+    public ImportWaterlevelDifferenceColumn(String description) {
+        this.values = new ArrayList<ImportWaterlevelDifferenceValue>();
+
+        this.description = description;
+    }
+
+
+    public void addValue(ImportWaterlevelDifferenceValue value) {
+        this.values.add(value);
+    }
+
+
+    public void storeDependencies(WaterlevelDifference difference) {
+        log.info("store dependencies");
+
+        WaterlevelDifferenceColumn peer = getPeer(difference);
+
+        int i = 0;
+
+        for (ImportWaterlevelDifferenceValue value: values) {
+            value.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " waterlevel difference values");
+    }
+
+
+    public WaterlevelDifferenceColumn getPeer(WaterlevelDifference diff) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WaterlevelDifferenceColumn where " +
+                "   difference=:difference and " +
+                "   description=:description"
+            );
+
+            query.setParameter("difference", diff);
+            query.setParameter("description", description);
+
+            List<WaterlevelDifferenceColumn> cols = query.list();
+            if (cols.isEmpty()) {
+                peer = new WaterlevelDifferenceColumn(diff, description);
+                session.save(peer);
+            }
+            else {
+                peer = cols.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,64 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.WaterlevelDifferenceColumn;
+import de.intevation.flys.model.WaterlevelDifferenceValue;
+
+
+public class ImportWaterlevelDifferenceValue {
+
+    private static final Logger log =
+        Logger.getLogger(ImportWaterlevelDifferenceValue.class);
+
+
+    private Double station;
+    private Double value;
+
+    private WaterlevelDifferenceValue peer;
+
+
+    public ImportWaterlevelDifferenceValue(Double station, Double value) {
+        this.station = station;
+        this.value   = value;
+    }
+
+
+    public void storeDependencies(WaterlevelDifferenceColumn column) {
+        getPeer(column);
+    }
+
+
+    public WaterlevelDifferenceValue getPeer(WaterlevelDifferenceColumn column) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WaterlevelDifferenceValue where " +
+                "   column=:column and " +
+                "   station=:station and " +
+                "   value=:value"
+            );
+
+            query.setParameter("column", column);
+            query.setParameter("station", station);
+            query.setParameter("value", value);
+
+            List<WaterlevelDifferenceValue> values = query.list();
+            if (values.isEmpty()) {
+                peer = new WaterlevelDifferenceValue(column, station, value);
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelQRange.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,79 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.Waterlevel;
+import de.intevation.flys.model.WaterlevelQRange;
+
+
+public class ImportWaterlevelQRange {
+
+    private static final Logger log =
+        Logger.getLogger(ImportWaterlevelQRange.class);
+
+
+    private Double q;
+
+    private List<ImportWaterlevelValue> values;
+
+    private WaterlevelQRange peer;
+
+
+    public ImportWaterlevelQRange(Double q) {
+        this.values = new ArrayList<ImportWaterlevelValue>();
+        this.q      = q;
+    }
+
+    public void addValue(ImportWaterlevelValue value) {
+        this.values.add(value);
+    }
+
+
+    public void storeDependencies(Waterlevel waterlevel) {
+        log.info("store dependencies");
+
+        WaterlevelQRange peer = getPeer(waterlevel);
+
+        int i = 0;
+
+        for (ImportWaterlevelValue value: values) {
+            value.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " waterlevel values");
+    }
+
+
+    public WaterlevelQRange getPeer(Waterlevel waterlevel) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WaterlevelQRange where " +
+                "   waterlevel=:waterlevel and " +
+                "   q=:q"
+            );
+
+            query.setParameter("waterlevel", waterlevel);
+            query.setParameter("q", q);
+
+            List<WaterlevelQRange> qRanges = query.list();
+            if (qRanges.isEmpty()) {
+                peer = new WaterlevelQRange(waterlevel, q);
+                session.save(peer);
+            }
+            else {
+                peer = qRanges.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,59 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.WaterlevelQRange;
+import de.intevation.flys.model.WaterlevelValue;
+
+
+public class ImportWaterlevelValue {
+
+    private Double station;
+    private Double w;
+
+    private WaterlevelValue peer;
+
+
+    public ImportWaterlevelValue(Double station, Double w) {
+        this.station = station;
+        this.w       = w;
+    }
+
+
+    public void storeDependencies(WaterlevelQRange qRange) {
+        getPeer(qRange);
+    }
+
+
+    public WaterlevelValue getPeer(WaterlevelQRange qRange) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WaterlevelValue where " +
+                "   qrange=:qrange and " +
+                "   station=:station and " +
+                "   w=:w"
+            );
+
+            query.setParameter("qrange", qRange);
+            query.setParameter("station", station);
+            query.setParameter("w", w);
+
+            List<WaterlevelValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new WaterlevelValue(qRange, station, w);
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWst.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,112 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Wst;
+import de.intevation.flys.model.River;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class ImportWst
+{
+    private static Logger log = Logger.getLogger(ImportWst.class);
+
+    protected String description;
+
+    protected Integer kind;
+
+    protected List<ImportWstColumn> columns;
+
+    protected ImportUnit unit;
+
+    protected Wst peer;
+
+    public ImportWst() {
+        kind = 0;
+        columns = new ArrayList<ImportWstColumn>();
+    }
+
+    public ImportWst(String description) {
+        this();
+        this.description = description;
+    }
+
+    public String getDescription() {
+        return description;
+    }
+
+    public Integer getKind() {
+        return kind;
+    }
+
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public void setNumberColumns(int numColumns) {
+        for (int i = 0; i < numColumns; ++i) {
+            columns.add(new ImportWstColumn(this, null, null, i));
+        }
+    }
+
+    public int getNumberColumns() {
+        return columns.size();
+    }
+
+    public ImportWstColumn getColumn(int index) {
+        return columns.get(index);
+    }
+
+    public ImportUnit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+    public void storeDependencies(River river) {
+
+        log.info("store '" + description + "'");
+        Wst wst = getPeer(river);
+
+        for (ImportWstColumn column: columns) {
+            column.storeDependencies(river);
+        }
+
+        Session session = ImporterSession.getInstance().getDatabaseSession();
+        session.flush();
+    }
+
+    public Wst getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from Wst where " +
+                "river=:river and description=:description and kind=:kind");
+            query.setParameter("river",       river);
+            query.setParameter("description", description);
+            query.setParameter("kind",        kind);
+            List<Wst> wsts = query.list();
+            if (wsts.isEmpty()) {
+                peer = new Wst(river, description, kind);
+                session.save(peer);
+            }
+            else {
+                peer = wsts.get(0);
+            }
+
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstColumn.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,144 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Wst;
+import de.intevation.flys.model.WstColumn;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.TimeInterval;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+public class ImportWstColumn
+{
+    private static Logger log = Logger.getLogger(ImportWstColumn.class);
+
+    protected ImportWst wst;
+    protected String    name;
+    protected String    description;
+    protected Integer   position;
+
+    protected ImportTimeInterval timeInterval;
+
+    protected List<ImportWstColumnQRange> columnQRanges;
+    protected List<ImportWstColumnValue>  columnValues;
+
+    protected WstColumn peer;
+
+    public ImportWstColumn() {
+        columnQRanges = new ArrayList<ImportWstColumnQRange>();
+        columnValues  = new ArrayList<ImportWstColumnValue>();
+    }
+
+    public ImportWstColumn(
+        ImportWst wst,
+        String    name,
+        String    description,
+        Integer   position
+    ) {
+        this();
+        this.wst         = wst;
+        this.name        = name;
+        this.description = description;
+        this.position    = position;
+    }
+
+    public ImportWst getWst() {
+        return wst;
+    }
+
+    public void setWst(ImportWst wst) {
+        this.wst = wst;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public Integer getPosition() {
+        return position;
+    }
+
+    public void setPosition(Integer position) {
+        this.position = position;
+    }
+
+    public void addColumnValue(BigDecimal position, BigDecimal w) {
+        columnValues.add(
+            new ImportWstColumnValue(this, position, w));
+    }
+
+    public void addColumnQRange(ImportWstQRange columnQRange) {
+        columnQRanges.add(
+            new ImportWstColumnQRange(this, columnQRange));
+    }
+
+    public void storeDependencies(River river) {
+        log.info("store column '" + name + "'");
+        WstColumn column = getPeer(river);
+
+        for (ImportWstColumnQRange columnQRange: columnQRanges) {
+            columnQRange.getPeer(river);
+        }
+
+        for (ImportWstColumnValue columnValue: columnValues) {
+            columnValue.getPeer(river);
+        }
+    }
+
+    public ImportTimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    public WstColumn getPeer(River river) {
+        if (peer == null) {
+            Wst w = wst.getPeer(river);
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WstColumn where " +
+                "wst=:wst and name=:name and description=:description" +
+                " and position=:position");
+            query.setParameter("wst",         w);
+            query.setParameter("name",        name);
+            query.setParameter("description", description);
+            query.setParameter("position",    position);
+
+            TimeInterval ti = timeInterval != null
+                ? timeInterval.getPeer()
+                : null;
+
+            List<WstColumn> columns = query.list();
+            if (columns.isEmpty()) {
+                peer = new WstColumn(w, name, description, position, ti);
+                session.save(peer);
+            }
+            else {
+                peer = columns.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstColumnQRange.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,69 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.WstColumnQRange;
+import de.intevation.flys.model.WstQRange;
+import de.intevation.flys.model.WstColumn;
+import de.intevation.flys.model.River;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportWstColumnQRange
+{
+    protected ImportWstColumn wstColumn;
+    protected ImportWstQRange qRange;
+
+    protected WstColumnQRange peer;
+
+    public ImportWstColumnQRange() {
+    }
+
+    public ImportWstColumnQRange(
+        ImportWstColumn wstColumn,
+        ImportWstQRange qRange
+    ) {
+        this.wstColumn = wstColumn;
+        this.qRange    = qRange;
+    }
+
+    public ImportWstColumn getWstColumn() {
+        return wstColumn;
+    }
+
+    public void setWstColumn(ImportWstColumn wstColumn) {
+        this.wstColumn = wstColumn;
+    }
+
+    public ImportWstQRange getQRange() {
+        return qRange;
+    }
+
+    public void setQRange(ImportWstQRange qRange) {
+        this.qRange = qRange;
+    }
+
+    public WstColumnQRange getPeer(River river) {
+        if (peer == null) {
+            WstColumn c = wstColumn.getPeer(river);
+            WstQRange q = qRange.getPeer(river);
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WstColumnQRange where " +
+                "wstColumn=:c and wstQRange=:q");
+            query.setParameter("c", c);
+            query.setParameter("q", q);
+            List<WstColumnQRange> cols = query.list();
+            if (cols.isEmpty()) {
+                peer = new WstColumnQRange(c, q);
+                session.save(peer);
+            }
+            else {
+                peer = cols.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstColumnValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,68 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.WstColumnValue;
+import de.intevation.flys.model.WstColumn;
+import de.intevation.flys.model.River;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+public class ImportWstColumnValue
+{
+    protected Logger logger = Logger.getLogger(ImportWstColumnValue.class);
+
+    protected BigDecimal      position;
+    protected BigDecimal      w;
+    protected ImportWstColumn wstColumn;
+
+    protected WstColumnValue  peer;
+
+    public ImportWstColumnValue() {
+    }
+
+    public ImportWstColumnValue(
+        ImportWstColumn wstColumn,
+        BigDecimal      position,
+        BigDecimal      w
+    ) {
+        this.wstColumn = wstColumn;
+        this.position  = position;
+        this.w         = w;
+    }
+
+    public BigDecimal getPosition() {
+        return position;
+    }
+
+    public void setPosition(BigDecimal position) {
+        this.position = position;
+    }
+
+    public BigDecimal getW() {
+        return w;
+    }
+
+    public void setW(BigDecimal w) {
+        this.w = w;
+    }
+
+    public ImportWstColumn getWstColumn() {
+        return wstColumn;
+    }
+
+    public void setWstColumn(ImportWstColumn wstColumn) {
+        this.wstColumn = wstColumn;
+    }
+
+    public WstColumnValue getPeer(River river) {
+        if (peer == null) {
+            WstColumn c = wstColumn.getPeer(river);
+            peer = ImporterSession.getInstance().getWstColumnValue(
+                c, position, w);
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstQRange.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,69 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import de.intevation.flys.model.WstQRange;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Range;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportWstQRange
+{
+    protected ImportRange range;
+    protected BigDecimal  q;
+
+    protected WstQRange peer;
+
+    public ImportWstQRange() {
+    }
+
+    public ImportWstQRange(
+        ImportRange range,
+        BigDecimal  q
+    ) {
+        this.range = range;
+        this.q     = q;
+    }
+
+    public ImportRange getRange() {
+        return range;
+    }
+
+    public void setRange(ImportRange range) {
+        this.range = range;
+    }
+
+    public BigDecimal getQ() {
+        return q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+
+    public WstQRange getPeer(River river) {
+        if (peer == null) {
+            Range r = range.getPeer(river);
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WstQRange where " +
+                "range=:range and q=:q");
+            query.setParameter("range", r);
+            query.setParameter("q",     q);
+            List<WstQRange> wstQRanges = query.list();
+            if (wstQRanges.isEmpty()) {
+                peer = new WstQRange(r, q);
+                session.save(peer);
+            }
+            else {
+                peer = wstQRanges.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/Importer.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,146 @@
+package de.intevation.flys.importer;
+
+import de.intevation.artifacts.common.utils.XMLUtils;
+
+import de.intevation.flys.importer.parsers.InfoGewParser;
+import de.intevation.flys.importer.parsers.AnnotationClassifier;
+
+import java.io.File;
+import java.io.IOException;
+
+import java.util.List;
+
+import java.sql.SQLException;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Transaction;
+import org.hibernate.HibernateException;
+
+import org.w3c.dom.Document;
+
+public class Importer
+{
+    private static Logger log = Logger.getLogger(Importer.class);
+
+    protected List<ImportRiver> rivers;
+
+    public Importer() {
+    }
+
+    public Importer(List<ImportRiver> rivers) {
+        this.rivers = rivers;
+    }
+
+    public List<ImportRiver> getRivers() {
+        return rivers;
+    }
+
+    public void setRivers(List<ImportRiver> rivers) {
+        this.rivers = rivers;
+    }
+
+    public void writeRivers() {
+        log.debug("write rivers started");
+
+        for (ImportRiver river: rivers) {
+            log.debug("writing river '" + river.getName() + "'");
+            river.storeDependencies();
+            ImporterSession.getInstance().getDatabaseSession().flush();
+        }
+
+        log.debug("write rivers finished");
+    }
+
+    public void writeToDatabase() {
+
+        Transaction tx = null;
+
+        try {
+            tx = ImporterSession.getInstance()
+                .getDatabaseSession().beginTransaction();
+
+            try {
+                writeRivers();
+            }
+            catch (HibernateException he) {
+                Throwable t = he.getCause();
+                while (t instanceof SQLException) {
+                    SQLException sqle = (SQLException)t;
+                    log.error("SQL exeception chain:", sqle);
+                    t = sqle.getNextException();
+                }
+                throw he;
+            }
+
+            tx.commit();
+        }
+        catch (RuntimeException re) {
+            if (tx != null) {
+                tx.rollback();
+            }
+            throw re;
+        }
+    }
+
+    public static AnnotationClassifier getAnnotationClassifier() {
+        String annotationTypes = Config.INSTANCE.getAnnotationTypes();
+
+        if (annotationTypes == null) {
+            log.info("no annotation types file configured.");
+            return null;
+        }
+
+        File file = new File(annotationTypes);
+
+        log.info("use annotation types file '" + file + "'");
+
+        if (!(file.isFile() && file.canRead())) {
+            log.warn("annotation type file '" + file + "' is not readable.");
+            return null;
+        }
+
+        Document rules = XMLUtils.parseDocument(file);
+
+        if (rules == null) {
+            log.warn("cannot parse annotation types file.");
+            return null;
+        }
+
+        return new AnnotationClassifier(rules);
+    }
+
+    public static void main(String [] args) {
+
+        InfoGewParser infoGewParser = new InfoGewParser(
+            getAnnotationClassifier());
+
+        log.info("Start parsing rivers...");
+
+        for (String gew: args) {
+            log.info("parsing info gew file: " + gew);
+            try {
+                infoGewParser.parse(new File(gew));
+            }
+            catch (IOException ioe) {
+                log.error("error while parsing gew: " + gew);
+            }
+        }
+
+        String gew = Config.INSTANCE.getInfoGewFile();
+        if (gew != null && gew.length() > 0) {
+            log.info("parsing info gew file: " + gew);
+            try {
+                infoGewParser.parse(new File(gew));
+            }
+            catch (IOException ioe) {
+                log.error("error while parsing gew: " + gew);
+            }
+        }
+
+        if (!Config.INSTANCE.dryRun()) {
+            new Importer(infoGewParser.getRivers()).writeToDatabase();
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImporterSession.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,187 @@
+package de.intevation.flys.importer;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.TreeMap;
+
+import java.math.BigDecimal;
+
+import org.hibernate.SessionFactory;
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.FlushMode;
+
+import de.intevation.flys.backend.SessionFactoryProvider;
+
+import de.intevation.flys.model.WstColumnValue;
+import de.intevation.flys.model.WstColumn;
+import de.intevation.flys.model.DischargeTableValue;
+import de.intevation.flys.model.DischargeTable;
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.River;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.LRUCache;
+
+public class ImporterSession
+{
+    private static Logger log = Logger.getLogger(ImporterSession.class);
+
+    private static final ThreadLocal<ImporterSession> SESSION =
+        new ThreadLocal<ImporterSession>() {
+            @Override
+            protected ImporterSession initialValue() {
+                return new ImporterSession();
+            }
+        };
+
+    protected Session databaseSession;
+
+    protected LRUCache<Integer, Map<ValueKey, WstColumnValue>>
+        wstColumnValues;
+
+    protected LRUCache<Integer, Map<ValueKey, DischargeTableValue>>
+        dischargeTableValues;
+
+    protected LRUCache<Integer, Map<ValueKey, Range>>
+        ranges;
+
+    public static ImporterSession getInstance() {
+        return SESSION.get();
+    }
+
+    public ImporterSession() {
+        SessionFactory sessionFactory =
+            SessionFactoryProvider.createSessionFactory();
+        databaseSession = sessionFactory.openSession();
+        //databaseSession.setFlushMode(FlushMode.MANUAL);
+
+        wstColumnValues =
+            new LRUCache<Integer, Map<ValueKey, WstColumnValue>>();
+
+        dischargeTableValues =
+            new LRUCache<Integer, Map<ValueKey, DischargeTableValue>>();
+
+        ranges = new LRUCache<Integer, Map<ValueKey, Range>>();
+    }
+
+    public Session getDatabaseSession() {
+        return databaseSession;
+    }
+
+    public WstColumnValue getWstColumnValue(
+        WstColumn  column,
+        BigDecimal position,
+        BigDecimal w
+    ) {
+        Integer c = column.getId();
+
+        Map<ValueKey, WstColumnValue> map = wstColumnValues.get(c);
+
+        if (map == null) {
+            map = new TreeMap<ValueKey, WstColumnValue>(
+                ValueKey.EPSILON_COMPARATOR);
+            wstColumnValues.put(c, map);
+            Query query = databaseSession.createQuery(
+                "from WstColumnValue where wstColumn.id=:cid");
+            query.setParameter("cid", c);
+            for (Iterator iter = query.iterate(); iter.hasNext();) {
+                WstColumnValue wcv = (WstColumnValue)iter.next();
+                map.put(new ValueKey(wcv.getPosition(), wcv.getW()), wcv);
+            }
+        }
+
+        ValueKey key = new ValueKey(position, w);
+
+        WstColumnValue wcv = map.get(key);
+
+        if (wcv != null) {
+            return wcv;
+        }
+
+        wcv = new WstColumnValue(column, position, w);
+
+        databaseSession.save(wcv);
+
+        map.put(key, wcv);
+
+        return wcv;
+    }
+
+    public DischargeTableValue getDischargeTableValue(
+        DischargeTable table,
+        BigDecimal     q,
+        BigDecimal     w
+    ) {
+        Integer t = table.getId();
+
+        Map<ValueKey, DischargeTableValue> map =
+            dischargeTableValues.get(t);
+
+        if (map == null) {
+            map = new TreeMap<ValueKey, DischargeTableValue>(
+                ValueKey.EPSILON_COMPARATOR);
+            dischargeTableValues.put(t, map);
+            Query query = databaseSession.createQuery(
+                "from DischargeTableValue where dischargeTable.id=:tid");
+            query.setParameter("tid", t);
+            for (Iterator iter = query.iterate(); iter.hasNext();) {
+                DischargeTableValue dctv = (DischargeTableValue)iter.next();
+                map.put(new ValueKey(dctv.getQ(), dctv.getW()), dctv);
+            }
+        }
+
+        ValueKey key = new ValueKey(q, w);
+
+        DischargeTableValue dctv = map.get(key);
+
+        if (dctv != null) {
+            return dctv;
+        }
+
+        dctv = new DischargeTableValue(table, q, w);
+
+        databaseSession.save(dctv);
+
+        map.put(key, dctv);
+
+        return dctv;
+    }
+
+    public Range getRange(River river, BigDecimal a, BigDecimal b) {
+        Integer r = river.getId();
+
+        Map<ValueKey, Range> map = ranges.get(r);
+
+        if (map == null) {
+            map = new TreeMap<ValueKey, Range>(
+                ValueKey.EPSILON_COMPARATOR);
+            ranges.put(r, map);
+            Query query = databaseSession.createQuery(
+                "from Range where river.id=:rid");
+            query.setParameter("rid", r);
+            for (Iterator iter = query.iterate(); iter.hasNext();) {
+                Range range = (Range)iter.next();
+                map.put(new ValueKey(range.getA(), range.getB()), range);
+            }
+        }
+
+        ValueKey key = new ValueKey(a, b);
+
+        Range range = map.get(key);
+
+        if (range != null) {
+            return range;
+        }
+
+        range = new Range(a, b, river);
+
+        databaseSession.save(range);
+
+        map.put(key, range);
+
+        return range;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ValueKey.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,63 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import java.util.Comparator;
+
+public class ValueKey
+{
+    public static final double EPSILON = 1e-6;
+
+    public static final Comparator<ValueKey> EPSILON_COMPARATOR =
+        new Comparator<ValueKey>()
+    {
+        public int compare(ValueKey x, ValueKey y) {
+            int cmp = ValueKey.compare(x.a, y.a);
+            if (cmp != 0) return cmp;
+            return ValueKey.compare(x.b, y.b);
+        }
+    };
+
+    public static int compare(BigDecimal a, BigDecimal b) {
+        if (a == null && b == null) return  0;
+        if (a != null && b == null) return +1;
+        if (a == null && b != null) return -1;
+
+        double diff = a.doubleValue() - b.doubleValue();
+        if (diff < -EPSILON) return -1;
+        return diff > EPSILON ? +1 : 0;
+    }
+
+    protected BigDecimal a;
+    protected BigDecimal b;
+
+    public ValueKey() {
+    }
+
+    public ValueKey(BigDecimal a, BigDecimal b) {
+        this.a = a;
+        this.b = b;
+    }
+
+    @Override
+    public int hashCode() {
+        return ((a != null ? a.hashCode() : 0) << 16)
+              | (b != null ? b.hashCode() : 0);
+    }
+
+    @Override
+    public boolean equals(Object other) {
+        if (!(other instanceof ValueKey)) {
+            return false;
+        }
+        ValueKey o = (ValueKey)other;
+        return !(
+               (a == null && o.a != null)
+            || (a != null && o.a == null)
+            || (a != null && !a.equals(o.a))
+            || (b == null && o.b != null)
+            || (b != null && o.b == null)
+            || (b != null && !b.equals(o.b)));
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/XY.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,54 @@
+package de.intevation.flys.importer;
+
+public class XY
+implements   Comparable<XY>
+{
+    public static final double X_EPSILON = 1e-4;
+
+    protected double x;
+    protected double y;
+    protected int    index;
+
+    public XY() {
+    }
+
+    public XY(double x, double y, int index) {
+        this.x     = x;
+        this.y     = y;
+        this.index = index;
+    }
+
+    @Override
+    public int compareTo(XY other) {
+        if (x + X_EPSILON < other.x) return -1;
+        if (x > other.x + X_EPSILON) return +1;
+        if (index < other.index)     return -1;
+        if (index > other.index)     return +1;
+        return 0;
+    }
+
+    public double getX() {
+        return x;
+    }
+
+    public void setX(double x) {
+        this.x = x;
+    }
+
+    public double getY() {
+        return y;
+    }
+
+    public void setY(double y) {
+        this.y = y;
+    }
+
+    public int getIndex() {
+        return index;
+    }
+
+    public void setIndex(int index) {
+        this.index = index;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/AnnotationClassifier.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,232 @@
+package de.intevation.flys.importer.parsers;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.NodeList;
+import org.w3c.dom.Element;
+
+import javax.xml.xpath.XPathConstants;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.List;
+import java.util.ArrayList;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.XMLUtils;
+
+import de.intevation.flys.importer.ImportAnnotationType;
+import de.intevation.flys.importer.Importer;
+
+public class AnnotationClassifier
+{
+    private static Logger log = Logger.getLogger(Importer.class);
+
+    public static final String TYPES_XPATH =
+        "/annotation/types/type";
+
+    public static final String FILE_PATTERNS_XPATH =
+        "/annotation/patterns/file";
+
+    public static final String DESCRIPTION_PATTERNS_XPATH =
+        "/annotation/patterns/line";
+
+
+    public static class Pair {
+
+        protected Pattern              pattern;
+        protected ImportAnnotationType annType;
+
+        public Pair(Pattern pattern, ImportAnnotationType annType) {
+            this.pattern  = pattern;
+            this.annType = annType;
+        }
+
+        public ImportAnnotationType match(String s) {
+            Matcher m = pattern.matcher(s);
+            return m.matches() ? annType : null;
+        }
+    } // class Pair
+
+
+    protected Map<String, ImportAnnotationType> types;
+    protected List<Pair>                        filePatterns;
+    protected List<Pair>                        descPatterns;
+
+    protected ImportAnnotationType defaultType;
+
+    public AnnotationClassifier() {
+    }
+
+    public AnnotationClassifier(Document rules) {
+        types        = new HashMap<String, ImportAnnotationType>();
+        filePatterns = new ArrayList<Pair>();
+        descPatterns = new ArrayList<Pair>();
+
+        buildRules(rules);
+    }
+
+    protected void buildRules(Document rules) {
+        buildTypes(rules);
+        buildFilePatterns(rules);
+        buildDescriptionPatterns(rules);
+    }
+
+    protected void buildTypes(Document rules) {
+
+        NodeList typeList = (NodeList)XMLUtils.xpath(
+            rules,
+            TYPES_XPATH,
+            XPathConstants.NODESET,
+            null);
+
+        if (typeList == null) {
+            log.info("no rules found.");
+            return;
+        }
+
+        for (int i = 0, N = typeList.getLength(); i < N; ++i) {
+            Element typeElement = (Element)typeList.item(i);
+            String name = typeElement.getAttribute("name");
+            if (name.length() == 0) {
+                log.warn("ANNCLASS: rule has no name");
+                continue;
+            }
+
+            ImportAnnotationType aic = new ImportAnnotationType(name);
+
+            types.put(name, aic);
+
+            if (typeElement.getAttribute("default").equals("true")) {
+                defaultType = aic;
+            }
+        }
+    }
+
+    protected void buildFilePatterns(Document rules) {
+
+        NodeList patternList = (NodeList)XMLUtils.xpath(
+            rules,
+            FILE_PATTERNS_XPATH,
+            XPathConstants.NODESET,
+            null);
+
+        if (patternList == null) {
+            log.info("no file patterns found.");
+            return;
+        }
+
+        for (int i = 0, N = patternList.getLength(); i < N; ++i) {
+            Element element = (Element)patternList.item(i);
+            Pair pair = buildPair(element);
+            if (pair != null) {
+                filePatterns.add(pair);
+            }
+        }
+    }
+
+    protected void buildDescriptionPatterns(Document rules) {
+
+        NodeList patternList = (NodeList)XMLUtils.xpath(
+            rules,
+            DESCRIPTION_PATTERNS_XPATH,
+            XPathConstants.NODESET,
+            null);
+
+        if (patternList == null) {
+            log.info("no line patterns found.");
+            return;
+        }
+
+        for (int i = 0, N = patternList.getLength(); i < N; ++i) {
+            Element element = (Element)patternList.item(i);
+            Pair pair = buildPair(element);
+            if (pair != null) {
+                descPatterns.add(pair);
+            }
+        }
+    }
+
+    protected Pair buildPair(Element element) {
+        String pattern = element.getAttribute("pattern");
+        String type    = element.getAttribute("type");
+
+        if (pattern.length() == 0) {
+            log.warn("ANNCLASS: pattern has no 'pattern' attribute.");
+            return null;
+        }
+
+        if (type.length() == 0) {
+            log.warn("ANNCLASS: pattern has no 'type' attribute.");
+            return null;
+        }
+
+        ImportAnnotationType annType = types.get(type);
+
+        if (annType == null) {
+            log.warn("ANNCLASS: pattern has unknown type '" + type + "'");
+            return null;
+        }
+
+        Pattern p;
+
+        try {
+            p = Pattern.compile(pattern,
+                    Pattern.CASE_INSENSITIVE|Pattern.UNICODE_CASE);
+        }
+        catch (IllegalArgumentException iae) {
+            log.warn("ANNCLASS: pattern '" + pattern + "' is invalid.", iae);
+            return null;
+        }
+
+        return new Pair(p, annType);
+    }
+
+    public ImportAnnotationType getDefaultType() {
+        return defaultType;
+    }
+
+    public ImportAnnotationType classifyFile(String filename) {
+        return classifyFile(filename, null);
+    }
+
+    public ImportAnnotationType classifyFile(
+        String                filename,
+        ImportAnnotationType def
+    ) {
+        if (filename.toLowerCase().endsWith(".km")) {
+            filename = filename.substring(0, filename.length()-3);
+        }
+
+        for (Pair pair: filePatterns) {
+            ImportAnnotationType annType = pair.match(filename);
+            if (annType != null) {
+                return annType;
+            }
+        }
+
+        return def;
+    }
+
+    public ImportAnnotationType classifyDescription(String description) {
+        return classifyDescription(description, null);
+    }
+
+    public ImportAnnotationType classifyDescription(
+        String                description,
+        ImportAnnotationType def
+    ) {
+        for (Pair pair: descPatterns) {
+            ImportAnnotationType annType = pair.match(description);
+            if (annType != null) {
+                return annType;
+            }
+        }
+
+        return def;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/AnnotationsParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,198 @@
+package de.intevation.flys.importer.parsers;
+
+import java.util.HashMap;
+import java.util.TreeSet;
+import java.util.List;
+import java.util.ArrayList;
+
+import java.io.IOException;
+import java.io.File;
+import java.io.LineNumberReader;
+import java.io.InputStreamReader;
+import java.io.FileInputStream;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.importer.ImportAnnotation;
+import de.intevation.flys.importer.ImportRange;
+import de.intevation.flys.importer.ImportEdge;
+import de.intevation.flys.importer.ImportAnnotationType;
+import de.intevation.flys.importer.ImportAttribute;
+import de.intevation.flys.importer.ImportPosition;
+
+public class AnnotationsParser
+{
+    private static Logger log = Logger.getLogger(AnnotationsParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final String [] TO_SCAN = {
+        "Basisdaten",
+        "Streckendaten",
+        ".." + File.separator +
+            "Morphologie" + File.separator + "Streckendaten"
+    };
+
+    protected HashMap<String, ImportAttribute> attributes;
+    protected HashMap<String, ImportPosition>  positions;
+    protected TreeSet<ImportAnnotation>        annotations;
+    protected AnnotationClassifier             classifier;
+
+    public AnnotationsParser() {
+        this(null);
+    }
+
+    public AnnotationsParser(AnnotationClassifier classifier) {
+        attributes  = new HashMap<String, ImportAttribute>();
+        positions   = new HashMap<String, ImportPosition>();
+        annotations = new TreeSet<ImportAnnotation>();
+        this.classifier = classifier;
+    }
+
+    public void parseFile(File file) throws IOException {
+        log.info("parsing km file: '" + file + "'");
+
+        ImportAnnotationType defaultIAT = null;
+
+        if (classifier != null) {
+            defaultIAT = classifier.classifyFile(
+                file.getName(),
+                classifier.getDefaultType());
+        }
+
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = null;
+            while ((line = in.readLine()) != null) {
+                if ((line = line.trim()).length() == 0
+                || line.startsWith("*")) {
+                    continue;
+                }
+
+                String [] parts = line.split("\\s*;\\s*");
+
+                if (parts.length < 3) {
+                    log.warn("ANN: not enough columns in line "
+                        + in.getLineNumber());
+                    continue;
+                }
+
+                ImportPosition position = positions.get(parts[0]);
+                if (position == null) {
+                    position = new ImportPosition(parts[0]);
+                    positions.put(parts[0], position);
+                }
+
+                ImportAttribute attribute = attributes.get(parts[1]);
+                if (attribute == null) {
+                    attribute = new ImportAttribute(parts[1]);
+                    attributes.put(parts[1], attribute);
+                }
+
+                String [] r = parts[2].replace(",", ".").split("\\s*#\\s*");
+
+                BigDecimal from, to;
+
+                try {
+                    from = new BigDecimal(r[0]);
+                    to   = r.length < 2 ? null : new BigDecimal(r[1]);
+                    if (to != null && from.compareTo(to) > 0) {
+                        BigDecimal t = from; from = to; to = t;
+                    }
+                }
+                catch (NumberFormatException nfe) {
+                    log.warn("ANN: invalid number in line " + in.getLineNumber());
+                    continue;
+                }
+
+                ImportEdge edge = null;
+
+                if (parts.length == 4) { // Only 'Unterkante'
+                    try {
+                        edge = new ImportEdge(
+                            null,
+                            new BigDecimal(parts[3].trim().replace(',', '.')));
+                    }
+                    catch (NumberFormatException nfe) {
+                        log.warn("ANN: cannot parse 'Unterkante' in line " +
+                            in.getLineNumber());
+                    }
+                }
+                else if (parts.length > 4) { // 'Unterkante' and 'Oberkante'
+                    String bottom = parts[3].trim().replace(',', '.');
+                    String top    = parts[4].trim().replace(',', '.');
+                    try {
+                        BigDecimal b = bottom.length() == 0
+                            ? null
+                            : new BigDecimal(bottom);
+                        BigDecimal t = top.length() == 0
+                            ? null
+                            : new BigDecimal(top);
+                        edge = new ImportEdge(t, b);
+                    }
+                    catch (NumberFormatException nfe) {
+                        log.warn(
+                            "ANN: cannot parse 'Unterkante' or 'Oberkante' in line "
+                            + in.getLineNumber());
+                    }
+                }
+
+                ImportRange range = new ImportRange(from, to);
+
+                ImportAnnotationType type = classifier != null
+                    ? classifier.classifyDescription(line, defaultIAT)
+                    : null;
+
+                ImportAnnotation annotation = new ImportAnnotation(
+                    attribute, position, range, edge, type);
+
+                if (!annotations.add(annotation)) {
+                    log.warn("ANN: duplicated annotation '" + parts[0] +
+                        "' in line " + in.getLineNumber());
+                }
+            }
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+    }
+
+    public void parse(File root) throws IOException {
+
+        for (String toScan: TO_SCAN) {
+            File directory = FileTools.repair(new File(root, toScan));
+            if (!directory.isDirectory()) {
+                log.warn("ANN: '" + directory + "' is not a directory.");
+                continue;
+            }
+            File [] files = directory.listFiles();
+            if (files == null) {
+                log.warn("ANN: cannot list directory '" + directory + "'");
+                continue;
+            }
+
+            for (File file: files) {
+                if (file.isFile() && file.canRead()
+                && file.getName().toLowerCase().endsWith(".km")) {
+                    parseFile(file);
+                }
+            }
+        } // for all directories to scan
+    }
+
+    public List<ImportAnnotation> getAnnotations() {
+        return new ArrayList<ImportAnnotation>(annotations);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/AtFileParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,206 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.IOException;
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportDischargeTable;
+import de.intevation.flys.importer.ImportDischargeTableValue;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import java.util.Date;
+import java.util.Calendar;
+
+import de.intevation.flys.importer.ImportTimeInterval;
+
+public class AtFileParser {
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    private static Logger logger = Logger.getLogger(AtFileParser.class);
+
+
+    // regular expression from hell to find out time range
+    public static final Pattern DATE_LINE = Pattern.compile(
+        "^\\*\\s*Abflu[^t]+tafel?\\s*([^\\d]+)"  +
+        "(\\d{1,2})?\\.?(\\d{1,2})?\\.?(\\d{2,4})\\s*(?:(?:bis)|-)?\\s*" +
+        "(?:(\\d{1,2})?\\.?(\\d{1,2})?\\.?(\\d{2,4}))?\\s*.*$");
+
+    public AtFileParser() {
+    }
+
+
+    public ImportDischargeTable parse(File file) throws IOException {
+        return parse(file, "", 0);
+    }
+
+    public ImportDischargeTable parse(
+        File   file,
+        String prefix,
+        int    kind
+    )
+    throws IOException {
+
+        logger.info("parsing AT file: " + file);
+
+        BufferedReader br = null;
+
+        String line       = null;
+
+        boolean beginning = true;
+
+        ImportDischargeTable dischargeTable =
+            new ImportDischargeTable(kind, prefix + file.getName());
+
+        Date from = null;
+        Date to   = null;
+
+        try {
+            br = new BufferedReader(
+                 new InputStreamReader(
+                 new FileInputStream(file), ENCODING));
+
+            while ((line = br.readLine()) != null) {
+
+                String tmp = line.trim();
+
+                if (tmp.length() == 0) {
+                    continue;
+                }
+
+                Matcher m = DATE_LINE.matcher(tmp);
+                if (m.matches()) {
+                    from = guessDate(m.group(2), m.group(3), m.group(4));
+                    to   = guessDate(m.group(5), m.group(6), m.group(7));
+                    if (from == null) {
+                        Date t = from; from = to; to = t;
+                    }
+                    continue;
+                }
+
+                if (tmp.startsWith("#! name=")) {
+                    // XXX Skip the name,  because we don't know where to save
+                    // it at the moment
+
+                    //String name = tmp.substring(8);
+                    continue;
+                }
+
+                if (tmp.startsWith("#") || tmp.startsWith("*")) {
+                    continue;
+                }
+
+                String[] splits = tmp.replace(',', '.').split("\\s+");
+
+                if ((splits.length < 2) || (splits.length > 11)) {
+                    logger.warn("Found an invalid row in the AT file.");
+                    continue;
+                }
+
+                String strW = splits[0].trim();
+                double W    = Double.parseDouble(strW);
+
+                /* shift is used to differenciate between lines with
+                 * exactly 10 Qs and lines with less than 10 Qs. The shift
+                 * is only modified when it is the first line.
+                 */
+                int shift = -1;
+
+                if (splits.length != 11 && beginning) {
+                    shift = 10 - splits.length;
+                }
+
+
+                for (int i = 1; i < splits.length; i++) {
+                    double iW = W + shift + i;
+                    double iQ = Double.parseDouble(splits[i].trim());
+
+                    dischargeTable.addDischargeTableValue(
+                        new ImportDischargeTableValue(
+                            new BigDecimal(iQ/100.0),
+                            new BigDecimal(iW/100.0)));
+                }
+
+                beginning = false;
+            }
+        }
+        catch (NumberFormatException pe) {
+            logger.warn("AT: invalid number " + pe.getMessage());
+        }
+        finally {
+            if (br != null) {
+                br.close();
+            }
+        }
+
+        if (from != null) {
+            if (to != null && from.compareTo(to) > 0) {
+                Date t = from; from = to; to = t;
+            }
+            logger.info("from: " + from + " to: " + to);
+            ImportTimeInterval interval = new ImportTimeInterval(from, to);
+            dischargeTable.setTimeInterval(interval);
+        }
+
+        logger.info("Finished parsing AT file: " + file);
+
+        return dischargeTable;
+    }
+
+    public static Date guessDate(String day, String month, String year) {
+        if (day == null && month == null && year == null) {
+            return null;
+        }
+
+        logger.debug("day: " + day + " month: " + month + " year: " + year);
+
+        int dayI = 15;
+        if (day != null) {
+            try {
+                dayI = Integer.parseInt(day.trim());
+            }
+            catch (NumberFormatException nfe) {
+            }
+        }
+
+        int monthI = 6;
+        if (month != null) {
+            try {
+                monthI = Integer.parseInt(month.trim());
+            }
+            catch (NumberFormatException nfe) {
+            }
+        }
+
+        int yearI = 1900;
+        if (year != null) {
+            try {
+                yearI = Integer.parseInt(year.trim());
+                if (yearI < 100) {
+                    if (yearI < 20) {
+                        yearI += 2000;
+                    }
+                    else {
+                        yearI += 1900;
+                    }
+                }
+            }
+            catch (NumberFormatException nfe) {
+            }
+        }
+
+        Calendar cal = Calendar.getInstance();
+        cal.set(yearI, monthI-1, dayI, 12, 0, 0);
+        long ms = cal.getTimeInMillis();
+        cal.setTimeInMillis(ms - ms%1000);
+        return cal.getTime();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightEpochParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,52 @@
+package de.intevation.flys.importer.parsers;
+
+
+import java.math.BigDecimal;
+
+import java.text.ParseException;
+
+
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportBedHeight;
+import de.intevation.flys.importer.ImportBedHeightEpoch;
+import de.intevation.flys.importer.ImportBedHeightEpochValue;
+
+
+public class BedHeightEpochParser extends BedHeightParser {
+
+    private static final Logger log =
+        Logger.getLogger(BedHeightEpochParser.class);
+
+
+
+    @Override
+    protected ImportBedHeight newImportBedHeight(String description) {
+        return new ImportBedHeightEpoch(description);
+    }
+
+
+    @Override
+    protected void handleDataLine(ImportBedHeight obj, String line) {
+        String[] values = line.split(SEPERATOR_CHAR);
+
+        if (values == null || values.length < 2 || values[0].length() == 0 || values[1].length() == 0) {
+            //log.warn("Skip invalid data line: " + line);
+            return;
+        }
+
+        try {
+            ImportBedHeightEpochValue value = new ImportBedHeightEpochValue(
+                new BigDecimal(nf.parse(values[0]).doubleValue()),
+                new BigDecimal(nf.parse(values[1]).doubleValue())
+            );
+
+            obj.addValue(value);
+        }
+        catch (ParseException e) {
+            log.warn("Error while parsing number from data row: " + line);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,409 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+
+import java.math.BigDecimal;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+import java.util.Locale;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportBedHeight;
+import de.intevation.flys.importer.ImportBedHeightType;
+import de.intevation.flys.importer.ImportElevationModel;
+import de.intevation.flys.importer.ImportLocationSystem;
+import de.intevation.flys.importer.ImportRange;
+import de.intevation.flys.importer.ImportTimeInterval;
+import de.intevation.flys.importer.ImportUnit;
+import de.intevation.flys.model.BedHeightType;
+
+
+public abstract class BedHeightParser {
+
+    private static final Logger log =
+        Logger.getLogger(BedHeightParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final Locale DEFAULT_LOCALE = Locale.GERMAN;
+
+    public static final String START_META_CHAR = "#";
+    public static final String SEPERATOR_CHAR  = ";";
+
+    public static final Pattern META_YEAR =
+        Pattern.compile("^Jahr: (\\d*).*");
+
+    public static final Pattern META_TIMEINTERVAL =
+        Pattern.compile("^Zeitraum: Epoche (\\d*)-(\\d*).*");
+
+    public static final Pattern META_TYPE =
+        Pattern.compile("^Aufnahmeart: (.*).*");
+
+    public static final Pattern META_LOCATION_SYSTEM =
+        Pattern.compile("^Lagesystem: (.*).*");
+
+    public static final Pattern META_CUR_ELEVATION_SYSTEM =
+        Pattern.compile("^H.hensystem:\\s(\\w++) (.* )??\\[(.*)\\].*");
+
+    public static final Pattern META_OLD_ELEVATION_SYSTEM =
+        Pattern.compile("^urspr.ngliches H.hensystem:\\s(\\w++) (.* )??\\[(.*)\\].*");
+
+    public static final Pattern META_SOUNDING_WIDTH =
+        Pattern.compile("^ausgewertete Peilbreite: (\\d*).*");
+
+    public static final Pattern META_RANGE =
+        Pattern.compile("^Strecke:\\D*(\\d++.\\d*)-(\\d++.\\d*).*");
+
+    public static final Pattern META_EVALUATION_BY =
+        Pattern.compile("^Auswerter: (.*).*");
+
+    public static final Pattern META_COMMENTS =
+        Pattern.compile("^Weitere Bemerkungen: (.*).*");
+
+
+    protected static NumberFormat nf = NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    protected List<ImportBedHeight> bedHeights;
+
+
+    protected abstract ImportBedHeight newImportBedHeight(String description);
+
+    protected abstract void handleDataLine(
+        ImportBedHeight importBedHeight,
+        String          line
+    );
+
+
+
+    public BedHeightParser() {
+        this.bedHeights = new ArrayList<ImportBedHeight>();
+    }
+
+
+    public List<ImportBedHeight> getBedHeights() {
+        return bedHeights;
+    }
+
+
+    public void parse(File file) throws IOException {
+        log.info("Parsing bed height single file '" + file + "'");
+
+        ImportBedHeight obj = newImportBedHeight(file.getName());
+
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = null;
+            while ((line = in.readLine()) != null) {
+                if ((line = line.trim()).length() == 0) {
+                    continue;
+                }
+
+                if (line.startsWith(START_META_CHAR)) {
+                    handleMetaLine(obj, line);
+                }
+                else {
+                    handleDataLine(obj, line);
+                }
+            }
+
+            log.info("File contained " + obj.getValueCount() + " values.");
+            bedHeights.add(obj);
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+    }
+
+
+    protected static String stripMetaLine(String line) {
+        String tmp = line.substring(1, line.length());
+
+        if (tmp.startsWith(" ")) {
+            return tmp.substring(1, tmp.length());
+        }
+        else {
+            return tmp;
+        }
+    }
+
+
+    public static Date getDateFromYear(int year) {
+        Calendar cal = Calendar.getInstance();
+        cal.set(year, 0, 1);
+
+        return cal.getTime();
+    }
+
+
+    protected void handleMetaLine(ImportBedHeight obj, String line) {
+        String meta = stripMetaLine(line);
+
+        if (handleMetaYear(obj, meta)) {
+            return;
+        }
+        else if (handleMetaTimeInterval(obj, meta)) {
+            return;
+        }
+        else if (handleMetaSoundingWidth(obj, meta)) {
+            return;
+        }
+        else if (handleMetaComment(obj, meta)) {
+            return;
+        }
+        else if (handleMetaEvaluationBy(obj, meta)) {
+            return;
+        }
+        else if (handleMetaRange(obj, meta)) {
+            return;
+        }
+        else if (handleMetaType(obj, meta)) {
+            return;
+        }
+        else if (handleMetaLocationSystem(obj, meta)) {
+            return;
+        }
+        else if (handleMetaCurElevationModel(obj, meta)) {
+            return;
+        }
+        else if (handleMetaOldElevationModel(obj, meta)) {
+            return;
+        }
+        else {
+            log.warn("BHP: Meta line did not match any known type: " + line);
+        }
+    }
+
+
+    protected boolean handleMetaYear(ImportBedHeight obj, String line) {
+        Matcher m = META_YEAR.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1);
+
+            try {
+                obj.setYear(Integer.valueOf(tmp));
+                return true;
+            }
+            catch (NumberFormatException e) {
+                log.warn("BHP: Error while parsing year!", e);
+            }
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaTimeInterval(ImportBedHeight obj, String line) {
+        Matcher m = META_TIMEINTERVAL.matcher(line);
+
+        if (m.matches()) {
+            String lo = m.group(1);
+            String up = m.group(2);
+
+            log.debug("Found time interval: " + lo + " - " + up);
+
+            try {
+                int lower = Integer.valueOf(lo);
+                int upper = Integer.valueOf(up);
+
+                Date fromYear = getDateFromYear(lower);
+                Date toYear   = getDateFromYear(upper);
+
+                obj.setTimeInterval(new ImportTimeInterval(fromYear, toYear));
+            }
+            catch (NumberFormatException e) {
+                log.warn("BHP: Error while parsing timeinterval!", e);
+            }
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaSoundingWidth(ImportBedHeight obj, String line) {
+        Matcher m = META_SOUNDING_WIDTH.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1);
+
+            try {
+                obj.setSoundingWidth(Integer.valueOf(tmp));
+                return true;
+            }
+            catch (NumberFormatException e) {
+                log.warn("BHP: Error while parsing sounding width!", e);
+            }
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaComment(ImportBedHeight obj, String line) {
+        Matcher m = META_COMMENTS.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1);
+
+            obj.setDescription(tmp);
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaEvaluationBy(
+        ImportBedHeight obj,
+        String                line
+    ) {
+        Matcher m = META_EVALUATION_BY.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1);
+            tmp = tmp.replace(";", "");
+
+            obj.setEvaluationBy(tmp);
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaRange(ImportBedHeight obj, String line) {
+        Matcher m = META_RANGE.matcher(line);
+
+        if (m.matches() && m.groupCount() >= 2) {
+            String a = m.group(1).replace(";", "");
+            String b = m.group(2).replace(";", "");
+
+            try {
+                BigDecimal lower = new BigDecimal(nf.parse(a).doubleValue());
+                BigDecimal upper = new BigDecimal(nf.parse(b).doubleValue());
+
+                obj.setRange(new ImportRange(lower, upper));
+
+                return true;
+            }
+            catch (ParseException e) {
+                log.warn("BHP: Error while parsing range!", e);
+            }
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaType(ImportBedHeight obj, String line) {
+        Matcher m = META_TYPE.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1).replace(";", "");
+
+            String name = BedHeightType.getBedHeightName(tmp);
+
+            if (name != null) {
+                obj.setType(new ImportBedHeightType(name, tmp));
+                return true;
+            }
+            else {
+                log.warn("Unknown bed height type: '" + tmp + "'");
+            }
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaLocationSystem(
+        ImportBedHeight obj,
+        String          line
+    ) {
+        Matcher m = META_LOCATION_SYSTEM.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1).replace(";", "");
+
+            obj.setLocationSystem(new ImportLocationSystem(tmp, tmp));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaCurElevationModel(
+        ImportBedHeight obj,
+        String          line
+    ) {
+        Matcher m = META_CUR_ELEVATION_SYSTEM.matcher(line);
+
+        if (m.matches()) {
+            String name = m.group(1);
+            String num  = m.group(2);
+            String unit = m.group(3);
+
+            obj.setCurElevationModel(new ImportElevationModel(
+                name + " " + num,
+                new ImportUnit(unit)
+            ));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaOldElevationModel(
+        ImportBedHeight obj,
+        String          line
+    ) {
+        Matcher m = META_OLD_ELEVATION_SYSTEM.matcher(line);
+
+        if (m.matches()) {
+            String name = m.group(1);
+            String num  = m.group(2);
+            String unit = m.group(3);
+
+            obj.setOldElevationModel(new ImportElevationModel(
+                name + " " + num,
+                new ImportUnit(unit)
+            ));
+
+            return true;
+        }
+
+        return false;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,59 @@
+package de.intevation.flys.importer.parsers;
+
+
+import java.math.BigDecimal;
+
+import java.text.ParseException;
+
+
+
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportBedHeight;
+import de.intevation.flys.importer.ImportBedHeightSingle;
+import de.intevation.flys.importer.ImportBedHeightSingleValue;
+
+
+public class BedHeightSingleParser extends BedHeightParser {
+
+    private static final Logger log =
+        Logger.getLogger(BedHeightSingleParser.class);
+
+
+
+    @Override
+    protected ImportBedHeight newImportBedHeight(String description) {
+        return new ImportBedHeightSingle(description);
+    }
+
+
+
+    @Override
+    protected void handleDataLine(ImportBedHeight obj, String line) {
+        String[] values = line.split(SEPERATOR_CHAR);
+
+        if (values == null || values.length < 6) {
+            //log.warn("BSP: Error while parsing data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            ImportBedHeightSingleValue value = new ImportBedHeightSingleValue(
+                (ImportBedHeightSingle) obj,
+                new BigDecimal(nf.parse(values[0]).doubleValue()),
+                new BigDecimal(nf.parse(values[1]).doubleValue()),
+                new BigDecimal(nf.parse(values[2]).doubleValue()),
+                new BigDecimal(nf.parse(values[3]).doubleValue()),
+                new BigDecimal(nf.parse(values[4]).doubleValue()),
+                new BigDecimal(nf.parse(values[5]).doubleValue())
+            );
+
+            obj.addValue(value);
+        }
+        catch (ParseException e) {
+            log.warn("BSP: Error while parsing data row.", e);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/FlowVelocityMeasurementParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,103 @@
+package de.intevation.flys.importer.parsers;
+
+import de.intevation.flys.importer.ImportFlowVelocityMeasurement;
+import de.intevation.flys.importer.ImportFlowVelocityMeasurementValue;
+
+import java.math.BigDecimal;
+
+import java.text.DateFormat;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+public class FlowVelocityMeasurementParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(FlowVelocityMeasurementParser.class);
+
+    private static final NumberFormat nf =
+        NumberFormat.getInstance(DEFAULT_LOCALE);
+
+    private static final DateFormat df =
+        new SimpleDateFormat("dd.MM.yyyy HH:mm:ss");
+
+
+    private List<ImportFlowVelocityMeasurement> measurements;
+
+    private ImportFlowVelocityMeasurement current;
+
+
+    public FlowVelocityMeasurementParser() {
+        measurements = new ArrayList<ImportFlowVelocityMeasurement>();
+    }
+
+
+    public List<ImportFlowVelocityMeasurement> getMeasurements() {
+        return measurements;
+    }
+
+    @Override
+    protected void reset() {
+        current = new ImportFlowVelocityMeasurement();
+    }
+
+
+    @Override
+    protected void finish() {
+        measurements.add(current);
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    public void handleMetaLine(String line) {
+        line = line.replace(";", "");
+        current.setDescription(line);
+    }
+
+
+    public void handleDataLine(String line) {
+        String[] cols = line.split(SEPERATOR_CHAR);
+
+        if (cols.length < 8) {
+            log.warn("skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            double km     = nf.parse(cols[1]).doubleValue();
+            double w      = nf.parse(cols[5]).doubleValue();
+            double q      = nf.parse(cols[6]).doubleValue();
+            double v      = nf.parse(cols[7]).doubleValue();
+
+            String timestr     = cols[3] + " " + cols[4];
+            String description = cols.length > 8 ? cols[8] : null;
+
+            current.addValue(new ImportFlowVelocityMeasurementValue(
+                df.parse(timestr),
+                new BigDecimal(km),
+                new BigDecimal(w),
+                new BigDecimal(q),
+                new BigDecimal(v),
+                description
+            ));
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing flow velocity values.", pe);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/FlowVelocityModelParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,239 @@
+package de.intevation.flys.importer.parsers;
+
+import java.math.BigDecimal;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportDischargeZone;
+import de.intevation.flys.importer.ImportFlowVelocityModel;
+import de.intevation.flys.importer.ImportFlowVelocityModelValue;
+
+
+public class FlowVelocityModelParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(FlowVelocityModelParser.class);
+
+    private static final Pattern META_REGEX =
+        Pattern.compile(".*Rechnung (.*) \\(Pegel (.*)\\).*");
+
+    private static final Pattern META_GAUGE =
+        Pattern.compile("(.*) Q=(\\w*)m3/s");
+
+    private static final Pattern META_MAINVALUE_A =
+        Pattern.compile("([a-zA-Z]+)+(\\d+)*");
+
+    private static final Pattern META_MAINVALUE_B =
+        Pattern.compile("(([a-zA-Z]+)+(\\d+)*)\\s*-\\s*(([a-zA-Z]+)+(\\d+)*)");
+
+    private static final Pattern META_MAINVALUE_C =
+        Pattern.compile("([0-9]++)\\s?(\\w*)|([0-9]++,[0-9]++)\\s?(\\w*)");
+
+    private static final Pattern META_MAINVALUE_D =
+        Pattern.compile("(([0-9]*)\\s?(\\w*)|([0-9]++,[0-9]++)\\s?(\\w*)) bis (([0-9]*)\\s?(\\w*)|([0-9]++,[0-9]++)\\s?(\\w*))");
+
+    private static final Pattern META_MAINVALUE_E =
+        Pattern.compile("(([a-zA-Z]+)+(\\d+)*) bis (([a-zA-Z]+)+(\\d+)*)");
+
+    private static final NumberFormat nf =
+        NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    private List<ImportFlowVelocityModel> models;
+
+    private ImportFlowVelocityModel current;
+
+
+    public FlowVelocityModelParser() {
+        models = new ArrayList<ImportFlowVelocityModel>();
+    }
+
+
+    public List<ImportFlowVelocityModel> getModels() {
+        return models;
+    }
+
+    @Override
+    protected void reset() {
+        current = new ImportFlowVelocityModel();
+    }
+
+
+    @Override
+    protected void finish() {
+        models.add(current);
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    protected void handleMetaLine(String line) {
+        Matcher m = META_REGEX.matcher(line);
+
+        if (m.matches()) {
+            String mainValueStr = m.group(1);
+            String gaugeStr     = m.group(2);
+
+            Object[] valueData = handleMainValueString(mainValueStr);
+            Object[] gaugeData = handleGaugeString(gaugeStr);
+
+            if (valueData == null || valueData.length < 2) {
+                log.warn("skip invalid MainValue part: '" + line + "'");
+                return;
+            }
+
+            if (gaugeData == null || gaugeData.length < 2) {
+                log.warn("skip invalid gauge part: '" + line + "'");
+                return;
+            }
+
+            if (log.isDebugEnabled()) {
+                log.debug("Found meta information:");
+                log.debug("   Gauge: " + gaugeData[0]);
+                log.debug("   Value: " + gaugeData[1]);
+                log.debug("   Lower: " + valueData[0]);
+                log.debug("   upper: " + valueData[1]);
+            }
+
+            current.setDischargeZone(new ImportDischargeZone(
+                (String) gaugeData[0],
+                (BigDecimal) gaugeData[1],
+                (String) valueData[0],
+                (String) valueData[1]
+            ));
+        }
+    }
+
+
+    protected Object[] handleMainValueString(String mainValueStr) {
+        Matcher mA = META_MAINVALUE_A.matcher(mainValueStr);
+        if (mA.matches()) {
+            String name = mA.group(0);
+
+            return new Object[] { name, name };
+        }
+
+        Matcher mB = META_MAINVALUE_B.matcher(mainValueStr);
+        if (mB.matches()) {
+            String lower = mB.group(1);
+            String upper = mB.group(4);
+
+            return new Object[] { lower, upper };
+        }
+
+        Matcher mC = META_MAINVALUE_C.matcher(mainValueStr);
+        if (mC.matches()) {
+            String facA  = mC.group(1);
+            String nameA = mC.group(2);
+            String facB  = mC.group(3);
+            String nameB = mC.group(4);
+
+            String fac  = facA  != null ? facA  : facB;
+            String name = nameA != null ? nameA : nameB;
+
+            String mainValue = fac + " " + name;
+
+            return new Object[] { mainValue, mainValue };
+        }
+
+        Matcher mD = META_MAINVALUE_D.matcher(mainValueStr);
+        if (mD.matches()) {
+            String loFacA  = mD.group(2);
+            String loNameA = mD.group(3);
+            String loFacB  = mD.group(4);
+            String loNameB = mD.group(5);
+
+            String upFacA  = mD.group(7);
+            String upNameA = mD.group(8);
+            String upFacB  = mD.group(9);
+            String upNameB = mD.group(10);
+
+            String loFac  = loFacA  != null ? loFacA  : loFacB;
+            String loName = loNameA != null ? loNameA : loNameB;
+
+            String upFac  = upFacA  != null ? upFacA  : upFacB;
+            String upName = upNameA != null ? upNameA : upNameB;
+
+            String loMainValue = loFac + " " + loName;
+            String upMainValue = upFac + " " + upName;
+
+            return new Object[] { loMainValue, upMainValue };
+        }
+
+        Matcher mE = META_MAINVALUE_E.matcher(mainValueStr);
+        if (mE.matches()) {
+            String lower = mE.group(1);
+            String upper = mE.group(4);
+
+            return new Object[] { lower, upper };
+        }
+
+        return null;
+    }
+
+
+    protected Object[] handleGaugeString(String gaugeStr) {
+        Matcher m = META_GAUGE.matcher(gaugeStr);
+
+        if (m.matches()) {
+            String name = m.group(1);
+            String qStr = m.group(2);
+
+            try {
+                return new Object[] {
+                    name,
+                    new BigDecimal(nf.parse(qStr).doubleValue()) };
+            }
+            catch (ParseException pe) {
+                log.warn("Error while parsing Q value: '" + qStr + "'");
+            }
+        }
+
+        return null;
+    }
+
+
+    protected void handleDataLine(String line) {
+        String[] cols = line.split(SEPERATOR_CHAR);
+
+        if (cols.length < 5) {
+            log.warn("skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            double km     = nf.parse(cols[0]).doubleValue();
+            double q      = nf.parse(cols[1]).doubleValue();
+            double total  = nf.parse(cols[2]).doubleValue();
+            double main   = nf.parse(cols[3]).doubleValue();
+            double stress = nf.parse(cols[4]).doubleValue();
+
+            current.addValue(new ImportFlowVelocityModelValue(
+                new BigDecimal(km),
+                new BigDecimal(q),
+                new BigDecimal(total),
+                new BigDecimal(main),
+                new BigDecimal(stress)
+            ));
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing flow velocity values.", pe);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/HYKParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,344 @@
+package de.intevation.flys.importer.parsers;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.importer.ImportHYK;
+import de.intevation.flys.importer.ImportHYKEntry;
+import de.intevation.flys.importer.ImportHYKFormation;
+import de.intevation.flys.importer.ImportHYKFlowZone;
+import de.intevation.flys.importer.ImportHYKFlowZoneType;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Date;
+import java.util.Calendar;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+public class HYKParser
+{
+    private static Logger log = Logger.getLogger(HYKParser.class);
+
+    public interface Callback {
+        boolean hykAccept(File file);
+        void    hykParsed(HYKParser parser);
+    } // interface Callback
+
+    public static enum State {
+        LINE_1, LINE_2, LINE_3, LINE_4, LINE_5, LINE_6
+    };
+
+    private static final String ENCODING = "ISO-8859-1";
+
+    protected Map<String, ImportHYKFlowZoneType> flowZoneTypes;
+
+    protected ImportHYK hyk;
+
+    public HYKParser() {
+        flowZoneTypes = new HashMap<String, ImportHYKFlowZoneType>();
+    }
+
+    public ImportHYK getHYK() {
+        return hyk;
+    }
+
+    private static Date yearToDate(Integer year) {
+        if (year == null) {
+            return null;
+        }
+        Calendar cal = Calendar.getInstance();
+        cal.set(year, 0, 1, 12, 0, 0);
+        long ms = cal.getTimeInMillis();
+        cal.setTimeInMillis(ms - ms%1000);
+        return cal.getTime();
+    }
+
+    public boolean parse(File file) {
+
+        boolean debug = log.isDebugEnabled();
+
+        log.info("Parsing HYK file '" + file + "'");
+
+        LineNumberReader in = null;
+
+        String description =
+            file.getParentFile().getName() + "/" + file.getName();
+
+        hyk = new ImportHYK(null, description);
+
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line;
+
+            State state = State.LINE_1;
+
+            int numFormations = 0;
+
+            BigDecimal km         = null;
+            BigDecimal top        = null;
+            BigDecimal bottom     = null;
+            BigDecimal distanceVL = null;
+            BigDecimal distanceHF = null;
+            BigDecimal distanceVR = null;
+
+            Integer    year       = null;
+            int        numZones   = 0;
+
+            ImportHYKFlowZoneType [] fzts     = null;
+            BigDecimal            [] coords   = null;
+            int                      coordPos = 0;
+
+            ImportHYKEntry     entry     = null;
+            ImportHYKFormation formation = null;
+
+            while ((line = in.readLine()) != null) {
+
+                if (line.startsWith("*") || line.startsWith("----")) {
+                    continue;
+                }
+
+                line = line.trim();
+
+                if (state != State.LINE_5 && line.length() == 0) {
+                    continue;
+                }
+
+                String [] parts = line.split("\\s+");
+
+                if (debug) {
+                    log.debug("'" + line + "': " + state);
+                }
+
+                switch (state) {
+                    case LINE_1:
+                        if (parts.length < 2) {
+                            log.error("HYK 1: not enough elements in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+
+                        if (parts.length == 2) {
+                            // no year given
+                            year = null;
+                        }
+                        else {
+                            try {
+                                year = Integer.valueOf(parts[1]);
+                            }
+                            catch (NumberFormatException nfe) {
+                                log.error(
+                                    "year is not an integer in line " +
+                                    in.getLineNumber());
+                                return false;
+                            }
+                        }
+                        try {
+                            km = new BigDecimal(parts[0]);
+                            numFormations = Integer.parseInt(
+                                parts[parts.length > 2 ? 2 : 1]);
+                        }
+                        catch (NumberFormatException nfe) {
+                            log.error(
+                                "parsing number of formations " +
+                                "or km failed in line " + in.getLineNumber());
+                            return false;
+                        }
+                        entry = new ImportHYKEntry(hyk, km, yearToDate(year));
+                        hyk.addEntry(entry);
+
+                        state = State.LINE_2;
+                        break;
+
+                    case LINE_2:
+                        if (parts.length < 3) {
+                            log.error("HYK 2: not enough elements in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        try {
+                            numZones = Integer.parseInt(parts[0]);
+                            bottom   = new BigDecimal(parts[1]);
+                            top      = new BigDecimal(parts[2]);
+                        }
+                        catch (NumberFormatException nfe) {
+                            log.error(
+                                "HYK: parsing num zones, bottom or top height " +
+                                "failed in line " + in.getLineNumber());
+                            return false;
+                        }
+                        formation = new ImportHYKFormation();
+                        formation.setBottom(bottom);
+                        formation.setTop(top);
+                        entry.addFormation(formation);
+
+                        state = State.LINE_3;
+                        break;
+
+                    case LINE_3:
+                        if (parts.length != numZones) {
+                            log.error(
+                                "HYK: number of flow zones mismatches " +
+                                "in line " + in.getLineNumber());
+                            return false;
+                        }
+
+                        fzts = new ImportHYKFlowZoneType[parts.length];
+                        for (int i = 0; i < fzts.length; ++i) {
+                            fzts[i] = getFlowZoneType(parts[i]);
+                        }
+                        coords = new BigDecimal[numZones];
+                        state = State.LINE_4;
+                        break;
+
+                    case LINE_4:
+                        try {
+                            int N = Math.min(parts.length, coords.length);
+                            for (coordPos = 0; coordPos < N; ++coordPos) {
+                                coords[coordPos] =
+                                    new BigDecimal(parts[coordPos]);
+                            }
+                        }
+                        catch (NumberFormatException nfe) {
+                            log.error("HYK: cannot parse number in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        state = State.LINE_5;
+                        break;
+
+                    case LINE_5:
+                        if (parts.length + coordPos < coords.length) {
+                            log.error("HYK 5: not enough elements in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        try {
+                            for (int i = 0;
+                                i < parts.length && coordPos < coords.length;
+                                ++i, ++coordPos
+                            ) {
+                                coords[coordPos] = new BigDecimal(parts[i]);
+                            }
+                        }
+                        catch (NumberFormatException nfe) {
+                            log.error("HYK: cannot parse number in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        for (int i = 0; i < coords.length; ++i) {
+                            BigDecimal a = coords[i];
+                            BigDecimal b = coords[i == coords.length-1 ? i : i+1];
+                            if (a.compareTo(b) > 0) {
+                                log.warn("HYK: zone coordinates swapped in line " +
+                                    in.getLineNumber());
+                                BigDecimal c = a; a = b; b = c;
+                            }
+                            ImportHYKFlowZone zone = new ImportHYKFlowZone(
+                                formation, fzts[i], a, b);
+                            formation.addFlowZone(zone);
+                        }
+                        state = State.LINE_6;
+                        break;
+
+                    case LINE_6:
+                        if (parts.length < 3) {
+                            log.error("HYK 6: not enough elements in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        try {
+                            distanceVL = new BigDecimal(parts[0]);
+                            distanceHF = new BigDecimal(parts[1]);
+                            distanceVR = new BigDecimal(parts[2]);
+                        }
+                        catch (NumberFormatException nfe) {
+                            log.error("HYK: cannot parse number in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        formation.setDistanceVL(distanceVL);
+                        formation.setDistanceHF(distanceHF);
+                        formation.setDistanceVR(distanceVR);
+
+                        // continue with next formation.
+                        state = --numFormations > 0 // formations left?
+                            ? State.LINE_2
+                            : State.LINE_1;
+                        break;
+                }
+            }
+        }
+        catch (IOException ioe) {
+            log.error("HYK: Error reading file.", ioe);
+            return false;
+        }
+        finally {
+            if (in != null) {
+                try {
+                    in.close();
+                }
+                catch (IOException ioe) {
+                    log.error("HYK: Error closing file.", ioe);
+                }
+            }
+        }
+        return true;
+    }
+
+    protected ImportHYKFlowZoneType getFlowZoneType(String name) {
+        name = name.toUpperCase();
+        ImportHYKFlowZoneType fzt = flowZoneTypes.get(name);
+        if (fzt == null) {
+            log.info("New flow zone type: " + name);
+            fzt = new ImportHYKFlowZoneType(name);
+            flowZoneTypes.put(name, fzt);
+        }
+        return fzt;
+    }
+
+    protected void reset() {
+        hyk = null;
+    }
+
+    public void parseHYKs(File root, final Callback callback) {
+
+        FileTools.walkTree(root, new FileTools.FileVisitor() {
+            @Override
+            public boolean visit(File file) {
+                if (file.isFile() && file.canRead()
+                && file.getName().toLowerCase().endsWith(".hyk")
+                && (callback == null || callback.hykAccept(file))) {
+                    reset();
+                    boolean success = parse(file);
+                    log.info("parsing " + (success ? "succeeded" : "failed"));
+                    if (success && callback != null) {
+                        callback.hykParsed(HYKParser.this);
+                    }
+                }
+                return true;
+            }
+        });
+    }
+
+    public static void main(String [] args) {
+
+        HYKParser parser = new HYKParser();
+
+        for (String arg: args) {
+            parser.parseHYKs(new File(arg), null);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/InfoGewParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,135 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.importer.ImportRiver;
+
+public class InfoGewParser
+{
+    private static Logger log = Logger.getLogger(InfoGewParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final Pattern GEWAESSER =
+        Pattern.compile("^\\s*Gew\u00e4sser\\s*:\\s*(.+)");
+
+    public static final Pattern WST_DATEI =
+        Pattern.compile("^\\s*WSTDatei\\s*:\\s*(.+)");
+
+    public static final Pattern BB_INFO =
+        Pattern.compile("^\\s*B\\+B-Info\\s*:\\s*(.+)");
+
+    protected ArrayList<ImportRiver> rivers;
+
+    protected AnnotationClassifier annotationClassifier;
+
+    public InfoGewParser() {
+        this(null);
+    }
+
+    public InfoGewParser(AnnotationClassifier annotationClassifier) {
+        rivers = new ArrayList<ImportRiver>();
+        this.annotationClassifier = annotationClassifier;
+    }
+
+    public List<ImportRiver> getRivers() {
+        return rivers;
+    }
+
+    public static final String normalize(String f) {
+        return f.replace("\\", "/").replace("/", File.separator);
+    }
+
+    public void parse(File file) throws IOException {
+
+        LineNumberReader in = null;
+
+        File root = file.getParentFile();
+
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = null;
+
+            String riverName  = null;
+            File   wstFile    = null;
+            File   bbInfoFile = null;
+
+            while ((line = in.readLine()) != null) {
+                if ((line = line.trim()).length() == 0) {
+                    continue;
+                }
+                Matcher m = GEWAESSER.matcher(line);
+
+                if (m.matches()) {
+                    String river = m.group(1);
+                    log.info("Found river '" + river + "'");
+                    if (riverName != null) {
+                        rivers.add(new ImportRiver(
+                            riverName,
+                            wstFile,
+                            bbInfoFile,
+                            annotationClassifier));
+                    }
+                    riverName  = river;
+                    wstFile    = null;
+                    bbInfoFile = null;
+                }
+                else if ((m = WST_DATEI.matcher(line)).matches()) {
+                    String wstFilename = m.group(1);
+                    File wst = new File(wstFilename = normalize(wstFilename));
+                    if (!wst.isAbsolute()) {
+                        wst = new File(root, wstFilename);
+                    }
+                    wst = FileTools.repair(wst);
+                    log.info("Found wst file '" + wst + "'");
+                    if (!wst.isFile() || !wst.canRead()) {
+                        log.warn("cannot access WST file '" + wstFilename + "'");
+                        continue;
+                    }
+                    wstFile = wst;
+                }
+                else if ((m = BB_INFO.matcher(line)).matches()) {
+                    //TODO: Make it relative to the wst file.
+                    String bbInfo = m.group(1);
+                    bbInfoFile = new File(normalize(bbInfo));
+                }
+            }
+            if (riverName != null) {
+                rivers.add(new ImportRiver(
+                    riverName,
+                    wstFile,
+                    bbInfoFile,
+                    annotationClassifier));
+            }
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+
+        for (ImportRiver river: rivers) {
+            river.parseDependencies();
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/LineParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,93 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+
+import java.util.Calendar;
+import java.util.Date;
+import java.util.Locale;
+
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+import org.apache.log4j.Logger;
+
+
+public abstract class LineParser {
+
+    private static final Logger log = Logger.getLogger(LineParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final Locale DEFAULT_LOCALE = Locale.GERMAN;
+
+    public static final String START_META_CHAR = "#";
+    public static final String SEPERATOR_CHAR  = ";";
+
+
+    protected abstract void handleLine(String line);
+
+    protected abstract void reset();
+
+    protected abstract void finish();
+
+
+    /**
+     * This method reads each line of <i>file</i>. At the beginning,
+     * <i>reset()</i> is called; afterwars for each line <i>handleLine()</i> is
+     * called; at the end <i>finish</i> is called.
+     *
+     * @param file The file which should be parsed.
+     */
+    public void parse(File file) throws IOException {
+        log.info("Parsing file '" + file + "'");
+
+        reset();
+
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = null;
+            while ((line = in.readLine()) != null) {
+                if ((line = line.trim()).length() == 0) {
+                    continue;
+                }
+
+                handleLine(line);
+            }
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+
+        finish();
+    }
+
+
+    protected static String stripMetaLine(String line) {
+        String tmp = line.substring(1, line.length());
+
+        if (tmp.startsWith(" ")) {
+            return tmp.substring(1, tmp.length());
+        }
+        else {
+            return tmp;
+        }
+    }
+
+
+    public static Date getDateFromYear(int year) {
+        Calendar cal = Calendar.getInstance();
+        cal.set(year, 0, 1);
+
+        return cal.getTime();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/MorphologicalWidthParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,123 @@
+package de.intevation.flys.importer.parsers;
+
+import java.math.BigDecimal;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportMorphWidth;
+import de.intevation.flys.importer.ImportMorphWidthValue;
+import de.intevation.flys.importer.ImportUnit;
+
+
+public class MorphologicalWidthParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(MorphologicalWidthParser.class);
+
+
+    public static final NumberFormat nf = NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    public static final Pattern META_UNIT =
+        Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+
+    protected List<ImportMorphWidth> morphWidths;
+
+    protected ImportMorphWidth current;
+
+
+    public MorphologicalWidthParser() {
+        morphWidths = new ArrayList<ImportMorphWidth>();
+    }
+
+
+    @Override
+    protected void reset() {
+        current = new ImportMorphWidth();
+    }
+
+
+    @Override
+    protected void finish() {
+        if (current != null) {
+            morphWidths.add(current);
+        }
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    protected void handleMetaLine(String line) {
+        if (handleMetaUnit(line)) {
+            return;
+        }
+        else {
+            log.warn("MWP: Unknown meta line: '" + line + "'");
+        }
+    }
+
+
+    protected boolean handleMetaUnit(String line) {
+        Matcher m = META_UNIT.matcher(line);
+
+        if (m.matches()) {
+            String unit = m.group(1);
+
+            current.setUnit(new ImportUnit(unit));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected void handleDataLine(String line) {
+        String[] vals = line.split(SEPERATOR_CHAR);
+
+        if (vals == null || vals.length < 2) {
+            log.warn("MWP: skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            BigDecimal km    = new BigDecimal(nf.parse(vals[0]).doubleValue());
+            BigDecimal width = new BigDecimal(nf.parse(vals[1]).doubleValue());
+
+            String desc = vals.length > 2 ? vals[2] : null;
+
+            current.addValue(new ImportMorphWidthValue(
+                km,
+                width,
+                desc
+            ));
+        }
+        catch (ParseException pe) {
+            log.warn("MWP: Error while parsing numbers in '" + line + "'");
+        }
+    }
+
+
+    public List<ImportMorphWidth> getMorphologicalWidths() {
+        return morphWidths;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/PRFParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,423 @@
+package de.intevation.flys.importer.parsers;
+
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import java.io.File;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.IOException;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.XY;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+public class PRFParser
+{
+    private static Logger log = Logger.getLogger(PRFParser.class);
+
+    public static final String ENCODING =
+        System.getProperty("flys.backend.prf.encoding", "ISO-8859-1");
+
+    public static final Pattern DATA_PATTERN =
+        Pattern.compile(
+            "\\((\\d+)x\\s*,\\s*(\\d+)\\(" +
+            "\\s*f(\\d+)\\.(\\d+)\\s*,\\s*f(\\d+)\\.(\\d+)\\s*\\)?\\)?");
+
+    public static final Pattern KM_PATTERN =
+        Pattern.compile("\\((\\d+)x\\s*,\\s*f(\\d+)\\.(\\d+)\\s*\\)?");
+
+    public static final Pattern YEAR_PATTERN =
+        Pattern.compile("(\\d{4})");
+
+    public static final int MIN_YEAR = 1800;
+    public static final int MAX_YEAR = 2100;
+
+    public interface Callback {
+        boolean prfAccept(File file);
+        void    prfParsed(PRFParser parser);
+    } // interface Parser
+
+    public static class DataFormat {
+
+        protected int deleteChars;
+        protected int maxRepetitions;
+        protected int firstIntegerPlaces;
+        protected int firstFractionPlaces;
+        protected int secondIntegerPlaces;
+        protected int secondFractionPlaces;
+
+        protected double firstShift;
+        protected double secondShift;
+
+        public DataFormat() {
+        }
+
+        public DataFormat(Matcher m) {
+            deleteChars          = Integer.parseInt(m.group(1));
+            maxRepetitions       = Integer.parseInt(m.group(2));
+            firstIntegerPlaces   = Integer.parseInt(m.group(3));
+            firstFractionPlaces  = Integer.parseInt(m.group(4));
+            secondIntegerPlaces  = Integer.parseInt(m.group(5));
+            secondFractionPlaces = Integer.parseInt(m.group(6));
+
+            firstShift  = Math.pow(10, firstFractionPlaces);
+            secondShift = Math.pow(10, secondFractionPlaces);
+        }
+
+        public int extractData(String line, List<XY> kmData) {
+            int L = line.length();
+            if (L <= deleteChars) {
+                return -1;
+            }
+
+            int pos = deleteChars;
+
+            boolean debug = log.isDebugEnabled();
+
+
+            int rep = 0;
+            for (;rep < maxRepetitions; ++rep) {
+                if (pos >= L || pos + firstIntegerPlaces >= L) {
+                    break;
+                }
+                String first = line.substring(
+                    pos, pos + firstIntegerPlaces);
+
+                String second = line.substring(
+                    pos + firstIntegerPlaces,
+                    Math.min(L, pos+firstIntegerPlaces+secondIntegerPlaces));
+
+                double x, y;
+                try {
+                    x = Double.parseDouble(first);
+                    y = Double.parseDouble(second);
+                }
+                catch (NumberFormatException nfe) {
+                    // broken line -> substract from dataset skip
+                    return -1;
+                }
+
+                if (first.indexOf('.') < 0) {
+                    x /= firstShift;
+                }
+
+                if (firstFractionPlaces > 0) {
+                    x = (int)(x*firstShift)/firstShift;
+                }
+
+                if (second.indexOf('.') < 0) {
+                    y /= secondShift;
+                }
+
+                if (secondFractionPlaces > 0) {
+                    y = (int)(y*secondShift)/secondShift;
+                }
+
+                kmData.add(new XY(x, y, kmData.size()));
+
+                pos += firstIntegerPlaces + secondIntegerPlaces;
+            }
+
+            return rep == maxRepetitions ? 1 : 0;
+        }
+    } // class DataFormat
+
+    public static class KMFormat {
+
+        protected int deleteChars;
+        protected int integerPlaces;
+        protected int fractionPlaces;
+
+        protected double shift;
+
+        public KMFormat() {
+        }
+
+        public KMFormat(Matcher m) {
+            deleteChars    = Integer.parseInt(m.group(1));
+            integerPlaces  = Integer.parseInt(m.group(2));
+            fractionPlaces = Integer.parseInt(m.group(3));
+
+            shift = Math.pow(10, fractionPlaces);
+        }
+
+        public double extractKm(String line) throws NumberFormatException {
+
+            if (line.length() <= deleteChars) {
+                throw new NumberFormatException("line too short");
+            }
+
+            String kmS =
+                line.substring(deleteChars, deleteChars+integerPlaces);
+
+            double km = Double.parseDouble(kmS.trim());
+
+            if (kmS.indexOf('.') < 0) {
+                km /= shift;
+            }
+
+            return fractionPlaces > 0
+                ? ((int)(km*shift))/shift
+                : km;
+        }
+    } // class KMFormat
+
+    protected Map<Double, List<XY>> data;
+
+    protected Integer year;
+
+    protected String description;
+
+
+    public PRFParser() {
+        data = new TreeMap<Double, List<XY>>();
+    }
+
+    public Integer getYear() {
+        return year;
+    }
+
+    public void setYear(Integer year) {
+        this.year = year;
+    }
+
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public Map<Double, List<XY>> getData() {
+        return data;
+    }
+
+    public void setData(Map<Double, List<XY>> data) {
+        this.data = data;
+    }
+
+    protected void sortLists() {
+        for (List<XY> xy: data.values()) {
+            Collections.sort(xy);
+        }
+    }
+
+    public static final Integer findYear(String s) {
+        Matcher m = YEAR_PATTERN.matcher(s);
+        while (m.find()) {
+            int year = Integer.parseInt(m.group(1));
+            if (year >= MIN_YEAR && year <= MAX_YEAR) {
+                return Integer.valueOf(year);
+            }
+        }
+        return null;
+    }
+
+    private static final String removeExtension(String name) {
+        int index = name.lastIndexOf('.');
+        return index == -1
+            ? name
+            : name.substring(0, index);
+    }
+
+    public boolean parse(File file) {
+
+        if (!(file.isFile() && file.canRead())) {
+            log.warn("PRF: cannot open file '" + file + "'");
+            return false;
+        }
+
+        log.info("parsing PRF file: '" + file + "'");
+
+        description = removeExtension(file.getName());
+
+        year = findYear(file.getName());
+
+        if (year == null) {
+            File parent = file.getParentFile();
+            if (parent != null) {
+                description = parent.getName() + "/" + description;
+                year = findYear(parent.getName());
+            }
+        }
+
+        if (year != null) {
+            log.info("year of sounding: " + year);
+        }
+
+        LineNumberReader in = null;
+
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = in.readLine();
+
+            if (line == null || (line = line.trim()).length() == 0) {
+                log.warn("PRF: file is empty.");
+                return false;
+            }
+
+            Matcher m = DATA_PATTERN.matcher(line);
+
+            if (!m.matches()) {
+                log.warn("PRF: First line does not look like a PRF data pattern.");
+                return false;
+            }
+
+            DataFormat dataFormat = new DataFormat(m);
+
+            if ((line = in.readLine()) == null
+            || (line = line.trim()).length() == 0) {
+                log.warn("PRF: premature EOF. Expected integer in line 2");
+                return false;
+            }
+
+            try {
+                if (Integer.parseInt(line) != dataFormat.maxRepetitions) {
+                    log.warn("PRF: Expected " +
+                        dataFormat.maxRepetitions + " in line 2");
+                    return false;
+                }
+            }
+            catch (NumberFormatException nfe) {
+                log.warn("PRF: invalid integer in line 2", nfe);
+                return false;
+            }
+
+            if ((line = in.readLine()) == null) {
+                log.warn(
+                    "PRF: premature EOF. Expected pattern for km extraction");
+                return false;
+            }
+
+            m = KM_PATTERN.matcher(line);
+
+            if (!m.matches()) {
+                log.warn(
+                    "PRF: line 4 does not look like a PRF km extraction pattern.");
+                return false;
+            }
+
+            KMFormat kmFormat = new KMFormat(m);
+
+            if ((line = in.readLine()) == null
+            || (line = line.trim()).length() == 0) {
+                log.warn("PRF: premature EOF. Expected skip row count.");
+                return false;
+            }
+
+            int lineSkipCount;
+            try {
+                if ((lineSkipCount = Integer.parseInt(line)) < 0) {
+                    throw new IllegalArgumentException(lineSkipCount + " < 0");
+                }
+            }
+            catch (NumberFormatException nfe) {
+                log.warn(
+                    "PRF: line 5 is not an positive integer.");
+                return false;
+            }
+
+            int skip = lineSkipCount;
+
+            while ((line = in.readLine()) != null) {
+                if (skip > 0) {
+                    --skip;
+                    continue;
+                }
+                double km;
+                try {
+                    km = kmFormat.extractKm(line);
+                }
+                catch (NumberFormatException iae) {
+                    log.warn("PRF: cannot extract km in line " + in.getLineNumber());
+                    return false;
+                }
+
+                Double station = Double.valueOf(km);
+
+                List<XY> kmData = data.get(station);
+
+                if (kmData == null) {
+                    //log.debug("found new km: " + station);
+                    kmData = new ArrayList<XY>();
+                    data.put(station, kmData);
+                }
+
+                int c = dataFormat.extractData(line, kmData);
+                if (c < 1) {
+                    skip = lineSkipCount + c;
+                }
+            }
+
+            // sort all the lists by x and index
+            sortLists();
+        }
+        catch (IOException ioe) {
+            log.error("Error reading PRF file.", ioe);
+            return false;
+        }
+        finally {
+            if (in != null) {
+                try {
+                    in.close();
+                }
+                catch (IOException ioe) {
+                    log.error("Error closing PRF file.", ioe);
+                }
+            }
+        }
+
+        return true;
+    }
+
+    public void reset() {
+        data.clear();
+        year        = null;
+        description = null;
+    }
+
+    public void parsePRFs(File root, final Callback callback) {
+
+        FileTools.walkTree(root, new FileTools.FileVisitor() {
+            @Override
+            public boolean visit(File file) {
+                if (file.isFile() && file.canRead()
+                && file.getName().toLowerCase().endsWith(".prf")
+                && (callback == null || callback.prfAccept(file))) {
+                    reset();
+                    boolean success = parse(file);
+                    log.info("parsing " + (success ? "succeeded" : "failed"));
+                    if (success && callback != null) {
+                        callback.prfParsed(PRFParser.this);
+                    }
+                }
+                return true;
+            }
+        });
+    }
+
+    public static void main(String [] args) {
+
+        PRFParser parser = new PRFParser();
+
+        for (String arg: args) {
+            parser.parsePRFs(new File(arg), null);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/PegelGltParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,102 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.importer.ImportGauge;
+import de.intevation.flys.importer.ImportRange;
+
+public class PegelGltParser
+{
+    private static Logger log = Logger.getLogger(PegelGltParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final String KM = "km:";
+
+    protected List<ImportGauge> gauges;
+
+    public PegelGltParser() {
+        gauges = new ArrayList<ImportGauge>();
+    }
+
+    public List<ImportGauge> getGauges() {
+        return gauges;
+    }
+
+    public void parse(File file) throws IOException {
+
+        File parent = file.getParentFile();
+
+        log.info("parsing GLT file '" + file + "'");
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = null;
+            while ((line = in.readLine()) != null) {
+                if ((line = line.trim()).length() == 0) {
+                    continue;
+                }
+
+                int kmPos = line.indexOf(KM);
+                if (kmPos < 0) {
+                    log.warn("GLT: no gauge found in line " + in.getLineNumber());
+                    continue;
+                }
+
+                String gaugeName = line.substring(0, kmPos).trim();
+                log.info("Found gauge '" + gaugeName + "'");
+
+                line = line.substring(kmPos + KM.length()).trim();
+
+                String [] parts = line.split("\\s+");
+                if (parts.length < 4) {
+                    log.warn("GLT: line " + in.getLineNumber()
+                        + " has not enough columns.");
+                    continue;
+                }
+
+                BigDecimal from = new BigDecimal(parts[0].replace(",", "."));
+                BigDecimal to   = new BigDecimal(parts[1].replace(",", "."));
+                if (from.compareTo(from) > 0) {
+                    BigDecimal t = from; from = to; to = t;
+                }
+                ImportRange range = new ImportRange(from, to);
+                File staFile = FileTools.repair(new File(parent, parts[2]));
+                File atFile  = FileTools.repair(new File(parent, parts[3]));
+
+                if (log.isDebugEnabled()) {
+                    log.debug("\tfrom: " + from);
+                    log.debug("\tto: " + to);
+                    log.debug("\tsta: " + staFile);
+                    log.debug("\tat: " + atFile);
+                }
+
+                gauges.add(new ImportGauge(range, staFile, atFile));
+            }
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SQRelationParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,128 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportSQRelation;
+import de.intevation.flys.importer.ImportSQRelationValue;
+import de.intevation.flys.importer.ImportTimeInterval;
+
+
+public class SQRelationParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(SQRelationParser.class);
+
+    private static final Pattern TIMERANGE_REGEX =
+        Pattern.compile(".*Zeitraum.*\\s(\\w*)-(\\w*).*");
+
+    private static final NumberFormat nf =
+        NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    private List<ImportSQRelation> relations;
+
+    private ImportSQRelation current;
+
+    private String currentDescription;
+
+
+    public SQRelationParser() {
+        relations = new ArrayList<ImportSQRelation>();
+    }
+
+
+    public List<ImportSQRelation> getSQRelations() {
+        return relations;
+    }
+
+    @Override
+    public void parse(File file) throws IOException {
+        this.currentDescription = file.getName();
+        super.parse(file);
+    }
+
+
+    @Override
+    protected void reset() {
+        current = new ImportSQRelation();
+    }
+
+
+    @Override
+    protected void finish() {
+        if (current != null) {
+            current.setDescription(currentDescription);
+            relations.add(current);
+        }
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    protected void handleMetaLine(String line) {
+        Matcher m = TIMERANGE_REGEX.matcher(line);
+
+        if (m.matches()) {
+            String lo = m.group(1);
+            String hi = m.group(2);
+
+            log.debug("Found timerange " + lo + " - " + hi);
+
+            try {
+                int low  = nf.parse(lo).intValue();
+                int high = nf.parse(hi).intValue();
+
+                current.setTimeInterval(new ImportTimeInterval(
+                    getDateFromYear(low),
+                    getDateFromYear(high)
+                ));
+            }
+            catch (ParseException nfe) {
+                log.warn("Cannot parse time range.", nfe);
+            }
+        }
+    }
+
+
+    protected void handleDataLine(String line) {
+        String[] cols = line.split(SEPERATOR_CHAR);
+
+        if (cols.length < 8) {
+            log.warn("skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            current.addValue(new ImportSQRelationValue(
+                cols[1],
+                cols[2],
+                cols[4],
+                nf.parse(cols[3]).doubleValue(),
+                nf.parse(cols[6]).doubleValue(),
+                nf.parse(cols[7]).doubleValue()
+            ));
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing sq relation row: '" + line + "'", pe);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,169 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+
+import java.math.BigDecimal;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportDepth;
+import de.intevation.flys.importer.ImportSedimentDensity;
+import de.intevation.flys.importer.ImportSedimentDensityValue;
+import de.intevation.flys.importer.ImportUnit;
+
+
+public class SedimentDensityParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(SedimentDensityParser.class);
+
+
+    public static final NumberFormat nf = NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    public static final Pattern META_UNIT =
+        Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+    public static final Pattern META_DEPTH =
+        Pattern.compile("^Tiefe: (\\w++)-(\\w++)( (\\w++))?.*");
+
+
+    protected List<ImportSedimentDensity> sedimentDensities;
+
+    protected ImportSedimentDensity current;
+
+    protected String currentDescription;
+
+
+    public SedimentDensityParser() {
+        sedimentDensities = new ArrayList<ImportSedimentDensity>();
+    }
+
+
+    @Override
+    public void parse(File file) throws IOException {
+        currentDescription = file.getName();
+
+        super.parse(file);
+    }
+
+
+    @Override
+    protected void reset() {
+        current = new ImportSedimentDensity(currentDescription);
+    }
+
+
+    @Override
+    protected void finish() {
+        if (current != null) {
+            sedimentDensities.add(current);
+        }
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    protected void handleMetaLine(String line) {
+        if (handleMetaUnit(line)) {
+            return;
+        }
+        else if (handleMetaDepth(line)) {
+            return;
+        }
+        else {
+            log.warn("Unknown meta line: '" + line + "'");
+        }
+    }
+
+
+    protected boolean handleMetaUnit(String line) {
+        Matcher m = META_UNIT.matcher(line);
+
+        if (m.matches()) {
+            String unit = m.group(1);
+
+            current.setUnit(new ImportUnit(unit));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaDepth(String line) {
+        Matcher m = META_DEPTH.matcher(line);
+
+        if (m.matches()) {
+            String lo   = m.group(1);
+            String up   = m.group(2);
+            String unit = m.group(4);
+
+            try {
+                ImportDepth depth = new ImportDepth(
+                    new BigDecimal(nf.parse(lo).doubleValue()),
+                    new BigDecimal(nf.parse(up).doubleValue()),
+                    new ImportUnit(unit)
+                );
+
+                current.setDepth(depth);
+
+                return true;
+            }
+            catch (ParseException pe) {
+                log.warn("Error while parsing numbers in: '" + line + "'");
+            }
+        }
+
+        return false;
+    }
+
+
+    protected void handleDataLine(String line) {
+        String[] vals = line.split(SEPERATOR_CHAR);
+
+        if (vals == null || vals.length < 3) {
+            log.warn("skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            BigDecimal km      = new BigDecimal(nf.parse(vals[0]).doubleValue());
+            BigDecimal density = new BigDecimal(nf.parse(vals[1]).doubleValue());
+
+            current.addValue(new ImportSedimentDensityValue(
+                km,
+                density,
+                vals[2])
+            );
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing numbers in '" + line + "'");
+        }
+    }
+
+
+    public List<ImportSedimentDensity> getSedimentDensities() {
+        return sedimentDensities;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,390 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportGrainFraction;
+import de.intevation.flys.importer.ImportSedimentYield;
+import de.intevation.flys.importer.ImportSedimentYieldValue;
+import de.intevation.flys.importer.ImportTimeInterval;
+import de.intevation.flys.importer.ImportUnit;
+import de.intevation.flys.model.GrainFraction;
+
+
+public class SedimentYieldParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(SedimentYieldParser.class);
+
+
+    public static final NumberFormat nf = NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    public static final String FRAKTION_START = "Fraktion:";
+
+    public static final String FRACTION_COARSE_STR =
+        "_Grobkorn.csv";
+
+    public static final String FRACTION_FINE_MIDDLE_STR =
+        "_Fein-Mittel-Kies.csv";
+
+    public static final String FRACTION_SAND =
+        "_Sand.csv";
+
+    public static final String FRACTION_SUSP_SAND =
+        "_susp_Sand.csv";
+
+    public static final String FRACTION_SUSP_SAND_BED =
+        "_susp_Sand_bettbildAnteil.csv";
+
+    public static final String FRACTION_SUSPENDED_SEDIMENT =
+        "_Schwebstoff.csv";
+
+
+    public static final Pattern TIMEINTERVAL_SINGLE =
+        Pattern.compile("\\D*([0-9]+?)\\D*");
+
+    public static final Pattern TIMEINTERVAL_EPOCH =
+        Pattern.compile("\\D*([0-9]+?)\\s*-\\s*([0-9]+?)\\D*");
+
+    public static final Pattern META_FRACTION =
+        Pattern.compile("^Fraktion: (.*)");
+
+    public static final Pattern META_UNIT =
+        Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+    public static final Pattern META_COLUMN_NAMES =
+        Pattern.compile("^Fluss-km.*");
+
+    public static final Pattern META_GRAIN_FRACTION_A =
+        Pattern.compile("\\D*(([0-9]+?,[0-9]+?)\\s*-|([0-9]++)\\s*-)(([0-9]+?,[0-9]+?)|([0-9]++))\\s*([a-zA-Z]+?)\\W*\\D*");
+
+    public static final Pattern META_GRAIN_FRACTION_B =
+        Pattern.compile("(<|>){1}\\s*(\\w++)\\s*(([0-9]+?,[0-9]+?)\\s*-|([0-9]++)\\s*-)(([0-9]+?,[0-9]+?)|([0-9]++))\\s*([a-zA-Z]+?)");
+
+    public static final Pattern META_GRAIN_FRACTION_C =
+        Pattern.compile("(<|>){1}\\s*((([0-9]+?,[0-9]+?)|([0-9]++))\\s*(\\w+))");
+
+
+    protected List<ImportSedimentYield> sedimentYields;
+
+    protected ImportSedimentYield[] current;
+
+    protected ImportGrainFraction grainFraction;
+
+    protected ImportUnit unit;
+
+    protected String description;
+
+    protected String[] columnNames;
+
+
+    public SedimentYieldParser() {
+        sedimentYields = new ArrayList<ImportSedimentYield>();
+    }
+
+
+    @Override
+    public void parse(File file) throws IOException {
+        description = file.getName();
+
+        super.parse(file);
+    }
+
+
+    @Override
+    protected void reset() {
+        current       = null;
+        grainFraction = null;
+        unit          = null;
+    }
+
+
+    @Override
+    protected void finish() {
+        if (current != null) {
+            for (ImportSedimentYield isy: current) {
+                sedimentYields.add(isy);
+            }
+        }
+
+        description = null;
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    protected void handleMetaLine(String line) {
+        if (handleMetaUnit(line)) {
+            return;
+        }
+        else if (handleMetaFraction(line)) {
+            return;
+        }
+        else if (handleColumnNames(line)) {
+            return;
+        }
+        else {
+            log.warn("SYP: Unknown meta line: '" + line + "'");
+        }
+    }
+
+
+    protected boolean handleMetaUnit(String line) {
+        Matcher m = META_UNIT.matcher(line);
+
+        if (m.matches()) {
+            unit = new ImportUnit(m.group(1));
+            return true;
+        }
+
+        return false;
+    }
+
+
+    public boolean handleMetaFraction(String line) {
+        Matcher m = META_FRACTION.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1);
+
+            this.grainFraction = buildGrainFraction(tmp);
+
+            return true;
+        }
+        else if (line.startsWith(FRAKTION_START)) {
+            String newLine = line.replace(FRAKTION_START, "").trim();
+            if (newLine.length() == 0) {
+                log.debug("Found total grain fraction.");
+                this.grainFraction = new ImportGrainFraction(GrainFraction.TOTAL);
+
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+
+    public boolean handleColumnNames(String line) {
+        Matcher m = META_COLUMN_NAMES.matcher(line);
+
+        if (m.matches()) {
+            columnNames = line.split(SEPERATOR_CHAR);
+
+            initializeSedimentYields();
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected void handleDataLine(String line) {
+        String[] vals = line.split(SEPERATOR_CHAR);
+
+        if (vals == null || vals.length < columnNames.length-1) {
+            log.warn("SYP: skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            Double km = nf.parse(vals[0]).doubleValue();
+
+            for (int i = 1, n = columnNames.length-1; i < n; i++) {
+                String curVal = vals[i];
+
+                if (curVal != null && curVal.length() > 0) {
+                    current[i-1].addValue(new ImportSedimentYieldValue(
+                        km, nf.parse(vals[i]).doubleValue()
+                    ));
+                }
+            }
+        }
+        catch (ParseException pe) {
+            log.warn("SYP: Error while parsing numbers in '" + line + "':", pe);
+        }
+    }
+
+
+    private void initializeSedimentYields() {
+        // skip first column (Fluss-km) and last column (Hinweise)
+        current = new ImportSedimentYield[columnNames.length-2];
+
+        for (int i = 0, n = columnNames.length; i < n-2; i++) {
+            current[i] = new ImportSedimentYield(this.description);
+            current[i].setTimeInterval(getTimeInterval(columnNames[i+1]));
+            current[i].setUnit(unit);
+            current[i].setGrainFraction(grainFraction);
+        }
+    }
+
+
+    private ImportTimeInterval getTimeInterval(String column) {
+        try {
+            Matcher a = TIMEINTERVAL_EPOCH.matcher(column);
+            if (a.matches()) {
+                int yearA = nf.parse(a.group(1)).intValue();
+                int yearB = nf.parse(a.group(2)).intValue();
+
+                return new ImportTimeInterval(
+                    getDateFromYear(yearA),
+                    getDateFromYear(yearB)
+                );
+            }
+
+            Matcher b = TIMEINTERVAL_SINGLE.matcher(column);
+            if (b.matches()) {
+                int year = nf.parse(b.group(1)).intValue();
+
+                return new ImportTimeInterval(getDateFromYear(year));
+            }
+
+            log.warn("SYP: Unknown time interval string: '" + column + "'");
+        }
+        catch (ParseException pe) {
+            log.warn("SYP: Error while parsing years: " + column, pe);
+        }
+
+        return null;
+    }
+
+
+    private ImportGrainFraction buildGrainFraction(String gfStr) {
+        Matcher a = META_GRAIN_FRACTION_A.matcher(gfStr);
+        if (a.matches()) {
+            String lowerA = a.group(2);
+            String lowerB = a.group(3);
+
+            String upperA = a.group(4);
+            String upperB = a.group(5);
+
+            String unitStr = a.group(7);
+            String lower = lowerA != null ? lowerA : lowerB;
+            String upper = upperA != null ? upperA : upperB;
+
+            try {
+                return new ImportGrainFraction(
+                    getGrainFractionTypeName(this.description),
+                    nf.parse(lower).doubleValue(),
+                    nf.parse(upper).doubleValue(),
+                    new ImportUnit(unitStr)
+                );
+            }
+            catch (ParseException pe) {
+                log.warn("SYP: Error while parsing ranges of: '" + gfStr + "'");
+            }
+        }
+
+        Matcher b = META_GRAIN_FRACTION_B.matcher(gfStr);
+        if (b.matches()) {
+            String lowerA  = b.group(4);
+            String lowerB  = b.group(5);
+            String upperA  = b.group(6);
+            String upperB  = b.group(7);
+            String unitStr = b.group(9);
+
+            String lower = lowerA != null ? lowerA : lowerB;
+            String upper = upperA != null ? upperA : upperB;
+
+            try {
+                return new ImportGrainFraction(
+                    getGrainFractionTypeName(this.description),
+                    nf.parse(lower).doubleValue(),
+                    nf.parse(upper).doubleValue(),
+                    new ImportUnit(unitStr)
+                );
+            }
+            catch (ParseException pe) {
+                log.warn("SYP: Error while parsing ranges of: '" + gfStr + "'");
+            }
+        }
+
+        Matcher c = META_GRAIN_FRACTION_C.matcher(gfStr);
+        if (c.matches()) {
+            String oper     = c.group(1);
+            String valueStr = c.group(3);
+            String unitStr  = c.group(6);
+
+            try {
+                Double value = nf.parse(valueStr).doubleValue();
+
+                if (oper.equals(">")) {
+                    return new ImportGrainFraction(
+                        getGrainFractionTypeName(this.description),
+                        value,
+                        null,
+                        new ImportUnit(unitStr)
+                    );
+                }
+                else {
+                    return new ImportGrainFraction(
+                        getGrainFractionTypeName(this.description),
+                        null,
+                        value,
+                        new ImportUnit(unitStr)
+                    );
+                }
+            }
+            catch (ParseException pe) {
+                log.warn("SYP: Error while parsing ranges of: '" + gfStr + "'");
+            }
+        }
+
+        log.warn("SYP: Unknow grain fraction: '" + gfStr + "'");
+
+        return null;
+    }
+
+
+    public static String getGrainFractionTypeName(String filename) {
+        if (filename.endsWith(FRACTION_COARSE_STR)) {
+            return GrainFraction.COARSE;
+        }
+        else if (filename.endsWith(FRACTION_FINE_MIDDLE_STR)) {
+            return GrainFraction.FINE_MIDDLE;
+        }
+        else if (filename.endsWith(FRACTION_SAND)) {
+            return GrainFraction.SAND;
+        }
+        else if (filename.endsWith(FRACTION_SUSP_SAND)) {
+            return GrainFraction.SUSP_SAND;
+        }
+        else if (filename.endsWith(FRACTION_SUSP_SAND_BED)) {
+            return GrainFraction.SUSP_SAND_BED;
+        }
+        else if (filename.endsWith(FRACTION_SUSPENDED_SEDIMENT)) {
+            return GrainFraction.SUSPENDED_SEDIMENT;
+        }
+        else {
+            log.warn("SYP: Unknown grain fraction '" + filename + "'");
+            return "unknown";
+        }
+    }
+
+
+    public List<ImportSedimentYield> getSedimentYields() {
+        return sedimentYields;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/StaFileParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,190 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+import java.math.BigDecimal;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import java.util.HashMap;
+import java.util.ArrayList;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportMainValueType;
+import de.intevation.flys.importer.ImportMainValue;
+import de.intevation.flys.importer.ImportNamedMainValue;
+import de.intevation.flys.importer.ImportGauge;
+
+public class StaFileParser
+{
+    private static Logger log = Logger.getLogger(StaFileParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final String TYPES =
+        System.getProperty("flys.backend.main.value.types", "QWTD");
+
+    public static final boolean PARSE_GAUGE_NUMBERS =
+        Boolean.getBoolean("flys.backend.sta.parse.gauge.numbers");
+
+    public static final Pattern QWTD_ =
+        Pattern.compile("\\s*([^\\s]+)\\s+([^\\s]+)\\s+([" +
+            Pattern.quote(TYPES) + "]).*");
+
+    public StaFileParser() {
+    }
+
+    public boolean parse(ImportGauge gauge) throws IOException {
+
+        File file = gauge.getStaFile();
+
+        log.info("parsing STA file: " + file);
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = in.readLine();
+
+            if (line == null) {
+                log.warn("STA file is empty.");
+                return false;
+            }
+
+            if (line.length() < 37) {
+                log.warn("First line in STA file is too short.");
+                return false;
+            }
+
+            String gaugeName = line.substring(16, 37).trim();
+
+            Long gaugeNumber = null;
+
+            if (PARSE_GAUGE_NUMBERS) {
+                String gaugeNumberString = line.substring(0, 16).trim();
+
+                try {
+                    gaugeNumber = Long.parseLong(gaugeNumberString);
+                }
+                catch (NumberFormatException nfe) {
+                    log.warn("STA: '" + gaugeNumberString +
+                        "' is not a valid long number.");
+                }
+            }
+
+            gauge.setName(gaugeName);
+            gauge.setOfficialNumber(gaugeNumber);
+
+            if (log.isDebugEnabled()) {
+                log.debug(
+                    "name/number: '" + gaugeName + "' '" + gaugeNumber + "'");
+            }
+
+            String [] values = line.substring(38).trim().split("\\s+", 2);
+
+            if (values.length < 2) {
+                log.warn("STA: Not enough columns for aeo and datum.");
+            }
+            try {
+                gauge.setAeo(new BigDecimal(values[0].replace(",", ".")));
+                gauge.setDatum(new BigDecimal(values[1].replace(",", ".")));
+            }
+            catch (NumberFormatException nfe) {
+                log.warn("STA: cannot parse aeo or datum.");
+                return false;
+            }
+
+            line = in.readLine();
+
+            if (line == null) {
+                log.warn("STA file has not enough lines");
+                return false;
+            }
+
+            if (line.length() < 36) {
+                log.warn("STA: second line is too short");
+                return false;
+            }
+
+            try {
+                gauge.setStation(
+                    new BigDecimal(line.substring(29, 36).trim()));
+            }
+            catch (NumberFormatException nfe) {
+                log.warn("STA: parsing of the datum of the gauge failed");
+                return false;
+            }
+
+            // overread the next six lines
+            for (int i = 0; i < 6; ++i) {
+                if ((line = in.readLine()) == null) {
+                    log.warn("STA file is too short");
+                    return false;
+                }
+            }
+
+            HashMap<String, ImportMainValueType> types =
+                new HashMap<String, ImportMainValueType>();
+
+            ArrayList<ImportNamedMainValue> namedMainValues =
+                new ArrayList<ImportNamedMainValue>();
+
+            ArrayList<ImportMainValue> mainValues =
+                new ArrayList<ImportMainValue>();
+
+            while ((line = in.readLine()) != null) {
+                Matcher m = QWTD_.matcher(line);
+                if (m.matches()) {
+                    BigDecimal value;
+                    try {
+                        value = new BigDecimal(m.group(2).replace(",", "."));
+                    }
+                    catch (NumberFormatException nfe) {
+                        log.warn("STA: value not parseable in line "
+                            + in.getLineNumber());
+                        continue;
+                    }
+                    String typeString = m.group(3);
+                    log.debug("\t type: " + typeString);
+                    ImportMainValueType type = types.get(typeString);
+                    if (type == null) {
+                        type = new ImportMainValueType(typeString);
+                        types.put(typeString, type);
+                    }
+                    String name = m.group(1);
+                    ImportNamedMainValue namedMainValue =
+                        new ImportNamedMainValue(type, name);
+                    namedMainValues.add(namedMainValue);
+
+                    ImportMainValue mainValue =
+                        new ImportMainValue(gauge, namedMainValue, value);
+
+                    mainValues.add(mainValue);
+                }
+                else {
+                    // TODO: treat as a comment
+                }
+            }
+            gauge.setMainValueTypes(
+                new ArrayList<ImportMainValueType>(types.values()));
+            gauge.setNamedMainValues(namedMainValues);
+            gauge.setMainValues(mainValues);
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+        log.info("finished parsing STA file: " + file);
+        return true;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelDifferencesParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,174 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportUnit;
+import de.intevation.flys.importer.ImportWaterlevelDifference;
+import de.intevation.flys.importer.ImportWaterlevelDifferenceColumn;
+import de.intevation.flys.importer.ImportWaterlevelDifferenceValue;
+
+
+public class WaterlevelDifferencesParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(WaterlevelDifferencesParser.class);
+
+    private static final NumberFormat nf =
+        NumberFormat.getInstance(DEFAULT_LOCALE);
+
+    public static final Pattern META_UNIT =
+        Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+
+    private List<ImportWaterlevelDifference> differences;
+
+    private ImportWaterlevelDifferenceColumn[] columns;
+
+    private ImportWaterlevelDifference current;
+
+
+    public WaterlevelDifferencesParser() {
+        differences = new ArrayList<ImportWaterlevelDifference>();
+    }
+
+
+    public List<ImportWaterlevelDifference> getDifferences() {
+        return differences;
+    }
+
+
+    @Override
+    public void parse(File file) throws IOException {
+        current = new ImportWaterlevelDifference(file.getName());
+
+        super.parse(file);
+    }
+
+
+    @Override
+    protected void reset() {
+    }
+
+
+    @Override
+    protected void finish() {
+        if (columns != null && current != null) {
+            for (ImportWaterlevelDifferenceColumn col: columns) {
+                current.addValue(col);
+            }
+
+            differences.add(current);
+        }
+
+        current = null;
+        columns = null;
+    }
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    private void handleMetaLine(String meta) {
+        if (handleMetaUnit(meta)) {
+            return;
+        }
+        else {
+            handleMetaColumnNames(meta);
+        }
+    }
+
+
+    private boolean handleMetaUnit(String meta) {
+        Matcher m = META_UNIT.matcher(meta);
+
+        if (m.matches()) {
+            String unit = m.group(1);
+            log.debug("Found unit: '" + unit + "'");
+
+            current.setUnit(new ImportUnit(unit));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    private boolean handleMetaColumnNames(String meta) {
+        Pattern META_COLUMN_NAMES = Pattern.compile("Fluss-km;(.*)");
+        Matcher m = META_COLUMN_NAMES.matcher(meta);
+
+        if (m.matches()) {
+            String colStr = m.group(1);
+            String[] cols = colStr.split(SEPERATOR_CHAR);
+
+            log.debug("Found " + cols.length + " columns.");
+
+            initColumns(cols);
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    private void initColumns(String[] cols) {
+        columns = new ImportWaterlevelDifferenceColumn[cols.length];
+
+        for (int i = 0; i < cols.length; i++) {
+            String name = cols[i].replace("\"", "");
+
+            log.debug("Create new column '" + name + "'");
+            columns[i] = new ImportWaterlevelDifferenceColumn(name);
+        }
+    }
+
+
+    private void handleDataLine(String line) {
+        String[] cols = line.split(SEPERATOR_CHAR);
+
+        if (cols == null || cols.length < 2) {
+            log.warn("skip invalid waterlevel line: '" + line + "'");
+            return;
+        }
+
+        try {
+            Double station = nf.parse(cols[0]).doubleValue();
+
+            for (int i = 0; i < columns.length; i++) {
+                String value = cols[i+1];
+
+                try {
+                    columns[i].addValue(new ImportWaterlevelDifferenceValue(
+                        station,
+                        nf.parse(value).doubleValue()
+                    ));
+                }
+                catch (ParseException pe) {
+                    log.warn("Error while parsing value: '" + value + "'");
+                }
+            }
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing station: '" + line + "'");
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,160 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportUnit;
+import de.intevation.flys.importer.ImportWaterlevel;
+import de.intevation.flys.importer.ImportWaterlevelQRange;
+import de.intevation.flys.importer.ImportWaterlevelValue;
+
+
+public class WaterlevelParser extends LineParser {
+
+    private static final Logger log = Logger.getLogger(WaterlevelParser.class);
+
+    private static final NumberFormat nf =
+        NumberFormat.getInstance(DEFAULT_LOCALE);
+
+    private static final Pattern META_Q_RANGE =
+        Pattern.compile("Abfluss\\s\\[(.*)\\];(.*)");
+
+    public static final Pattern META_UNIT =
+        Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+
+    private List<ImportWaterlevel> waterlevels;
+
+    private ImportWaterlevel current;
+
+    private ImportWaterlevelQRange currentQ;
+
+    private String currentDescription;
+
+
+    public WaterlevelParser() {
+        waterlevels = new ArrayList<ImportWaterlevel>();
+    }
+
+
+    public List<ImportWaterlevel> getWaterlevels() {
+        return waterlevels;
+    }
+
+
+    @Override
+    public void parse(File file) throws IOException {
+        currentDescription = file.getName();
+
+        super.parse(file);
+    }
+
+
+    @Override
+    protected void reset() {
+        currentQ = null;
+        current  = new ImportWaterlevel(currentDescription);
+    }
+
+
+    @Override
+    protected void finish() {
+        if (current != null) {
+            if (currentQ != null) {
+                current.addValue(currentQ);
+            }
+
+            waterlevels.add(current);
+        }
+    }
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+            return;
+        }
+        else if (handleQRange(line)) {
+            return;
+        }
+        else {
+            handleDataLine(line);
+            return;
+        }
+    }
+
+
+    private void handleMetaLine(String meta) {
+        Matcher m = META_UNIT.matcher(meta);
+
+        if (m.matches()) {
+            String unit = m.group(1);
+            log.debug("Found unit: '" + unit + "'");
+
+            current.setUnit(new ImportUnit(unit));
+        }
+    }
+
+
+    private boolean handleQRange(String line) {
+        Matcher m = META_Q_RANGE.matcher(line);
+
+        if (m.matches()) {
+            String unitStr  = m.group(1);
+            String valueStr = m.group(2);
+
+            if (currentQ != null) {
+                if (current != null) {
+                    current.addValue(currentQ);
+                }
+                else {
+                    // this should never happen
+                    log.warn("Try to add Q range without waterlevel!");
+                }
+            }
+
+            try {
+                log.debug("Found new Q range: Q=" + valueStr);
+
+                currentQ = new ImportWaterlevelQRange(
+                    nf.parse(valueStr).doubleValue());
+
+                return true;
+            }
+            catch (ParseException pe) {
+                log.warn("Error while parsing Q range: '" + line + "'");
+            }
+        }
+
+        return false;
+    }
+
+
+    private void handleDataLine(String line) {
+        String[] cols = line.split(SEPERATOR_CHAR);
+
+        if (cols == null || cols.length < 2) {
+            log.warn("skip invalid waterlevel line: '" + line + "'");
+            return;
+        }
+
+        try {
+            Double station = nf.parse(cols[0]).doubleValue();
+            Double value   = nf.parse(cols[1]).doubleValue();
+
+            currentQ.addValue(new ImportWaterlevelValue(station, value));
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing number values: '" + line + "'");
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WstParser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,448 @@
+package de.intevation.flys.importer.parsers;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.InputStreamReader;
+import java.io.FileInputStream;
+
+import java.text.NumberFormat;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.utils.StringUtil;
+import de.intevation.flys.utils.DateGuesser;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import java.math.BigDecimal;
+
+import de.intevation.flys.importer.ImportWstQRange;
+import de.intevation.flys.importer.ImportWstColumn;
+import de.intevation.flys.importer.ImportTimeInterval;
+import de.intevation.flys.importer.ImportRange;
+import de.intevation.flys.importer.ImportUnit;
+import de.intevation.flys.importer.ImportWst;
+
+public class WstParser
+{
+    private static Logger log = Logger.getLogger(WstParser.class);
+
+    public static final String COLUMN_BEZ_TEXT   = "column-bez-text";
+    public static final String COLUMN_BEZ_BREITE = "column-bez-breite";
+    public static final String COLUMN_QUELLE     = "column-quelle";
+    public static final String COLUMN_DATUM      = "column-datum";
+
+    public static final BigDecimal UNDEFINED_ZERO =
+        new BigDecimal(0.0);
+    public static final BigDecimal MIN_RANGE =
+        new BigDecimal(-Double.MAX_VALUE);
+    public static final BigDecimal MAX_RANGE =
+        new BigDecimal(Double.MAX_VALUE);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final Pattern UNIT_COMMENT =
+        Pattern.compile("\\*\\s*[kK][mM]\\s+(.+)");
+
+    public static final Pattern UNIT =
+        Pattern.compile("[^\\[]*\\[([^]]+)\\].*");
+
+    public static final BigDecimal INTERVAL_GAP =
+        new BigDecimal(0.00001);
+
+    protected ImportWst wst;
+
+    protected ImportRange lastRange;
+
+    public WstParser() {
+    }
+
+    public ImportWst getWst() {
+        return wst;
+    }
+
+    public void setWst(ImportWst wst) {
+        this.wst = wst;
+    }
+
+    public ImportTimeInterval guessDate(String string) {
+        try {
+            return new ImportTimeInterval(
+                DateGuesser.guessDate(string));
+        }
+        catch (IllegalArgumentException iae) {
+        }
+        return null;
+    }
+
+    public void parse(File file) throws IOException {
+
+        log.info("Parsing WST file '" + file + "'");
+
+        wst = new ImportWst(file.getName());
+
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String input;
+            boolean first = true;
+            int columnCount = 0;
+
+            String [] lsBezeichner   = null;
+            String [] langBezeichner = null;
+            int    [] colNaWidths    = null;
+            String [] quellen        = null;
+            String [] daten          = null;
+
+            BigDecimal [] aktAbfluesse   = null;
+            BigDecimal [] firstAbfluesse = null;
+
+            BigDecimal minKm = MAX_RANGE;
+            BigDecimal maxKm = MIN_RANGE;
+
+            boolean columnHeaderChecked = false;
+
+            String einheit = "Wasserstand [NN + m]";
+
+            HashSet<BigDecimal> kms = new HashSet<BigDecimal>();
+
+            while ((input = in.readLine()) != null) {
+                String line = input;
+                if (first) { // fetch number of columns
+                    if ((line = line.trim()).length() == 0) {
+                        continue;
+                    }
+                    try {
+                        columnCount = Integer.parseInt(line);
+                        if (columnCount <= 0) {
+                            throw new NumberFormatException(
+                                "number columns <= 0");
+                        }
+                        log.debug("Number of columns: " + columnCount);
+                        wst.setNumberColumns(columnCount);
+                        lsBezeichner = new String[columnCount];
+                    }
+                    catch (NumberFormatException nfe) {
+                        log.warn("WST: invalid number.", nfe);
+                        continue;
+                    }
+                    first = false;
+                    continue;
+                }
+
+                line = line.replace(',', '.');
+
+                if (line.startsWith("*\u001f")) {
+                    BigDecimal [] data =
+                        parseLineAsDouble(line, columnCount, false, true);
+
+                    if (aktAbfluesse != null) {
+                        addInterval(minKm, maxKm, aktAbfluesse);
+                        minKm = MAX_RANGE;
+                        maxKm = MIN_RANGE;
+                    }
+
+                    aktAbfluesse = new BigDecimal[columnCount];
+                    log.debug("new q range: " + columnCount);
+                    for (int i = 0; i < Math.min(columnCount, data.length); ++i) {
+                        if (data[i] != null) {
+                            log.debug("  column: " + data[i]);
+                            aktAbfluesse[i] = data[i];
+                        }
+                    }
+
+                    if (firstAbfluesse == null) {
+                        firstAbfluesse = (BigDecimal [])aktAbfluesse.clone();
+                    }
+                    continue;
+                }
+
+                if (line.startsWith("*!")) {
+                    String spezial = line.substring(2).trim();
+
+                    if (spezial.length() == 0) {
+                        continue;
+                    }
+
+                    if (spezial.startsWith(COLUMN_BEZ_TEXT)) {
+                        spezial = spezial.substring(COLUMN_BEZ_TEXT.length()).trim();
+                        if (spezial.length() == 0) {
+                            continue;
+                        }
+                        langBezeichner = StringUtil.splitQuoted(spezial, '"');
+                    }
+                    else if (spezial.startsWith(COLUMN_BEZ_BREITE)) {
+                        spezial = spezial.substring(COLUMN_BEZ_BREITE.length()).trim();
+
+                        if (spezial.length() == 0) {
+                            continue;
+                        }
+
+                        String[] split = spezial.split("\\s+");
+
+                        colNaWidths = new int[split.length];
+                        for (int i=0; i < split.length; i++) {
+                            colNaWidths[i] = Integer.parseInt(split[i]);
+                        }
+                    }
+                    else if (spezial.startsWith(COLUMN_QUELLE)) {
+                        if (spezial.length() == 0) {
+                            continue;
+                        }
+                        quellen = StringUtil.splitQuoted(spezial, '"');
+                    }
+                    else if (spezial.startsWith(COLUMN_DATUM)) {
+                        spezial = spezial.substring(COLUMN_DATUM.length()).trim();
+                        if (spezial.length() == 0) {
+                            continue;
+                        }
+                        daten = StringUtil.splitQuoted(spezial, '"');
+                    }
+                    continue;
+                }
+
+                if (line.length() < 11) {
+                    continue;
+                }
+
+                if (line.startsWith("*")) {
+                    Matcher m = UNIT_COMMENT.matcher(line);
+                    if (m.matches()) {
+                        log.debug("unit comment found");
+                        // XXX: This hack is needed because desktop
+                        // FLYS is broken figuring out the unit
+                        String [] units = m.group(1).split("\\s{2,}");
+                        m = UNIT.matcher(units[0]);
+                        einheit = m.matches() ? m.group(1) : units[0];
+                        log.debug("unit: " + einheit);
+                    }
+                    continue;
+                }
+
+                if (firstAbfluesse != null) {
+                    if (!columnHeaderChecked) {
+                        int unknownCount = 0;
+                        HashSet<String> uniqueColumnNames =
+                            new HashSet<String>();
+                        for (int i = 0; i < lsBezeichner.length; ++i) {
+                            if (lsBezeichner[i] == null
+                            || lsBezeichner[i].length() == 0) {
+                                double q = firstAbfluesse[i].doubleValue();
+                                if (q < 0.001) {
+                                    lsBezeichner[i] =
+                                        "<unbekannt #" + unknownCount + ">";
+                                    ++unknownCount;
+                                }
+                                else {
+                                    lsBezeichner[i] = "Q="+format(q);
+                                }
+                            }
+                            String candidate = lsBezeichner[i];
+                            int collision = 1;
+                            while (!uniqueColumnNames.add(candidate)) {
+                                candidate = lsBezeichner[i] +
+                                    " (" + collision + ")";
+                                ++collision;
+                            }
+                            ImportWstColumn iwc = wst.getColumn(i);
+                            iwc.setName(candidate);
+                            iwc.setTimeInterval(guessDate(candidate));
+                        }
+                        columnHeaderChecked = true;
+                    }
+
+                    BigDecimal [] data =
+                        parseLineAsDouble(line, columnCount, true, false);
+
+                    BigDecimal kaem = data[0];
+
+                    if (!kms.add(kaem)) {
+                        log.warn(
+                            "WST: km " + kaem +
+                            " (line " + in.getLineNumber() +
+                            ") found more than once. -> ignored");
+                        continue;
+                    }
+
+                    if (kaem.compareTo(minKm) < 0) {
+                        minKm = kaem;
+                    }
+                    if (kaem.compareTo(maxKm) > 0) {
+                        maxKm = kaem;
+                    }
+
+                    // extract values
+                    for (int i = 0; i < columnCount; ++i) {
+                        addValue(kaem, data[i+1], i);
+                    }
+
+                }
+                else { // firstAbfluesse == null
+                    if (langBezeichner != null) {
+                        lsBezeichner = StringUtil.fitArray(
+                            langBezeichner, lsBezeichner);
+                    }
+                    else if (colNaWidths != null) {
+                        for (int j = 0, i = 0, N = input.length();
+                             j < colNaWidths.length && i < N;
+                             i += colNaWidths[j++]
+                        ) {
+                            lsBezeichner[j] = input.substring(
+                                i, i+colNaWidths[j]).trim();
+                        }
+                    }
+                    else {
+                        // first column begins at position 8 in line
+                        for (int i = 8, col = 0; i < input.length(); i += 9) {
+                            if ((i + 9) > input.length()) {
+                                i = input.length() - 10;
+                            }
+                            // one column header is 9 chars wide
+                            lsBezeichner[col++] =
+                                input.substring(i, i + 9).trim();
+
+                            if (col == lsBezeichner.length) {
+                                break;
+                            }
+                        }
+                    }
+                }
+
+            }
+
+            wst.setUnit(new ImportUnit(einheit));
+
+            addInterval(minKm, maxKm, aktAbfluesse);
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+    }
+
+    protected void addValue(BigDecimal km, BigDecimal w, int index) {
+        if (w != null) {
+            ImportWstColumn column = wst.getColumn(index);
+            column.addColumnValue(km, w);
+        }
+    }
+
+    private static final NumberFormat NF = getNumberFormat();
+
+    private static final NumberFormat getNumberFormat() {
+        NumberFormat nf = NumberFormat.getInstance();
+        nf.setMinimumFractionDigits(2);
+        nf.setMaximumFractionDigits(2);
+        return nf;
+    }
+
+    protected static String format(double value) {
+        return NF.format(value);
+    }
+
+    protected void addInterval(
+        BigDecimal    from,
+        BigDecimal    to,
+        BigDecimal [] values
+    ) {
+        log.debug("addInterval: " + from + " " + to);
+
+        if (values == null || from == MAX_RANGE) {
+            return;
+        }
+
+        if (to.compareTo(from) < 0) {
+            BigDecimal t = from; from = to; to = t;
+        }
+
+        ImportRange range = new ImportRange(from, to);
+
+        // little workaround to make the q ranges tightly fit.
+        // Leave a very small gap to ensure that the range queries
+        // still work.
+
+        if (lastRange != null) {
+            double d1 = Math.abs(
+                lastRange.getB().doubleValue() - range.getA().doubleValue());
+            double d2 = Math.abs(
+                range.getB().doubleValue() - lastRange.getA().doubleValue());
+
+            if (d1 < d2) {
+                lastRange.setB(range.getA().subtract(INTERVAL_GAP));
+            }
+            else {
+                range.setA(lastRange.getB().subtract(INTERVAL_GAP));
+            }
+        }
+
+        for (int i = 0; i < values.length; ++i) {
+            ImportWstColumn column = wst.getColumn(i);
+            ImportWstQRange wstQRange = new ImportWstQRange(range, values[i]);
+            column.addColumnQRange(wstQRange);
+        }
+
+        lastRange = range;
+    }
+
+    private static final BigDecimal [] parseLineAsDouble(
+        String  line,
+        int     count,
+        boolean bStation,
+        boolean bParseEmptyAsZero
+    ) {
+        String [] tokens = parseLine(line, count, bStation);
+
+        BigDecimal [] doubles = new BigDecimal[tokens.length];
+
+        for (int i = 0; i < doubles.length; ++i) {
+            String token = tokens[i].trim();
+            if (token.length() != 0) {
+                doubles[i] = new BigDecimal(token);
+            }
+            else if (bParseEmptyAsZero) {
+                doubles[i] = UNDEFINED_ZERO;
+            }
+        }
+
+        return doubles;
+    }
+
+    private static String [] parseLine(
+        String  line,
+        int     tokenCount,
+        boolean bParseStation
+    ) {
+        ArrayList<String> strings = new ArrayList<String>();
+
+        if (bParseStation) {
+            if (line.length() < 8) {
+                throw new IllegalArgumentException("station too short");
+            }
+            strings.add(line.substring(0, 8));
+        }
+
+        int pos = 9;
+        for (int i = 0; i < tokenCount; ++i) {
+            if (line.length() >= pos + 8) {
+                strings.add(line.substring(pos, pos + 8));
+            }
+            else {
+                strings.add("");
+            }
+            pos += 9;
+        }
+
+        return strings.toArray(new String[strings.size()]);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Annotation.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,111 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "annotations")
+public class Annotation
+implements   Serializable
+{
+    private Integer        id;
+    private Range          range;
+    private Attribute      attribute;
+    private Position       position;
+    private Edge           edge;
+    private AnnotationType type;
+
+    public Annotation() {
+    }
+
+    public Annotation(
+        Range          range,
+        Attribute      attribute,
+        Position       position,
+        Edge           edge,
+        AnnotationType type
+    ) {
+        this.range     = range;
+        this.attribute = attribute;
+        this.position  = position;
+        this.edge      = edge;
+        this.type      = type;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_ANNOTATIONS_ID_SEQ",
+        sequenceName   = "ANNOTATIONS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_ANNOTATIONS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "range_id")
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "attribute_id")
+    public Attribute getAttribute() {
+        return attribute;
+    }
+
+    public void setAttribute(Attribute attribute) {
+        this.attribute = attribute;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "position_id")
+    public Position getPosition() {
+        return position;
+    }
+
+    public void setPosition(Position position) {
+        this.position = position;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "edge_id")
+    public Edge getEdge() {
+        return edge;
+    }
+
+    public void setEdge(Edge edge) {
+        this.edge = edge;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "type_id")
+    public AnnotationType getType() {
+        return type;
+    }
+
+    public void setType(AnnotationType type) {
+        this.type = type;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/AnnotationType.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,54 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "annotation_types")
+public class AnnotationType
+implements   Serializable
+{
+    private Integer id;
+    private String  name;
+
+    public AnnotationType() {
+    }
+
+    public AnnotationType(String name) {
+        this.name = name;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_ANNOTATION_TYPES_ID_SEQ",
+        sequenceName   = "ANNOTATION_TYPES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_ANNOTATION_TYPES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Attribute.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,55 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "attributes")
+public class Attribute
+implements   Serializable
+{
+    private Integer id;
+
+    private String  value;
+
+    public Attribute() {
+    }
+
+    public Attribute(String value) {
+        this.value = value;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_ATTRIBUTES_ID_SEQ",
+        sequenceName   = "ATTRIBUTES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_ATTRIBUTES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "value")
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightEpoch.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,211 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "bed_height_epoch")
+public class BedHeightEpoch implements Serializable {
+
+    private Integer id;
+
+    private River river;
+
+    private TimeInterval timeInterval;
+
+    private ElevationModel curElevationModel;
+    private ElevationModel oldElevationModel;
+
+    private Range range;
+
+    private String evaluationBy;
+    private String description;
+
+    private List<BedHeightEpochValue> values;
+
+
+    public BedHeightEpoch() {
+    }
+
+
+    public BedHeightEpoch(
+        River          river,
+        TimeInterval   timeInterval,
+        Range          range,
+        ElevationModel curElevationModel,
+        ElevationModel oldElevationModel,
+        String         evaluationBy,
+        String         description
+    ) {
+        this.river             = river;
+        this.timeInterval      = timeInterval;
+        this.range             = range;
+        this.curElevationModel = curElevationModel;
+        this.oldElevationModel = oldElevationModel;
+        this.evaluationBy      = evaluationBy;
+        this.description       = description;
+        this.values            = new ArrayList<BedHeightEpochValue>();
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_BED_HEIGHT_EPOCH_ID_SEQ",
+        sequenceName   = "BED_HEIGHT_EPOCH_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_BED_HEIGHT_EPOCH_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id")
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "cur_elevation_model_id")
+    public ElevationModel getCurElevationModel() {
+        return curElevationModel;
+    }
+
+    public void setCurElevationModel(ElevationModel curElevationModel) {
+        this.curElevationModel = curElevationModel;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "old_elevation_model_id")
+    public ElevationModel getOldElevationModel() {
+        return oldElevationModel;
+    }
+
+    public void setOldElevationModel(ElevationModel oldElevationModel) {
+        this.oldElevationModel = oldElevationModel;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "range_id")
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @Column(name = "evaluation_by")
+    public String getEvaluationBy() {
+        return evaluationBy;
+    }
+
+    public void setEvaluationBy(String evaluationBy) {
+        this.evaluationBy = evaluationBy;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "bed_height_epoch_id")
+    public List<BedHeightEpochValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<BedHeightEpochValue> values) {
+        this.values = values;
+    }
+
+
+    public static List<BedHeightEpoch> getBedHeightEpochs(
+        River  river,
+        double kmLo,
+        double kmHi
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightEpoch where river=:river");
+
+        query.setParameter("river", river);
+
+        // TODO Do km range filtering in SQL statement
+
+        List<BedHeightEpoch> epochs = query.list();
+        List<BedHeightEpoch> good   = new ArrayList<BedHeightEpoch>();
+
+        OUTER: for (BedHeightEpoch e: epochs) {
+            for (BedHeightEpochValue value: e.getValues()) {
+                double station = value.getStation().doubleValue();
+
+                if (station >= kmLo && station <= kmHi) {
+                    good.add(e);
+                    continue OUTER;
+                }
+            }
+        }
+
+        return good;
+    }
+
+
+    public static BedHeightEpoch getBedHeightEpochById(int id) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightEpoch where id=:id");
+
+        query.setParameter("id", id);
+
+        List<BedHeightEpoch> singles = query.list();
+
+        return singles != null ? singles.get(0) : null;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightEpochValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,119 @@
+package de.intevation.flys.model;
+
+import java.util.List;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "bed_height_epoch_values")
+public class BedHeightEpochValue
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(BedHeightEpochValue.class);
+
+    private Integer id;
+
+    private BedHeightEpoch bedHeight;
+
+    private BigDecimal station;
+    private BigDecimal height;
+
+
+    public BedHeightEpochValue() {
+    }
+
+    public BedHeightEpochValue(
+        BedHeightEpoch bedHeight,
+        BigDecimal station,
+        BigDecimal height
+    ) {
+        this.bedHeight = bedHeight;
+        this.station   = station;
+        this.height    = height;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_BED_EPOCH_VALUE_ID_SEQ",
+        sequenceName   = "BED_EPOCH_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_BED_EPOCH_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "bed_height_epoch_id" )
+    public BedHeightEpoch getBedHeight() {
+        return bedHeight;
+    }
+
+    public void setBedHeight(BedHeightEpoch bedHeight) {
+        this.bedHeight = bedHeight;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "height")
+    public BigDecimal getHeight() {
+        return height;
+    }
+
+    public void setHeight(BigDecimal height) {
+        this.height = height;
+    }
+
+
+    public static List<BedHeightEpochValue> getBedHeightEpochValues(
+        BedHeightEpoch epoch,
+        double kmLo,
+        double kmHi
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightEpochValue where bedHeight=:epoch " +
+            "   and station >= :kmLo and station <= :kmHi");
+
+        query.setParameter("epoch", epoch);
+        query.setParameter("kmLo", new BigDecimal(kmLo));
+        query.setParameter("kmHi", new BigDecimal(kmHi));
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightSingle.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,272 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "bed_height_single")
+public class BedHeightSingle implements Serializable {
+
+    private Integer id;
+    private Integer year;
+    private Integer soundingWidth;
+
+    private String evaluationBy;
+    private String description;
+
+    private River river;
+
+    private BedHeightType  type;
+
+    private LocationSystem locationSystem;
+
+    private ElevationModel curElevationModel;
+
+    private ElevationModel oldElevationModel;
+
+    private Range range;
+
+    private List<BedHeightSingleValue> values;
+
+
+    public BedHeightSingle() {
+    }
+
+
+    public BedHeightSingle(
+        River          river,
+        Integer        year,
+        Integer        soundingWidth,
+        BedHeightType  type,
+        LocationSystem locationSystem,
+        ElevationModel curElevationModel,
+        Range          range
+    ) {
+        this(
+            river,
+            year,
+            soundingWidth,
+            type,
+            locationSystem,
+            curElevationModel,
+            null,
+            range,
+            null,
+            null);
+    }
+
+
+    public BedHeightSingle(
+        River          river,
+        Integer        year,
+        Integer        soundingWidth,
+        BedHeightType  type,
+        LocationSystem locationSystem,
+        ElevationModel curElevationModel,
+        ElevationModel oldElevationModel,
+        Range          range,
+        String         evaluationBy,
+        String         description
+    ) {
+        this.river             = river;
+        this.year              = year;
+        this.soundingWidth     = soundingWidth;
+        this.type              = type;
+        this.locationSystem    = locationSystem;
+        this.curElevationModel = curElevationModel;
+        this.oldElevationModel = oldElevationModel;
+        this.range             = range;
+        this.evaluationBy      = evaluationBy;
+        this.description       = description;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_BED_HEIGHT_SINGLE_ID_SEQ",
+        sequenceName   = "BED_HEIGHT_SINGLE_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_BED_HEIGHT_SINGLE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "year")
+    public Integer getYear() {
+        return year;
+    }
+
+    public void setYear(Integer year) {
+        this.year = year;
+    }
+
+    @Column(name = "sounding_width")
+    public Integer getSoundingWidth() {
+        return soundingWidth;
+    }
+
+    public void setSoundingWidth(Integer soundingWidth) {
+        this.soundingWidth = soundingWidth;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "type_id")
+    public BedHeightType getType() {
+        return type;
+    }
+
+    public void setType(BedHeightType type) {
+        this.type = type;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "location_system_id")
+    public LocationSystem getLocationSystem() {
+        return locationSystem;
+    }
+
+    public void setLocationSystem(LocationSystem locationSystem) {
+        this.locationSystem = locationSystem;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "cur_elevation_model_id")
+    public ElevationModel getCurElevationModel() {
+        return curElevationModel;
+    }
+
+    public void setCurElevationModel(ElevationModel curElevationModel) {
+        this.curElevationModel = curElevationModel;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "old_elevation_model_id")
+    public ElevationModel getOldElevationModel() {
+        return oldElevationModel;
+    }
+
+    public void setOldElevationModel(ElevationModel oldElevationModel) {
+        this.oldElevationModel = oldElevationModel;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "range_id")
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @Column(name = "evaluation_by")
+    public String getEvaluationBy() {
+        return evaluationBy;
+    }
+
+    public void setEvaluationBy(String evaluationBy) {
+        this.evaluationBy = evaluationBy;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "bed_height_single_id")
+    public List<BedHeightSingleValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<BedHeightSingleValue> values) {
+        this.values = values;
+    }
+
+
+    public static List<BedHeightSingle> getBedHeightSingles(
+        River  river,
+        double kmLo,
+        double kmHi
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightSingle where river=:river");
+
+        query.setParameter("river", river);
+
+        // TODO Do km range filtering in SQL statement
+
+        List<BedHeightSingle> singles = query.list();
+        List<BedHeightSingle> good    = new ArrayList<BedHeightSingle>();
+
+        OUTER: for (BedHeightSingle s: singles) {
+            for (BedHeightSingleValue value: s.getValues()) {
+                double station = value.getStation().doubleValue();
+
+                if (station >= kmLo && station <= kmHi) {
+                    good.add(s);
+                    continue OUTER;
+                }
+            }
+        }
+
+        return good;
+    }
+
+
+    public static BedHeightSingle getBedHeightSingleById(int id) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightSingle where id=:id");
+
+        query.setParameter("id", id);
+
+        List<BedHeightSingle> singles = query.list();
+
+        return singles != null ? singles.get(0) : null;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightSingleValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,167 @@
+package de.intevation.flys.model;
+
+import java.util.List;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "bed_height_single_values")
+public class BedHeightSingleValue
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(BedHeightSingleValue.class);
+
+    private Integer id;
+
+    private BedHeightSingle bedHeight;
+
+    private BigDecimal station;
+    private BigDecimal height;
+    private BigDecimal uncertainty;
+    private BigDecimal dataGap;
+    private BigDecimal soundingWidth;
+    private BigDecimal width;
+
+
+    public BedHeightSingleValue() {
+    }
+
+    public BedHeightSingleValue(
+        BedHeightSingle bedHeight,
+        BigDecimal station,
+        BigDecimal height,
+        BigDecimal uncertainty,
+        BigDecimal dataGap,
+        BigDecimal soundingWidth,
+        BigDecimal width
+    ) {
+        this.bedHeight     = bedHeight;
+        this.station       = station;
+        this.height        = height;
+        this.uncertainty   = uncertainty;
+        this.dataGap       = dataGap;
+        this.soundingWidth = soundingWidth;
+        this.width         = width;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_BED_SINGLE_VALUE_ID_SEQ",
+        sequenceName   = "BED_SINGLE_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_BED_SINGLE_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "bed_height_single_id" )
+    public BedHeightSingle getBedHeight() {
+        return bedHeight;
+    }
+
+    public void setBedHeight(BedHeightSingle bedHeight) {
+        this.bedHeight = bedHeight;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "height")
+    public BigDecimal getHeight() {
+        return height;
+    }
+
+    public void setHeight(BigDecimal height) {
+        this.height = height;
+    }
+
+    @Column(name="uncertainty")
+    public BigDecimal getUncertainty() {
+        return uncertainty;
+    }
+
+    public void setUncertainty(BigDecimal uncertainty) {
+        this.uncertainty = uncertainty;
+    }
+
+    @Column(name="data_gap")
+    public BigDecimal getDataGap() {
+        return dataGap;
+    }
+
+    public void setDataGap(BigDecimal dataGap) {
+        this.dataGap = dataGap;
+    }
+
+    @Column(name="sounding_width")
+    public BigDecimal getSoundingWidth() {
+        return soundingWidth;
+    }
+
+    public void setSoundingWidth(BigDecimal soundingWidth) {
+        this.soundingWidth = soundingWidth;
+    }
+
+    @Column(name="width")
+    public BigDecimal getWidth() {
+        return width;
+    }
+
+    public void setWidth(BigDecimal width) {
+        this.width = width;
+    }
+
+
+    public static List<BedHeightSingleValue> getBedHeightSingleValues(
+        BedHeightSingle single,
+        double kmLo,
+        double kmHi
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightSingleValue where bedHeight=:single " +
+            "   and station >= :kmLo and station <= :kmHi");
+
+        query.setParameter("single", single);
+        query.setParameter("kmLo", new BigDecimal(kmLo));
+        query.setParameter("kmHi", new BigDecimal(kmHi));
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightType.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,91 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "bed_height_type")
+public class BedHeightType
+implements   Serializable
+{
+    private static Logger log = Logger.getLogger(BedHeightType.class);
+
+    private Integer id;
+    private String  name;
+    private String  description;
+
+
+    public BedHeightType() {
+    }
+
+    public BedHeightType(String name, String description) {
+        this.name        = name;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_BED_HEIGHT_TYPE_ID_SEQ",
+        sequenceName   = "BED_HEIGHT_TYPE_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_BED_HEIGHT_TYPE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public static String getBedHeightName(String description) {
+        if (description.equals("Flächenpeilung")) {
+            return "FP";
+        }
+        else if ("Querprofile".equals(description)) {
+            return "QP";
+        }
+        else if ("TIN".equals(description)) {
+            return "TIN";
+        }
+        else if ("Flächen- u. Querprofilpeilungen".equals(description)) {
+            return "FP-QP";
+        }
+        else {
+            log.warn("Unknown bed height type: " + description);
+            return null;
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Building.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,94 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "buildings")
+public class Building
+implements   Serializable
+{
+    private Integer    id;
+    private River      river;
+    private String     name;
+    private LineString geom;
+
+    public Building() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<Building> getBuildings(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Building where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Catchment.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,107 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Geometry;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "catchment")
+public class Catchment
+implements   Serializable
+{
+    private Integer    id;
+    private BigDecimal area;
+    private String     name;
+    private River      river;
+    private Geometry    geom;
+
+    public Catchment() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "area")
+    public BigDecimal getArea() {
+        return area;
+    }
+
+
+    public void setArea(BigDecimal area) {
+        this.area = area;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Geometry getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Geometry geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<Catchment> getCatchments(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Catchment where river.id =:river_id AND name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSection.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,198 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import java.awt.geom.Point2D;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.OrderBy;
+import javax.persistence.JoinColumn;
+
+import java.math.MathContext;
+import java.math.BigDecimal;
+
+import org.hibernate.Session;
+import org.hibernate.SQLQuery;
+import org.hibernate.Query;
+
+import org.hibernate.type.StandardBasicTypes;
+
+import de.intevation.flys.backend.SessionHolder;
+
+@Entity
+@Table(name = "cross_sections")
+public class CrossSection
+implements   Serializable
+{
+    public static final MathContext PRECISION = new MathContext(6);
+
+    public static final String SQL_FAST_CROSS_SECTION_LINES =
+        "SELECT km, x, y, csl.id AS csl_id " +
+        "FROM cross_section_lines csl JOIN cross_section_points csp " +
+        "ON csp.cross_section_line_id = csl.id " +
+        "WHERE csl.cross_section_id = :cs_id AND " +
+        "km between :from_km AND :to_km " +
+        "ORDER BY csl.id, csp.col_pos";
+
+    private Integer                id;
+    private River                  river;
+    private TimeInterval           timeInterval;
+    private String                 description;
+    private List<CrossSectionLine> lines;
+
+    public CrossSection() {
+    }
+
+    public CrossSection(
+        River        river,
+        TimeInterval timeInterval,
+        String       description
+    ) {
+        this.river        = river;
+        this.timeInterval = timeInterval;
+        this.description  = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_CROSS_SECTIONS_ID_SEQ",
+        sequenceName   = "CROSS_SECTIONS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_CROSS_SECTIONS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id")
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @OrderBy("km")
+    @JoinColumn(name="cross_section_id")
+    public List<CrossSectionLine> getLines() {
+        return lines;
+    }
+
+    public void setLines(List<CrossSectionLine> lines) {
+        this.lines = lines;
+    }
+
+    public List<CrossSectionLine> getLines(double startKm, double endKm) {
+        Session session = SessionHolder.HOLDER.get();
+        Query query = session.createQuery(
+            "from CrossSectionLine where crossSection=:crossSection " +
+            "and km between :startKm and :endKm order by km");
+        query.setParameter("crossSection", this);
+        query.setParameter("startKm", new BigDecimal(startKm, PRECISION));
+        query.setParameter("endKm", new BigDecimal(endKm, PRECISION));
+
+        return query.list();
+    }
+
+    public List<FastCrossSectionLine> getFastLines(
+        double startKm,
+        double endKm
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        SQLQuery sqlQuery = session.createSQLQuery(SQL_FAST_CROSS_SECTION_LINES)
+            .addScalar("km",     StandardBasicTypes.DOUBLE)
+            .addScalar("x",      StandardBasicTypes.DOUBLE)
+            .addScalar("y",      StandardBasicTypes.DOUBLE)
+            .addScalar("csl_id", StandardBasicTypes.INTEGER);
+
+        sqlQuery
+            .setInteger("cs_id",  getId())
+            .setDouble("from_km", startKm)
+            .setDouble("to_km",   endKm);
+
+        List<Object []> results = sqlQuery.list();
+
+        ArrayList<Point2D> points = new ArrayList<Point2D>(500);
+        ArrayList<FastCrossSectionLine> lines =
+            new ArrayList<FastCrossSectionLine>();
+
+        Integer lastId = null;
+        Double  lastKm = null;
+
+        for (Object [] result: results) {
+            Double  km = (Double)result[0];
+            Double  x  = (Double)result[1];
+            Double  y  = (Double)result[2];
+            Integer id = (Integer)result[3];
+
+            if (lastId != null && !lastId.equals(id)) {
+                points.trimToSize();
+                FastCrossSectionLine line =
+                    new FastCrossSectionLine(lastKm, points);
+                lines.add(line);
+                points = new ArrayList<Point2D>(500);
+            }
+
+            Point2D p = new Point2D.Double(x, y);
+
+            if (CrossSectionLine.isValid(p)) {
+                points.add(p);
+            }
+
+            lastKm = km;
+            lastId = id;
+        }
+
+        if (lastId != null) {
+            points.trimToSize();
+            FastCrossSectionLine line =
+                new FastCrossSectionLine(lastKm, points);
+            lines.add(line);
+        }
+
+        lines.trimToSize();
+
+        return lines;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionLine.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,174 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Collections;
+import java.util.Comparator;
+
+import java.awt.geom.Point2D;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.JoinColumn;
+
+import org.apache.log4j.Logger;
+
+@Entity
+@Table(name = "cross_section_lines")
+public class CrossSectionLine
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(CrossSectionLine.class);
+
+    public static final double EPSILON   = 1e-4;
+
+    public static final double TOO_SMALL = 0.2;
+    public static final double TOO_BIG   = 2500;
+
+    private Integer                 id;
+    private Double              km;
+    private CrossSection            crossSection;
+
+    private List<CrossSectionPoint> points;
+
+    public static final Comparator<CrossSectionPoint> COL_POS_CMP =
+        new Comparator<CrossSectionPoint>() {
+            @Override
+            public int compare(CrossSectionPoint a, CrossSectionPoint b) {
+                double xa = a.getX().doubleValue();
+                double xb = b.getX().doubleValue();
+                double d = xa - xb;
+                if (d < -EPSILON) return -1;
+                if (d > +EPSILON) return +1;
+                int diff = a.getColPos() - b.getColPos();
+                return diff < 0 ? -1 : diff > 0 ? +1 : 0;
+            }
+        };
+
+
+    public static final boolean isValid(double x) {
+        x = Math.abs(x);
+        return x > TOO_SMALL && x < TOO_BIG;
+    }
+
+    public static final boolean isValid(Point2D p) {
+        return isValid(p.getX()) && isValid(p.getY());
+    }
+
+
+    public CrossSectionLine() {
+    }
+
+    public CrossSectionLine(CrossSection crossSection, Double km) {
+        this.crossSection = crossSection;
+        this.km           = km;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_CROSS_SECTION_LINES_ID_SEQ",
+        sequenceName   = "CROSS_SECTION_LINES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_CROSS_SECTION_LINES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "km")
+    public Double getKm() {
+        return km;
+    }
+
+    public void setKm(Double km) {
+        this.km = km;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "cross_section_id")
+    public CrossSection getCrossSection() {
+        return crossSection;
+    }
+
+    public void setCrossSection(CrossSection CrossSection) {
+        this.crossSection = crossSection;
+    }
+
+    @OneToMany
+    @JoinColumn(name="cross_section_line_id")
+    public List<CrossSectionPoint> getPoints() {
+        return points;
+    }
+
+    public void setPoints(List<CrossSectionPoint> points) {
+        this.points = points;
+    }
+
+
+    public List<Point2D> fetchCrossSectionLinesPoints() {
+
+        List<CrossSectionPoint> linePoints =
+            new ArrayList<CrossSectionPoint>(getPoints());
+
+        Collections.sort(linePoints, COL_POS_CMP);
+
+        List<Point2D> points = new ArrayList<Point2D>(linePoints.size());
+        for (CrossSectionPoint p: linePoints) {
+            double x = p.getX().doubleValue();
+            double y = p.getY().doubleValue();
+            if (isValid(x) && isValid(y)) {
+                points.add(new Point2D.Double(x, y));
+            }
+        }
+
+        return points;
+    }
+
+    public double [][] fetchCrossSectionProfile() {
+        return fetchCrossSectionProfile(fetchCrossSectionLinesPoints());
+    }
+
+    public static double [][] fetchCrossSectionProfile(List<Point2D> points) {
+
+        int P = points.size();
+
+        double [] xs = new double[P];
+        double [] ys = new double[P];
+
+        if (P > 0) {
+            xs[0] = points.get(0).getX();
+            ys[0] = points.get(0).getY();
+
+            for (int i = 1; i < P; i++) {
+                Point2D p = points.get(i);
+                double x = p.getX();
+                double y = p.getY();
+
+                if (x <= xs[i-1]) {
+                    x = xs[i-1] + EPSILON;
+                }
+
+                xs[i] = x;
+                ys[i] = y;
+            }
+        }
+
+        return new double [][] { xs, ys };
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionPoint.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,95 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "cross_section_points")
+public class CrossSectionPoint
+implements   Serializable
+{
+    private Integer          id;
+    private CrossSectionLine crossSectionLine;
+    private Integer          colPos;
+    private Double       x;
+    private Double       y;
+
+    public CrossSectionPoint() {
+    }
+
+    public CrossSectionPoint(
+        CrossSectionLine crossSectionLine,
+        Integer          colPos,
+        Double       x,
+        Double       y
+    ) {
+        this.crossSectionLine = crossSectionLine;
+        this.colPos           = colPos;
+        this.x                = x;
+        this.y                = y;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_CROSS_SECTION_POINTS_ID_SEQ",
+        sequenceName   = "CROSS_SECTION_POINTS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_CROSS_SECTION_POINTS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "cross_section_line_id")
+    public CrossSectionLine getCrossSectionLine() {
+        return crossSectionLine;
+    }
+
+    public void setCrossSectionLine(CrossSectionLine crossSectionLine) {
+        this.crossSectionLine = crossSectionLine;
+    }
+
+    @Column(name = "col_pos")
+    public Integer getColPos() {
+        return colPos;
+    }
+
+    public void setColPos(Integer colPos) {
+        this.colPos = colPos;
+    }
+
+    @Column(name = "x")
+    public Double getX() {
+        return x;
+    }
+
+    public void setX(Double x) {
+        this.x = x;
+    }
+
+    @Column(name = "y")
+    public Double getY() {
+        return y;
+    }
+
+    public void setY(Double y) {
+        this.y = y;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionTrack.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,164 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "cross_section_tracks")
+public class CrossSectionTrack
+implements   Serializable
+{
+    private Integer    id;
+    private River      river;
+    private String     name;
+    private LineString geom;
+    private BigDecimal km;
+    private BigDecimal z;
+
+    public CrossSectionTrack() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    @Column(name = "km")
+    public BigDecimal getKm() {
+        return km;
+    }
+
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name = "z")
+    public BigDecimal getZ() {
+        return z;
+    }
+
+
+    public void setZ(BigDecimal z) {
+        this.z = z;
+    }
+
+
+    public static List<CrossSectionTrack> getCrossSectionTrack(
+        String river)
+    {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from CrossSectionTrack where river.name =:river");
+        query.setParameter("river", river);
+
+        return query.list();
+    }
+
+
+    public static List<CrossSectionTrack> getCrossSectionTrack(
+        String river,
+        String name
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from CrossSectionTrack as cst " +
+            "    where river.name =:river" +
+            "      and cst.name=:name");
+        query.setParameter("river", river);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+
+
+    /**
+     * Returns the nearest CrossSectionTrack of <i>river</i> to a given
+     * <i>km</i>.
+     *
+     * @param river The name of a river.
+     * @param km The kilometer value.
+     *
+     * @return the nearest CrossSectionTrack to <i>km</i> of river <i>river</i>.
+     */
+    public static CrossSectionTrack getCrossSectionTrack(
+        String river,
+        double km
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from CrossSectionTrack where river.name =:river " +
+            "order by abs( km - :mykm)");
+        query.setParameter("river", river);
+        query.setParameter("mykm", new BigDecimal(km));
+
+        List<CrossSectionTrack> cst = query.list();
+
+        return cst != null && !cst.isEmpty() ? cst.get(0) : null;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/DGM.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,115 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+import java.math.BigDecimal;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "dem")
+public class DGM implements Serializable {
+
+    private Integer    id;
+
+    private River      river;
+
+    private BigDecimal lower;
+    private BigDecimal upper;
+
+    private String     path;
+
+
+    public DGM() {
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setLower(BigDecimal lower) {
+        this.lower = lower;
+    }
+
+    @Column(name = "lower")
+    public BigDecimal getLower() {
+        return lower;
+    }
+
+    public void setUpper(BigDecimal upper) {
+        this.upper = upper;
+    }
+
+    @Column(name = "upper")
+    public BigDecimal getUpper() {
+        return upper;
+    }
+
+    public void setPath(String path) {
+        this.path = path;
+    }
+
+    @Column(name = "path")
+    public String getPath() {
+        return path;
+    }
+
+
+    public static DGM getDGM(int id) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from DGM where Id =:id");
+        query.setParameter("id", id);
+
+        List<DGM> result = query.list();
+
+        return result.isEmpty() ? null : result.get(0);
+    }
+
+
+    public static DGM getDGM(String river, double lower, double upper) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from DGM where river.name =:river and " +
+            "lower <=:lower and upper >=:lower and " +
+            "lower <=:upper and upper >=:upper");
+        query.setParameter("river", river);
+        query.setParameter("lower", new BigDecimal(lower));
+        query.setParameter("upper", new BigDecimal(upper));
+
+        List<DGM> result = query.list();
+
+        return result.isEmpty() ? null : result.get(0);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Depth.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,84 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+
+@Entity
+@Table(name = "depths")
+public class Depth implements Serializable {
+
+    private Integer id;
+
+    private BigDecimal lower;
+    private BigDecimal upper;
+
+    private Unit unit;
+
+
+    public Depth() {
+    }
+
+
+    public Depth(BigDecimal lower, BigDecimal upper, Unit unit) {
+        this.lower = lower;
+        this.upper = upper;
+        this.unit  = unit;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_DEPTHS_ID_SEQ",
+        sequenceName   = "DEPTHS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_DEPTHS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "lower")
+    public BigDecimal getLower() {
+        return lower;
+    }
+
+    public void setLower(BigDecimal lower) {
+        this.lower = lower;
+    }
+
+    @Column(name = "upper")
+    public BigDecimal getUpper() {
+        return upper;
+    }
+
+    public void setUpper(BigDecimal upper) {
+        this.upper = upper;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/DischargeTable.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,120 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.OrderBy;
+import javax.persistence.JoinColumn;
+
+import java.util.List;
+
+@Entity
+@Table(name = "discharge_tables")
+public class DischargeTable
+implements   Serializable
+{
+    private Integer      id;
+    private Gauge        gauge;
+    private String       description;
+    private Integer      kind;
+    private TimeInterval timeInterval;
+
+    private List<DischargeTableValue> dischargeTableValues;
+
+    public DischargeTable() {
+        kind = 0;
+    }
+
+    public DischargeTable(Gauge gauge) {
+        this(gauge, null, 0, null);
+    }
+
+    public DischargeTable(
+        Gauge        gauge,
+        String       description,
+        Integer      kind,
+        TimeInterval timeInterval
+    ) {
+        this.gauge        = gauge;
+        this.description  = description;
+        this.kind         = kind;
+        this.timeInterval = timeInterval;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_DISCHARGE_TABLES_ID_SEQ",
+        sequenceName   = "DISCHARGE_TABLES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_DISCHARGE_TABLES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "gauge_id" )
+    public Gauge getGauge() {
+        return gauge;
+    }
+
+    public void setGauge(Gauge gauge) {
+        this.gauge = gauge;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @Column(name = "kind")
+    public Integer getKind() {
+        return kind;
+    }
+
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id" )
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "table_id")
+    @OrderBy("q")
+    public List<DischargeTableValue> getDischargeTableValues() {
+        return dischargeTableValues;
+    }
+
+    public void setDischargeTableValues(
+        List<DischargeTableValue> dischargeTableValues
+    ) {
+        this.dischargeTableValues = dischargeTableValues;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/DischargeTableValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,84 @@
+package de.intevation.flys.model;
+
+import java.math.BigDecimal;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "discharge_table_values")
+public class DischargeTableValue
+implements   Serializable
+{
+    private Integer        id;
+    private DischargeTable dischargeTable;
+    private BigDecimal     q;
+    private BigDecimal     w;
+
+    public DischargeTableValue() {
+    }
+
+    public DischargeTableValue(
+        DischargeTable dischargeTable, BigDecimal q, BigDecimal w)
+    {
+        this.dischargeTable = dischargeTable;
+        this.q              = q;
+        this.w              = w;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_DISCHARGE_TABLE_VALUES_ID_SEQ",
+        sequenceName   = "DISCHARGE_TABLE_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_DISCHARGE_TABLE_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "table_id" )
+    public DischargeTable getDischargeTable() {
+        return dischargeTable;
+    }
+
+    public void setDischargeTable(DischargeTable dischargeTable) {
+        this.dischargeTable = dischargeTable;
+    }
+
+
+    @Column(name = "q")
+    public BigDecimal getQ() {
+        return q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+
+    @Column(name = "w")
+    public BigDecimal getW() {
+        return w;
+    }
+
+    public void setW(BigDecimal w) {
+        this.w = w;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/DischargeZone.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,152 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "discharge_zone")
+public class DischargeZone
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(DischargeZone.class);
+
+
+    private Integer id;
+
+    private River river;
+
+    private String gaugeName;
+
+    private BigDecimal value;
+
+    private String lowerDischarge;
+    private String upperDischarge;
+
+
+    public DischargeZone() {
+    }
+
+
+    public DischargeZone(
+        River       river,
+        String      gaugeName,
+        BigDecimal  value,
+        String      lowerDischarge,
+        String      upperDischarge
+    ) {
+        this.river          = river;
+        this.gaugeName      = gaugeName;
+        this.value          = value;
+        this.lowerDischarge = lowerDischarge;
+        this.upperDischarge = upperDischarge;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_DISCHARGE_ZONE_ID_SEQ",
+        sequenceName   = "DISCHARGE_ZONE_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_DISCHARGE_ZONE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "value")
+    public BigDecimal getValue() {
+        return value;
+    }
+
+    public void setValue(BigDecimal value) {
+        this.value = value;
+    }
+
+    @Column(name = "gauge_name")
+    public String getGaugeName() {
+        return gaugeName;
+    }
+
+    public void setGaugeName(String gaugeName) {
+        this.gaugeName = gaugeName;
+    }
+
+    @Column(name = "lower_discharge")
+    public String getLowerDischarge() {
+        return lowerDischarge;
+    }
+
+    public void setLowerDischarge(String lowerDischarge) {
+        this.lowerDischarge = lowerDischarge;
+    }
+
+    @Column(name = "upper_discharge")
+    public String getUpperDischarge() {
+        return upperDischarge;
+    }
+
+    public void setUpperDischarge(String upperDischarge) {
+        this.upperDischarge = upperDischarge;
+    }
+
+
+    public static List<DischargeZone> getDischargeZones(River river) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from DischargeZone where river=:river");
+
+        query.setParameter("river", river);
+
+        return query.list();
+    }
+
+
+    public static DischargeZone getDischargeZoneById(int id) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from DischargeZone where id=:id");
+
+        query.setParameter("id", id);
+
+        List<DischargeZone> zones = query.list();
+
+        return zones.isEmpty() ? null : zones.get(0);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Edge.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,67 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "edges")
+public class Edge
+implements   Serializable
+{
+    private Integer    id;
+    private BigDecimal top;
+    private BigDecimal bottom;
+
+    public Edge() {
+    }
+
+    public Edge(BigDecimal top, BigDecimal bottom) {
+        this.top    = top;
+        this.bottom = bottom;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_EDGES_ID_SEQ",
+        sequenceName   = "EDGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_EDGES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "top")
+    public BigDecimal getTop() {
+        return top;
+    }
+
+    public void setTop(BigDecimal top) {
+        this.top = top;
+    }
+
+    @Column(name = "bottom")
+    public BigDecimal getBottom() {
+        return bottom;
+    }
+
+    public void setBottom(BigDecimal bottom) {
+        this.bottom = bottom;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/ElevationModel.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,78 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "elevation_model")
+public class ElevationModel
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(ElevationModel.class);
+
+    protected Integer id;
+
+    protected String name;
+
+    protected Unit unit;
+
+
+    public ElevationModel() {
+    }
+
+
+    public ElevationModel(String name, Unit unit) {
+        this.name = name;
+        this.unit = unit;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_ELEVATION_MODE_ID_SEQ",
+        sequenceName   = "ELEVATION_MODEL_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_ELEVATION_MODE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FastAnnotations.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,335 @@
+package de.intevation.flys.model;
+
+import java.util.Comparator;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+import java.io.Serializable;
+
+import org.hibernate.Session;
+import org.hibernate.SQLQuery;
+
+import org.hibernate.type.StandardBasicTypes;
+
+import de.intevation.flys.backend.SessionHolder;
+
+public class FastAnnotations
+implements   Serializable
+{
+    public static final String SQL_BY_RIVER_NAME =
+        "SELECT r.a AS a, r.b AS b, p.value AS position, " +
+                "at.value AS attribute, ant.name AS name, " +
+                "e.top AS top, e.bottom AS bottom " +
+        "FROM annotations an " +
+            "JOIN ranges r " +
+                "ON an.range_id = r.id " +
+            "JOIN attributes at " +
+                "ON an.attribute_id = at.id " +
+            "JOIN positions p " +
+                "ON an.position_id = p.id " +
+            "JOIN rivers riv " +
+                "ON r.river_id = riv.id " +
+            "LEFT JOIN annotation_types ant " +
+                "ON an.type_id = ant.id " +
+            "LEFT JOIN edges e " +
+                "ON an.edge_id = e.id " +
+            "WHERE riv.name = :river_name " +
+                "ORDER BY r.a";
+
+    public static final String SQL_BY_RIVER_ID =
+        "SELECT r.a AS a, r.b AS b, p.value AS position, " +
+                "at.value AS attribute, ant.name AS name, " +
+                "e.top AS top, e.bottom AS bottom " +
+        "FROM annotations an " +
+            "JOIN ranges r " +
+                "ON an.range_id = r.id " +
+            "JOIN attributes at " +
+                "ON an.attribute_id = at.id " +
+            "JOIN positions p " +
+                "ON an.position_id = p.id " +
+            "LEFT JOIN annotation_types ant " +
+                "ON an.type_id = ant.id " +
+            "LEFT JOIN edges e " +
+                "ON an.edge_id = e.id " +
+            "WHERE r.id = :river_id " +
+                "ORDER BY r.a";
+
+    public static final double EPSILON = 1e-5;
+
+    public static final Comparator<Annotation> KM_CMP =
+        new Comparator<Annotation>() {
+            @Override
+            public int compare(Annotation a, Annotation b) {
+                double diff = a.a - b.a;
+                if (diff < -EPSILON) return -1;
+                if (diff > +EPSILON) return +1;
+                return 0;
+            }
+        };
+
+    public static final class Annotation
+    implements                Serializable
+    {
+        private double a;
+        private double b;
+        private String position;
+        private String attribute;
+        private String name;
+        private double top;
+        private double bottom;
+
+        public Annotation() {
+        }
+
+        public Annotation(double a) {
+            this.a = a;
+        }
+
+        public Annotation(
+            double a,
+            double b,
+            String position,
+            String attribute,
+            String name,
+            double top,
+            double bottom
+        ) {
+            this.a         = a;
+            this.b         = b;
+            this.position  = position;
+            this.attribute = attribute;
+            this.name      = name;
+            this.top       = top;
+            this.bottom    = bottom;
+        }
+
+        public double getA() {
+            return a;
+        }
+
+        public double getB() {
+            return b;
+        }
+
+        public String getPosition() {
+            return position;
+        }
+
+        public String getAttribute() {
+            return attribute;
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public double getTop() {
+            return top;
+        }
+
+        public double getBottom() {
+            return bottom;
+        }
+
+        @Override
+        public String toString() {
+            return "[a=" + a + ";b=" + b +
+                ";pos=" + position + ";attr=" + attribute +
+                ";name=" + name + ";top=" + top +
+                ";bot=" + bottom + "]";
+        }
+    } // class Annotation
+
+    public interface Filter {
+
+        boolean accept(Annotation annotation);
+
+    } // interface Filter
+
+    public static class NameFilter implements Filter {
+
+        private String name;
+
+        public NameFilter(String name) {
+            this.name = name;
+        }
+
+        @Override
+        public boolean accept(Annotation annotation) {
+            return annotation.getName().contains(name);
+        }
+    } // class NameFilter
+
+    public static final Filter ALL = new Filter() {
+        @Override
+        public boolean accept(Annotation annotation) {
+            return true;
+        }
+    };
+
+    public static final Filter IS_POINT = new Filter() {
+        @Override
+        public boolean accept(Annotation annotation) {
+            return Double.isNaN(annotation.getB());
+        }
+    };
+
+    public static final Filter IS_RANGE = new Filter() {
+        @Override
+        public boolean accept(Annotation annotation) {
+            return !Double.isNaN(annotation.getB());
+        }
+    };
+
+    private Annotation [] annotations;
+
+    public FastAnnotations() {
+    }
+
+    public FastAnnotations(Annotation [] annotations) {
+        this.annotations = annotations;
+    }
+
+    public FastAnnotations(String riverName) {
+        this(loadByRiverName(riverName));
+    }
+
+    public FastAnnotations(int riverId) {
+        this(loadByRiverId(riverId));
+    }
+
+    public FastAnnotations(Iterator<Annotation> iter) {
+        this(toArray(iter));
+    }
+
+    public int size() {
+        return annotations.length;
+    }
+
+    public Iterator<Annotation> filter(final Filter filter) {
+        return new Iterator<Annotation>() {
+
+            private int idx;
+            private Annotation current = findNext();
+
+            @Override
+            public boolean hasNext() {
+                return current != null;
+            }
+
+            @Override
+            public Annotation next() {
+                if (current == null) {
+                    throw new NoSuchElementException();
+                }
+                Annotation result = current;
+                current = findNext();
+                return result;
+            }
+
+            private Annotation findNext() {
+
+                while (idx < annotations.length) {
+                    Annotation annotation = annotations[idx++];
+                    if (filter.accept(annotation)) {
+                        return annotation;
+                    }
+                }
+
+                return null;
+            }
+
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException();
+            }
+        };
+    }
+
+    public static Annotation [] toArray(Iterator<Annotation> iter) {
+
+        ArrayList<Annotation> list = new ArrayList<Annotation>();
+
+        while (iter.hasNext()) {
+            list.add(iter.next());
+        }
+
+        return list.toArray(new Annotation[list.size()]);
+    }
+
+    public Annotation findByKm(double km) {
+        Annotation key = new Annotation(km);
+        int idx = Arrays.binarySearch(annotations, key, KM_CMP);
+        return idx < 0 ? null : annotations[idx];
+    }
+
+    private static SQLQuery createQuery(String query) {
+        Session session = SessionHolder.HOLDER.get();
+
+        return session.createSQLQuery(query)
+            .addScalar("a",         StandardBasicTypes.DOUBLE)
+            .addScalar("b",         StandardBasicTypes.DOUBLE)
+            .addScalar("position",  StandardBasicTypes.STRING)
+            .addScalar("attribute", StandardBasicTypes.STRING)
+            .addScalar("name",      StandardBasicTypes.STRING)
+            .addScalar("top",       StandardBasicTypes.DOUBLE)
+            .addScalar("bottom",    StandardBasicTypes.DOUBLE);
+    }
+
+    private static Annotation [] buildAnnotations(List<Object []> list) {
+        Annotation [] anns = new Annotation[list.size()];
+
+        // Names are likely the same because they are a type
+        // like 'Pegel' or 'Hafen'.
+        HashMap<String, String> names = new HashMap<String, String>();
+
+        for (int i = 0; i < anns.length; ++i) {
+            Object [] data   = list.get(i);
+            double a         = ((Double)data[0]);
+            double b         = data[1] != null ? (Double)data[1] : Double.NaN;
+            String position  = (String)data[2];
+            String attribute = (String)data[3];
+            String name      = (String)data[4];
+            double top       = data[5] != null ? (Double)data[5] : Double.NaN;
+            double bottom    = data[6] != null ? (Double)data[6] : Double.NaN;
+
+            if (name != null) {
+                String old = names.get(name);
+                if (old != null) {
+                    name = old;
+                }
+                else {
+                    names.put(name, name);
+                }
+            }
+
+            anns[i] = new Annotation(
+                a, b, position, attribute, name, top, bottom);
+        }
+
+        return anns;
+    }
+
+    public static Annotation [] loadByRiverName(String riverName) {
+
+        SQLQuery query = createQuery(SQL_BY_RIVER_NAME);
+
+        query.setString("river_name", riverName);
+
+        return buildAnnotations(query.list());
+    }
+
+    public static Annotation [] loadByRiverId(int riverId) {
+
+        SQLQuery query = createQuery(SQL_BY_RIVER_ID);
+
+        query.setInteger("river_id", riverId);
+
+        return buildAnnotations(query.list());
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FastCrossSectionLine.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,68 @@
+package de.intevation.flys.model;
+
+import java.util.List;
+import java.util.Comparator;
+
+import java.io.Serializable;
+
+import java.awt.geom.Point2D;
+
+public class FastCrossSectionLine
+implements   Serializable
+{
+    public static final double EPSILON = 1e-5;
+
+    public static final Comparator<FastCrossSectionLine> KM_CMP =
+        new Comparator<FastCrossSectionLine>() {
+            public int compare(
+                FastCrossSectionLine a,
+                FastCrossSectionLine b
+            ) {
+                double diff = a.km - b.km;
+                if (diff < -EPSILON) return -1;
+                return diff > +EPSILON ? +1 : 0;
+            }
+        };
+
+    protected double km;
+    protected List<Point2D> points;
+
+    public FastCrossSectionLine() {
+    }
+
+    public FastCrossSectionLine(double km) {
+        this.km = km;
+    }
+
+    public FastCrossSectionLine(double km, List<Point2D> points) {
+        this(km);
+        this.points = points;
+    }
+
+    public FastCrossSectionLine(CrossSectionLine csl) {
+        Double kmBD = csl.getKm();
+        km = kmBD != null ? kmBD.doubleValue() : 0d;
+        points = csl.fetchCrossSectionLinesPoints();
+    }
+
+    public double getKm() {
+        return km;
+    }
+
+    public void setKm(double km) {
+        this.km = km;
+    }
+
+    public List<Point2D> getPoints() {
+        return points;
+    }
+
+    public void setPoints(List<Point2D> points) {
+        this.points = points;
+    }
+
+    public double [][] fetchCrossSectionProfile() {
+        return CrossSectionLine.fetchCrossSectionProfile(points);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Fixpoint.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,130 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Point;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "fixpoints")
+public class Fixpoint
+implements   Serializable
+{
+    private Integer    id;
+    private River      river;
+    private Integer    x;
+    private Integer    y;
+    private BigDecimal km;
+    private String     hpgp;
+    private Point      geom;
+
+    public Fixpoint() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "x")
+    public Integer getX() {
+        return x;
+    }
+
+
+    public void setX(Integer x) {
+        this.x = x;
+    }
+
+
+    @Column(name = "y")
+    public Integer getY() {
+        return y;
+    }
+
+
+    public void setY(Integer y) {
+        this.y = y;
+    }
+
+
+    @Column(name = "km")
+    public BigDecimal getKm() {
+        return km;
+    }
+
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name = "hpgp")
+    public String getHpgp() {
+        return hpgp;
+    }
+
+
+    public void setHpgp(String hpgp) {
+        this.hpgp = hpgp;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Point getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Point geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<Fixpoint> getFixpoints(int riverId) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Fixpoint where river.id =:river_id");
+        query.setParameter("river_id", riverId);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Floodmaps.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,155 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Geometry;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "floodmaps")
+public class Floodmaps
+implements   Serializable
+{
+    private Integer      id;
+    private River        river;
+    private String       name;
+    private Integer      kind;
+    private Integer      count;
+    private BigDecimal   diff;
+    private BigDecimal   area;
+    private BigDecimal   perimeter;
+    private Geometry     geom;
+
+    public Floodmaps() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "kind")
+    public Integer getKind() {
+        return kind;
+    }
+
+
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
+
+    @Column(name = "count")
+    public Integer getCount() {
+        return count;
+    }
+
+
+    public void setCount(Integer count) {
+        this.count = count;
+    }
+
+
+    @Column(name = "diff")
+    public BigDecimal getDiff() {
+        return diff;
+    }
+
+
+    public void setDiff(BigDecimal diff) {
+        this.diff = diff;
+    }
+
+
+    @Column(name = "area")
+    public BigDecimal getArea() {
+        return area;
+    }
+
+
+    public void setArea(BigDecimal area) {
+        this.area = area;
+    }
+
+
+    @Column(name = "perimeter")
+    public BigDecimal getPerimeter() {
+        return perimeter;
+    }
+
+
+    public void setPerimeter(BigDecimal perimeter) {
+        this.perimeter = perimeter;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Geometry getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Geometry geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<Floodmaps> getFloodmaps(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Floodmaps where river.id =:river_id AND name =:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Floodplain.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,81 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Polygon;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "floodplain")
+public class Floodplain
+implements   Serializable
+{
+    private Integer id;
+
+    private River   river;
+
+    private Polygon geom;
+
+
+    public Floodplain() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Polygon getGeom() {
+        return geom;
+    }
+
+    public void setGeom(Polygon geom) {
+        this.geom = geom;
+    }
+
+
+    public static Floodplain getFloodplain(String river) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Floodplain where river.name =:river");
+        query.setParameter("river", river);
+
+        List<Floodplain> result = query.list();
+
+        return result.isEmpty() ? null : result.get(0);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FlowVelocityMeasurement.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,97 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "flow_velocity_measurements")
+public class FlowVelocityMeasurement
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(FlowVelocityMeasurement.class);
+
+
+    private Integer id;
+
+    private River river;
+
+    private String description;
+
+    private List<FlowVelocityMeasurementValue> values;
+
+
+    public FlowVelocityMeasurement() {
+    }
+
+
+    public FlowVelocityMeasurement(River river, String description) {
+        this.river       = river;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_FV_MEASURE_ID_SEQ",
+        sequenceName   = "FV_MEASURE_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_FV_MEASURE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "measurements_id")
+    public List<FlowVelocityMeasurementValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<FlowVelocityMeasurementValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(FlowVelocityMeasurementValue value) {
+        this.values.add(value);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FlowVelocityMeasurementValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,146 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.Date;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "flow_velocity_measure_values")
+public class FlowVelocityMeasurementValue
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(FlowVelocityMeasurementValue.class);
+
+
+    private Integer id;
+
+    private FlowVelocityMeasurement measurement;
+
+    private BigDecimal station;
+    private BigDecimal w;
+    private BigDecimal q;
+    private BigDecimal v;
+
+    private Date datetime;
+
+    private String description;
+
+
+    public FlowVelocityMeasurementValue() {
+    }
+
+
+    public FlowVelocityMeasurementValue(
+        FlowVelocityMeasurement measurement,
+        Date                    datetime,
+        BigDecimal              station,
+        BigDecimal              w,
+        BigDecimal              q,
+        BigDecimal              v,
+        String                  description
+    ) {
+        this.measurement = measurement;
+        this.datetime    = datetime;
+        this.station     = station;
+        this.w           = w;
+        this.q           = q;
+        this.v           = v;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_FV_MEASURE_VALUES_ID_SEQ",
+        sequenceName   = "FV_MEASURE_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_FV_MEASURE_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "measurements_id")
+    public FlowVelocityMeasurement getMeasurement() {
+        return measurement;
+    }
+
+    public void setMeasurement(FlowVelocityMeasurement measurement) {
+        this.measurement = measurement;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "datetime")
+    public Date getDatetime() {
+        return datetime;
+    }
+
+    public void setDatetime(Date datetime) {
+        this.datetime = datetime;
+    }
+
+    @Column(name = "w")
+    public BigDecimal getW() {
+        return w;
+    }
+
+    public void setW(BigDecimal w) {
+        this.w = w;
+    }
+
+    @Column(name = "q")
+    public BigDecimal getQ() {
+        return q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+
+    @Column(name = "v")
+    public BigDecimal getV() {
+        return v;
+    }
+
+    public void setV(BigDecimal v) {
+        this.v = v;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FlowVelocityModel.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,124 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "flow_velocity_model")
+public class FlowVelocityModel
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(FlowVelocityModel.class);
+
+
+    private Integer id;
+
+    private River river;
+
+    private DischargeZone dischargeZone;
+
+    private List<FlowVelocityModelValue> values;
+
+    private String description;
+
+
+    public FlowVelocityModel() {
+    }
+
+
+    public FlowVelocityModel(River river, DischargeZone dischargeZone) {
+        this(river, dischargeZone, null);
+    }
+
+
+    public FlowVelocityModel(
+        River         river,
+        DischargeZone dischargeZone,
+        String        description
+    ) {
+        this.river         = river;
+        this.dischargeZone = dischargeZone;
+        this.description   = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_FLOW_VELOCITY_MODEL_ID_SEQ",
+        sequenceName   = "FLOW_VELOCITY_MODEL_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_FLOW_VELOCITY_MODEL_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "discharge_zone_id")
+    public DischargeZone getDischargeZone() {
+        return dischargeZone;
+    }
+
+    public void setDischargeZone(DischargeZone dischargeZone) {
+        this.dischargeZone = dischargeZone;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public static List<FlowVelocityModel> getModels(
+        River         river,
+        DischargeZone zone
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from FlowVelocityModel where river=:river and dischargeZone=:zone");
+
+        query.setParameter("river", river);
+        query.setParameter("zone", zone);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FlowVelocityModelValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,158 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "flow_velocity_model_values")
+public class FlowVelocityModelValue
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(FlowVelocityModelValue.class);
+
+
+    private Integer id;
+
+    private FlowVelocityModel flowVelocity;
+
+    private BigDecimal station;
+    private BigDecimal q;
+    private BigDecimal totalChannel;
+    private BigDecimal mainChannel;
+    private BigDecimal shearStress;
+
+
+    public FlowVelocityModelValue() {
+    }
+
+
+    public FlowVelocityModelValue(
+        FlowVelocityModel flowVelocity,
+        BigDecimal        station,
+        BigDecimal        q,
+        BigDecimal        totalChannel,
+        BigDecimal        mainChannel,
+        BigDecimal        shearStress
+    ) {
+        this.flowVelocity = flowVelocity;
+        this.station      = station;
+        this.q            = q;
+        this.totalChannel = totalChannel;
+        this.mainChannel  = mainChannel;
+        this.shearStress  = shearStress;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_FLOW_VELOCITY_M_VALUES_ID_SEQ",
+        sequenceName   = "FLOW_VELOCITY_M_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_FLOW_VELOCITY_M_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "flow_velocity_model_id")
+    public FlowVelocityModel getFlowVelocity() {
+        return flowVelocity;
+    }
+
+    public void setFlowVelocity(FlowVelocityModel flowVelocity) {
+        this.flowVelocity = flowVelocity;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "q")
+    public BigDecimal getQ() {
+        return q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+
+    @Column(name = "total_channel")
+    public BigDecimal getTotalChannel() {
+        return totalChannel;
+    }
+
+    public void setTotalChannel(BigDecimal totalChannel) {
+        this.totalChannel = totalChannel;
+    }
+
+    @Column(name = "main_channel")
+    public BigDecimal getMainChannel() {
+        return mainChannel;
+    }
+
+    public void setMainChannel(BigDecimal mainChannel) {
+        this.mainChannel = mainChannel;
+    }
+
+    @Column(name = "shear_stress")
+    public BigDecimal getShearStress() {
+        return shearStress;
+    }
+
+    public void setShearStress(BigDecimal shearStress) {
+        this.shearStress = shearStress;
+    }
+
+
+    public static List<FlowVelocityModelValue> getValues(
+        FlowVelocityModel model,
+        double kmLo,
+        double kmHi
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from FlowVelocityModelValue where " +
+            "   flowVelocity=:model and" +
+            "   station >= :kmLo and " +
+            "   station <= :kmHi");
+
+        query.setParameter("model", model);
+        query.setParameter("kmLo", new BigDecimal(kmLo));
+        query.setParameter("kmHi", new BigDecimal(kmHi));
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Gauge.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,330 @@
+package de.intevation.flys.model;
+
+import java.math.BigDecimal;
+
+import java.io.Serializable;
+
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+@Entity
+@Table(name = "gauges")
+public class Gauge
+implements   Serializable
+{
+    public static final int DEFAULT_SCALE = 100;
+
+    public static final int MASTER_DISCHARGE_TABLE = 0;
+
+
+    private Integer    id;
+    private String     name;
+    private River      river;
+    private BigDecimal station;
+    private BigDecimal aeo;
+    private BigDecimal datum;
+    private Long       officialNumber;
+    private Range      range;
+
+    private List<DischargeTable> dischargeTables;
+
+    /** MainValues at this Gauge. */
+    protected List<MainValue> mainValues;
+
+    public Gauge() {
+    }
+
+    public Gauge(
+        String     name,
+        River      river,
+        BigDecimal station,
+        BigDecimal aeo,
+        BigDecimal datum,
+        Long       officialNumber,
+        Range      range
+    ) {
+        this.name            = name;
+        this.river           = river;
+        this.station         = station;
+        this.aeo             = aeo;
+        this.datum           = datum;
+        this.officialNumber  = officialNumber;
+        this.range           = range;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_GAUGES_ID_SEQ",
+        sequenceName   = "GAUGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_GAUGES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "station") // FIXME: type mapping needed
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "aeo") // FIXME: type mapping needed
+    public BigDecimal getAeo() {
+        return aeo;
+    }
+
+    public void setAeo(BigDecimal aeo) {
+        this.aeo = aeo;
+    }
+
+    @Column(name = "datum") // FIXME: type mapping needed
+    public BigDecimal getDatum() {
+        return datum;
+    }
+
+    public void setDatum(BigDecimal datum) {
+        this.datum = datum;
+    }
+
+    @Column(name = "official_number")
+    public Long getOfficialNumber() {
+        return officialNumber;
+    }
+
+    public void setOfficialNumber(Long officialNumber) {
+        this.officialNumber = officialNumber;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "range_id" )
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "gauge_id")
+    public List<DischargeTable> getDischargeTables() {
+        return dischargeTables;
+    }
+
+    public void setDischargeTables(List<DischargeTable> dischargeTables) {
+        this.dischargeTables = dischargeTables;
+    }
+
+
+    /**
+     * Returns min and max W values of this gauge based with a DEFAULT_SCALE.
+     *
+     * @return min and max W value of this gauge [min,max].
+     */
+    public double[] determineMinMaxW() {
+        return determineMinMaxW(DEFAULT_SCALE);
+    }
+
+
+    /**
+     * Returns min and max W values of this gauge.
+     *
+     * @return the min and max W value of this gauge [min,max].
+     */
+    public double[] determineMinMaxW(int scale) {
+        Session session = SessionHolder.HOLDER.get();
+
+        List<DischargeTable> tables   = getDischargeTables();
+        DischargeTable dischargeTable = null;
+
+        for (DischargeTable tmp: tables) {
+            if (tmp.getKind() == 0) {
+                dischargeTable = tmp;
+                break;
+            }
+        }
+
+        if (dischargeTable == null) {
+            return null;
+        }
+
+        Query query  = session.createQuery(
+            "select min(w) as min, max(w) as max from DischargeTableValue " +
+            "where table_id =:table");
+        query.setParameter("table", dischargeTable.getId());
+
+        List     results = query.list();
+        Object[] result  = (Object[]) results.get(0);
+
+        return result != null
+            ? new double[] {
+                ((BigDecimal) result[0]).doubleValue() * scale,
+                ((BigDecimal) result[1]).doubleValue() * scale}
+            : null;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "gauge_id")
+    public List<MainValue> getMainValues() {
+        return mainValues;
+    }
+
+    public void setMainValues(List<MainValue> mainValues) {
+        this.mainValues = mainValues;
+    }
+
+
+    public static Gauge getGaugeByOfficialNumber(long number) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Gauge where officialNumber=:number");
+
+        query.setParameter("number", number);
+
+        List<Gauge> results = query.list();
+
+        return results.isEmpty() ? null : results.get(0);
+    }
+
+
+    public DischargeTable fetchMasterDischargeTable() {
+        for (DischargeTable dt: dischargeTables) {
+            if (dt.getKind() == MASTER_DISCHARGE_TABLE) {
+                return dt;
+            }
+        }
+
+        return null;
+    }
+
+    /**
+     * Returns an array of [days, qs] necessary to create duration curves.
+     *
+     * @return a 2dim array of [days, qs] where days is an int[] and qs is
+     * an double[].
+     */
+    public Object[] fetchDurationCurveData() {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "select cast(nmv.name as integer) as days, mv.value as q " +
+            "from MainValue as mv " +
+            "join mv.mainValue as nmv " +
+            "join nmv.type mvt " +
+            "where mvt.name = 'D' and mv.gauge.id = :gauge_id " +
+            "order by days");
+
+        query.setParameter("gauge_id", getId());
+
+        List<Object> results = query.list();
+        int[]        days    = new int[results.size()];
+        double[]     qs      = new double[results.size()];
+
+        int idx = 0;
+
+        for (Object obj: results) {
+            Object[] arr = (Object[]) obj;
+
+            try {
+                int  day = ((Integer)    arr[0]).intValue();
+                double q = ((BigDecimal) arr[1]).doubleValue();
+
+                days[idx] = day;
+                qs[idx++] = q;
+            }
+            catch (NumberFormatException nfe) {
+            }
+        }
+
+        return new Object[] { days, qs };
+    }
+
+    /**
+     * Calculates the maximum and minimum W and Q values
+     *
+     * @return the MaxMinWQ object representing the calculated values
+     */
+    public MinMaxWQ fetchMaxMinWQ() {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "select max(mv.value) as max, min(mv.value) as min " +
+            "from MainValue as mv " +
+            "join mv.mainValue as nmv " +
+            "join nmv.type mvt " +
+            "where mvt.name in ('W', 'Q') " +
+            "and mv.gauge.id = :gauge_id " +
+            "group by mvt.name order by mvt.name"
+            );
+
+        query.setParameter("gauge_id", getId());
+
+        List<Object> results = query.list();
+        if (results.isEmpty()) {
+            // No values found
+            return new MinMaxWQ();
+        }
+
+        Object[] arr = (Object[]) results.get(0);
+        BigDecimal maxw = (BigDecimal)arr[0];
+        BigDecimal minw = (BigDecimal)arr[1];
+        BigDecimal maxq = null;
+        BigDecimal minq = null;
+
+
+        if (results.size() > 1) {
+            arr = (Object[]) results.get(1);
+            maxq = (BigDecimal)arr[0];
+            minq = (BigDecimal)arr[1];
+        }
+
+        return new MinMaxWQ(minw, maxw, minq, maxq);
+    }
+
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/GaugeLocation.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,106 @@
+package de.intevation.flys.model;
+
+import com.vividsolutions.jts.geom.Point;
+
+import de.intevation.flys.backend.SessionHolder;
+
+import java.io.Serializable;
+
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+import org.hibernate.annotations.Type;
+
+
+@Entity
+@Table(name = "gauge_location")
+public class GaugeLocation
+implements   Serializable
+{
+    private Integer    id;
+    private River      river;
+    private String     name;
+    private Point      geom;
+
+
+    public GaugeLocation() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Point getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Point geom) {
+        this.geom = geom;
+    }
+
+
+    /**
+     * Returns a list of RiverAxisKm objects for a given river.
+     *
+     * @param riverid The ID of a river in the database.
+     *
+     * @return a list of RiverAxisKm objects.
+     */
+    public static List<GaugeLocation> getGaugeLocations(int riverid, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from GaugeLocation where river.id =:riverid and name=:name");
+        query.setParameter("riverid", riverid);
+        query.setParameter("name", name);
+
+        List<GaugeLocation> list = query.list();
+
+        return list;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/GrainFraction.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,108 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "grain_fraction")
+public class GrainFraction
+implements   Serializable
+{
+    public static final String TOTAL              = "total";
+    public static final String COARSE             = "coarse";
+    public static final String FINE_MIDDLE        = "fine_middle";
+    public static final String SAND               = "sand";
+    public static final String SUSP_SAND          = "susp_sand";
+    public static final String SUSP_SAND_BED      = "susp_sand_bed";
+    public static final String SUSPENDED_SEDIMENT = "suspended_sediment";
+
+
+    private static Logger logger = Logger.getLogger(GrainFraction.class);
+
+    private Integer id;
+
+    private String name;
+
+    private Double lower;
+    private Double upper;
+
+    private Unit unit;
+
+
+    public GrainFraction() {
+    }
+
+    public GrainFraction(String name, Double lower, Double upper, Unit unit) {
+        this.name  = name;
+        this.lower = lower;
+        this.upper = upper;
+        this.unit  = unit;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_GRAIN_FRACTION_ID_SEQ",
+        sequenceName   = "GRAIN_FRACTION_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_GRAIN_FRACTION_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name" )
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "lower")
+    public Double getLower() {
+        return lower;
+    }
+
+    public void setLower(Double lower) {
+        this.lower = lower;
+    }
+
+    @Column(name = "upper")
+    public Double getUpper() {
+        return upper;
+    }
+
+    public void setUpper(Double upper) {
+        this.upper = upper;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HYK.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,85 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.OrderBy;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "hyks")
+public class HYK
+implements   Serializable
+{
+    private Integer id;
+    private River   river;
+    private String  description;
+
+    private List<HYKEntry> entries;
+
+    public HYK() {
+    }
+
+    public HYK(River river, String description) {
+        this.river       = river;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYKS_ID_SEQ",
+        sequenceName   = "HYKS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYKS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @OrderBy("km")
+    @JoinColumn(name="hyk_id")
+    public List<HYKEntry> getEntries() {
+        return entries;
+    }
+
+    public void setEntries(List<HYKEntry> entries) {
+        this.entries = entries;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HYKEntry.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,99 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+
+import java.util.Date;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.OrderBy;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "hyk_entries")
+public class HYKEntry
+implements   Serializable
+{
+    private Integer    id;
+    private HYK        hyk;
+    private BigDecimal km;
+    private Date       measure;
+
+    private List<HYKFormation> formations;
+
+    public HYKEntry() {
+    }
+
+    public HYKEntry(HYK hyk, BigDecimal km, Date measure) {
+        this.hyk     = hyk;
+        this.km      = km;
+        this.measure = measure;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYK_ENTRIES_ID_SEQ",
+        sequenceName   = "HYK_ENTRIES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYK_ENTRIES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "hyk_id")
+    public HYK getHYK() {
+        return hyk;
+    }
+
+    public void setHYK(HYK hyk) {
+        this.hyk = hyk;
+    }
+
+    @Column(name = "km")
+    public BigDecimal getKm() {
+        return km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+    @Column(name = "measure")
+    public Date getMeasure() {
+        return measure;
+    }
+
+    public void setMeasure(Date measure) {
+        this.measure = measure;
+    }
+
+    @OneToMany
+    @OrderBy("formationNum")
+    @JoinColumn(name="hyk_entry_id")
+    public List<HYKFormation> getFormations() {
+        return formations;
+    }
+
+    public void setFormations(List<HYKFormation> formations) {
+        this.formations = formations;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HYKFlowZone.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,98 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "hyk_flow_zones")
+public class HYKFlowZone
+implements   Serializable
+{
+    private Integer         id;
+    private HYKFormation    formation;
+    private HYKFlowZoneType type;
+    private BigDecimal      a;
+    private BigDecimal      b;
+
+    public HYKFlowZone() {
+    }
+
+    public HYKFlowZone(
+        HYKFormation    formation,
+        HYKFlowZoneType type,
+        BigDecimal      a,
+        BigDecimal      b
+    ) {
+        this.formation = formation;
+        this.type      = type;
+        this.a         = a;
+        this.b         = b;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYK_FLOW_ZONES_ID_SEQ",
+        sequenceName   = "HYK_FLOW_ZONES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYK_FLOW_ZONES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "formation_id")
+    public HYKFormation getFormation() {
+        return formation;
+    }
+
+    public void setFormation(HYKFormation formation) {
+        this.formation = formation;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "type_id")
+    public HYKFlowZoneType getType() {
+        return type;
+    }
+
+    public void setType(HYKFlowZoneType type) {
+        this.type = type;
+    }
+
+    @Column(name = "a")
+    public BigDecimal getA() {
+        return a;
+    }
+
+    public void setA(BigDecimal a) {
+        this.a = a;
+    }
+
+    @Column(name = "b")
+    public BigDecimal getB() {
+        return b;
+    }
+
+    public void setB(BigDecimal b) {
+        this.b = b;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HYKFlowZoneType.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,69 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "hyk_flow_zone_types")
+public class HYKFlowZoneType
+implements   Serializable
+{
+    private Integer id;
+    private String  name;
+    private String  description;
+
+    public HYKFlowZoneType() {
+    }
+
+    public HYKFlowZoneType(String name) {
+        this.name = name;
+    }
+
+    public HYKFlowZoneType(String name, String description) {
+        this.name        = name;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYK_FLOW_ZONE_TYPES_ID_SEQ",
+        sequenceName   = "HYK_FLOW_ZONE_TYPES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYK_FLOW_ZONE_TYPES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HYKFormation.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,151 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.OrderBy;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "hyk_formations")
+public class HYKFormation
+implements   Serializable
+{
+    private Integer    id;
+    private Integer    formationNum;
+    private HYKEntry   entry;
+    private BigDecimal top;
+    private BigDecimal bottom;
+    private BigDecimal distanceVL;
+    private BigDecimal distanceHF;
+    private BigDecimal distanceVR;
+
+    private List<HYKFlowZone> zones;
+
+    public HYKFormation() {
+    }
+
+    public HYKFormation(
+        Integer    formationNum,
+        HYKEntry   entry,
+        BigDecimal top,
+        BigDecimal bottom,
+        BigDecimal distanceVL,
+        BigDecimal distanceHF,
+        BigDecimal distanceVR
+    ) {
+        this.formationNum = formationNum;
+        this.entry        = entry;
+        this.top          = top;
+        this.bottom       = bottom;
+        this.distanceVL   = distanceVL;
+        this.distanceHF   = distanceHF;
+        this.distanceVR   = distanceVR;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYK_FORMATIONS_ID_SEQ",
+        sequenceName   = "HYK_FORMATIONS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYK_FORMATIONS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "formation_num")
+    public Integer getFormationNum() {
+        return formationNum;
+    }
+
+    public void setFormationNum(Integer formationNum) {
+        this.formationNum = formationNum;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "hyk_entry_id")
+    public HYKEntry getEntry() {
+        return entry;
+    }
+
+    public void setEntry(HYKEntry entry) {
+        this.entry = entry;
+    }
+
+    @Column(name = "top")
+    public BigDecimal getTop() {
+        return top;
+    }
+
+    public void setTop(BigDecimal top) {
+        this.top = top;
+    }
+
+    @Column(name = "bottom")
+    public BigDecimal getBottom() {
+        return bottom;
+    }
+
+    public void setBottom(BigDecimal bottom) {
+        this.bottom = bottom;
+    }
+
+    @Column(name = "distance_vl")
+    public BigDecimal getDistanceVL() {
+        return distanceVL;
+    }
+
+    public void setDistanceVL(BigDecimal distanceVL) {
+        this.distanceVL = distanceVL;
+    }
+
+    @Column(name = "distance_hf")
+    public BigDecimal getDistanceHF() {
+        return distanceHF;
+    }
+
+    public void setDistanceHF(BigDecimal distanceHF) {
+        this.distanceHF = distanceHF;
+    }
+
+    @Column(name = "distance_vr")
+    public BigDecimal getDistanceVR() {
+        return distanceVR;
+    }
+
+    public void setDistanceVR(BigDecimal distanceVR) {
+        this.distanceVR = distanceVR;
+    }
+
+
+    @OneToMany
+    @OrderBy("a")
+    @JoinColumn(name="formation_id")
+    public List<HYKFlowZone> getZones() {
+        return zones;
+    }
+
+    public void setZones(List<HYKFlowZone> zones) {
+        this.zones = zones;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Hws.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,106 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "hws")
+public class Hws
+implements   Serializable
+{
+    private Integer    id;
+    private String     facility;
+    private String     type;
+    private River      river;
+    private LineString geom;
+
+    public Hws() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "hws_facility")
+    public String getFacility() {
+        return facility;
+    }
+
+
+    public void setFacility(String facility) {
+        this.facility = facility;
+    }
+
+
+    @Column(name = "type")
+    public String getType() {
+        return type;
+    }
+
+
+    public void setType(String type) {
+        this.type = type;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<Hws> getHws(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Hws where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,94 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "hydr_boundaries")
+public class HydrBoundary
+implements   Serializable
+{
+    private Integer    id;
+    private String     name;
+    private River      river;
+    private LineString geom;
+
+    public HydrBoundary() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<HydrBoundary> getHydrBoundaries(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from HydrBoundary where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,94 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Geometry;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "hydr_boundaries_poly")
+public class HydrBoundaryPoly
+implements   Serializable
+{
+    private Integer    id;
+    private String     name;
+    private River      river;
+    private Geometry   geom;
+
+    public HydrBoundaryPoly() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Geometry getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Geometry geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<HydrBoundaryPoly> getHydrBoundaries(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from HydrBoundaryPoly where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Line.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,108 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "lines")
+public class Line
+implements   Serializable
+{
+    private Integer    id;
+    private String     kind;
+    private River      river;
+    private LineString geom;
+    private BigDecimal z;
+
+    public Line() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "kind")
+    public String getKind() {
+        return kind;
+    }
+
+
+    public void setKind(String kind) {
+        this.kind = kind;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    @Column(name = "z")
+    public BigDecimal getZ() {
+        return z;
+    }
+
+
+    public void setZ(BigDecimal z) {
+        this.z = z;
+    }
+
+
+    public static List<Line> getLines(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Line where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/LocationSystem.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,68 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+
+@Entity
+@Table(name = "location_system")
+public class LocationSystem implements Serializable {
+
+    protected Integer id;
+
+    protected String name;
+    protected String description;
+
+
+    public LocationSystem() {
+    }
+
+
+    public LocationSystem(String name, String description) {
+        this.name        = name;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_LOCATION_SYSTEM_ID_SEQ",
+        sequenceName   = "LOCATION_SYSTEM_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_LOCATION_SYSTEM_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MainValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,103 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+import javax.persistence.GenerationType;
+
+import java.math.BigDecimal;
+
+@Entity
+@Table(name = "main_values")
+public class MainValue
+implements   Serializable
+{
+    private Integer        id;
+
+    private Gauge          gauge;
+
+    private NamedMainValue mainValue;
+
+    private BigDecimal     value;
+
+    private TimeInterval   timeInterval;
+
+    public MainValue() {
+    }
+
+    public MainValue(
+        Gauge          gauge,
+        NamedMainValue mainValue,
+        BigDecimal     value,
+        TimeInterval   timeInterval
+    ) {
+        this.gauge        = gauge;
+        this.mainValue    = mainValue;
+        this.value        = value;
+        this.timeInterval = timeInterval;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_MAIN_VALUES_ID_SEQ",
+        sequenceName   = "MAIN_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_MAIN_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "gauge_id")
+    public Gauge getGauge() {
+        return gauge;
+    }
+
+    public void setGauge(Gauge gauge) {
+        this.gauge = gauge;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "named_value_id")
+    public NamedMainValue getMainValue() {
+        return mainValue;
+    }
+
+    public void setMainValue(NamedMainValue mainValue) {
+        this.mainValue = mainValue;
+    }
+
+    @Column(name = "value") // FIXME: type mapping needed?
+    public BigDecimal getValue() {
+        return value;
+    }
+
+    public void setValue(BigDecimal value) {
+        this.value = value;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id")
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MainValueType.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,54 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "main_value_types")
+public class MainValueType
+implements   Serializable
+{
+    private Integer id;
+    private String  name;
+
+    public MainValueType() {
+    }
+
+    public MainValueType(String name) {
+        this.name = name;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_MAIN_VALUE_TYPES_ID_SEQ",
+        sequenceName   = "MAIN_VALUE_TYPES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_MAIN_VALUE_TYPES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name") // FIXME: Type conversion needed?
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MinMaxWQ.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,58 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+/**
+ * Represents minmimum and maximum values for W and Q
+ */
+public class MinMaxWQ implements Serializable {
+
+    private BigDecimal minw;
+    private BigDecimal maxw;
+    private BigDecimal minq;
+    private BigDecimal maxq;
+
+    /**
+     * Default constuctor to indecate that no min and max w and q values
+     * are available
+     */
+    public MinMaxWQ() {
+    }
+
+    /**
+     * Constructor for a new MinMaxWQ value
+     *
+     * @param minw Mimimim W
+     * @param maxw Maximum W
+     * @param minq Mimimim Q
+     * @param maxq Maximum Q
+     */
+    public MinMaxWQ(
+            BigDecimal minw,
+            BigDecimal maxw,
+            BigDecimal minq,
+            BigDecimal maxq)
+    {
+        this.minw = minw;
+        this.maxw = maxw;
+        this.minq = minq;
+        this.maxq = maxq;
+    }
+
+    public BigDecimal getMinW() {
+        return this.minw;
+    }
+
+    public BigDecimal getMaxW() {
+        return this.maxw;
+    }
+
+    public BigDecimal getMinQ() {
+        return this.minq;
+    }
+
+    public BigDecimal getMaxQ() {
+        return this.maxq;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MorphologicalWidth.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,88 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+
+
+@Entity
+@Table(name = "morphologic_width")
+public class MorphologicalWidth implements Serializable {
+
+    private Integer id;
+
+    private River river;
+
+    private Unit unit;
+
+    private List<MorphologicalWidthValue> values;
+
+
+    public MorphologicalWidth() {
+    }
+
+
+    public MorphologicalWidth(River river, Unit unit) {
+        this.river = river;
+        this.unit  = unit;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_MORPHOLOGIC_WIDTH_ID_SEQ",
+        sequenceName   = "MORPHOLOGIC_WIDTH_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_MORPHOLOGIC_WIDTH_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "morphologic_width_id")
+    public List<MorphologicalWidthValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<MorphologicalWidthValue> values) {
+        this.values = values;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MorphologicalWidthValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,103 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+
+@Entity
+@Table(name = "morphologic_width_values")
+public class MorphologicalWidthValue implements Serializable {
+
+    private Integer id;
+
+    private MorphologicalWidth morphologicalWidth;
+
+    private BigDecimal station;
+    private BigDecimal width;
+
+    private String description;
+
+
+    public MorphologicalWidthValue() {
+    }
+
+
+    public MorphologicalWidthValue(
+        MorphologicalWidth morphologicalWidth,
+        BigDecimal         station,
+        BigDecimal         width,
+        String             description
+    ) {
+        this.morphologicalWidth = morphologicalWidth;
+        this.station            = station;
+        this.width              = width;
+        this.description        = description;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_MORPH_WIDTH_VALUES_ID_SEQ",
+        sequenceName   = "MORPH_WIDTH_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_MORPH_WIDTH_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "morphologic_width_id")
+    public MorphologicalWidth getMorphologicalWidth() {
+        return morphologicalWidth;
+    }
+
+    public void setMorphologicalWidth(MorphologicalWidth width) {
+        this.morphologicalWidth = width;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "width")
+    public BigDecimal getWidth() {
+        return width;
+    }
+
+    public void setWidth(BigDecimal width) {
+        this.width = width;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/NamedMainValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,68 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "named_main_values")
+public class NamedMainValue
+implements   Serializable
+{
+    private Integer       id;
+    private String        name;
+    private MainValueType type;
+
+    public NamedMainValue() {
+    }
+
+    public NamedMainValue(String name, MainValueType type) {
+        this.name = name;
+        this.type = type;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_NAMED_MAIN_VALUES_ID_SEQ",
+        sequenceName   = "NAMED_MAIN_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_NAMED_MAIN_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "type_id" )
+    public MainValueType getType() {
+        return type;
+    }
+
+    public void setType(MainValueType type) {
+        this.type = type;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Position.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,71 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.OneToMany;
+import javax.persistence.JoinColumn;
+import javax.persistence.GenerationType;
+
+import java.util.List;
+
+@Entity
+@Table(name = "positions")
+public class Position
+implements   Serializable
+{
+    private Integer id;
+
+    private String  value;
+
+    private List<Annotation> annotations;
+
+    public Position() {
+    }
+
+    public Position(String value) {
+        this.value = value;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_POSITIONS_ID_SEQ",
+        sequenceName   = "POSITIONS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_POSITIONS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "value")
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+
+    @OneToMany
+    @JoinColumn(name="position_id")
+    public List<Annotation> getAnnotations() {
+        return annotations;
+    }
+
+    public void setAnnotations(List<Annotation> annotations) {
+        this.annotations = annotations;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Range.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,144 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+@Entity
+@Table(name = "ranges")
+public class Range
+implements   Serializable
+{
+    private Integer    id;
+    private BigDecimal a;
+    private BigDecimal b;
+
+    private River      river;
+
+    public Range() {
+    }
+
+    public Range(double a, double b, River river) {
+        this(new BigDecimal(a), new BigDecimal(b), river);
+    }
+
+    public Range(BigDecimal a, BigDecimal b, River river) {
+        this.a     = a;
+        this.b     = b;
+        this.river = river;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_RANGES_ID_SEQ",
+        sequenceName   = "RANGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_RANGES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "a") // FIXME: type mapping needed?
+    public BigDecimal getA() {
+        return a;
+    }
+
+    public void setA(BigDecimal a) {
+        this.a = a;
+    }
+
+    @Column(name = "b") // FIXME: type mapping needed?
+    public BigDecimal getB() {
+        return b;
+    }
+
+    public void setB(BigDecimal b) {
+        this.b = b;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    public int code() {
+        int code = 0;
+        if (a != null) code  = 1;
+        if (b != null) code |= 2;
+        return code;
+    }
+
+    public boolean intersects(BigDecimal c) {
+        return !(a.compareTo(c) > 0 || b.compareTo(c) < 0);
+    }
+
+    public boolean intersects(Range other) {
+
+        int code  = code();
+        int ocode = other.code();
+
+        if (code == 0 || ocode == 0) {
+            return false;
+        }
+
+        switch (code) {
+            case 1: // has a
+                switch (ocode) {
+                    case 1: // has a
+                        return a.compareTo(other.a) == 0;
+                    case 2: // has b
+                        return a.compareTo(other.b) == 0;
+                    case 3: // has range
+                        return other.intersects(a);
+                }
+                break;
+            case 2: // has b
+                switch (ocode) {
+                    case 1: // has a
+                        return b.compareTo(other.a) == 0;
+                    case 2: // has b
+                        return b.compareTo(other.b) == 0;
+                    case 3: // has range
+                        return other.intersects(b);
+                }
+                break;
+            case 3: // has range
+                switch (ocode) {
+                    case 1: // has a
+                        return intersects(other.a);
+                    case 2: // has b
+                        return intersects(other.b);
+                    case 3: // has range
+                        return !(other.b.compareTo(a) < 0
+                               ||other.a.compareTo(b) > 0);
+                }
+                break;
+
+        }
+
+        return false;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/River.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,320 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+import java.math.MathContext;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+import javax.persistence.GenerationType;
+
+import java.util.List;
+import java.util.Comparator;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "rivers")
+public class River
+implements   Serializable
+{
+    public static final MathContext PRECISION = new MathContext(6);
+
+    public static final double EPSILON = 1e-5;
+
+    public static final Comparator KM_CMP = new Comparator<Double>() {
+        @Override
+        public int compare(Double a, Double b) {
+            double diff = a - b;
+            if (diff < -EPSILON) return -1;
+            if (diff >  EPSILON) return +1;
+            return 0;
+        }
+    };
+
+    private Integer id;
+
+    private String  name;
+
+    private boolean kmUp;
+
+    private List<Gauge> gauges;
+
+    private Unit wstUnit;
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_RIVERS_ID_SEQ",
+        sequenceName   = "RIVERS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_RIVERS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "km_up")
+    public boolean getKmUp() {
+        return kmUp;
+    }
+
+    public void setKmUp(boolean kmUp) {
+        this.kmUp = kmUp;
+    }
+
+    public River() {
+    }
+
+    public River(String name, Unit wstUnit) {
+        this.name    = name;
+        this.wstUnit = wstUnit;
+    }
+
+    @OneToMany
+    @JoinColumn(name="river_id")
+    public List<Gauge> getGauges() {
+        return gauges;
+    }
+
+    public void setGauges(List<Gauge> gauges) {
+        this.gauges = gauges;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "wst_unit_id" )
+    public Unit getWstUnit() {
+        return wstUnit;
+    }
+
+    public void setWstUnit(Unit wstUnit) {
+        this.wstUnit = wstUnit;
+    }
+
+
+
+    public String toString() {
+        return name != null ? name : "";
+    }
+
+
+    /**
+     * This method returns the gauges that intersect with <i>a</i> and
+     * <i>b</i>,
+     *
+     * @param a A start point.
+     * @param b An end point.
+     *
+     * @return the intersecting gauges.
+     */
+    public List<Gauge> determineGauges(double a, double b) {
+        Session session = SessionHolder.HOLDER.get();
+
+        if (a > b) { double t = a; a = b; b = t; }
+
+        Query query = session.createQuery(
+            "from Gauge where river=:river " +
+            "and not (range.a > :b or range.b < :a) order by a");
+        query.setParameter("river", this);
+        query.setParameter("a", new BigDecimal(a, PRECISION));
+        query.setParameter("b", new BigDecimal(b, PRECISION));
+
+        return query.list();
+    }
+
+    public Gauge maxOverlap(double a, double b) {
+        List<Gauge> gauges = determineGauges(a, b);
+        if (gauges == null) {
+            return null;
+        }
+
+        if (a > b) { double t = a; a = b; b = t; }
+
+        double max = -Double.MAX_VALUE;
+
+        Gauge result = null;
+
+        for (Gauge gauge: gauges) {
+            Range  r = gauge.getRange();
+            double c = r.getA().doubleValue();
+            double d = r.getB().doubleValue();
+
+            double start = c >= a ? c : a;
+            double stop  = d <= b ? d : b;
+
+            double length = stop - start;
+
+            if (length > max) {
+                max = length;
+                result = gauge;
+            }
+        }
+
+        return result;
+    }
+
+    public Gauge determineGaugeByName(String name) {
+        Session session = SessionHolder.HOLDER.get();
+        Query query = session.createQuery(
+            "from Gauge where river=:river and name=:name");
+        query.setParameter("river", this);
+        query.setParameter("name", name);
+        List<Gauge> gauges = query.list();
+        return gauges.isEmpty() ? null : gauges.get(0);
+    }
+
+    public Gauge determineGaugeByPosition(double p) {
+        Session session = SessionHolder.HOLDER.get();
+        Query query = session.createQuery(
+            "from Gauge g where river=:river "  +
+            "and :p between g.range.a and g.range.b");
+        query.setParameter("river", this);
+        query.setParameter("p", new BigDecimal(p, PRECISION));
+        List<Gauge> gauges = query.list();
+        return gauges.isEmpty() ? null : gauges.get(0);
+    }
+
+    public Gauge determineGaugeByStation(double a, double b) {
+
+        if (a > b) { double t = a; a = b; b = t; }
+
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Gauge where river.id=:river " +
+            "and station between :a and :b");
+        query.setParameter("river", getId());
+        query.setParameter("a", new BigDecimal(a));
+        query.setParameter("b", new BigDecimal(b));
+
+        List<Gauge> gauges = query.list();
+        return gauges.isEmpty() ? null : gauges.get(0);
+    }
+
+    public double[] determineMinMaxQ() {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "select min(wqr.q) as min, max(wqr.q) as max " +
+            "from Wst as w " +
+            "join w.columns as wc " +
+            "join wc.columnQRanges as wcqr " +
+            "join wcqr.wstQRange as wqr " +
+            "where w.kind = 0 and river_id = :river");
+
+        query.setParameter("river", getId());
+
+        double minmax[] = new double[] { Double.MAX_VALUE, Double.MIN_VALUE };
+
+        List<Object> results = query.list();
+
+        if (!results.isEmpty()) {
+            Object[] arr = (Object[]) results.get(0);
+            BigDecimal minq = (BigDecimal)arr[0];
+            BigDecimal maxq = (BigDecimal)arr[1];
+            minmax[0] = minq.doubleValue();
+            minmax[1] = maxq.doubleValue();
+        }
+
+        return minmax;
+    }
+
+
+    /**
+     * This method returns the first gauge that is intersected by <i>a</i> and
+     * <i>b</i>,
+     *
+     * @param a A start point.
+     * @param b An end point.
+     *
+     * @return the first intersecting gauge.
+     */
+    public Gauge determineGauge(double a, double b) {
+        List<Gauge> gauges = determineGauges(a, b);
+
+        int idx = a < b ? 0 : gauges.size() - 1;
+
+        return gauges.isEmpty() ? null : gauges.get(idx);
+    }
+
+    /**
+     * Returns the min and max distance of this river. The first position in the
+     * resulting array contains the min distance, the second position the max
+     * distance.
+     *
+     * @return the min and max distance of this river.
+     */
+    public double[] determineMinMaxDistance() {
+        List<Gauge> gauges = getGauges();
+
+        if (gauges == null || gauges.isEmpty()) {
+            return null;
+        }
+
+        double minmax[] = new double[] { Double.MAX_VALUE, Double.MIN_VALUE };
+
+        for (Gauge g: gauges) {
+            Range r = g.getRange();
+
+            if (r == null) {
+                continue;
+            }
+
+            double a  = r.getA().doubleValue();
+            minmax[0] = minmax[0] < a ? minmax[0] : a;
+
+            BigDecimal bigB = r.getB();
+            if (bigB != null) {
+                double b  = bigB.doubleValue();
+                minmax[1] = minmax[1] > b ? minmax[1] : b;
+            }
+        }
+
+        return minmax;
+    }
+
+    public Map<Double, Double> queryGaugeDatumsKMs() {
+        List<Gauge> gauges = getGauges();
+        Map result = new TreeMap<Double, Double>(KM_CMP);
+
+        for (Gauge gauge: gauges) {
+            BigDecimal km    = gauge.getStation();
+            BigDecimal datum = gauge.getDatum();
+            if (km != null && datum != null) {
+                result.put(km.doubleValue(), datum.doubleValue());
+            }
+        }
+
+        return result;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/RiverAxis.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,102 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+/**
+ * There is a modeling problem with the RiverAxis. The initial idea was, that a
+ * river can have a riveraxis that consist of exact one geometry. Now, it has
+ * turned out, that a single geometry is not enough for a riveraxis (arm of a
+ * river, inflows, ...). As workaround, we now expect, that a river can just
+ * have a single riveraxis.
+ */
+@Entity
+@Table(name = "river_axes")
+public class RiverAxis
+implements   Serializable
+{
+    private Integer    id;
+    private Integer    kind;
+    private River      river;
+    private LineString geom;
+
+    public RiverAxis() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "kind")
+    public Integer getKind() {
+        return kind;
+    }
+
+
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<RiverAxis> getRiverAxis(String river) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from RiverAxis where river.name =:river");
+        query.setParameter("river", river);
+
+        List<RiverAxis> list = query.list();
+
+        return list.isEmpty() ? null : list;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/RiverAxisKm.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,104 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Point;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "river_axes_km")
+public class RiverAxisKm
+implements   Serializable
+{
+    private Integer    id;
+    private River      river;
+    private BigDecimal km;
+    private Point      geom;
+
+
+    public RiverAxisKm() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "km")
+    public BigDecimal getKm() {
+        return km;
+    }
+
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Point getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Point geom) {
+        this.geom = geom;
+    }
+
+
+    /**
+     * Returns a list of RiverAxisKm objects for a given river.
+     *
+     * @param riverid The ID of a river in the database.
+     *
+     * @return a list of RiverAxisKm objects.
+     */
+    public static List<RiverAxisKm> getRiverAxisKms(int riverid) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from RiverAxisKm where river.id =:riverid");
+        query.setParameter("riverid", riverid);
+
+        List<RiverAxisKm> list = query.list();
+
+        return list;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SQRelation.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,105 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+import javax.persistence.GenerationType;
+
+
+@Entity
+@Table(name = "sq_relation")
+public class SQRelation implements Serializable {
+
+    private Integer id;
+
+    private River river;
+
+    private TimeInterval timeInterval;
+
+    private String description;
+
+    private List<SQRelationValue> values;
+
+
+    protected SQRelation() {
+    }
+
+
+    public SQRelation(River river, TimeInterval timeInterval, String desc) {
+        this.river        = river;
+        this.timeInterval = timeInterval;
+        this.description  = desc;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_SQ_ID_SEQ",
+        sequenceName   = "SQ_RELATION_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_SQ_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id")
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+
+    @OneToMany
+    @JoinColumn(name = "sq_relation_id")
+    public List<SQRelationValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<SQRelationValue> values) {
+        this.values = values;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SQRelationValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,144 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.JoinColumn;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.OneToOne;
+import javax.persistence.GenerationType;
+
+
+@Entity
+@Table(name = "sq_relation_value")
+public class SQRelationValue implements Serializable {
+
+    private Integer id;
+
+    private SQRelation sqRelation;
+
+    private String parameter;
+    private String fraction;
+    private String function;
+
+    private double km;
+    private double a;
+    private double b;
+
+
+    protected SQRelationValue() {
+    }
+
+
+    public SQRelationValue(
+        SQRelation sqRelation,
+        String     parameter,
+        String     fraction,
+        String     function,
+        double     km,
+        double     a,
+        double     b
+    ) {
+        this.sqRelation = sqRelation;
+        this.parameter  = parameter;
+        this.fraction   = fraction;
+        this.function   = function;
+        this.km         = km;
+        this.a          = a;
+        this.b          = b;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_SQ_VALUE_ID_SEQ",
+        sequenceName   = "SQ_RELATION_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_SQ_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "sq_relation_id")
+    public SQRelation getSqRelation() {
+        return sqRelation;
+    }
+
+    public void setSqRelation(SQRelation sqRelation) {
+        this.sqRelation = sqRelation;
+    }
+
+
+    @Column(name = "parameter")
+    public String getParameter() {
+        return parameter;
+    }
+
+    public void setParameter(String parameter) {
+        this.parameter = parameter;
+    }
+
+
+    @Column(name = "fraction")
+    public String getFraction() {
+        return fraction;
+    }
+
+    public void setFraction(String fraction) {
+        this.fraction = fraction;
+    }
+
+
+    @Column(name = "function")
+    public String getFunction() {
+        return function;
+    }
+
+    public void setFunction(String function) {
+        this.function = function;
+    }
+
+
+    @Column(name = "km")
+    public double getKm() {
+        return km;
+    }
+
+    public void setKm(double km) {
+        this.km = km;
+    }
+
+
+    @Column(name = "a")
+    public double getA() {
+        return a;
+    }
+
+    public void setA(double a) {
+        this.a = a;
+    }
+
+
+    @Column(name = "b")
+    public double getB() {
+        return b;
+    }
+
+    public void setB(double b) {
+        this.b = b;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SedimentDensity.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,116 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+
+@Entity
+@Table(name = "sediment_density")
+public class SedimentDensity implements Serializable {
+
+    private Integer id;
+
+    private River river;
+
+    private Depth depth;
+
+    private Unit unit;
+
+    private List<SedimentDensityValue> values;
+
+    private String description;
+
+
+    public SedimentDensity() {
+    }
+
+
+    public SedimentDensity(River river, Depth depth, Unit unit, String desc) {
+        this.river       = river;
+        this.depth       = depth;
+        this.unit        = unit;
+        this.description = desc;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_SEDIMENT_DENSITY_ID_SEQ",
+        sequenceName   = "SEDIMENT_DENSITY_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_SEDIMENT_DENSITY_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "depth_id")
+    public Depth getDepth() {
+        return depth;
+    }
+
+    public void setDepth(Depth depth) {
+        this.depth = depth;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name="sediment_density_id")
+    public List<SedimentDensityValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<SedimentDensityValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(SedimentDensityValue value) {
+        this.values.add(value);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SedimentDensityValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,101 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+
+@Entity
+@Table(name = "sediment_density_values")
+public class SedimentDensityValue implements Serializable {
+
+    private Integer id;
+
+    private SedimentDensity sedimentDensity;
+
+    private BigDecimal station;
+    private BigDecimal density;
+
+    private String description;
+
+
+    public SedimentDensityValue() {
+    }
+
+
+    public SedimentDensityValue(
+        SedimentDensity sedimentDensity,
+        BigDecimal      station,
+        BigDecimal      density,
+        String          desc
+    ) {
+        this.sedimentDensity = sedimentDensity;
+        this.station         = station;
+        this.density         = density;
+        this.description     = desc;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_SEDIMENT_DENSITY_VALUES_ID_SEQ",
+        sequenceName   = "SEDIMENT_DENSITY_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_SEDIMENT_DENSITY_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "sediment_density_id")
+    public SedimentDensity getSedimentDensity() {
+        return sedimentDensity;
+    }
+
+    public void setSedimentDensity(SedimentDensity sedimentDensity) {
+        this.sedimentDensity = sedimentDensity;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "density")
+    public BigDecimal getDensity() {
+        return density;
+    }
+
+    public void setDensity(BigDecimal density) {
+        this.density = density;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SedimentYield.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,145 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "sediment_yield")
+public class SedimentYield
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(SedimentYield.class);
+
+    private Integer id;
+
+    private River river;
+
+    private GrainFraction grainFraction;
+
+    private Unit unit;
+
+    private TimeInterval timeInterval;
+
+    private String description;
+
+    private List<SedimentYieldValue> values;
+
+
+    public SedimentYield() {
+        this.values = new ArrayList<SedimentYieldValue>();
+    }
+
+    public SedimentYield(River river, Unit unit, TimeInterval timeInterval) {
+        this();
+
+        this.river        = river;
+        this.unit         = unit;
+        this.timeInterval = timeInterval;
+    }
+
+
+    public SedimentYield(
+        River         river,
+        Unit          unit,
+        TimeInterval  timeInterval,
+        GrainFraction grainFraction
+    ) {
+        this(river, unit, timeInterval);
+
+        this.grainFraction = grainFraction;
+    }
+
+
+    public SedimentYield(
+        River         river,
+        Unit          unit,
+        TimeInterval  timeInterval,
+        GrainFraction grainFraction,
+        String        description
+    ) {
+        this(river, unit, timeInterval, grainFraction);
+
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_SEDIMENT_YIELD_ID_SEQ",
+        sequenceName   = "SEDIMENT_YIELD_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_SEDIMENT_YIELD_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name="grain_fraction_id")
+    public GrainFraction getGrainFraction() {
+        return grainFraction;
+    }
+
+    public void setGrainFraction(GrainFraction grainFraction) {
+        this.grainFraction = grainFraction;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id")
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SedimentYieldValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,93 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "sediment_yield_values")
+public class SedimentYieldValue
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(SedimentYieldValue.class);
+
+    private Integer id;
+
+    private SedimentYield sedimentYield;
+
+    private Double station;
+    private Double value;
+
+    private Unit unit;
+
+
+    public SedimentYieldValue() {
+    }
+
+    public SedimentYieldValue(
+        SedimentYield sedimentYield,
+        Double        station,
+        Double        value
+    ) {
+        this.sedimentYield = sedimentYield;
+        this.station       = station;
+        this.value         = value;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_SEDIMENT_YIELD_VALuES_ID_SEQ",
+        sequenceName   = "SEDIMENT_YIELD_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_SEDIMENT_YIELD_VALuES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "sediment_yield_id" )
+    public SedimentYield getSedimentYield() {
+        return sedimentYield;
+    }
+
+    public void setSedimentYield(SedimentYield sedimentYield) {
+        this.sedimentYield = sedimentYield;
+    }
+
+    @Column(name="station")
+    public Double getStation() {
+        return station;
+    }
+
+    public void setStation(Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "value")
+    public Double getValue() {
+        return value;
+    }
+
+    public void setValue(Double value) {
+        this.value = value;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/TimeInterval.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,67 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.Date;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "time_intervals")
+public class TimeInterval
+implements   Serializable
+{
+    private Integer id;
+    private Date    startTime;
+    private Date    stopTime;
+
+    public TimeInterval() {
+    }
+
+    public TimeInterval(Date startTime, Date stopTime) {
+        this.startTime = startTime;
+        this.stopTime  = stopTime;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_TIME_INTERVALS_ID_SEQ",
+        sequenceName   = "TIME_INTERVALS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_TIME_INTERVALS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "start_time") // FIXME: type mapping needed?
+    public Date getStartTime() {
+        return startTime;
+    }
+
+    public void setStartTime(Date startTime) {
+        this.startTime = startTime;
+    }
+
+    @Column(name = "stop_time") // FIXME: type mapping needed?
+    public Date getStopTime() {
+        return stopTime;
+    }
+
+    public void setStopTime(Date stopTime) {
+        this.stopTime = stopTime;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Unit.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,60 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+
+@Entity
+@Table(name = "units")
+public class Unit
+implements   Serializable
+{
+    protected Integer id;
+    protected String  name;
+
+
+    public Unit() {
+    }
+
+
+    public Unit(String name) {
+        this.name = name;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_UNITS_ID_SEQ",
+        sequenceName   = "UNITS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_UNITS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Waterlevel.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,113 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+
+
+
+@Entity
+@Table(name = "waterlevel")
+public class Waterlevel
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(Waterlevel.class);
+
+    private Integer id;
+
+    private River river;
+
+    private Unit unit;
+
+    private String  description;
+
+    private List<WaterlevelQRange> qRanges;
+
+
+    public Waterlevel() {
+    }
+
+    public Waterlevel(River river, Unit unit) {
+        this.river = river;
+        this.unit  = unit;
+    }
+
+    public Waterlevel(River river, Unit unit, String description) {
+        this(river, unit);
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_ID_SEQ",
+        sequenceName   = "WATERLEVEL_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name="waterlevel_id")
+    public List<WaterlevelQRange> getQRanges() {
+        return qRanges;
+    }
+
+    public void setQRanges(List<WaterlevelQRange> qRanges) {
+        this.qRanges = qRanges;
+    }
+
+    public void addQRange(WaterlevelQRange qRange) {
+        qRanges.add(qRange);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelDifference.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,119 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "waterlevel_difference")
+public class WaterlevelDifference
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(WaterlevelDifference.class);
+
+    private Integer id;
+
+    private River river;
+
+    private Unit unit;
+
+    private List<WaterlevelDifferenceColumn> columns;
+
+    private String description;
+
+
+    public WaterlevelDifference() {
+        columns = new ArrayList<WaterlevelDifferenceColumn>();
+    }
+
+
+    public WaterlevelDifference(River river, Unit unit) {
+        this();
+
+        this.river = river;
+        this.unit  = unit;
+    }
+
+
+    public WaterlevelDifference(River river, Unit unit, String description) {
+        this(river, unit);
+
+        this.description = description;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_DIFFERENCE_ID_SEQ",
+        sequenceName   = "WATERLEVEL_DIFFERENCE_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_DIFFERENCE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "difference_id")
+    public List<WaterlevelDifferenceColumn> getColumns() {
+        return columns;
+    }
+
+    public void setColumns(List<WaterlevelDifferenceColumn> columns) {
+        this.columns = columns;
+    }
+
+    public void addColumn(WaterlevelDifferenceColumn column) {
+        this.columns.add(column);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelDifferenceColumn.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,104 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "waterlevel_difference_column")
+public class WaterlevelDifferenceColumn
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(WaterlevelDifferenceColumn.class);
+
+
+    private Integer id;
+
+    private WaterlevelDifference difference;
+
+    private List<WaterlevelDifferenceValue> values;
+
+    private String description;
+
+
+    public WaterlevelDifferenceColumn() {
+        values = new ArrayList<WaterlevelDifferenceValue>();
+    }
+
+    public WaterlevelDifferenceColumn(
+        WaterlevelDifference difference,
+        String               description
+    ) {
+        this();
+
+        this.difference = difference;
+        this.description = description;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_DIFF_COLUMN_ID_SEQ",
+        sequenceName   = "WATERLEVEL_DIFF_COLUMN_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_DIFF_COLUMN_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "difference_id" )
+    public WaterlevelDifference getDifference() {
+        return difference;
+    }
+
+    public void setDifference(WaterlevelDifference difference) {
+        this.difference = difference;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "column_id")
+    public List<WaterlevelDifferenceValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<WaterlevelDifferenceValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(WaterlevelDifferenceValue value) {
+        this.values.add(value);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelDifferenceValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,94 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "waterlevel_difference_values")
+public class WaterlevelDifferenceValue
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(WaterlevelDifferenceValue.class);
+
+
+    private Integer id;
+
+    private WaterlevelDifferenceColumn column;
+
+    private Double station;
+    private Double value;
+
+
+    public WaterlevelDifferenceValue() {
+    }
+
+    public WaterlevelDifferenceValue(
+        WaterlevelDifferenceColumn column,
+        Double                     station,
+        Double                     value
+    ) {
+        this.column  = column;
+        this.station = station;
+        this.value   = value;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_DIFF_VALUES_ID_SEQ",
+        sequenceName   = "WATERLEVEL_DIFF_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_DIFF_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "column_id" )
+    public WaterlevelDifferenceColumn getColumn() {
+        return column;
+    }
+
+    public void setColumn(WaterlevelDifferenceColumn column) {
+        this.column = column;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return station;
+    }
+
+    public void setStation(Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "value")
+    public Double getValue() {
+        return value;
+    }
+
+    public void setValue(Double value) {
+        this.value = value;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelQRange.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,100 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+
+
+
+@Entity
+@Table(name = "waterlevel_q_range")
+public class WaterlevelQRange
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(WaterlevelQRange.class);
+
+    private Integer id;
+
+    private Waterlevel waterlevel;
+
+    private Double q;
+
+    private List<WaterlevelValue> values;
+
+
+    public WaterlevelQRange() {
+        this.values = new ArrayList<WaterlevelValue>();
+    }
+
+    public WaterlevelQRange(Waterlevel waterlevel, Double q) {
+        this();
+        this.q          = q;
+        this.waterlevel = waterlevel;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_Q_RANGE_ID_SEQ",
+        sequenceName   = "WATERLEVEL_Q_RANGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_Q_RANGE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "waterlevel_id" )
+    public Waterlevel getWaterlevel() {
+        return waterlevel;
+    }
+
+    public void setWaterlevel(Waterlevel waterlevel) {
+        this.waterlevel = waterlevel;
+    }
+
+    @Column(name = "q")
+    public Double getQ() {
+        return q;
+    }
+
+    public void setQ(Double q) {
+        this.q = q;
+    }
+
+    @OneToMany
+    @Column(name = "waterlevel_q_range_id")
+    public List<WaterlevelValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<WaterlevelValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(WaterlevelValue value) {
+        values.add(value);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,90 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+
+
+@Entity
+@Table(name = "waterlevel_values")
+public class WaterlevelValue
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(WaterlevelValue.class);
+
+    private Integer id;
+
+    private WaterlevelQRange qrange;
+
+    private Double station;
+    private Double w;
+
+
+    public WaterlevelValue() {
+    }
+
+    public WaterlevelValue(WaterlevelQRange qrange, Double station, Double w) {
+        this.qrange  = qrange;
+        this.station = station;
+        this.w       = w;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_VALUES_ID_SEQ",
+        sequenceName   = "WATERLEVEL_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "waterlevel_q_range_id" )
+    public WaterlevelQRange getQrange() {
+        return qrange;
+    }
+
+    public void setQrange(WaterlevelQRange qrange) {
+        this.qrange = qrange;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return station;
+    }
+
+    public void setStation(Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "w")
+    public Double getW() {
+        return w;
+    }
+
+    public void setW(Double w) {
+        this.w = w;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Wst.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,215 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.SQLQuery;
+import org.hibernate.Query;
+import org.hibernate.type.StandardBasicTypes;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "wsts")
+public class Wst
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(Wst.class);
+
+    private Integer id;
+    private River   river;
+    private String  description;
+    private Integer kind;
+
+    private List<WstColumn> columns;
+
+
+    public static final String SQL_SELECT_MINMAX =
+        "select min(q) as minQ, max(q) as maxQ from wst_q_values " +
+        "where wst_id = :wst and not (a > :km or b < :km)";
+
+    public Wst() {
+    }
+
+    public Wst(River river, String description) {
+        this(river, description, 0);
+    }
+
+    public Wst(River river, String description, Integer kind) {
+        this.river       = river;
+        this.description = description;
+        this.kind        = kind;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WSTS_ID_SEQ",
+        sequenceName   = "WSTS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WSTS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @Column(name = "kind")
+    public Integer getKind() {
+        return kind;
+    }
+
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
+    @OneToMany
+    @JoinColumn(name="wst_id")
+    public List<WstColumn> getColumns() {
+        return columns;
+    }
+
+    public void setColumns(List<WstColumn> columns) {
+        this.columns = columns;
+    }
+
+
+    /**
+     * Determines the min and max Q values of this WST. The min value is placed
+     * in the first field of the resulting array - the max value is placed in
+     * the second field.
+     *
+     * @return the min and max Q values of this WST.
+     */
+    public double[] determineMinMaxQ() {
+        double[] ab = river.determineMinMaxDistance();
+        return determineMinMaxQ(new Range(ab[0], ab[1], river));
+    }
+
+
+    /**
+     * Determines the min and max Q values of this WST in the given range. The
+     * min value is placed in the first field of the resulting array - the max
+     * value is placed in the second field.
+     *
+     * @param range The range used for querying the Q values.
+     *
+     * @return the min and max Q values of this WST.
+     */
+    public double[] determineMinMaxQ(Range range) {
+        if (range != null) {
+            return determineMinMaxQ(
+                range.getA().doubleValue(),
+                range.getB().doubleValue());
+        }
+
+        return null;
+    }
+
+
+    /**
+     * Determines the min and max Q values of this WST in the given range. The
+     * min value is placed in the first field of the resulting array - the max
+     * value is placed in the second field.
+     *
+     * @param fromKm the lower km value.
+     * @param toKm the upper km value.
+     *
+     * @return the min and max Q values of this WST.
+     */
+    public double[] determineMinMaxQ(double fromKm, double toKm) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+          "select min(q), max(q) from WstQRange where " +
+          " id in " +
+          "  (select wstQRange.id from WstColumnQRange where " +
+          "    wstColumn.id in (select id from WstColumn where wst.id = :wst)) " +
+          " and range.id in " +
+          "  (select id from Range where not (a > :end or b < :start))");
+
+        query.setParameter("wst",   getId());
+        query.setParameter("start", new BigDecimal(fromKm));
+        query.setParameter("end",   new BigDecimal(toKm));
+
+        List<Object []> results = query.list();
+
+        if (results.isEmpty()) {
+            return null;
+        }
+
+        Object [] result = results.get(0);
+
+        return new double [] {
+            ((BigDecimal)result[0]).doubleValue(),
+            ((BigDecimal)result[1]).doubleValue() };
+    }
+
+
+    public double[] determineMinMaxQFree(double km) {
+        Session session = SessionHolder.HOLDER.get();
+
+        SQLQuery sqlQuery = session.createSQLQuery(SQL_SELECT_MINMAX)
+            .addScalar("minQ", StandardBasicTypes.DOUBLE)
+            .addScalar("maxQ", StandardBasicTypes.DOUBLE);
+
+        sqlQuery.setInteger("wst", getId());
+        sqlQuery.setDouble("km", km);
+
+        List<Object[]> minmaxQ = sqlQuery.list();
+
+
+        if (minmaxQ.isEmpty()) {
+            return null;
+        }
+
+        Object[] mm = minmaxQ.get(0);
+
+        if (mm[0] == null || mm[1] == null) {
+            logger.warn ("No min/max Q for km " + km + " found.");
+            return null;
+        }
+
+        return new double[] { (Double) mm[0], (Double) mm[1] };
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WstColumn.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,134 @@
+package de.intevation.flys.model;
+
+import java.util.List;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+@Entity
+@Table(name = "wst_columns")
+public class WstColumn
+implements   Serializable
+{
+    private Integer               id;
+    private Wst                   wst;
+    private String                name;
+    private String                description;
+    private Integer               position;
+    private TimeInterval          timeInterval;
+
+    private List<WstColumnQRange> columnQRanges;
+    private List<WstColumnValue>  columnValues;
+
+    public WstColumn() {
+    }
+
+    public WstColumn(
+        Wst          wst,
+        String       name,
+        String       description,
+        Integer      position,
+        TimeInterval timeInterval
+    ) {
+        this.wst          = wst;
+        this.name         = name;
+        this.description  = description;
+        this.position     = position;
+        this.timeInterval = timeInterval;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WST_COLUMNS_ID_SEQ",
+        sequenceName   = "WST_COLUMNS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WST_COLUMNS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "wst_id" )
+    public Wst getWst() {
+        return wst;
+    }
+
+    public void setWst(Wst wst) {
+        this.wst = wst;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @Column(name = "position")
+    public Integer getPosition() {
+        return position;
+    }
+
+    public void setPosition(Integer position) {
+        this.position = position;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id" )
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    @OneToMany
+    @JoinColumn(name="wst_column_id")
+    public List<WstColumnQRange> getColumnQRanges() {
+        return columnQRanges;
+    }
+
+    public void setColumnQRanges(List<WstColumnQRange> columnQRanges) {
+        this.columnQRanges = columnQRanges;
+    }
+
+    @OneToMany
+    @JoinColumn(name="wst_column_id")
+    public List<WstColumnValue> getColumnValues() {
+        return columnValues;
+    }
+
+    public void setColumnValues(List<WstColumnValue> columnValues) {
+        this.columnValues = columnValues;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WstColumnQRange.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,73 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "wst_column_q_ranges")
+public class WstColumnQRange
+implements   Serializable
+{
+    private Integer   id;
+    private WstColumn wstColumn;
+    private WstQRange wstQRange;
+
+    public WstColumnQRange() {
+    }
+
+    public WstColumnQRange(
+        WstColumn wstColumn,
+        WstQRange wstQRange
+    ) {
+        this.wstColumn = wstColumn;
+        this.wstQRange = wstQRange;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WST_Q_RANGES_ID_SEQ",
+        sequenceName   = "WST_Q_RANGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WST_Q_RANGES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "wst_column_id" )
+    public WstColumn getWstColumn() {
+        return wstColumn;
+    }
+
+    public void setWstColumn(WstColumn wstColumn) {
+        this.wstColumn = wstColumn;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "wst_q_range_id" )
+    public WstQRange getWstQRange() {
+        return wstQRange;
+    }
+
+    public void setWstQRange(WstQRange wstQRange) {
+        this.wstQRange = wstQRange;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WstColumnValue.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,85 @@
+package de.intevation.flys.model;
+
+import java.math.BigDecimal;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "wst_column_values")
+public class WstColumnValue
+implements   Serializable
+{
+    private Integer    id;
+    private WstColumn  wstColumn;
+    private BigDecimal position;
+    private BigDecimal w;
+
+    public WstColumnValue() {
+    }
+
+    public WstColumnValue(
+        WstColumn  wstColumn,
+        BigDecimal position,
+        BigDecimal w
+    ) {
+        this.wstColumn = wstColumn;
+        this.position  = position;
+        this.w         = w;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WST_COLUMN_VALUES_ID_SEQ",
+        sequenceName   = "WST_COLUMN_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WST_COLUMN_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "wst_column_id")
+    public WstColumn getWstColumn() {
+        return wstColumn;
+    }
+
+    public void setWstColumn(WstColumn wstColumn) {
+        this.wstColumn = wstColumn;
+    }
+
+    @Column(name = "position") // FIXME: type mapping needed?
+    public BigDecimal getPosition() {
+        return position;
+    }
+
+    public void setPosition(BigDecimal position) {
+        this.position = position;
+    }
+
+    @Column(name = "w") // FIXME: type mapping needed?
+    public BigDecimal getW() {
+        return w;
+    }
+
+    public void setW(BigDecimal w) {
+        this.w = w;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WstQRange.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,70 @@
+package de.intevation.flys.model;
+
+import java.math.BigDecimal;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "wst_q_ranges")
+public class WstQRange
+implements   Serializable
+{
+    private Integer    id;
+    private Range      range;
+    private BigDecimal q;
+
+    public WstQRange() {
+    }
+
+    public WstQRange(Range range, BigDecimal q) {
+        this.range = range;
+        this.q     = q;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WST_Q_RANGES_ID_SEQ",
+        sequenceName   = "WST_Q_RANGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WST_Q_RANGES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "range_id" )
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @Column(name = "q") // FIXME: type mapping needed?!
+    public BigDecimal getQ() {
+        return q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,244 @@
+/*
+ * Copyright 2004 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package de.intevation.flys.utils;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+
+import java.util.Iterator;
+import java.util.Properties;
+import java.util.Map;
+
+import org.apache.commons.dbcp.BasicDataSource;
+import org.apache.commons.dbcp.BasicDataSourceFactory;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.HibernateException;
+
+import org.hibernate.connection.ConnectionProviderFactory;
+import org.hibernate.connection.ConnectionProvider;
+
+import org.hibernate.cfg.Environment;
+
+/**
+ * <p>A connection provider that uses an Apache commons DBCP connection pool.</p>
+ *
+ * <p>To use this connection provider set:<br>
+ * <code>hibernate.connection.provider_class&nbsp;org.hibernate.connection.DBCPConnectionProvider</code></p>
+ *
+ * <pre>Supported Hibernate properties:
+ *   hibernate.connection.driver_class
+ *   hibernate.connection.url
+ *   hibernate.connection.username
+ *   hibernate.connection.password
+ *   hibernate.connection.isolation
+ *   hibernate.connection.autocommit
+ *   hibernate.connection.pool_size
+ *   hibernate.connection (JDBC driver properties)</pre>
+ * <br>
+ * All DBCP properties are also supported by using the hibernate.dbcp prefix.
+ * A complete list can be found on the DBCP configuration page:
+ * <a href="http://jakarta.apache.org/commons/dbcp/configuration.html">http://jakarta.apache.org/commons/dbcp/configuration.html</a>.
+ * <br>
+ * <pre>Example:
+ *   hibernate.connection.provider_class org.hibernate.connection.DBCPConnectionProvider
+ *   hibernate.connection.driver_class org.hsqldb.jdbcDriver
+ *   hibernate.connection.username sa
+ *   hibernate.connection.password
+ *   hibernate.connection.url jdbc:hsqldb:test
+ *   hibernate.connection.pool_size 20
+ *   hibernate.dbcp.initialSize 10
+ *   hibernate.dbcp.maxWait 3000
+ *   hibernate.dbcp.validationQuery select 1 from dual</pre>
+ *
+ * <p>More information about configuring/using DBCP can be found on the
+ * <a href="http://jakarta.apache.org/commons/dbcp/">DBCP website</a>.
+ * There you will also find the DBCP wiki, mailing lists, issue tracking
+ * and other support facilities</p>
+ *
+ * @see org.hibernate.connection.ConnectionProvider
+ * @author Dirk Verbeeck
+ */
+public class DBCPConnectionProvider
+implements   ConnectionProvider
+{
+    private static Logger log = Logger.getLogger(DBCPConnectionProvider.class);
+
+    private static final String PREFIX = "hibernate.dbcp.";
+
+    private BasicDataSource ds;
+
+    // Old Environment property for backward-compatibility
+    // (property removed in Hibernate3)
+    private static final String DBCP_PS_MAXACTIVE =
+        "hibernate.dbcp.ps.maxActive";
+
+    // Property doesn't exists in Hibernate2
+    private static final String AUTOCOMMIT =
+        "hibernate.connection.autocommit";
+
+    public void configure(Properties props) throws HibernateException {
+        try {
+            log.debug("Configure DBCPConnectionProvider");
+
+            // DBCP properties used to create the BasicDataSource
+            Properties dbcpProperties = new Properties();
+
+            // DriverClass & url
+            String jdbcDriverClass = props.getProperty(Environment.DRIVER);
+            String jdbcUrl = props.getProperty(Environment.URL);
+            dbcpProperties.put("driverClassName", jdbcDriverClass);
+            dbcpProperties.put("url", jdbcUrl);
+
+            // Username / password
+            String username = props.getProperty(Environment.USER);
+            String password = props.getProperty(Environment.PASS);
+            dbcpProperties.put("username", username);
+            dbcpProperties.put("password", password);
+
+            // Isolation level
+            String isolationLevel = props.getProperty(Environment.ISOLATION);
+            if (isolationLevel != null
+            && (isolationLevel = isolationLevel.trim()).length() > 0) {
+                dbcpProperties.put("defaultTransactionIsolation", isolationLevel);
+            }
+
+            // Turn off autocommit (unless autocommit property is set)
+            String autocommit = props.getProperty(AUTOCOMMIT);
+            if (autocommit != null
+            && (autocommit = autocommit.trim()).length() > 0) {
+                dbcpProperties.put("defaultAutoCommit", autocommit);
+            } else {
+                dbcpProperties.put("defaultAutoCommit", String.valueOf(Boolean.FALSE));
+            }
+
+            // Pool size
+            String poolSize = props.getProperty(Environment.POOL_SIZE);
+            if (poolSize != null
+            && (poolSize = poolSize.trim()).length() > 0
+            && Integer.parseInt(poolSize) > 0)  {
+                dbcpProperties.put("maxActive", poolSize);
+            }
+
+            // Copy all "driver" properties into "connectionProperties"
+            Properties driverProps =
+                ConnectionProviderFactory.getConnectionProperties(props);
+
+            if (driverProps.size() > 0) {
+                StringBuilder connectionProperties = new StringBuilder();
+                for (Iterator iter = driverProps.entrySet().iterator();
+                    iter.hasNext();
+                ) {
+                    Map.Entry entry = (Map.Entry)iter.next();
+                    String    key   = (String)entry.getKey();
+                    String    value = (String)entry.getValue();
+                    connectionProperties
+                        .append(key)
+                        .append('=')
+                        .append(value);
+                    if (iter.hasNext()) {
+                        connectionProperties.append(';');
+                    }
+                }
+                dbcpProperties.put(
+                    "connectionProperties", connectionProperties.toString());
+            }
+
+            // Copy all DBCP properties removing the prefix
+            for (Iterator iter = props.entrySet().iterator() ; iter.hasNext() ;) {
+                Map.Entry entry = (Map.Entry)iter.next();
+                String    key   = (String)entry.getKey();
+                if (key.startsWith(PREFIX)) {
+                    String property = key.substring(PREFIX.length());
+                    String value    = (String)entry.getValue();
+                    dbcpProperties.put(property, value);
+                }
+            }
+
+            // Backward-compatibility
+            if (props.getProperty(DBCP_PS_MAXACTIVE) != null) {
+                dbcpProperties.put(
+                    "poolPreparedStatements",
+                    String.valueOf(Boolean.TRUE));
+                dbcpProperties.put(
+                    "maxOpenPreparedStatements",
+                    props.getProperty(DBCP_PS_MAXACTIVE));
+            }
+
+            // Some debug info
+            /* // commented out, because it leaks the password
+            if (log.isDebugEnabled()) {
+                log.debug("Creating a DBCP BasicDataSource" +
+                          " with the following DBCP factory properties:");
+                StringWriter sw = new StringWriter();
+                dbcpProperties.list(new PrintWriter(sw, true));
+                log.debug(sw.toString());
+            }
+            */
+
+            // Let the factory create the pool
+            ds = (BasicDataSource)BasicDataSourceFactory
+                .createDataSource(dbcpProperties);
+
+            // The BasicDataSource has lazy initialization
+            // borrowing a connection will start the DataSource
+            // and make sure it is configured correctly.
+
+            // Connection conn = ds.getConnection();
+            // conn.close();
+        }
+        catch (Exception e) {
+            String message = "Could not create a DBCP pool";
+            log.fatal(message, e);
+            if (ds != null) {
+                BasicDataSource x = ds; ds = null;
+                try {
+                    x.close();
+                }
+                catch (SQLException sqle) {
+                }
+            }
+            throw new HibernateException(message, e);
+        }
+        log.debug("Configure DBCPConnectionProvider complete");
+    }
+
+    public Connection getConnection() throws SQLException {
+        return ds.getConnection();
+    }
+
+    public void closeConnection(Connection conn) throws SQLException {
+        conn.close();
+    }
+
+    public void close() throws HibernateException {
+        try {
+            if (ds != null) {
+                BasicDataSource x = ds; ds = null;
+                x.close();
+            }
+        }
+        catch (SQLException sqle) {
+            throw new HibernateException("Could not close DBCP pool", sqle);
+        }
+    }
+
+    public boolean supportsAggressiveRelease() {
+        return false;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/utils/DateGuesser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,226 @@
+package de.intevation.flys.utils;
+
+/**
+ * Copyright (c) 2006 by Intevation GmbH
+ *
+ * @author Sascha L. Teichmann (teichmann@intevation.de)
+ *
+ * This program is free software under the LGPL (&gt;=v2.1)
+ * Read the file LGPL coming with FLYS for details.
+ */
+
+import java.util.Date;
+import java.util.Calendar;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+public final class DateGuesser {
+    public static final String [] MONTH = {
+        "jan", "feb", "mrz", "apr", "mai", "jun",
+        "jul", "aug", "sep", "okt", "nov", "dez"
+    };
+
+    public static final int guessMonth(String s) {
+        s = s.toLowerCase();
+        for (int i = 0; i < MONTH.length; ++i)
+            if (MONTH[i].equals(s)) {
+                return i;
+            }
+        return -1;
+    }
+
+    public static final Pattern YYYY_MM_DD =
+        Pattern.compile("^(\\d{4})-(\\d{2})-(\\d{2})$");
+
+    public static final Pattern DD_MM_YYYY =
+        Pattern.compile("^(\\d{1,2})\\.(\\d{1,2})\\.(\\d{2,4})$");
+
+    public static final Pattern MMM_YYYY =
+        Pattern.compile("^(\\d{0,2})\\.?(\\w{3})\\.?(\\d{2,4})$");
+
+    public static final Pattern GARBAGE_YYYY =
+        Pattern.compile("^\\D*(\\d{2,4})$");
+
+    public static final Pattern YYYY_MM_DDThh_mm =
+        Pattern.compile("^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2})$");
+
+    public static final Pattern YYYY_MM_DDThh_mm_ss =
+        Pattern.compile("^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2})$");
+
+    public static final Pattern DD_MM_YYYYThh_mm =
+        Pattern.compile("^(\\d{1,2})\\.(\\d{1,2})\\.(\\d{2,4})T(\\d{1,2}):(\\d{2})$");
+
+    public static final Pattern DD_MM_YYYYThh_mm_ss =
+        Pattern.compile("^(\\d{1,2})\\.(\\d{1,2})\\.(\\d{2,4})T(\\d{1,2}):(\\d{2}):(\\d{2})$");
+
+    private DateGuesser() {
+    }
+
+    public static final int calendarMonth(String month) {
+        return calendarMonth(Integer.parseInt(month));
+    }
+
+    public static final int calendarMonth(int month) {
+        return Math.max(Math.min(month-1, 11), 0);
+    }
+
+    public static Date guessDate(String s) {
+        if (s == null || (s = s.trim()).length() == 0) {
+            throw new IllegalArgumentException();
+        }
+
+        Matcher m;
+
+        m = YYYY_MM_DD.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year  = m.group(1);
+            String month = m.group(2);
+            String day   = m.group(3);
+            cal.set(
+                Integer.parseInt(year),
+                calendarMonth(month),
+                Integer.parseInt(day),
+                12, 0, 0);
+            return cal.getTime();
+        }
+
+        m = DD_MM_YYYY.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year  = m.group(3);
+            String month = m.group(2);
+            String day   = m.group(1);
+            cal.set(
+                Integer.parseInt(year) + (year.length() == 2 ? 1900 : 0),
+                calendarMonth(month),
+                Integer.parseInt(m.group(1)),
+                12, 0, 0);
+            return cal.getTime();
+        }
+
+        m = MMM_YYYY.matcher(s);
+
+        if (m.matches()) {
+            int month = guessMonth(m.group(2));
+            if (month >= 0) {
+                Calendar cal = Calendar.getInstance();
+                String year = m.group(3);
+                String day  = m.group(1);
+                cal.set(
+                    Integer.parseInt(year) + (year.length() == 2 ? 1900 : 0),
+                    month,
+                    day.length() == 0 ? 15 : Integer.parseInt(day),
+                    12, 0, 0);
+                return cal.getTime();
+            }
+        }
+
+        m = YYYY_MM_DDThh_mm.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year = m.group(1);
+            String month = m.group(2);
+            String day = m.group(3);
+            String hour = m.group(4);
+            String minute = m.group(5);
+            cal.set(
+                Integer.parseInt(year),
+                calendarMonth(month),
+                Integer.parseInt(day),
+                Integer.parseInt(hour),
+                Integer.parseInt(minute),
+                0
+            );
+            return cal.getTime();
+        }
+
+        m = YYYY_MM_DDThh_mm_ss.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year = m.group(1);
+            String month = m.group(2);
+            String day = m.group(3);
+            String hour = m.group(4);
+            String minute = m.group(5);
+            String second = m.group(6);
+            cal.set(
+                Integer.parseInt(year),
+                calendarMonth(month),
+                Integer.parseInt(day),
+                Integer.parseInt(hour),
+                Integer.parseInt(minute),
+                Integer.parseInt(second)
+            );
+            return cal.getTime();
+        }
+
+        m = DD_MM_YYYYThh_mm.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year = m.group(3);
+            String month = m.group(2);
+            String day = m.group(1);
+            String hour = m.group(4);
+            String minute = m.group(5);
+            cal.set(
+                Integer.parseInt(year) + (year.length() == 2 ? 1900 : 0),
+                calendarMonth(month),
+                Integer.parseInt(day),
+                Integer.parseInt(hour),
+                Integer.parseInt(minute),
+                0
+            );
+            return cal.getTime();
+        }
+
+        m = DD_MM_YYYYThh_mm_ss.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year = m.group(3);
+            String month = m.group(2);
+            String day = m.group(1);
+            String hour = m.group(4);
+            String minute = m.group(5);
+            String second = m.group(6);
+            cal.set(
+                Integer.parseInt(year) + (year.length() == 2 ? 1900 : 0),
+                calendarMonth(month),
+                Integer.parseInt(day),
+                Integer.parseInt(hour),
+                Integer.parseInt(minute),
+                Integer.parseInt(second)
+            );
+            return cal.getTime();
+        }
+
+        m = GARBAGE_YYYY.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year = m.group(1);
+            cal.set(
+                Integer.parseInt(year) + (year.length() == 2 ? 1900 : 0),
+                5,  // month
+                15, // day
+                12, 0, 0);
+            return cal.getTime();
+        }
+
+        throw new IllegalArgumentException();
+    }
+
+    public static void main(String [] args) {
+        for (int i = 0; i < args.length; ++i) {
+            System.out.println(args[i] + ": " + guessDate(args[i]));
+        }
+    }
+}
+// end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/utils/StringUtil.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,823 @@
+package de.intevation.flys.utils;
+
+/**
+ * Copyright (c) 2006 by Intevation GmbH
+ *
+ * @author Sascha L. Teichmann (teichmann@intevation.de)
+ * @author Ludwig Reiter       (ludwig@intevation.de)
+ *
+ * This program is free software under the LGPL (&gt;=v2.1)
+ * Read the file LGPL coming with FLYS for details.
+ */
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.Locale;
+
+import java.net.URLEncoder;
+import java.net.URLDecoder;
+
+import java.io.UnsupportedEncodingException;
+import java.io.IOException;
+import java.io.BufferedReader;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.io.PrintWriter;
+
+
+public final class StringUtil {
+    final static String NUMBER_SEPERATOR = ";";
+    final static String LINE_SEPERATOR = ":";
+
+    private StringUtil() {
+    }
+
+    public static final String double2DArrayToString(double[][] values) {
+
+        if (values == null) {
+            throw new IllegalArgumentException("keine double[][]-Werte");
+        }
+
+        StringBuilder strbuf = new StringBuilder();
+
+        for (int i=0; i < values.length; i++) {
+            if (i>0) {
+                strbuf.append(LINE_SEPERATOR);
+            }
+            for (int j=0; j < values[i].length; j++) {
+                if (j > 0) {
+                    strbuf.append(NUMBER_SEPERATOR);
+                }
+                strbuf.append(values[i][j]);
+            }
+        }
+
+        return strbuf.toString();
+    }
+
+    public static final double[][] stringToDouble2DArray(String str) {
+        if (str == null || str.length() == 0) {
+            return null;
+        }
+
+        String[] lineSplit = str.split(LINE_SEPERATOR);
+        double[][] array2D = new double[lineSplit.length][];
+        for (int i=0; i < lineSplit.length; i++) {
+            String[] numberSplit =  lineSplit[i].split(NUMBER_SEPERATOR);
+
+            double[] numbers = new double[numberSplit.length];
+            for (int j=0; j < numberSplit.length; j++) {
+                numbers[j] = Double.valueOf(numberSplit[j]).doubleValue();
+            }
+
+            array2D[i] = numbers;
+        }
+
+        return array2D;
+    }
+
+    /**
+     * Remove first occurrence of "[" and "]" (if both do occur).
+     * @param value String to be stripped of [] (might be null).
+     * @return input string but with [ and ] removed, or input string if no
+     *         brackets were found.
+     */
+    public static final String unbracket(String value) {
+        // null- guard
+        if (value == null) return value;
+
+        int start = value.indexOf("[");
+        int end   = value.indexOf("]");
+
+        if (start < 0 || end < 0) {
+            return value;
+        }
+
+        value = value.substring(start + 1, end);
+
+        return value;
+    }
+
+
+    /**
+     * From "Q=1" make "W(Q=1)".
+     * @return original string wraped in "W()" if it contains a "Q", original
+     *         string otherwise.
+     */
+    public static String wWrap(String wOrQ) {
+        return (wOrQ != null && wOrQ.indexOf("Q") >=0)
+               ? "W(" + wOrQ + ")"
+               : wOrQ;
+        }
+
+
+    public static final String [] splitLines(String s) {
+        if (s == null) {
+            return null;
+        }
+        ArrayList<String> list = new ArrayList<String>();
+
+        BufferedReader in = null;
+
+        try {
+            in =
+                new BufferedReader(
+                new StringReader(s));
+
+            String line;
+
+            while ((line = in.readLine()) != null) {
+                list.add(line);
+            }
+        }
+        catch (IOException ioe) {
+            return null;
+        }
+        finally {
+            if (in != null)
+                try {
+                    in.close();
+                }
+                catch (IOException ioe) {}
+        }
+
+        return list.toArray(new String[list.size()]);
+    }
+
+    public static final String concat(String [] s) {
+        return concat(s, null);
+    }
+
+    public static final String concat(String [] s, String glue) {
+        if (s == null) {
+            return null;
+        }
+        StringBuilder sb = new StringBuilder();
+        for (int i = 0; i < s.length; ++i) {
+            if (i > 0 && glue != null) {
+                sb.append(glue);
+            }
+            sb.append(s[i]);
+        }
+        return sb.toString();
+    }
+
+    public static final String [] splitAfter(String [] src, int N) {
+        if (src == null) {
+            return null;
+        }
+
+        ArrayList<String> list = new ArrayList<String>(src.length);
+        for (int i = 0; i < src.length; ++i) {
+            String s = src[i];
+            int R;
+            if (s == null || (R = s.length()) == 0) {
+                list.add(s);
+            }
+            else {
+                while (R > N) {
+                    list.add(s.substring(0, N));
+                    s = s.substring(N);
+                    R = s.length();
+                }
+                list.add(s);
+            }
+        }
+        return list.toArray(new String[list.size()]);
+    }
+
+    public static final String [] splitQuoted(String s) {
+        return splitQuoted(s, '"');
+    }
+
+    public static final String[] fitArray(String [] src, String [] dst) {
+        if (src == null) {
+            return dst;
+        }
+        if (dst == null) {
+            return src;
+        }
+
+        if (src.length == dst.length) {
+            return src;
+        }
+
+        System.arraycopy(src, 0, dst, 0, Math.min(dst.length, src.length));
+
+        return dst;
+    }
+
+    public static final String [] splitQuoted(String s, char quoteChar) {
+        if (s == null) {
+            return null;
+        }
+        ArrayList<String> l = new ArrayList<String>();
+        int mode = 0, last_mode = 0;
+        StringBuilder sb = new StringBuilder();
+        for (int N = s.length(), i = 0; i < N; ++i) {
+            char c = s.charAt(i);
+            switch (mode) {
+                case 0: // unquoted mode
+                    if (c == quoteChar) {
+                        mode = 1; // to quoted mode
+                        if (sb.length() > 0) {
+                            l.add(sb.toString());
+                            sb.setLength(0);
+                        }
+                    }
+                    else if (c == '\\') {
+                        last_mode = 0;
+                        mode = 2; // escape mode
+                    }
+                    else if (!Character.isWhitespace(c)) {
+                        sb.append(c);
+                    }
+                    else if (sb.length() > 0) {
+                        l.add(sb.toString());
+                        sb.setLength(0);
+                    }
+                    break;
+                case 1: // quote mode
+                    if (c == '\\') {
+                        last_mode = 1;
+                        mode = 2; // escape mode
+                    }
+                    else if (c == quoteChar) { // leave quote mode
+                        l.add(sb.toString());
+                        sb.setLength(0);
+                        mode = 0; // to unquoted mode
+                    }
+                    else {
+                        sb.append(c);
+                    }
+                    break;
+                case 2: // escape mode
+                    sb.append(c);
+                    mode = last_mode;
+                    break;
+            }
+        }
+        if (sb.length() > 0) {
+            l.add(sb.toString());
+        }
+        return l.toArray(new String[l.size()]);
+    }
+
+    public static final String [] splitUnique(String s) {
+        return splitUnique(s, "[\\s,]+");
+    }
+
+    public static final String [] splitUnique(String s, String sep) {
+        return s != null ? unique(s.split(sep)) : null;
+    }
+
+    public static final String [] unique(String [] str) {
+        if (str == null || str.length == 1) {
+            return str;
+        }
+
+        Arrays.sort(str);
+
+        for (int i = 1; i < str.length; ++i)
+            if (str[i].equals(str[i-1])) {
+                ArrayList<String> list = new ArrayList<String>(str.length);
+
+                for (int j = 0; j < i; ++j) {
+                    list.add(str[j]);
+                }
+
+                String last = str[i];
+
+                for (++i; i < str.length; ++i)
+                    if (!last.equals(str[i])) {
+                        list.add(last = str[i]);
+                    }
+
+                return list.toArray(new String[list.size()]);
+            }
+
+        return str;
+    }
+
+    public static final String [] ensureEmptyExistence(String [] str) {
+        if (str == null) {
+            return null;
+        }
+
+        for (int i = 0; i < str.length; ++i)
+            if (str[i].length() == 0) {
+                if (i != 0) { // copy to front
+                    String t = str[0];
+                    str[0] = str[i];
+                    str[i] = t;
+                }
+                return str;
+            }
+
+        String [] n = new String[str.length+1];
+        n[0] = "";
+        System.arraycopy(str, 0, n, 1, str.length);
+        return n;
+    }
+
+    public static final String ensureWidthPadLeft(String s, int width, char pad) {
+        int N = s.length();
+        if (N >= width) {
+            return s;
+        }
+        StringBuilder sb = new StringBuilder(width);
+        for (; N < width; ++N) {
+            sb.append(pad);
+        }
+        sb.append(s);
+        return sb.toString();
+    }
+
+    public static final String [] splitWhiteSpaceWithNAsPad(
+        String s,
+        int    N,
+        String pad
+    ) {
+        if (s == null) {
+            return null;
+        }
+
+        boolean copyChars = true;
+        int     count     = 0; // number of WS
+
+        int S = s.length();
+
+        ArrayList<String> parts = new ArrayList<String>();
+
+        StringBuilder part = new StringBuilder(S);
+
+        for (int i = 0; i < S; ++i) {
+            char c = s.charAt(i);
+            if (copyChars) { // char mode
+                if (Character.isWhitespace(c)) {
+                    if (part.length() > 0) {
+                        parts.add(part.toString());
+                        part.setLength(0);
+                    }
+                    count     = 1;
+                    copyChars = false; // to WS mode
+                }
+                else {
+                    part.append(c);
+                }
+            }
+            else { // counting WS
+                if (Character.isWhitespace(c)) {
+                    ++count;
+                }
+                else {
+                    while (count >= N) {// enough to insert pad?
+                        parts.add(pad);
+                        count -= N;
+                    }
+                    part.append(c);
+                    count     = 0;
+                    copyChars = true; // back to char mode
+                }
+            }
+        } // for all chars
+
+        if (copyChars) {
+            if (part.length() > 0) {
+                parts.add(part.toString());
+            }
+        }
+        else {
+            while (count >= N) { // enough to insert pad?
+                parts.add(pad);
+                count -= N;
+            }
+        }
+
+        return parts.toArray(new String[parts.size()]);
+    }
+
+    public static final String encodeURL(String url) {
+        try {
+            return url != null
+                   ? URLEncoder.encode(url, "UTF-8")
+                   : "";
+        }
+        catch (UnsupportedEncodingException usee) {
+            throw new RuntimeException(usee.getLocalizedMessage());
+        }
+    }
+
+    public static final String decodeURL(String url) {
+        try {
+            return url != null
+                   ? URLDecoder.decode(url, "UTF-8")
+                   : "";
+        }
+        catch (UnsupportedEncodingException usee) {
+            throw new RuntimeException(usee.getLocalizedMessage());
+        }
+    }
+
+    public static final boolean isEmpty(String s) {
+        return s == null || s.length() == 0;
+    }
+
+    public static final String empty(String s) {
+        return s == null ? "" : s;
+    }
+
+
+    public static final String trim(String s) {
+        return s != null ? s.trim() : null;
+    }
+
+    public static final String uniqueWhitespaces(String s) {
+        if (s == null) {
+            return null;
+        }
+
+        boolean wasWS = false;
+        StringBuilder sb = new StringBuilder();
+
+        for (int N = s.length(), i = 0; i < N; ++i) {
+            char c = s.charAt(i);
+            if (Character.isWhitespace(c)) {
+                if (!wasWS) {
+                    sb.append(c);
+                    wasWS = true;
+                }
+            }
+            else {
+                sb.append(c);
+                wasWS = false;
+            }
+        }
+
+        return sb.toString();
+    }
+
+    public static final String replaceNewlines(String s) {
+        return s == null
+               ? null
+               : s.replace('\r', ' ').replace('\n', ' ');
+    }
+
+    /*
+    public static final String quoteReplacement(String s) {
+
+        if (s == null || (s.indexOf('\\') == -1 && s.indexOf('$') == -1))
+            return s;
+
+        StringBuilder sb = new StringBuilder();
+
+        for (int N = s.length(), i = 0; i < N; ++i) {
+            char c = s.charAt(i);
+            if (c == '\\' || c == '$') sb.append('\\');
+            sb.append(c);
+        }
+
+        return sb.toString();
+    }
+    */
+
+    public static final String quoteReplacement(String s) {
+
+        if (s == null) {
+            return null;
+        }
+
+        for (int N = s.length(), i = 0; i < N; ++i) { // plain check loop
+            char c = s.charAt(i);
+            if (c == '$' || c == '\\') { // first special -> StringBuilder
+                StringBuilder sb = new StringBuilder(s.substring(0, i))
+                .append('\\')
+                .append(c);
+                for (++i; i < N; ++i) { // build StringBuilder with rest
+                    if ((c = s.charAt(i)) == '$' || c == '\\') {
+                        sb.append('\\');
+                    }
+                    sb.append(c);
+                }
+                return sb.toString();
+            }
+        }
+
+        return s;
+    }
+
+    public static final String repeat(String what, int times) {
+        return repeat(what, times, new StringBuilder()).toString();
+    }
+
+    public static final StringBuilder repeat(String what, int times, StringBuilder sb) {
+        while (times-- > 0) {
+            sb.append(what);
+        }
+        return sb;
+    }
+
+    /**
+     * Returns the file name without extension.
+     */
+    public static final String cutExtension(String s) {
+        if (s == null) {
+            return null;
+        }
+        int dot = s.lastIndexOf('.');
+        return dot >= 0
+               ? s.substring(0, dot)
+               : s;
+    }
+
+    public static final String extension(String s) {
+        if (s == null) {
+            return null;
+        }
+        int dot = s.lastIndexOf('.');
+        return dot >= 0
+               ? s.substring(dot+1)
+               : s;
+    }
+
+    public static final String [] splitExtension(String x) {
+        if (x == null) {
+            return null;
+        }
+        int i = x.lastIndexOf('.');
+        return i < 0
+               ? new String[] { x, null }
+               : new String[] { x.substring(0, Math.max(0, i)), x.substring(i+1).toLowerCase() };
+    }
+
+    public static String entityEncode(String s) {
+        if (s == null || s.length() == 0) {
+            return s;
+        }
+
+        StringBuilder sb = new StringBuilder();
+        for (int i=0, N =s.length(); i < N; i++) {
+            char c = s.charAt(i);
+            switch (c) {
+                case '<':
+                    sb.append("&lt;");
+                    break;
+                case '>':
+                    sb.append("&gt;");
+                    break;
+                case '&':
+                    sb.append("&amp;");
+                    break;
+                default:
+                    sb.append(c);
+            }
+        }
+        return sb.toString();
+    }
+
+    public static String entityDecode(String s) {
+        if (s == null || s.length() == 0) {
+            return s;
+        }
+
+        boolean amp = false;
+        StringBuilder sb = new StringBuilder();
+        StringBuilder ampbuf = new StringBuilder();
+        for (int i=0, N =s.length(); i < N; i++) {
+            char c = s.charAt(i);
+            if (amp) {
+                if (c == ';') {
+                    amp = false;
+                    String str = ampbuf.toString();
+                    ampbuf.setLength(0);
+                    if (str.equals("lt")) {
+                        sb.append('<');
+                    }
+                    else if (str.equals("gt")) {
+                        sb.append('>');
+                    }
+                    else if (str.equals("amp")) {
+                        sb.append('&');
+                    }
+                    else {
+                        sb.append('&').append(str).append(';');
+                    }
+                }
+                else {
+                    ampbuf.append(c);
+                }
+            }
+            else if (c=='&') {
+                amp = true;
+            }
+            else {
+                sb.append(c);
+            }
+
+        }
+        return sb.toString();
+    }
+
+    public static final String quote(String s) {
+        return quote(s, '"');
+    }
+
+    public static final String quote(String s, char quoteChar) {
+        if (s == null) {
+            return null;
+        }
+
+        int N = s.length();
+
+        if (N == 0)
+            return new StringBuilder(2)
+                   .append(quoteChar)
+                   .append(quoteChar)
+                   .toString();
+
+        StringBuilder sb = null;
+
+        int i = 0;
+
+        for (; i < N; ++i) {
+            char c = s.charAt(i);
+
+            if (Character.isWhitespace(c)) {
+                sb = new StringBuilder()
+                .append(quoteChar)
+                .append(s.substring(0, i+1));
+                break;
+            }
+            else if (c == quoteChar) {
+                sb = new StringBuilder()
+                .append(quoteChar)
+                .append(s.substring(0, i))
+                .append('\\')
+                .append(quoteChar);
+                break;
+            }
+        }
+
+        if (sb == null) {
+            return s;
+        }
+
+        for (++i; i < N; ++i) {
+            char c = s.charAt(i);
+            if (c == quoteChar || c == '\\') {
+                sb.append('\\');
+            }
+
+            sb.append(c);
+        }
+
+        return sb.append(quoteChar).toString();
+    }
+
+    /*
+    public static String sprintf(String format, Object... args) {
+        return sprintf(null, format, args);
+    }
+    */
+
+    public static String sprintf(Locale locale, String format, Object ... args) {
+        StringWriter sw = new StringWriter();
+        PrintWriter pw = new PrintWriter(sw);
+        pw.printf(locale, format, args);
+        pw.flush();
+        return sw.toString();
+    }
+
+
+    public static void testQuote() {
+        System.err.println("testing quote:");
+
+        String cases []  = {
+            "",          "''",
+            "test",      "test",
+            "test test", "'test test'",
+            "  test",    "'  test'",
+            "test   ",   "'test   '",
+            " test ",    "' test '",
+            "'test",     "'\\'test'",
+            "'",         "'\\''",
+            " ' ' ",     "' \\' \\' '",
+            "te'st",     "'te\\'st'"
+        };
+
+        int failed = 0;
+
+        for (int i = 0; i < cases.length; i += 2) {
+            String in  = cases[i];
+            String out = cases[i+1];
+
+            String res = quote(in, '\'');
+            if (!res.equals(out)) {
+                ++failed;
+                System.err.println(
+                    "quote failed on: >" + in +
+                    "< result: >" + res +
+                    "< expected: >" + out + "<");
+            }
+        }
+
+        int T = cases.length/2;
+
+        System.err.println("tests total: "  + T);
+        System.err.println("tests failed: " + failed);
+        System.err.println("tests passed: " + (T - failed));
+    }
+
+    public static void testQuoteReplacement() {
+        System.err.println("testing quoteReplacement:");
+
+        String cases []  = {
+            "",          "",
+            "test",      "test",
+            "$",         "\\$",
+            "\\",        "\\\\",
+            "\\$",       "\\\\\\$",
+            "test\\$",   "test\\\\\\$",
+            "\\test",    "\\\\test",
+            "test$",     "test\\$",
+            "test$test", "test\\$test",
+            "$test$",    "\\$test\\$"
+        };
+
+        int failed = 0;
+
+        for (int i = 0; i < cases.length; i += 2) {
+            String in  = cases[i];
+            String out = cases[i+1];
+
+            String res = quoteReplacement(in);
+            if (!res.equals(out)) {
+                ++failed;
+                System.err.println(
+                    "quoteReplacement failed on: '" + in +
+                    "' result: '" + res +
+                    "' expected: '" + out + "'");
+            }
+        }
+
+        int T = cases.length/2;
+
+        System.err.println("tests total: "  + T);
+        System.err.println("tests failed: " + failed);
+        System.err.println("tests passed: " + (T - failed));
+    }
+
+    public static void testStringArray2D() {
+        int total = 0;
+        int fail = 0;
+        int passed = 0;
+
+        System.err.println("testing StringArray2D:");
+
+        double[][] testarray = {{1.0, 2.0, 3.0},
+            {1.1, 2.1, 3.1},
+            {100.2, 200.2}
+        };
+        String str = double2DArrayToString(testarray);
+
+        total += 1;
+        if (str.equals("1.0;2.0;3.0:1.1;2.1;3.1:100.2;200.2")) {
+            passed +=1;
+        }
+        else {
+            fail +=1;
+            System.err.println("Der Ergebnis-String ist nicht richtig:");
+            System.err.println(str);
+        }
+
+
+
+        double[][] testarray2 = stringToDouble2DArray(str);
+        boolean failed = false;
+
+        total +=1;
+        for (int i=0; i < testarray.length; i++)
+            for (int j=0; j < testarray[i].length; j++)
+                if (testarray[i][j] != testarray2[i][j]) {
+                    System.err.println("Test scheitert bei i=" +i +" j=" +j);
+                    System.err.println("alter Wert=" + testarray[i][j] +" neuer Wert=" +testarray2[i][j]);
+                    failed = true;
+                }
+        if (failed) {
+            fail +=1;
+        }
+        else {
+            passed +=1;
+        }
+        System.err.println("tests total: "+ total);
+        System.err.println("tests failed: "+ fail);
+        System.err.println("tests passed: "+ passed);
+    }
+
+    public static void main(String [] args) {
+
+        testQuoteReplacement();
+        testQuote();
+        testStringArray2D();
+    }
+}
+// end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/hibernate/MapResultTransformer.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,26 @@
+package de.intevation.hibernate;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.hibernate.transform.BasicTransformerAdapter;
+
+public class MapResultTransformer
+extends      BasicTransformerAdapter
+{
+    public static final MapResultTransformer INSTANCE =
+        new MapResultTransformer();
+
+    public MapResultTransformer() {
+    }
+
+    @Override
+    public Object transformTuple(Object [] tuple, String [] aliases) {
+        Map<String, Object> map = new HashMap<String, Object>();
+        for (int i = 0; i < tuple.length; ++i) {
+            map.put(aliases[i], tuple[i]);
+        }
+        return map;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Bezugspegel.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,286 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+import javax.persistence.Temporal;
+import javax.persistence.TemporalType;
+import javax.persistence.UniqueConstraint;
+
+/**
+ * Bezugspegel generated by hbm2java
+ */
+@Entity
+@Table(name="BEZUGSPEGEL"
+    ,schema="SEDDB"
+    , uniqueConstraints = @UniqueConstraint(columnNames={"GEWAESSERID", "NAME"})
+)
+public class Bezugspegel  implements java.io.Serializable {
+
+
+     private long bezugspegelid;
+     private Gewaesser gewaesser;
+     private BigDecimal km;
+     private String name;
+     private BigDecimal nullpunkt;
+     private String kennung;
+     private BigDecimal einzugsgebiet;
+     private Date inbetrieb;
+     private String fliesscharakter;
+     private BigDecimal laufentwicklung;
+     private BigDecimal mnq;
+     private BigDecimal mq;
+     private BigDecimal mhq;
+     private BigDecimal nnq;
+     private BigDecimal hhq;
+     private Date nnqdatum;
+     private Date hhqdatum;
+     private String jahresreihe;
+     private String bemerkung;
+     private Set<Station> stations = new HashSet<Station>(0);
+
+    public Bezugspegel() {
+    }
+
+    public Bezugspegel(long bezugspegelid, Gewaesser gewaesser, BigDecimal km, String name, BigDecimal nullpunkt) {
+        this.bezugspegelid = bezugspegelid;
+        this.gewaesser = gewaesser;
+        this.km = km;
+        this.name = name;
+        this.nullpunkt = nullpunkt;
+    }
+    public Bezugspegel(long bezugspegelid, Gewaesser gewaesser, BigDecimal km, String name, BigDecimal nullpunkt, String kennung, BigDecimal einzugsgebiet, Date inbetrieb, String fliesscharakter, BigDecimal laufentwicklung, BigDecimal mnq, BigDecimal mq, BigDecimal mhq, BigDecimal nnq, BigDecimal hhq, Date nnqdatum, Date hhqdatum, String jahresreihe, String bemerkung, Set<Station> stations) {
+       this.bezugspegelid = bezugspegelid;
+       this.gewaesser = gewaesser;
+       this.km = km;
+       this.name = name;
+       this.nullpunkt = nullpunkt;
+       this.kennung = kennung;
+       this.einzugsgebiet = einzugsgebiet;
+       this.inbetrieb = inbetrieb;
+       this.fliesscharakter = fliesscharakter;
+       this.laufentwicklung = laufentwicklung;
+       this.mnq = mnq;
+       this.mq = mq;
+       this.mhq = mhq;
+       this.nnq = nnq;
+       this.hhq = hhq;
+       this.nnqdatum = nnqdatum;
+       this.hhqdatum = hhqdatum;
+       this.jahresreihe = jahresreihe;
+       this.bemerkung = bemerkung;
+       this.stations = stations;
+    }
+
+     @Id
+
+
+    @Column(name="BEZUGSPEGELID", unique=true, nullable=false, precision=11, scale=0)
+    public long getBezugspegelid() {
+        return this.bezugspegelid;
+    }
+
+    public void setBezugspegelid(long bezugspegelid) {
+        this.bezugspegelid = bezugspegelid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="GEWAESSERID", nullable=false)
+    public Gewaesser getGewaesser() {
+        return this.gewaesser;
+    }
+
+    public void setGewaesser(Gewaesser gewaesser) {
+        this.gewaesser = gewaesser;
+    }
+
+
+    @Column(name="KM", nullable=false, precision=8, scale=3)
+    public BigDecimal getKm() {
+        return this.km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name="NAME", nullable=false, length=50)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="NULLPUNKT", nullable=false, precision=8, scale=3)
+    public BigDecimal getNullpunkt() {
+        return this.nullpunkt;
+    }
+
+    public void setNullpunkt(BigDecimal nullpunkt) {
+        this.nullpunkt = nullpunkt;
+    }
+
+
+    @Column(name="KENNUNG", length=16)
+    public String getKennung() {
+        return this.kennung;
+    }
+
+    public void setKennung(String kennung) {
+        this.kennung = kennung;
+    }
+
+
+    @Column(name="EINZUGSGEBIET", precision=10)
+    public BigDecimal getEinzugsgebiet() {
+        return this.einzugsgebiet;
+    }
+
+    public void setEinzugsgebiet(BigDecimal einzugsgebiet) {
+        this.einzugsgebiet = einzugsgebiet;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="INBETRIEB", length=7)
+    public Date getInbetrieb() {
+        return this.inbetrieb;
+    }
+
+    public void setInbetrieb(Date inbetrieb) {
+        this.inbetrieb = inbetrieb;
+    }
+
+
+    @Column(name="FLIESSCHARAKTER", length=3)
+    public String getFliesscharakter() {
+        return this.fliesscharakter;
+    }
+
+    public void setFliesscharakter(String fliesscharakter) {
+        this.fliesscharakter = fliesscharakter;
+    }
+
+
+    @Column(name="LAUFENTWICKLUNG", precision=7, scale=3)
+    public BigDecimal getLaufentwicklung() {
+        return this.laufentwicklung;
+    }
+
+    public void setLaufentwicklung(BigDecimal laufentwicklung) {
+        this.laufentwicklung = laufentwicklung;
+    }
+
+
+    @Column(name="MNQ", precision=8, scale=3)
+    public BigDecimal getMnq() {
+        return this.mnq;
+    }
+
+    public void setMnq(BigDecimal mnq) {
+        this.mnq = mnq;
+    }
+
+
+    @Column(name="MQ", precision=8, scale=3)
+    public BigDecimal getMq() {
+        return this.mq;
+    }
+
+    public void setMq(BigDecimal mq) {
+        this.mq = mq;
+    }
+
+
+    @Column(name="MHQ", precision=8, scale=3)
+    public BigDecimal getMhq() {
+        return this.mhq;
+    }
+
+    public void setMhq(BigDecimal mhq) {
+        this.mhq = mhq;
+    }
+
+
+    @Column(name="NNQ", precision=8, scale=3)
+    public BigDecimal getNnq() {
+        return this.nnq;
+    }
+
+    public void setNnq(BigDecimal nnq) {
+        this.nnq = nnq;
+    }
+
+
+    @Column(name="HHQ", precision=8, scale=3)
+    public BigDecimal getHhq() {
+        return this.hhq;
+    }
+
+    public void setHhq(BigDecimal hhq) {
+        this.hhq = hhq;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="NNQDATUM", length=7)
+    public Date getNnqdatum() {
+        return this.nnqdatum;
+    }
+
+    public void setNnqdatum(Date nnqdatum) {
+        this.nnqdatum = nnqdatum;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="HHQDATUM", length=7)
+    public Date getHhqdatum() {
+        return this.hhqdatum;
+    }
+
+    public void setHhqdatum(Date hhqdatum) {
+        this.hhqdatum = hhqdatum;
+    }
+
+
+    @Column(name="JAHRESREIHE", length=12)
+    public String getJahresreihe() {
+        return this.jahresreihe;
+    }
+
+    public void setJahresreihe(String jahresreihe) {
+        this.jahresreihe = jahresreihe;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="bezugspegel")
+    public Set<Station> getStations() {
+        return this.stations;
+    }
+
+    public void setStations(Set<Station> stations) {
+        this.stations = stations;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Bezugspegelgew.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,61 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+/**
+ * Bezugspegelgew generated by hbm2java
+ */
+@Entity
+@Table(name="BEZUGSPEGELGEW"
+    ,schema="SEDDB"
+)
+public class Bezugspegelgew  implements java.io.Serializable {
+
+
+     private BezugspegelgewId id;
+
+    public Bezugspegelgew() {
+    }
+
+    public Bezugspegelgew(BezugspegelgewId id) {
+       this.id = id;
+    }
+
+     @EmbeddedId
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="bezugspegelid", column=@Column(name="BEZUGSPEGELID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="gewaesserid", column=@Column(name="GEWAESSERID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="km", column=@Column(name="KM", nullable=false, precision=8, scale=3) ),
+        @AttributeOverride(name="name", column=@Column(name="NAME", nullable=false, length=50) ),
+        @AttributeOverride(name="nullpunkt", column=@Column(name="NULLPUNKT", nullable=false, precision=8, scale=3) ),
+        @AttributeOverride(name="kennung", column=@Column(name="KENNUNG", length=16) ),
+        @AttributeOverride(name="einzugsgebiet", column=@Column(name="EINZUGSGEBIET", precision=10) ),
+        @AttributeOverride(name="inbetrieb", column=@Column(name="INBETRIEB", length=7) ),
+        @AttributeOverride(name="fliesscharakter", column=@Column(name="FLIESSCHARAKTER", length=3) ),
+        @AttributeOverride(name="laufentwicklung", column=@Column(name="LAUFENTWICKLUNG", precision=7, scale=3) ),
+        @AttributeOverride(name="mnq", column=@Column(name="MNQ", precision=8, scale=3) ),
+        @AttributeOverride(name="mq", column=@Column(name="MQ", precision=8, scale=3) ),
+        @AttributeOverride(name="mhq", column=@Column(name="MHQ", precision=8, scale=3) ),
+        @AttributeOverride(name="nnq", column=@Column(name="NNQ", precision=8, scale=3) ),
+        @AttributeOverride(name="hhq", column=@Column(name="HHQ", precision=8, scale=3) ),
+        @AttributeOverride(name="nnqdatum", column=@Column(name="NNQDATUM", length=7) ),
+        @AttributeOverride(name="hhqdatum", column=@Column(name="HHQDATUM", length=7) ),
+        @AttributeOverride(name="jahresreihe", column=@Column(name="JAHRESREIHE", length=12) ),
+        @AttributeOverride(name="bemerkung", column=@Column(name="BEMERKUNG", length=240) ),
+        @AttributeOverride(name="gewname", column=@Column(name="GEWNAME", nullable=false, length=20) ) } )
+    public BezugspegelgewId getId() {
+        return this.id;
+    }
+
+    public void setId(BezugspegelgewId id) {
+        this.id = id;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/BezugspegelgewId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,327 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.Date;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * BezugspegelgewId generated by hbm2java
+ */
+@Embeddable
+public class BezugspegelgewId  implements java.io.Serializable {
+
+
+     private long bezugspegelid;
+     private long gewaesserid;
+     private BigDecimal km;
+     private String name;
+     private BigDecimal nullpunkt;
+     private String kennung;
+     private BigDecimal einzugsgebiet;
+     private Date inbetrieb;
+     private String fliesscharakter;
+     private BigDecimal laufentwicklung;
+     private BigDecimal mnq;
+     private BigDecimal mq;
+     private BigDecimal mhq;
+     private BigDecimal nnq;
+     private BigDecimal hhq;
+     private Date nnqdatum;
+     private Date hhqdatum;
+     private String jahresreihe;
+     private String bemerkung;
+     private String gewname;
+
+    public BezugspegelgewId() {
+    }
+
+    public BezugspegelgewId(long bezugspegelid, long gewaesserid, BigDecimal km, String name, BigDecimal nullpunkt, String gewname) {
+        this.bezugspegelid = bezugspegelid;
+        this.gewaesserid = gewaesserid;
+        this.km = km;
+        this.name = name;
+        this.nullpunkt = nullpunkt;
+        this.gewname = gewname;
+    }
+    public BezugspegelgewId(long bezugspegelid, long gewaesserid, BigDecimal km, String name, BigDecimal nullpunkt, String kennung, BigDecimal einzugsgebiet, Date inbetrieb, String fliesscharakter, BigDecimal laufentwicklung, BigDecimal mnq, BigDecimal mq, BigDecimal mhq, BigDecimal nnq, BigDecimal hhq, Date nnqdatum, Date hhqdatum, String jahresreihe, String bemerkung, String gewname) {
+       this.bezugspegelid = bezugspegelid;
+       this.gewaesserid = gewaesserid;
+       this.km = km;
+       this.name = name;
+       this.nullpunkt = nullpunkt;
+       this.kennung = kennung;
+       this.einzugsgebiet = einzugsgebiet;
+       this.inbetrieb = inbetrieb;
+       this.fliesscharakter = fliesscharakter;
+       this.laufentwicklung = laufentwicklung;
+       this.mnq = mnq;
+       this.mq = mq;
+       this.mhq = mhq;
+       this.nnq = nnq;
+       this.hhq = hhq;
+       this.nnqdatum = nnqdatum;
+       this.hhqdatum = hhqdatum;
+       this.jahresreihe = jahresreihe;
+       this.bemerkung = bemerkung;
+       this.gewname = gewname;
+    }
+
+
+
+    @Column(name="BEZUGSPEGELID", nullable=false, precision=11, scale=0)
+    public long getBezugspegelid() {
+        return this.bezugspegelid;
+    }
+
+    public void setBezugspegelid(long bezugspegelid) {
+        this.bezugspegelid = bezugspegelid;
+    }
+
+
+    @Column(name="GEWAESSERID", nullable=false, precision=11, scale=0)
+    public long getGewaesserid() {
+        return this.gewaesserid;
+    }
+
+    public void setGewaesserid(long gewaesserid) {
+        this.gewaesserid = gewaesserid;
+    }
+
+
+    @Column(name="KM", nullable=false, precision=8, scale=3)
+    public BigDecimal getKm() {
+        return this.km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name="NAME", nullable=false, length=50)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="NULLPUNKT", nullable=false, precision=8, scale=3)
+    public BigDecimal getNullpunkt() {
+        return this.nullpunkt;
+    }
+
+    public void setNullpunkt(BigDecimal nullpunkt) {
+        this.nullpunkt = nullpunkt;
+    }
+
+
+    @Column(name="KENNUNG", length=16)
+    public String getKennung() {
+        return this.kennung;
+    }
+
+    public void setKennung(String kennung) {
+        this.kennung = kennung;
+    }
+
+
+    @Column(name="EINZUGSGEBIET", precision=10)
+    public BigDecimal getEinzugsgebiet() {
+        return this.einzugsgebiet;
+    }
+
+    public void setEinzugsgebiet(BigDecimal einzugsgebiet) {
+        this.einzugsgebiet = einzugsgebiet;
+    }
+
+
+    @Column(name="INBETRIEB", length=7)
+    public Date getInbetrieb() {
+        return this.inbetrieb;
+    }
+
+    public void setInbetrieb(Date inbetrieb) {
+        this.inbetrieb = inbetrieb;
+    }
+
+
+    @Column(name="FLIESSCHARAKTER", length=3)
+    public String getFliesscharakter() {
+        return this.fliesscharakter;
+    }
+
+    public void setFliesscharakter(String fliesscharakter) {
+        this.fliesscharakter = fliesscharakter;
+    }
+
+
+    @Column(name="LAUFENTWICKLUNG", precision=7, scale=3)
+    public BigDecimal getLaufentwicklung() {
+        return this.laufentwicklung;
+    }
+
+    public void setLaufentwicklung(BigDecimal laufentwicklung) {
+        this.laufentwicklung = laufentwicklung;
+    }
+
+
+    @Column(name="MNQ", precision=8, scale=3)
+    public BigDecimal getMnq() {
+        return this.mnq;
+    }
+
+    public void setMnq(BigDecimal mnq) {
+        this.mnq = mnq;
+    }
+
+
+    @Column(name="MQ", precision=8, scale=3)
+    public BigDecimal getMq() {
+        return this.mq;
+    }
+
+    public void setMq(BigDecimal mq) {
+        this.mq = mq;
+    }
+
+
+    @Column(name="MHQ", precision=8, scale=3)
+    public BigDecimal getMhq() {
+        return this.mhq;
+    }
+
+    public void setMhq(BigDecimal mhq) {
+        this.mhq = mhq;
+    }
+
+
+    @Column(name="NNQ", precision=8, scale=3)
+    public BigDecimal getNnq() {
+        return this.nnq;
+    }
+
+    public void setNnq(BigDecimal nnq) {
+        this.nnq = nnq;
+    }
+
+
+    @Column(name="HHQ", precision=8, scale=3)
+    public BigDecimal getHhq() {
+        return this.hhq;
+    }
+
+    public void setHhq(BigDecimal hhq) {
+        this.hhq = hhq;
+    }
+
+
+    @Column(name="NNQDATUM", length=7)
+    public Date getNnqdatum() {
+        return this.nnqdatum;
+    }
+
+    public void setNnqdatum(Date nnqdatum) {
+        this.nnqdatum = nnqdatum;
+    }
+
+
+    @Column(name="HHQDATUM", length=7)
+    public Date getHhqdatum() {
+        return this.hhqdatum;
+    }
+
+    public void setHhqdatum(Date hhqdatum) {
+        this.hhqdatum = hhqdatum;
+    }
+
+
+    @Column(name="JAHRESREIHE", length=12)
+    public String getJahresreihe() {
+        return this.jahresreihe;
+    }
+
+    public void setJahresreihe(String jahresreihe) {
+        this.jahresreihe = jahresreihe;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="GEWNAME", nullable=false, length=20)
+    public String getGewname() {
+        return this.gewname;
+    }
+
+    public void setGewname(String gewname) {
+        this.gewname = gewname;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof BezugspegelgewId) ) return false;
+         BezugspegelgewId castOther = ( BezugspegelgewId ) other;
+
+         return (this.getBezugspegelid()==castOther.getBezugspegelid())
+ && (this.getGewaesserid()==castOther.getGewaesserid())
+ && ( (this.getKm()==castOther.getKm()) || ( this.getKm()!=null && castOther.getKm()!=null && this.getKm().equals(castOther.getKm()) ) )
+ && ( (this.getName()==castOther.getName()) || ( this.getName()!=null && castOther.getName()!=null && this.getName().equals(castOther.getName()) ) )
+ && ( (this.getNullpunkt()==castOther.getNullpunkt()) || ( this.getNullpunkt()!=null && castOther.getNullpunkt()!=null && this.getNullpunkt().equals(castOther.getNullpunkt()) ) )
+ && ( (this.getKennung()==castOther.getKennung()) || ( this.getKennung()!=null && castOther.getKennung()!=null && this.getKennung().equals(castOther.getKennung()) ) )
+ && ( (this.getEinzugsgebiet()==castOther.getEinzugsgebiet()) || ( this.getEinzugsgebiet()!=null && castOther.getEinzugsgebiet()!=null && this.getEinzugsgebiet().equals(castOther.getEinzugsgebiet()) ) )
+ && ( (this.getInbetrieb()==castOther.getInbetrieb()) || ( this.getInbetrieb()!=null && castOther.getInbetrieb()!=null && this.getInbetrieb().equals(castOther.getInbetrieb()) ) )
+ && ( (this.getFliesscharakter()==castOther.getFliesscharakter()) || ( this.getFliesscharakter()!=null && castOther.getFliesscharakter()!=null && this.getFliesscharakter().equals(castOther.getFliesscharakter()) ) )
+ && ( (this.getLaufentwicklung()==castOther.getLaufentwicklung()) || ( this.getLaufentwicklung()!=null && castOther.getLaufentwicklung()!=null && this.getLaufentwicklung().equals(castOther.getLaufentwicklung()) ) )
+ && ( (this.getMnq()==castOther.getMnq()) || ( this.getMnq()!=null && castOther.getMnq()!=null && this.getMnq().equals(castOther.getMnq()) ) )
+ && ( (this.getMq()==castOther.getMq()) || ( this.getMq()!=null && castOther.getMq()!=null && this.getMq().equals(castOther.getMq()) ) )
+ && ( (this.getMhq()==castOther.getMhq()) || ( this.getMhq()!=null && castOther.getMhq()!=null && this.getMhq().equals(castOther.getMhq()) ) )
+ && ( (this.getNnq()==castOther.getNnq()) || ( this.getNnq()!=null && castOther.getNnq()!=null && this.getNnq().equals(castOther.getNnq()) ) )
+ && ( (this.getHhq()==castOther.getHhq()) || ( this.getHhq()!=null && castOther.getHhq()!=null && this.getHhq().equals(castOther.getHhq()) ) )
+ && ( (this.getNnqdatum()==castOther.getNnqdatum()) || ( this.getNnqdatum()!=null && castOther.getNnqdatum()!=null && this.getNnqdatum().equals(castOther.getNnqdatum()) ) )
+ && ( (this.getHhqdatum()==castOther.getHhqdatum()) || ( this.getHhqdatum()!=null && castOther.getHhqdatum()!=null && this.getHhqdatum().equals(castOther.getHhqdatum()) ) )
+ && ( (this.getJahresreihe()==castOther.getJahresreihe()) || ( this.getJahresreihe()!=null && castOther.getJahresreihe()!=null && this.getJahresreihe().equals(castOther.getJahresreihe()) ) )
+ && ( (this.getBemerkung()==castOther.getBemerkung()) || ( this.getBemerkung()!=null && castOther.getBemerkung()!=null && this.getBemerkung().equals(castOther.getBemerkung()) ) )
+ && ( (this.getGewname()==castOther.getGewname()) || ( this.getGewname()!=null && castOther.getGewname()!=null && this.getGewname().equals(castOther.getGewname()) ) );
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getBezugspegelid();
+         result = 37 * result + (int) this.getGewaesserid();
+         result = 37 * result + ( getKm() == null ? 0 : this.getKm().hashCode() );
+         result = 37 * result + ( getName() == null ? 0 : this.getName().hashCode() );
+         result = 37 * result + ( getNullpunkt() == null ? 0 : this.getNullpunkt().hashCode() );
+         result = 37 * result + ( getKennung() == null ? 0 : this.getKennung().hashCode() );
+         result = 37 * result + ( getEinzugsgebiet() == null ? 0 : this.getEinzugsgebiet().hashCode() );
+         result = 37 * result + ( getInbetrieb() == null ? 0 : this.getInbetrieb().hashCode() );
+         result = 37 * result + ( getFliesscharakter() == null ? 0 : this.getFliesscharakter().hashCode() );
+         result = 37 * result + ( getLaufentwicklung() == null ? 0 : this.getLaufentwicklung().hashCode() );
+         result = 37 * result + ( getMnq() == null ? 0 : this.getMnq().hashCode() );
+         result = 37 * result + ( getMq() == null ? 0 : this.getMq().hashCode() );
+         result = 37 * result + ( getMhq() == null ? 0 : this.getMhq().hashCode() );
+         result = 37 * result + ( getNnq() == null ? 0 : this.getNnq().hashCode() );
+         result = 37 * result + ( getHhq() == null ? 0 : this.getHhq().hashCode() );
+         result = 37 * result + ( getNnqdatum() == null ? 0 : this.getNnqdatum().hashCode() );
+         result = 37 * result + ( getHhqdatum() == null ? 0 : this.getHhqdatum().hashCode() );
+         result = 37 * result + ( getJahresreihe() == null ? 0 : this.getJahresreihe().hashCode() );
+         result = 37 * result + ( getBemerkung() == null ? 0 : this.getBemerkung().hashCode() );
+         result = 37 * result + ( getGewname() == null ? 0 : this.getGewname().hashCode() );
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Bild.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,194 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.Table;
+
+/**
+ * Bild generated by hbm2java
+ */
+@Entity
+@Table(name="BILD"
+    ,schema="SEDDB"
+)
+public class Bild  implements java.io.Serializable {
+
+
+     private long bildid;
+     private Zzthema zzthema;
+     private Sohltest sohltest;
+     private int lfdnr;
+     private boolean istdigital;
+     private String pfad;
+     private String standort;
+     private String medium;
+     private String medpfad;
+     private String bemerkung;
+     private String typklein;
+     private String typmittel;
+     private String typgross;
+
+    public Bild() {
+    }
+
+    public Bild(long bildid, Zzthema zzthema, Sohltest sohltest, int lfdnr, boolean istdigital) {
+        this.bildid = bildid;
+        this.zzthema = zzthema;
+        this.sohltest = sohltest;
+        this.lfdnr = lfdnr;
+        this.istdigital = istdigital;
+    }
+    public Bild(long bildid, Zzthema zzthema, Sohltest sohltest, int lfdnr, boolean istdigital, String pfad, String standort, String medium, String medpfad, String bemerkung, String typklein, String typmittel, String typgross) {
+       this.bildid = bildid;
+       this.zzthema = zzthema;
+       this.sohltest = sohltest;
+       this.lfdnr = lfdnr;
+       this.istdigital = istdigital;
+       this.pfad = pfad;
+       this.standort = standort;
+       this.medium = medium;
+       this.medpfad = medpfad;
+       this.bemerkung = bemerkung;
+       this.typklein = typklein;
+       this.typmittel = typmittel;
+       this.typgross = typgross;
+    }
+
+     @Id
+
+
+    @Column(name="BILDID", unique=true, nullable=false, precision=11, scale=0)
+    public long getBildid() {
+        return this.bildid;
+    }
+
+    public void setBildid(long bildid) {
+        this.bildid = bildid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="THEMAID", nullable=false)
+    public Zzthema getZzthema() {
+        return this.zzthema;
+    }
+
+    public void setZzthema(Zzthema zzthema) {
+        this.zzthema = zzthema;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="SOHLTESTID", nullable=false)
+    public Sohltest getSohltest() {
+        return this.sohltest;
+    }
+
+    public void setSohltest(Sohltest sohltest) {
+        this.sohltest = sohltest;
+    }
+
+
+    @Column(name="LFDNR", nullable=false, precision=5, scale=0)
+    public int getLfdnr() {
+        return this.lfdnr;
+    }
+
+    public void setLfdnr(int lfdnr) {
+        this.lfdnr = lfdnr;
+    }
+
+
+    @Column(name="ISTDIGITAL", nullable=false, precision=1, scale=0)
+    public boolean isIstdigital() {
+        return this.istdigital;
+    }
+
+    public void setIstdigital(boolean istdigital) {
+        this.istdigital = istdigital;
+    }
+
+
+    @Column(name="PFAD", length=512)
+    public String getPfad() {
+        return this.pfad;
+    }
+
+    public void setPfad(String pfad) {
+        this.pfad = pfad;
+    }
+
+
+    @Column(name="STANDORT", length=50)
+    public String getStandort() {
+        return this.standort;
+    }
+
+    public void setStandort(String standort) {
+        this.standort = standort;
+    }
+
+
+    @Column(name="MEDIUM", length=50)
+    public String getMedium() {
+        return this.medium;
+    }
+
+    public void setMedium(String medium) {
+        this.medium = medium;
+    }
+
+
+    @Column(name="MEDPFAD", length=50)
+    public String getMedpfad() {
+        return this.medpfad;
+    }
+
+    public void setMedpfad(String medpfad) {
+        this.medpfad = medpfad;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="TYPKLEIN", length=8)
+    public String getTypklein() {
+        return this.typklein;
+    }
+
+    public void setTypklein(String typklein) {
+        this.typklein = typklein;
+    }
+
+
+    @Column(name="TYPMITTEL", length=8)
+    public String getTypmittel() {
+        return this.typmittel;
+    }
+
+    public void setTypmittel(String typmittel) {
+        this.typmittel = typmittel;
+    }
+
+
+    @Column(name="TYPGROSS", length=8)
+    public String getTypgross() {
+        return this.typgross;
+    }
+
+    public void setTypgross(String typgross) {
+        this.typgross = typgross;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Gewaesser.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,139 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+import javax.persistence.UniqueConstraint;
+
+/**
+ * Gewaesser generated by hbm2java
+ */
+@Entity
+@Table(name="GEWAESSER"
+    ,schema="SEDDB"
+    , uniqueConstraints = @UniqueConstraint(columnNames="NAME")
+)
+public class Gewaesser  implements java.io.Serializable {
+
+
+     private long gewaesserid;
+     private String name;
+     private String kennung;
+     private BigDecimal laenge;
+     private BigDecimal einzugsgebiet;
+     private String bemerkung;
+     private Set<Bezugspegel> bezugspegels = new HashSet<Bezugspegel>(0);
+     private Set<Station> stations = new HashSet<Station>(0);
+
+    public Gewaesser() {
+    }
+
+    public Gewaesser(long gewaesserid, String name) {
+        this.gewaesserid = gewaesserid;
+        this.name = name;
+    }
+    public Gewaesser(long gewaesserid, String name, String kennung, BigDecimal laenge, BigDecimal einzugsgebiet, String bemerkung, Set<Bezugspegel> bezugspegels, Set<Station> stations) {
+       this.gewaesserid = gewaesserid;
+       this.name = name;
+       this.kennung = kennung;
+       this.laenge = laenge;
+       this.einzugsgebiet = einzugsgebiet;
+       this.bemerkung = bemerkung;
+       this.bezugspegels = bezugspegels;
+       this.stations = stations;
+    }
+
+     @Id
+
+
+    @Column(name="GEWAESSERID", unique=true, nullable=false, precision=11, scale=0)
+    public long getGewaesserid() {
+        return this.gewaesserid;
+    }
+
+    public void setGewaesserid(long gewaesserid) {
+        this.gewaesserid = gewaesserid;
+    }
+
+
+    @Column(name="NAME", unique=true, nullable=false, length=20)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="KENNUNG", length=11)
+    public String getKennung() {
+        return this.kennung;
+    }
+
+    public void setKennung(String kennung) {
+        this.kennung = kennung;
+    }
+
+
+    @Column(name="LAENGE", precision=7, scale=3)
+    public BigDecimal getLaenge() {
+        return this.laenge;
+    }
+
+    public void setLaenge(BigDecimal laenge) {
+        this.laenge = laenge;
+    }
+
+
+    @Column(name="EINZUGSGEBIET", precision=10)
+    public BigDecimal getEinzugsgebiet() {
+        return this.einzugsgebiet;
+    }
+
+    public void setEinzugsgebiet(BigDecimal einzugsgebiet) {
+        this.einzugsgebiet = einzugsgebiet;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="gewaesser")
+    public Set<Bezugspegel> getBezugspegels() {
+        return this.bezugspegels;
+    }
+
+    public void setBezugspegels(Set<Bezugspegel> bezugspegels) {
+        this.bezugspegels = bezugspegels;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="gewaesser")
+    public Set<Station> getStations() {
+        return this.stations;
+    }
+
+    public void setStations(Set<Station> stations) {
+        this.stations = stations;
+    }
+
+
+
+
+}
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Gfaenger.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,125 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+import javax.persistence.UniqueConstraint;
+
+/**
+ * Gfaenger generated by hbm2java
+ */
+@Entity
+@Table(name="GFAENGER"
+    ,schema="SEDDB"
+    , uniqueConstraints = @UniqueConstraint(columnNames="NAME")
+)
+public class Gfaenger  implements java.io.Serializable {
+
+
+     private long gfaengerid;
+     private boolean istaktiv;
+     private String name;
+     private BigDecimal breite;
+     private BigDecimal maschenweite;
+     private String bemerkung;
+     private Set<Messung> messungs = new HashSet<Messung>(0);
+
+    public Gfaenger() {
+    }
+
+    public Gfaenger(long gfaengerid, boolean istaktiv, String name, BigDecimal breite, BigDecimal maschenweite) {
+        this.gfaengerid = gfaengerid;
+        this.istaktiv = istaktiv;
+        this.name = name;
+        this.breite = breite;
+        this.maschenweite = maschenweite;
+    }
+    public Gfaenger(long gfaengerid, boolean istaktiv, String name, BigDecimal breite, BigDecimal maschenweite, String bemerkung, Set<Messung> messungs) {
+       this.gfaengerid = gfaengerid;
+       this.istaktiv = istaktiv;
+       this.name = name;
+       this.breite = breite;
+       this.maschenweite = maschenweite;
+       this.bemerkung = bemerkung;
+       this.messungs = messungs;
+    }
+
+     @Id
+
+
+    @Column(name="GFAENGERID", unique=true, nullable=false, precision=11, scale=0)
+    public long getGfaengerid() {
+        return this.gfaengerid;
+    }
+
+    public void setGfaengerid(long gfaengerid) {
+        this.gfaengerid = gfaengerid;
+    }
+
+
+    @Column(name="ISTAKTIV", nullable=false, precision=1, scale=0)
+    public boolean isIstaktiv() {
+        return this.istaktiv;
+    }
+
+    public void setIstaktiv(boolean istaktiv) {
+        this.istaktiv = istaktiv;
+    }
+
+
+    @Column(name="NAME", unique=true, nullable=false, length=30)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="BREITE", nullable=false, precision=6, scale=3)
+    public BigDecimal getBreite() {
+        return this.breite;
+    }
+
+    public void setBreite(BigDecimal breite) {
+        this.breite = breite;
+    }
+
+
+    @Column(name="MASCHENWEITE", nullable=false, precision=7, scale=4)
+    public BigDecimal getMaschenweite() {
+        return this.maschenweite;
+    }
+
+    public void setMaschenweite(BigDecimal maschenweite) {
+        this.maschenweite = maschenweite;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="gfaenger")
+    public Set<Messung> getMessungs() {
+        return this.messungs;
+    }
+
+    public void setMessungs(Set<Messung> messungs) {
+        this.messungs = messungs;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Glotlinks.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,72 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+/**
+ * Glotlinks generated by hbm2java
+ */
+@Entity
+@Table(name="GLOTLINKS"
+    ,schema="SEDDB"
+)
+public class Glotlinks  implements java.io.Serializable {
+
+
+     private GlotlinksId id;
+
+    public Glotlinks() {
+    }
+
+    public Glotlinks(GlotlinksId id) {
+       this.id = id;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="glotrechteid", column=@Column(name="GLOTRECHTEID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="messungid", column=@Column(name="MESSUNGID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="uferabst", column=@Column(name="UFERABST", nullable=false, precision=8, scale=3) ),
+        @AttributeOverride(name="tgeschiebe", column=@Column(name="TGESCHIEBE", precision=8, scale=3) ),
+        @AttributeOverride(name="dm", column=@Column(name="DM", precision=7, scale=4) ),
+        @AttributeOverride(name="sk", column=@Column(name="SK", precision=8, scale=3) ),
+        @AttributeOverride(name="so", column=@Column(name="SO", precision=8, scale=3) ),
+        @AttributeOverride(name="u", column=@Column(name="U", precision=8, scale=3) ),
+        @AttributeOverride(name="d90", column=@Column(name="D90", precision=7, scale=4) ),
+        @AttributeOverride(name="d84", column=@Column(name="D84", precision=7, scale=4) ),
+        @AttributeOverride(name="d80", column=@Column(name="D80", precision=7, scale=4) ),
+        @AttributeOverride(name="d75", column=@Column(name="D75", precision=7, scale=4) ),
+        @AttributeOverride(name="d70", column=@Column(name="D70", precision=7, scale=4) ),
+        @AttributeOverride(name="d60", column=@Column(name="D60", precision=7, scale=4) ),
+        @AttributeOverride(name="d50", column=@Column(name="D50", precision=7, scale=4) ),
+        @AttributeOverride(name="d40", column=@Column(name="D40", precision=7, scale=4) ),
+        @AttributeOverride(name="d30", column=@Column(name="D30", precision=7, scale=4) ),
+        @AttributeOverride(name="d25", column=@Column(name="D25", precision=7, scale=4) ),
+        @AttributeOverride(name="d20", column=@Column(name="D20", precision=7, scale=4) ),
+        @AttributeOverride(name="d16", column=@Column(name="D16", precision=7, scale=4) ),
+        @AttributeOverride(name="d10", column=@Column(name="D10", precision=7, scale=4) ),
+        @AttributeOverride(name="dmin", column=@Column(name="DMIN", precision=7, scale=4) ),
+        @AttributeOverride(name="durchdmin", column=@Column(name="DURCHDMIN", precision=6, scale=3) ),
+        @AttributeOverride(name="dmax", column=@Column(name="DMAX", precision=7, scale=4) ),
+        @AttributeOverride(name="durchdmax", column=@Column(name="DURCHDMAX", precision=6, scale=3) ),
+        @AttributeOverride(name="stdabw", column=@Column(name="STDABW", precision=8, scale=3) ),
+        @AttributeOverride(name="stdfehler", column=@Column(name="STDFEHLER", precision=8, scale=3) ),
+        @AttributeOverride(name="bemerkung", column=@Column(name="BEMERKUNG", length=240) ),
+        @AttributeOverride(name="uferablinks", column=@Column(name="UFERABLINKS", precision=8, scale=3) ),
+        @AttributeOverride(name="linksabst", column=@Column(name="LINKSABST", precision=22, scale=0) ) } )
+    public GlotlinksId getId() {
+        return this.id;
+    }
+
+    public void setId(GlotlinksId id) {
+        this.id = id;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/GlotlinksId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,464 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * GlotlinksId generated by hbm2java
+ */
+@Embeddable
+public class GlotlinksId  implements java.io.Serializable {
+
+
+     private long glotrechteid;
+     private long messungid;
+     private BigDecimal uferabst;
+     private BigDecimal tgeschiebe;
+     private BigDecimal dm;
+     private BigDecimal sk;
+     private BigDecimal so;
+     private BigDecimal u;
+     private BigDecimal d90;
+     private BigDecimal d84;
+     private BigDecimal d80;
+     private BigDecimal d75;
+     private BigDecimal d70;
+     private BigDecimal d60;
+     private BigDecimal d50;
+     private BigDecimal d40;
+     private BigDecimal d30;
+     private BigDecimal d25;
+     private BigDecimal d20;
+     private BigDecimal d16;
+     private BigDecimal d10;
+     private BigDecimal dmin;
+     private BigDecimal durchdmin;
+     private BigDecimal dmax;
+     private BigDecimal durchdmax;
+     private BigDecimal stdabw;
+     private BigDecimal stdfehler;
+     private String bemerkung;
+     private BigDecimal uferablinks;
+     private BigDecimal linksabst;
+
+    public GlotlinksId() {
+    }
+
+
+    public GlotlinksId(long glotrechteid, long messungid, BigDecimal uferabst) {
+        this.glotrechteid = glotrechteid;
+        this.messungid = messungid;
+        this.uferabst = uferabst;
+    }
+    public GlotlinksId(long glotrechteid, long messungid, BigDecimal uferabst, BigDecimal tgeschiebe, BigDecimal dm, BigDecimal sk, BigDecimal so, BigDecimal u, BigDecimal d90, BigDecimal d84, BigDecimal d80, BigDecimal d75, BigDecimal d70, BigDecimal d60, BigDecimal d50, BigDecimal d40, BigDecimal d30, BigDecimal d25, BigDecimal d20, BigDecimal d16, BigDecimal d10, BigDecimal dmin, BigDecimal durchdmin, BigDecimal dmax, BigDecimal durchdmax, BigDecimal stdabw, BigDecimal stdfehler, String bemerkung, BigDecimal uferablinks, BigDecimal linksabst) {
+       this.glotrechteid = glotrechteid;
+       this.messungid = messungid;
+       this.uferabst = uferabst;
+       this.tgeschiebe = tgeschiebe;
+       this.dm = dm;
+       this.sk = sk;
+       this.so = so;
+       this.u = u;
+       this.d90 = d90;
+       this.d84 = d84;
+       this.d80 = d80;
+       this.d75 = d75;
+       this.d70 = d70;
+       this.d60 = d60;
+       this.d50 = d50;
+       this.d40 = d40;
+       this.d30 = d30;
+       this.d25 = d25;
+       this.d20 = d20;
+       this.d16 = d16;
+       this.d10 = d10;
+       this.dmin = dmin;
+       this.durchdmin = durchdmin;
+       this.dmax = dmax;
+       this.durchdmax = durchdmax;
+       this.stdabw = stdabw;
+       this.stdfehler = stdfehler;
+       this.bemerkung = bemerkung;
+       this.uferablinks = uferablinks;
+       this.linksabst = linksabst;
+    }
+
+
+
+    @Column(name="GLOTRECHTEID", nullable=false, precision=11, scale=0)
+    public long getGlotrechteid() {
+        return this.glotrechteid;
+    }
+
+    public void setGlotrechteid(long glotrechteid) {
+        this.glotrechteid = glotrechteid;
+    }
+
+
+    @Column(name="MESSUNGID", nullable=false, precision=11, scale=0)
+    public long getMessungid() {
+        return this.messungid;
+    }
+
+    public void setMessungid(long messungid) {
+        this.messungid = messungid;
+    }
+
+
+    @Column(name="UFERABST", nullable=false, precision=8, scale=3)
+    public BigDecimal getUferabst() {
+        return this.uferabst;
+    }
+
+    public void setUferabst(BigDecimal uferabst) {
+        this.uferabst = uferabst;
+    }
+
+
+    @Column(name="TGESCHIEBE", precision=8, scale=3)
+    public BigDecimal getTgeschiebe() {
+        return this.tgeschiebe;
+    }
+
+    public void setTgeschiebe(BigDecimal tgeschiebe) {
+        this.tgeschiebe = tgeschiebe;
+    }
+
+
+    @Column(name="DM", precision=7, scale=4)
+    public BigDecimal getDm() {
+        return this.dm;
+    }
+
+    public void setDm(BigDecimal dm) {
+        this.dm = dm;
+    }
+
+
+    @Column(name="SK", precision=8, scale=3)
+    public BigDecimal getSk() {
+        return this.sk;
+    }
+
+    public void setSk(BigDecimal sk) {
+        this.sk = sk;
+    }
+
+
+    @Column(name="SO", precision=8, scale=3)
+    public BigDecimal getSo() {
+        return this.so;
+    }
+
+    public void setSo(BigDecimal so) {
+        this.so = so;
+    }
+
+
+    @Column(name="U", precision=8, scale=3)
+    public BigDecimal getU() {
+        return this.u;
+    }
+
+    public void setU(BigDecimal u) {
+        this.u = u;
+    }
+
+
+    @Column(name="D90", precision=7, scale=4)
+    public BigDecimal getD90() {
+        return this.d90;
+    }
+
+    public void setD90(BigDecimal d90) {
+        this.d90 = d90;
+    }
+
+
+    @Column(name="D84", precision=7, scale=4)
+    public BigDecimal getD84() {
+        return this.d84;
+    }
+
+    public void setD84(BigDecimal d84) {
+        this.d84 = d84;
+    }
+
+
+    @Column(name="D80", precision=7, scale=4)
+    public BigDecimal getD80() {
+        return this.d80;
+    }
+
+    public void setD80(BigDecimal d80) {
+        this.d80 = d80;
+    }
+
+
+    @Column(name="D75", precision=7, scale=4)
+    public BigDecimal getD75() {
+        return this.d75;
+    }
+
+    public void setD75(BigDecimal d75) {
+        this.d75 = d75;
+    }
+
+
+    @Column(name="D70", precision=7, scale=4)
+    public BigDecimal getD70() {
+        return this.d70;
+    }
+
+    public void setD70(BigDecimal d70) {
+        this.d70 = d70;
+    }
+
+
+    @Column(name="D60", precision=7, scale=4)
+    public BigDecimal getD60() {
+        return this.d60;
+    }
+
+    public void setD60(BigDecimal d60) {
+        this.d60 = d60;
+    }
+
+
+    @Column(name="D50", precision=7, scale=4)
+    public BigDecimal getD50() {
+        return this.d50;
+    }
+
+    public void setD50(BigDecimal d50) {
+        this.d50 = d50;
+    }
+
+
+    @Column(name="D40", precision=7, scale=4)
+    public BigDecimal getD40() {
+        return this.d40;
+    }
+
+    public void setD40(BigDecimal d40) {
+        this.d40 = d40;
+    }
+
+
+    @Column(name="D30", precision=7, scale=4)
+    public BigDecimal getD30() {
+        return this.d30;
+    }
+
+    public void setD30(BigDecimal d30) {
+        this.d30 = d30;
+    }
+
+
+    @Column(name="D25", precision=7, scale=4)
+    public BigDecimal getD25() {
+        return this.d25;
+    }
+
+    public void setD25(BigDecimal d25) {
+        this.d25 = d25;
+    }
+
+
+    @Column(name="D20", precision=7, scale=4)
+    public BigDecimal getD20() {
+        return this.d20;
+    }
+
+    public void setD20(BigDecimal d20) {
+        this.d20 = d20;
+    }
+
+
+    @Column(name="D16", precision=7, scale=4)
+    public BigDecimal getD16() {
+        return this.d16;
+    }
+
+    public void setD16(BigDecimal d16) {
+        this.d16 = d16;
+    }
+
+
+    @Column(name="D10", precision=7, scale=4)
+    public BigDecimal getD10() {
+        return this.d10;
+    }
+
+    public void setD10(BigDecimal d10) {
+        this.d10 = d10;
+    }
+
+
+    @Column(name="DMIN", precision=7, scale=4)
+    public BigDecimal getDmin() {
+        return this.dmin;
+    }
+
+    public void setDmin(BigDecimal dmin) {
+        this.dmin = dmin;
+    }
+
+
+    @Column(name="DURCHDMIN", precision=6, scale=3)
+    public BigDecimal getDurchdmin() {
+        return this.durchdmin;
+    }
+
+    public void setDurchdmin(BigDecimal durchdmin) {
+        this.durchdmin = durchdmin;
+    }
+
+
+    @Column(name="DMAX", precision=7, scale=4)
+    public BigDecimal getDmax() {
+        return this.dmax;
+    }
+
+    public void setDmax(BigDecimal dmax) {
+        this.dmax = dmax;
+    }
+
+
+    @Column(name="DURCHDMAX", precision=6, scale=3)
+    public BigDecimal getDurchdmax() {
+        return this.durchdmax;
+    }
+
+    public void setDurchdmax(BigDecimal durchdmax) {
+        this.durchdmax = durchdmax;
+    }
+
+
+    @Column(name="STDABW", precision=8, scale=3)
+    public BigDecimal getStdabw() {
+        return this.stdabw;
+    }
+
+    public void setStdabw(BigDecimal stdabw) {
+        this.stdabw = stdabw;
+    }
+
+
+    @Column(name="STDFEHLER", precision=8, scale=3)
+    public BigDecimal getStdfehler() {
+        return this.stdfehler;
+    }
+
+    public void setStdfehler(BigDecimal stdfehler) {
+        this.stdfehler = stdfehler;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="UFERABLINKS", precision=8, scale=3)
+    public BigDecimal getUferablinks() {
+        return this.uferablinks;
+    }
+
+    public void setUferablinks(BigDecimal uferablinks) {
+        this.uferablinks = uferablinks;
+    }
+
+
+    @Column(name="LINKSABST", precision=22, scale=0)
+    public BigDecimal getLinksabst() {
+        return this.linksabst;
+    }
+
+    public void setLinksabst(BigDecimal linksabst) {
+        this.linksabst = linksabst;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof GlotlinksId) ) return false;
+         GlotlinksId castOther = ( GlotlinksId ) other;
+
+         return (this.getGlotrechteid()==castOther.getGlotrechteid())
+ && (this.getMessungid()==castOther.getMessungid())
+ && ( (this.getUferabst()==castOther.getUferabst()) || ( this.getUferabst()!=null && castOther.getUferabst()!=null && this.getUferabst().equals(castOther.getUferabst()) ) )
+ && ( (this.getTgeschiebe()==castOther.getTgeschiebe()) || ( this.getTgeschiebe()!=null && castOther.getTgeschiebe()!=null && this.getTgeschiebe().equals(castOther.getTgeschiebe()) ) )
+ && ( (this.getDm()==castOther.getDm()) || ( this.getDm()!=null && castOther.getDm()!=null && this.getDm().equals(castOther.getDm()) ) )
+ && ( (this.getSk()==castOther.getSk()) || ( this.getSk()!=null && castOther.getSk()!=null && this.getSk().equals(castOther.getSk()) ) )
+ && ( (this.getSo()==castOther.getSo()) || ( this.getSo()!=null && castOther.getSo()!=null && this.getSo().equals(castOther.getSo()) ) )
+ && ( (this.getU()==castOther.getU()) || ( this.getU()!=null && castOther.getU()!=null && this.getU().equals(castOther.getU()) ) )
+ && ( (this.getD90()==castOther.getD90()) || ( this.getD90()!=null && castOther.getD90()!=null && this.getD90().equals(castOther.getD90()) ) )
+ && ( (this.getD84()==castOther.getD84()) || ( this.getD84()!=null && castOther.getD84()!=null && this.getD84().equals(castOther.getD84()) ) )
+ && ( (this.getD80()==castOther.getD80()) || ( this.getD80()!=null && castOther.getD80()!=null && this.getD80().equals(castOther.getD80()) ) )
+ && ( (this.getD75()==castOther.getD75()) || ( this.getD75()!=null && castOther.getD75()!=null && this.getD75().equals(castOther.getD75()) ) )
+ && ( (this.getD70()==castOther.getD70()) || ( this.getD70()!=null && castOther.getD70()!=null && this.getD70().equals(castOther.getD70()) ) )
+ && ( (this.getD60()==castOther.getD60()) || ( this.getD60()!=null && castOther.getD60()!=null && this.getD60().equals(castOther.getD60()) ) )
+ && ( (this.getD50()==castOther.getD50()) || ( this.getD50()!=null && castOther.getD50()!=null && this.getD50().equals(castOther.getD50()) ) )
+ && ( (this.getD40()==castOther.getD40()) || ( this.getD40()!=null && castOther.getD40()!=null && this.getD40().equals(castOther.getD40()) ) )
+ && ( (this.getD30()==castOther.getD30()) || ( this.getD30()!=null && castOther.getD30()!=null && this.getD30().equals(castOther.getD30()) ) )
+ && ( (this.getD25()==castOther.getD25()) || ( this.getD25()!=null && castOther.getD25()!=null && this.getD25().equals(castOther.getD25()) ) )
+ && ( (this.getD20()==castOther.getD20()) || ( this.getD20()!=null && castOther.getD20()!=null && this.getD20().equals(castOther.getD20()) ) )
+ && ( (this.getD16()==castOther.getD16()) || ( this.getD16()!=null && castOther.getD16()!=null && this.getD16().equals(castOther.getD16()) ) )
+ && ( (this.getD10()==castOther.getD10()) || ( this.getD10()!=null && castOther.getD10()!=null && this.getD10().equals(castOther.getD10()) ) )
+ && ( (this.getDmin()==castOther.getDmin()) || ( this.getDmin()!=null && castOther.getDmin()!=null && this.getDmin().equals(castOther.getDmin()) ) )
+ && ( (this.getDurchdmin()==castOther.getDurchdmin()) || ( this.getDurchdmin()!=null && castOther.getDurchdmin()!=null && this.getDurchdmin().equals(castOther.getDurchdmin()) ) )
+ && ( (this.getDmax()==castOther.getDmax()) || ( this.getDmax()!=null && castOther.getDmax()!=null && this.getDmax().equals(castOther.getDmax()) ) )
+ && ( (this.getDurchdmax()==castOther.getDurchdmax()) || ( this.getDurchdmax()!=null && castOther.getDurchdmax()!=null && this.getDurchdmax().equals(castOther.getDurchdmax()) ) )
+ && ( (this.getStdabw()==castOther.getStdabw()) || ( this.getStdabw()!=null && castOther.getStdabw()!=null && this.getStdabw().equals(castOther.getStdabw()) ) )
+ && ( (this.getStdfehler()==castOther.getStdfehler()) || ( this.getStdfehler()!=null && castOther.getStdfehler()!=null && this.getStdfehler().equals(castOther.getStdfehler()) ) )
+ && ( (this.getBemerkung()==castOther.getBemerkung()) || ( this.getBemerkung()!=null && castOther.getBemerkung()!=null && this.getBemerkung().equals(castOther.getBemerkung()) ) )
+ && ( (this.getUferablinks()==castOther.getUferablinks()) || ( this.getUferablinks()!=null && castOther.getUferablinks()!=null && this.getUferablinks().equals(castOther.getUferablinks()) ) )
+ && ( (this.getLinksabst()==castOther.getLinksabst()) || ( this.getLinksabst()!=null && castOther.getLinksabst()!=null && this.getLinksabst().equals(castOther.getLinksabst()) ) );
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getGlotrechteid();
+         result = 37 * result + (int) this.getMessungid();
+         result = 37 * result + ( getUferabst() == null ? 0 : this.getUferabst().hashCode() );
+         result = 37 * result + ( getTgeschiebe() == null ? 0 : this.getTgeschiebe().hashCode() );
+         result = 37 * result + ( getDm() == null ? 0 : this.getDm().hashCode() );
+         result = 37 * result + ( getSk() == null ? 0 : this.getSk().hashCode() );
+         result = 37 * result + ( getSo() == null ? 0 : this.getSo().hashCode() );
+         result = 37 * result + ( getU() == null ? 0 : this.getU().hashCode() );
+         result = 37 * result + ( getD90() == null ? 0 : this.getD90().hashCode() );
+         result = 37 * result + ( getD84() == null ? 0 : this.getD84().hashCode() );
+         result = 37 * result + ( getD80() == null ? 0 : this.getD80().hashCode() );
+         result = 37 * result + ( getD75() == null ? 0 : this.getD75().hashCode() );
+         result = 37 * result + ( getD70() == null ? 0 : this.getD70().hashCode() );
+         result = 37 * result + ( getD60() == null ? 0 : this.getD60().hashCode() );
+         result = 37 * result + ( getD50() == null ? 0 : this.getD50().hashCode() );
+         result = 37 * result + ( getD40() == null ? 0 : this.getD40().hashCode() );
+         result = 37 * result + ( getD30() == null ? 0 : this.getD30().hashCode() );
+         result = 37 * result + ( getD25() == null ? 0 : this.getD25().hashCode() );
+         result = 37 * result + ( getD20() == null ? 0 : this.getD20().hashCode() );
+         result = 37 * result + ( getD16() == null ? 0 : this.getD16().hashCode() );
+         result = 37 * result + ( getD10() == null ? 0 : this.getD10().hashCode() );
+         result = 37 * result + ( getDmin() == null ? 0 : this.getDmin().hashCode() );
+         result = 37 * result + ( getDurchdmin() == null ? 0 : this.getDurchdmin().hashCode() );
+         result = 37 * result + ( getDmax() == null ? 0 : this.getDmax().hashCode() );
+         result = 37 * result + ( getDurchdmax() == null ? 0 : this.getDurchdmax().hashCode() );
+         result = 37 * result + ( getStdabw() == null ? 0 : this.getStdabw().hashCode() );
+         result = 37 * result + ( getStdfehler() == null ? 0 : this.getStdfehler().hashCode() );
+         result = 37 * result + ( getBemerkung() == null ? 0 : this.getBemerkung().hashCode() );
+         result = 37 * result + ( getUferablinks() == null ? 0 : this.getUferablinks().hashCode() );
+         result = 37 * result + ( getLinksabst() == null ? 0 : this.getLinksabst().hashCode() );
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Glotrechte.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,412 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+/**
+ * Glotrechte generated by hbm2java
+ */
+@Entity
+@Table(name="GLOTRECHTE"
+    ,schema="SEDDB"
+)
+public class Glotrechte  implements java.io.Serializable {
+
+
+     private long glotrechteid;
+     private Messung messung;
+     private BigDecimal uferabst;
+     private BigDecimal tgeschiebe;
+     private BigDecimal dm;
+     private BigDecimal sk;
+     private BigDecimal so;
+     private BigDecimal u;
+     private BigDecimal d90;
+     private BigDecimal d84;
+     private BigDecimal d80;
+     private BigDecimal d75;
+     private BigDecimal d70;
+     private BigDecimal d60;
+     private BigDecimal d50;
+     private BigDecimal d40;
+     private BigDecimal d30;
+     private BigDecimal d25;
+     private BigDecimal d20;
+     private BigDecimal d16;
+     private BigDecimal d10;
+     private BigDecimal dmin;
+     private BigDecimal durchdmin;
+     private BigDecimal dmax;
+     private BigDecimal durchdmax;
+     private BigDecimal stdabw;
+     private BigDecimal stdfehler;
+     private String bemerkung;
+     private BigDecimal uferablinks;
+     private Gsiebung gsiebung;
+     private Set<Gprobe> gprobes = new HashSet<Gprobe>(0);
+
+    public Glotrechte() {
+    }
+
+
+    public Glotrechte(long glotrechteid, Messung messung, BigDecimal uferabst) {
+        this.glotrechteid = glotrechteid;
+        this.messung = messung;
+        this.uferabst = uferabst;
+    }
+    public Glotrechte(long glotrechteid, Messung messung, BigDecimal uferabst, BigDecimal tgeschiebe, BigDecimal dm, BigDecimal sk, BigDecimal so, BigDecimal u, BigDecimal d90, BigDecimal d84, BigDecimal d80, BigDecimal d75, BigDecimal d70, BigDecimal d60, BigDecimal d50, BigDecimal d40, BigDecimal d30, BigDecimal d25, BigDecimal d20, BigDecimal d16, BigDecimal d10, BigDecimal dmin, BigDecimal durchdmin, BigDecimal dmax, BigDecimal durchdmax, BigDecimal stdabw, BigDecimal stdfehler, String bemerkung, BigDecimal uferablinks, Gsiebung gsiebung, Set<Gprobe> gprobes) {
+       this.glotrechteid = glotrechteid;
+       this.messung = messung;
+       this.uferabst = uferabst;
+       this.tgeschiebe = tgeschiebe;
+       this.dm = dm;
+       this.sk = sk;
+       this.so = so;
+       this.u = u;
+       this.d90 = d90;
+       this.d84 = d84;
+       this.d80 = d80;
+       this.d75 = d75;
+       this.d70 = d70;
+       this.d60 = d60;
+       this.d50 = d50;
+       this.d40 = d40;
+       this.d30 = d30;
+       this.d25 = d25;
+       this.d20 = d20;
+       this.d16 = d16;
+       this.d10 = d10;
+       this.dmin = dmin;
+       this.durchdmin = durchdmin;
+       this.dmax = dmax;
+       this.durchdmax = durchdmax;
+       this.stdabw = stdabw;
+       this.stdfehler = stdfehler;
+       this.bemerkung = bemerkung;
+       this.uferablinks = uferablinks;
+       this.gsiebung = gsiebung;
+       this.gprobes = gprobes;
+    }
+
+     @Id
+
+
+    @Column(name="GLOTRECHTEID", unique=true, nullable=false, precision=11, scale=0)
+    public long getGlotrechteid() {
+        return this.glotrechteid;
+    }
+
+    public void setGlotrechteid(long glotrechteid) {
+        this.glotrechteid = glotrechteid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="MESSUNGID", nullable=false)
+    public Messung getMessung() {
+        return this.messung;
+    }
+
+    public void setMessung(Messung messung) {
+        this.messung = messung;
+    }
+
+
+    @Column(name="UFERABST", nullable=false, precision=8, scale=3)
+    public BigDecimal getUferabst() {
+        return this.uferabst;
+    }
+
+    public void setUferabst(BigDecimal uferabst) {
+        this.uferabst = uferabst;
+    }
+
+
+    @Column(name="TGESCHIEBE", precision=8, scale=3)
+    public BigDecimal getTgeschiebe() {
+        return this.tgeschiebe;
+    }
+
+    public void setTgeschiebe(BigDecimal tgeschiebe) {
+        this.tgeschiebe = tgeschiebe;
+    }
+
+
+    @Column(name="DM", precision=7, scale=4)
+    public BigDecimal getDm() {
+        return this.dm;
+    }
+
+    public void setDm(BigDecimal dm) {
+        this.dm = dm;
+    }
+
+
+    @Column(name="SK", precision=8, scale=3)
+    public BigDecimal getSk() {
+        return this.sk;
+    }
+
+    public void setSk(BigDecimal sk) {
+        this.sk = sk;
+    }
+
+
+    @Column(name="SO", precision=8, scale=3)
+    public BigDecimal getSo() {
+        return this.so;
+    }
+
+    public void setSo(BigDecimal so) {
+        this.so = so;
+    }
+
+
+    @Column(name="U", precision=8, scale=3)
+    public BigDecimal getU() {
+        return this.u;
+    }
+
+    public void setU(BigDecimal u) {
+        this.u = u;
+    }
+
+
+    @Column(name="D90", precision=7, scale=4)
+    public BigDecimal getD90() {
+        return this.d90;
+    }
+
+    public void setD90(BigDecimal d90) {
+        this.d90 = d90;
+    }
+
+
+    @Column(name="D84", precision=7, scale=4)
+    public BigDecimal getD84() {
+        return this.d84;
+    }
+
+    public void setD84(BigDecimal d84) {
+        this.d84 = d84;
+    }
+
+
+    @Column(name="D80", precision=7, scale=4)
+    public BigDecimal getD80() {
+        return this.d80;
+    }
+
+    public void setD80(BigDecimal d80) {
+        this.d80 = d80;
+    }
+
+
+    @Column(name="D75", precision=7, scale=4)
+    public BigDecimal getD75() {
+        return this.d75;
+    }
+
+    public void setD75(BigDecimal d75) {
+        this.d75 = d75;
+    }
+
+
+    @Column(name="D70", precision=7, scale=4)
+    public BigDecimal getD70() {
+        return this.d70;
+    }
+
+    public void setD70(BigDecimal d70) {
+        this.d70 = d70;
+    }
+
+
+    @Column(name="D60", precision=7, scale=4)
+    public BigDecimal getD60() {
+        return this.d60;
+    }
+
+    public void setD60(BigDecimal d60) {
+        this.d60 = d60;
+    }
+
+
+    @Column(name="D50", precision=7, scale=4)
+    public BigDecimal getD50() {
+        return this.d50;
+    }
+
+    public void setD50(BigDecimal d50) {
+        this.d50 = d50;
+    }
+
+
+    @Column(name="D40", precision=7, scale=4)
+    public BigDecimal getD40() {
+        return this.d40;
+    }
+
+    public void setD40(BigDecimal d40) {
+        this.d40 = d40;
+    }
+
+
+    @Column(name="D30", precision=7, scale=4)
+    public BigDecimal getD30() {
+        return this.d30;
+    }
+
+    public void setD30(BigDecimal d30) {
+        this.d30 = d30;
+    }
+
+
+    @Column(name="D25", precision=7, scale=4)
+    public BigDecimal getD25() {
+        return this.d25;
+    }
+
+    public void setD25(BigDecimal d25) {
+        this.d25 = d25;
+    }
+
+
+    @Column(name="D20", precision=7, scale=4)
+    public BigDecimal getD20() {
+        return this.d20;
+    }
+
+    public void setD20(BigDecimal d20) {
+        this.d20 = d20;
+    }
+
+
+    @Column(name="D16", precision=7, scale=4)
+    public BigDecimal getD16() {
+        return this.d16;
+    }
+
+    public void setD16(BigDecimal d16) {
+        this.d16 = d16;
+    }
+
+
+    @Column(name="D10", precision=7, scale=4)
+    public BigDecimal getD10() {
+        return this.d10;
+    }
+
+    public void setD10(BigDecimal d10) {
+        this.d10 = d10;
+    }
+
+
+    @Column(name="DMIN", precision=7, scale=4)
+    public BigDecimal getDmin() {
+        return this.dmin;
+    }
+
+    public void setDmin(BigDecimal dmin) {
+        this.dmin = dmin;
+    }
+
+
+    @Column(name="DURCHDMIN", precision=6, scale=3)
+    public BigDecimal getDurchdmin() {
+        return this.durchdmin;
+    }
+
+    public void setDurchdmin(BigDecimal durchdmin) {
+        this.durchdmin = durchdmin;
+    }
+
+
+    @Column(name="DMAX", precision=7, scale=4)
+    public BigDecimal getDmax() {
+        return this.dmax;
+    }
+
+    public void setDmax(BigDecimal dmax) {
+        this.dmax = dmax;
+    }
+
+
+    @Column(name="DURCHDMAX", precision=6, scale=3)
+    public BigDecimal getDurchdmax() {
+        return this.durchdmax;
+    }
+
+    public void setDurchdmax(BigDecimal durchdmax) {
+        this.durchdmax = durchdmax;
+    }
+
+
+    @Column(name="STDABW", precision=8, scale=3)
+    public BigDecimal getStdabw() {
+        return this.stdabw;
+    }
+
+    public void setStdabw(BigDecimal stdabw) {
+        this.stdabw = stdabw;
+    }
+
+
+    @Column(name="STDFEHLER", precision=8, scale=3)
+    public BigDecimal getStdfehler() {
+        return this.stdfehler;
+    }
+
+    public void setStdfehler(BigDecimal stdfehler) {
+        this.stdfehler = stdfehler;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="UFERABLINKS", precision=8, scale=3)
+    public BigDecimal getUferablinks() {
+        return this.uferablinks;
+    }
+
+    public void setUferablinks(BigDecimal uferablinks) {
+        this.uferablinks = uferablinks;
+    }
+
+@OneToOne(fetch=FetchType.LAZY, mappedBy="glotrechte")
+    public Gsiebung getGsiebung() {
+        return this.gsiebung;
+    }
+
+    public void setGsiebung(Gsiebung gsiebung) {
+        this.gsiebung = gsiebung;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="glotrechte")
+    public Set<Gprobe> getGprobes() {
+        return this.gprobes;
+    }
+
+    public void setGprobes(Set<Gprobe> gprobes) {
+        this.gprobes = gprobes;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Gprobe.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,138 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.Table;
+
+/**
+ * Gprobe generated by hbm2java
+ */
+@Entity
+@Table(name="GPROBE"
+    ,schema="SEDDB"
+)
+public class Gprobe  implements java.io.Serializable {
+
+
+     private GprobeId id;
+     private Glotrechte glotrechte;
+     private int messdauer;
+     private BigDecimal menge;
+     private BigDecimal gtrieb;
+     private BigDecimal faktor;
+     private BigDecimal mengeF;
+     private BigDecimal gtriebF;
+
+    public Gprobe() {
+    }
+
+    public Gprobe(GprobeId id, Glotrechte glotrechte, int messdauer, BigDecimal menge) {
+        this.id = id;
+        this.glotrechte = glotrechte;
+        this.messdauer = messdauer;
+        this.menge = menge;
+    }
+    public Gprobe(GprobeId id, Glotrechte glotrechte, int messdauer, BigDecimal menge, BigDecimal gtrieb, BigDecimal faktor, BigDecimal mengeF, BigDecimal gtriebF) {
+       this.id = id;
+       this.glotrechte = glotrechte;
+       this.messdauer = messdauer;
+       this.menge = menge;
+       this.gtrieb = gtrieb;
+       this.faktor = faktor;
+       this.mengeF = mengeF;
+       this.gtriebF = gtriebF;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="glotrechteid", column=@Column(name="GLOTRECHTEID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="lfdnr", column=@Column(name="LFDNR", nullable=false, precision=5, scale=0) ) } )
+    public GprobeId getId() {
+        return this.id;
+    }
+
+    public void setId(GprobeId id) {
+        this.id = id;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="GLOTRECHTEID", nullable=false, insertable=false, updatable=false)
+    public Glotrechte getGlotrechte() {
+        return this.glotrechte;
+    }
+
+    public void setGlotrechte(Glotrechte glotrechte) {
+        this.glotrechte = glotrechte;
+    }
+
+
+    @Column(name="MESSDAUER", nullable=false, precision=5, scale=0)
+    public int getMessdauer() {
+        return this.messdauer;
+    }
+
+    public void setMessdauer(int messdauer) {
+        this.messdauer = messdauer;
+    }
+
+
+    @Column(name="MENGE", nullable=false, precision=9, scale=3)
+    public BigDecimal getMenge() {
+        return this.menge;
+    }
+
+    public void setMenge(BigDecimal menge) {
+        this.menge = menge;
+    }
+
+
+    @Column(name="GTRIEB", precision=8, scale=3)
+    public BigDecimal getGtrieb() {
+        return this.gtrieb;
+    }
+
+    public void setGtrieb(BigDecimal gtrieb) {
+        this.gtrieb = gtrieb;
+    }
+
+
+    @Column(name="FAKTOR", precision=4, scale=3)
+    public BigDecimal getFaktor() {
+        return this.faktor;
+    }
+
+    public void setFaktor(BigDecimal faktor) {
+        this.faktor = faktor;
+    }
+
+
+    @Column(name="MENGE_F", precision=9, scale=3)
+    public BigDecimal getMengeF() {
+        return this.mengeF;
+    }
+
+    public void setMengeF(BigDecimal mengeF) {
+        this.mengeF = mengeF;
+    }
+
+
+    @Column(name="GTRIEB_F", precision=8, scale=3)
+    public BigDecimal getGtriebF() {
+        return this.gtriebF;
+    }
+
+    public void setGtriebF(BigDecimal gtriebF) {
+        this.gtriebF = gtriebF;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/GprobeId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,65 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * GprobeId generated by hbm2java
+ */
+@Embeddable
+public class GprobeId  implements java.io.Serializable {
+
+
+     private long glotrechteid;
+     private int lfdnr;
+
+    public GprobeId() {
+    }
+
+    public GprobeId(long glotrechteid, int lfdnr) {
+       this.glotrechteid = glotrechteid;
+       this.lfdnr = lfdnr;
+    }
+
+
+
+    @Column(name="GLOTRECHTEID", nullable=false, precision=11, scale=0)
+    public long getGlotrechteid() {
+        return this.glotrechteid;
+    }
+
+    public void setGlotrechteid(long glotrechteid) {
+        this.glotrechteid = glotrechteid;
+    }
+
+
+    @Column(name="LFDNR", nullable=false, precision=5, scale=0)
+    public int getLfdnr() {
+        return this.lfdnr;
+    }
+
+    public void setLfdnr(int lfdnr) {
+        this.lfdnr = lfdnr;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof GprobeId) ) return false;
+         GprobeId castOther = ( GprobeId ) other;
+
+         return (this.getGlotrechteid()==castOther.getGlotrechteid())
+ && (this.getLfdnr()==castOther.getLfdnr());
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getGlotrechteid();
+         result = 37 * result + this.getLfdnr();
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Gsiebsatz.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,363 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+import javax.persistence.UniqueConstraint;
+
+/**
+ * Gsiebsatz generated by hbm2java
+ */
+@Entity
+@Table(name="GSIEBSATZ"
+    ,schema="SEDDB"
+    , uniqueConstraints = @UniqueConstraint(columnNames="NAME")
+)
+public class Gsiebsatz  implements java.io.Serializable {
+
+
+     private long gsiebsatzid;
+     private String name;
+     private boolean istaktiv;
+     private BigDecimal sieb01;
+     private BigDecimal sieb02;
+     private BigDecimal sieb03;
+     private BigDecimal sieb04;
+     private BigDecimal sieb05;
+     private BigDecimal sieb06;
+     private BigDecimal sieb07;
+     private BigDecimal sieb08;
+     private BigDecimal sieb09;
+     private BigDecimal sieb10;
+     private BigDecimal sieb11;
+     private BigDecimal sieb12;
+     private BigDecimal sieb13;
+     private BigDecimal sieb14;
+     private BigDecimal sieb15;
+     private BigDecimal sieb16;
+     private BigDecimal sieb17;
+     private BigDecimal sieb18;
+     private BigDecimal sieb19;
+     private BigDecimal sieb20;
+     private BigDecimal sieb21;
+     private String bemerkung;
+     private Set<Ssiebung> ssiebungs = new HashSet<Ssiebung>(0);
+     private Set<Messung> messungs = new HashSet<Messung>(0);
+
+    public Gsiebsatz() {
+    }
+
+
+    public Gsiebsatz(long gsiebsatzid, String name, boolean istaktiv) {
+        this.gsiebsatzid = gsiebsatzid;
+        this.name = name;
+        this.istaktiv = istaktiv;
+    }
+    public Gsiebsatz(long gsiebsatzid, String name, boolean istaktiv, BigDecimal sieb01, BigDecimal sieb02, BigDecimal sieb03, BigDecimal sieb04, BigDecimal sieb05, BigDecimal sieb06, BigDecimal sieb07, BigDecimal sieb08, BigDecimal sieb09, BigDecimal sieb10, BigDecimal sieb11, BigDecimal sieb12, BigDecimal sieb13, BigDecimal sieb14, BigDecimal sieb15, BigDecimal sieb16, BigDecimal sieb17, BigDecimal sieb18, BigDecimal sieb19, BigDecimal sieb20, BigDecimal sieb21, String bemerkung, Set<Ssiebung> ssiebungs, Set<Messung> messungs) {
+       this.gsiebsatzid = gsiebsatzid;
+       this.name = name;
+       this.istaktiv = istaktiv;
+       this.sieb01 = sieb01;
+       this.sieb02 = sieb02;
+       this.sieb03 = sieb03;
+       this.sieb04 = sieb04;
+       this.sieb05 = sieb05;
+       this.sieb06 = sieb06;
+       this.sieb07 = sieb07;
+       this.sieb08 = sieb08;
+       this.sieb09 = sieb09;
+       this.sieb10 = sieb10;
+       this.sieb11 = sieb11;
+       this.sieb12 = sieb12;
+       this.sieb13 = sieb13;
+       this.sieb14 = sieb14;
+       this.sieb15 = sieb15;
+       this.sieb16 = sieb16;
+       this.sieb17 = sieb17;
+       this.sieb18 = sieb18;
+       this.sieb19 = sieb19;
+       this.sieb20 = sieb20;
+       this.sieb21 = sieb21;
+       this.bemerkung = bemerkung;
+       this.ssiebungs = ssiebungs;
+       this.messungs = messungs;
+    }
+
+     @Id
+
+
+    @Column(name="GSIEBSATZID", unique=true, nullable=false, precision=11, scale=0)
+    public long getGsiebsatzid() {
+        return this.gsiebsatzid;
+    }
+
+    public void setGsiebsatzid(long gsiebsatzid) {
+        this.gsiebsatzid = gsiebsatzid;
+    }
+
+
+    @Column(name="NAME", unique=true, nullable=false, length=20)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="ISTAKTIV", nullable=false, precision=1, scale=0)
+    public boolean isIstaktiv() {
+        return this.istaktiv;
+    }
+
+    public void setIstaktiv(boolean istaktiv) {
+        this.istaktiv = istaktiv;
+    }
+
+
+    @Column(name="SIEB01", precision=7, scale=4)
+    public BigDecimal getSieb01() {
+        return this.sieb01;
+    }
+
+    public void setSieb01(BigDecimal sieb01) {
+        this.sieb01 = sieb01;
+    }
+
+
+    @Column(name="SIEB02", precision=7, scale=4)
+    public BigDecimal getSieb02() {
+        return this.sieb02;
+    }
+
+    public void setSieb02(BigDecimal sieb02) {
+        this.sieb02 = sieb02;
+    }
+
+
+    @Column(name="SIEB03", precision=7, scale=4)
+    public BigDecimal getSieb03() {
+        return this.sieb03;
+    }
+
+    public void setSieb03(BigDecimal sieb03) {
+        this.sieb03 = sieb03;
+    }
+
+
+    @Column(name="SIEB04", precision=7, scale=4)
+    public BigDecimal getSieb04() {
+        return this.sieb04;
+    }
+
+    public void setSieb04(BigDecimal sieb04) {
+        this.sieb04 = sieb04;
+    }
+
+
+    @Column(name="SIEB05", precision=7, scale=4)
+    public BigDecimal getSieb05() {
+        return this.sieb05;
+    }
+
+    public void setSieb05(BigDecimal sieb05) {
+        this.sieb05 = sieb05;
+    }
+
+
+    @Column(name="SIEB06", precision=7, scale=4)
+    public BigDecimal getSieb06() {
+        return this.sieb06;
+    }
+
+    public void setSieb06(BigDecimal sieb06) {
+        this.sieb06 = sieb06;
+    }
+
+
+    @Column(name="SIEB07", precision=7, scale=4)
+    public BigDecimal getSieb07() {
+        return this.sieb07;
+    }
+
+    public void setSieb07(BigDecimal sieb07) {
+        this.sieb07 = sieb07;
+    }
+
+
+    @Column(name="SIEB08", precision=7, scale=4)
+    public BigDecimal getSieb08() {
+        return this.sieb08;
+    }
+
+    public void setSieb08(BigDecimal sieb08) {
+        this.sieb08 = sieb08;
+    }
+
+
+    @Column(name="SIEB09", precision=7, scale=4)
+    public BigDecimal getSieb09() {
+        return this.sieb09;
+    }
+
+    public void setSieb09(BigDecimal sieb09) {
+        this.sieb09 = sieb09;
+    }
+
+
+    @Column(name="SIEB10", precision=7, scale=4)
+    public BigDecimal getSieb10() {
+        return this.sieb10;
+    }
+
+    public void setSieb10(BigDecimal sieb10) {
+        this.sieb10 = sieb10;
+    }
+
+
+    @Column(name="SIEB11", precision=7, scale=4)
+    public BigDecimal getSieb11() {
+        return this.sieb11;
+    }
+
+    public void setSieb11(BigDecimal sieb11) {
+        this.sieb11 = sieb11;
+    }
+
+
+    @Column(name="SIEB12", precision=7, scale=4)
+    public BigDecimal getSieb12() {
+        return this.sieb12;
+    }
+
+    public void setSieb12(BigDecimal sieb12) {
+        this.sieb12 = sieb12;
+    }
+
+
+    @Column(name="SIEB13", precision=7, scale=4)
+    public BigDecimal getSieb13() {
+        return this.sieb13;
+    }
+
+    public void setSieb13(BigDecimal sieb13) {
+        this.sieb13 = sieb13;
+    }
+
+
+    @Column(name="SIEB14", precision=7, scale=4)
+    public BigDecimal getSieb14() {
+        return this.sieb14;
+    }
+
+    public void setSieb14(BigDecimal sieb14) {
+        this.sieb14 = sieb14;
+    }
+
+
+    @Column(name="SIEB15", precision=7, scale=4)
+    public BigDecimal getSieb15() {
+        return this.sieb15;
+    }
+
+    public void setSieb15(BigDecimal sieb15) {
+        this.sieb15 = sieb15;
+    }
+
+
+    @Column(name="SIEB16", precision=7, scale=4)
+    public BigDecimal getSieb16() {
+        return this.sieb16;
+    }
+
+    public void setSieb16(BigDecimal sieb16) {
+        this.sieb16 = sieb16;
+    }
+
+
+    @Column(name="SIEB17", precision=7, scale=4)
+    public BigDecimal getSieb17() {
+        return this.sieb17;
+    }
+
+    public void setSieb17(BigDecimal sieb17) {
+        this.sieb17 = sieb17;
+    }
+
+
+    @Column(name="SIEB18", precision=7, scale=4)
+    public BigDecimal getSieb18() {
+        return this.sieb18;
+    }
+
+    public void setSieb18(BigDecimal sieb18) {
+        this.sieb18 = sieb18;
+    }
+
+
+    @Column(name="SIEB19", precision=7, scale=4)
+    public BigDecimal getSieb19() {
+        return this.sieb19;
+    }
+
+    public void setSieb19(BigDecimal sieb19) {
+        this.sieb19 = sieb19;
+    }
+
+
+    @Column(name="SIEB20", precision=7, scale=4)
+    public BigDecimal getSieb20() {
+        return this.sieb20;
+    }
+
+    public void setSieb20(BigDecimal sieb20) {
+        this.sieb20 = sieb20;
+    }
+
+
+    @Column(name="SIEB21", precision=7, scale=4)
+    public BigDecimal getSieb21() {
+        return this.sieb21;
+    }
+
+    public void setSieb21(BigDecimal sieb21) {
+        this.sieb21 = sieb21;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="gsiebsatz")
+    public Set<Ssiebung> getSsiebungs() {
+        return this.ssiebungs;
+    }
+
+    public void setSsiebungs(Set<Ssiebung> ssiebungs) {
+        this.ssiebungs = ssiebungs;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="gsiebsatz")
+    public Set<Messung> getMessungs() {
+        return this.messungs;
+    }
+
+    public void setMessungs(Set<Messung> messungs) {
+        this.messungs = messungs;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Gsiebung.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,348 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.OneToOne;
+import javax.persistence.PrimaryKeyJoinColumn;
+import javax.persistence.Table;
+import org.hibernate.annotations.GenericGenerator;
+import org.hibernate.annotations.Parameter;
+
+/**
+ * Gsiebung generated by hbm2java
+ */
+@Entity
+@Table(name="GSIEBUNG"
+    ,schema="SEDDB"
+)
+public class Gsiebung  implements java.io.Serializable {
+
+
+     private long glotrechteid;
+     private Glotrechte glotrechte;
+     private BigDecimal gmasse;
+     private BigDecimal rsieb01;
+     private BigDecimal rsieb02;
+     private BigDecimal rsieb03;
+     private BigDecimal rsieb04;
+     private BigDecimal rsieb05;
+     private BigDecimal rsieb06;
+     private BigDecimal rsieb07;
+     private BigDecimal rsieb08;
+     private BigDecimal rsieb09;
+     private BigDecimal rsieb10;
+     private BigDecimal rsieb11;
+     private BigDecimal rsieb12;
+     private BigDecimal rsieb13;
+     private BigDecimal rsieb14;
+     private BigDecimal rsieb15;
+     private BigDecimal rsieb16;
+     private BigDecimal rsieb17;
+     private BigDecimal rsieb18;
+     private BigDecimal rsieb19;
+     private BigDecimal rsieb20;
+     private BigDecimal rsieb21;
+     private BigDecimal rest;
+     private String bemerkung;
+
+    public Gsiebung() {
+    }
+
+    public Gsiebung(Glotrechte glotrechte) {
+        this.glotrechte = glotrechte;
+    }
+    public Gsiebung(Glotrechte glotrechte, BigDecimal gmasse, BigDecimal rsieb01, BigDecimal rsieb02, BigDecimal rsieb03, BigDecimal rsieb04, BigDecimal rsieb05, BigDecimal rsieb06, BigDecimal rsieb07, BigDecimal rsieb08, BigDecimal rsieb09, BigDecimal rsieb10, BigDecimal rsieb11, BigDecimal rsieb12, BigDecimal rsieb13, BigDecimal rsieb14, BigDecimal rsieb15, BigDecimal rsieb16, BigDecimal rsieb17, BigDecimal rsieb18, BigDecimal rsieb19, BigDecimal rsieb20, BigDecimal rsieb21, BigDecimal rest, String bemerkung) {
+       this.glotrechte = glotrechte;
+       this.gmasse = gmasse;
+       this.rsieb01 = rsieb01;
+       this.rsieb02 = rsieb02;
+       this.rsieb03 = rsieb03;
+       this.rsieb04 = rsieb04;
+       this.rsieb05 = rsieb05;
+       this.rsieb06 = rsieb06;
+       this.rsieb07 = rsieb07;
+       this.rsieb08 = rsieb08;
+       this.rsieb09 = rsieb09;
+       this.rsieb10 = rsieb10;
+       this.rsieb11 = rsieb11;
+       this.rsieb12 = rsieb12;
+       this.rsieb13 = rsieb13;
+       this.rsieb14 = rsieb14;
+       this.rsieb15 = rsieb15;
+       this.rsieb16 = rsieb16;
+       this.rsieb17 = rsieb17;
+       this.rsieb18 = rsieb18;
+       this.rsieb19 = rsieb19;
+       this.rsieb20 = rsieb20;
+       this.rsieb21 = rsieb21;
+       this.rest = rest;
+       this.bemerkung = bemerkung;
+    }
+
+     @GenericGenerator(name="generator", strategy="foreign", parameters=@Parameter(name="property", value="glotrechte"))@Id @GeneratedValue(generator="generator")
+
+
+    @Column(name="GLOTRECHTEID", unique=true, nullable=false, precision=11, scale=0)
+    public long getGlotrechteid() {
+        return this.glotrechteid;
+    }
+
+    public void setGlotrechteid(long glotrechteid) {
+        this.glotrechteid = glotrechteid;
+    }
+
+@OneToOne(fetch=FetchType.LAZY)@PrimaryKeyJoinColumn
+    public Glotrechte getGlotrechte() {
+        return this.glotrechte;
+    }
+
+    public void setGlotrechte(Glotrechte glotrechte) {
+        this.glotrechte = glotrechte;
+    }
+
+
+    @Column(name="GMASSE", precision=9, scale=3)
+    public BigDecimal getGmasse() {
+        return this.gmasse;
+    }
+
+    public void setGmasse(BigDecimal gmasse) {
+        this.gmasse = gmasse;
+    }
+
+
+    @Column(name="RSIEB01", precision=9, scale=3)
+    public BigDecimal getRsieb01() {
+        return this.rsieb01;
+    }
+
+    public void setRsieb01(BigDecimal rsieb01) {
+        this.rsieb01 = rsieb01;
+    }
+
+
+    @Column(name="RSIEB02", precision=9, scale=3)
+    public BigDecimal getRsieb02() {
+        return this.rsieb02;
+    }
+
+    public void setRsieb02(BigDecimal rsieb02) {
+        this.rsieb02 = rsieb02;
+    }
+
+
+    @Column(name="RSIEB03", precision=9, scale=3)
+    public BigDecimal getRsieb03() {
+        return this.rsieb03;
+    }
+
+    public void setRsieb03(BigDecimal rsieb03) {
+        this.rsieb03 = rsieb03;
+    }
+
+
+    @Column(name="RSIEB04", precision=9, scale=3)
+    public BigDecimal getRsieb04() {
+        return this.rsieb04;
+    }
+
+    public void setRsieb04(BigDecimal rsieb04) {
+        this.rsieb04 = rsieb04;
+    }
+
+
+    @Column(name="RSIEB05", precision=9, scale=3)
+    public BigDecimal getRsieb05() {
+        return this.rsieb05;
+    }
+
+    public void setRsieb05(BigDecimal rsieb05) {
+        this.rsieb05 = rsieb05;
+    }
+
+
+    @Column(name="RSIEB06", precision=9, scale=3)
+    public BigDecimal getRsieb06() {
+        return this.rsieb06;
+    }
+
+    public void setRsieb06(BigDecimal rsieb06) {
+        this.rsieb06 = rsieb06;
+    }
+
+
+    @Column(name="RSIEB07", precision=9, scale=3)
+    public BigDecimal getRsieb07() {
+        return this.rsieb07;
+    }
+
+    public void setRsieb07(BigDecimal rsieb07) {
+        this.rsieb07 = rsieb07;
+    }
+
+
+    @Column(name="RSIEB08", precision=9, scale=3)
+    public BigDecimal getRsieb08() {
+        return this.rsieb08;
+    }
+
+    public void setRsieb08(BigDecimal rsieb08) {
+        this.rsieb08 = rsieb08;
+    }
+
+
+    @Column(name="RSIEB09", precision=9, scale=3)
+    public BigDecimal getRsieb09() {
+        return this.rsieb09;
+    }
+
+    public void setRsieb09(BigDecimal rsieb09) {
+        this.rsieb09 = rsieb09;
+    }
+
+
+    @Column(name="RSIEB10", precision=9, scale=3)
+    public BigDecimal getRsieb10() {
+        return this.rsieb10;
+    }
+
+    public void setRsieb10(BigDecimal rsieb10) {
+        this.rsieb10 = rsieb10;
+    }
+
+
+    @Column(name="RSIEB11", precision=9, scale=3)
+    public BigDecimal getRsieb11() {
+        return this.rsieb11;
+    }
+
+    public void setRsieb11(BigDecimal rsieb11) {
+        this.rsieb11 = rsieb11;
+    }
+
+
+    @Column(name="RSIEB12", precision=9, scale=3)
+    public BigDecimal getRsieb12() {
+        return this.rsieb12;
+    }
+
+    public void setRsieb12(BigDecimal rsieb12) {
+        this.rsieb12 = rsieb12;
+    }
+
+
+    @Column(name="RSIEB13", precision=9, scale=3)
+    public BigDecimal getRsieb13() {
+        return this.rsieb13;
+    }
+
+    public void setRsieb13(BigDecimal rsieb13) {
+        this.rsieb13 = rsieb13;
+    }
+
+
+    @Column(name="RSIEB14", precision=9, scale=3)
+    public BigDecimal getRsieb14() {
+        return this.rsieb14;
+    }
+
+    public void setRsieb14(BigDecimal rsieb14) {
+        this.rsieb14 = rsieb14;
+    }
+
+
+    @Column(name="RSIEB15", precision=9, scale=3)
+    public BigDecimal getRsieb15() {
+        return this.rsieb15;
+    }
+
+    public void setRsieb15(BigDecimal rsieb15) {
+        this.rsieb15 = rsieb15;
+    }
+
+
+    @Column(name="RSIEB16", precision=9, scale=3)
+    public BigDecimal getRsieb16() {
+        return this.rsieb16;
+    }
+
+    public void setRsieb16(BigDecimal rsieb16) {
+        this.rsieb16 = rsieb16;
+    }
+
+
+    @Column(name="RSIEB17", precision=9, scale=3)
+    public BigDecimal getRsieb17() {
+        return this.rsieb17;
+    }
+
+    public void setRsieb17(BigDecimal rsieb17) {
+        this.rsieb17 = rsieb17;
+    }
+
+
+    @Column(name="RSIEB18", precision=9, scale=3)
+    public BigDecimal getRsieb18() {
+        return this.rsieb18;
+    }
+
+    public void setRsieb18(BigDecimal rsieb18) {
+        this.rsieb18 = rsieb18;
+    }
+
+
+    @Column(name="RSIEB19", precision=9, scale=3)
+    public BigDecimal getRsieb19() {
+        return this.rsieb19;
+    }
+
+    public void setRsieb19(BigDecimal rsieb19) {
+        this.rsieb19 = rsieb19;
+    }
+
+
+    @Column(name="RSIEB20", precision=9, scale=3)
+    public BigDecimal getRsieb20() {
+        return this.rsieb20;
+    }
+
+    public void setRsieb20(BigDecimal rsieb20) {
+        this.rsieb20 = rsieb20;
+    }
+
+
+    @Column(name="RSIEB21", precision=9, scale=3)
+    public BigDecimal getRsieb21() {
+        return this.rsieb21;
+    }
+
+    public void setRsieb21(BigDecimal rsieb21) {
+        this.rsieb21 = rsieb21;
+    }
+
+
+    @Column(name="REST", precision=9, scale=3)
+    public BigDecimal getRest() {
+        return this.rest;
+    }
+
+    public void setRest(BigDecimal rest) {
+        this.rest = rest;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Gsiebungsieb.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,88 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+/**
+ * Gsiebungsieb generated by hbm2java
+ */
+@Entity
+@Table(name="GSIEBUNGSIEB"
+    ,schema="SEDDB"
+)
+public class Gsiebungsieb  implements java.io.Serializable {
+
+
+     private GsiebungsiebId id;
+
+    public Gsiebungsieb() {
+    }
+
+    public Gsiebungsieb(GsiebungsiebId id) {
+       this.id = id;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="glotrechteid", column=@Column(name="GLOTRECHTEID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="gsiebsatzid", column=@Column(name="GSIEBSATZID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="gmasse", column=@Column(name="GMASSE", precision=9, scale=3) ),
+        @AttributeOverride(name="masche01", column=@Column(name="MASCHE01", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck01", column=@Column(name="RUECK01", precision=9, scale=3) ),
+        @AttributeOverride(name="masche02", column=@Column(name="MASCHE02", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck02", column=@Column(name="RUECK02", precision=9, scale=3) ),
+        @AttributeOverride(name="masche03", column=@Column(name="MASCHE03", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck03", column=@Column(name="RUECK03", precision=9, scale=3) ),
+        @AttributeOverride(name="masche04", column=@Column(name="MASCHE04", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck04", column=@Column(name="RUECK04", precision=9, scale=3) ),
+        @AttributeOverride(name="masche05", column=@Column(name="MASCHE05", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck05", column=@Column(name="RUECK05", precision=9, scale=3) ),
+        @AttributeOverride(name="masche06", column=@Column(name="MASCHE06", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck06", column=@Column(name="RUECK06", precision=9, scale=3) ),
+        @AttributeOverride(name="masche07", column=@Column(name="MASCHE07", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck07", column=@Column(name="RUECK07", precision=9, scale=3) ),
+        @AttributeOverride(name="masche08", column=@Column(name="MASCHE08", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck08", column=@Column(name="RUECK08", precision=9, scale=3) ),
+        @AttributeOverride(name="masche09", column=@Column(name="MASCHE09", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck09", column=@Column(name="RUECK09", precision=9, scale=3) ),
+        @AttributeOverride(name="masche10", column=@Column(name="MASCHE10", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck10", column=@Column(name="RUECK10", precision=9, scale=3) ),
+        @AttributeOverride(name="masche11", column=@Column(name="MASCHE11", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck11", column=@Column(name="RUECK11", precision=9, scale=3) ),
+        @AttributeOverride(name="masche12", column=@Column(name="MASCHE12", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck12", column=@Column(name="RUECK12", precision=9, scale=3) ),
+        @AttributeOverride(name="masche13", column=@Column(name="MASCHE13", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck13", column=@Column(name="RUECK13", precision=9, scale=3) ),
+        @AttributeOverride(name="masche14", column=@Column(name="MASCHE14", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck14", column=@Column(name="RUECK14", precision=9, scale=3) ),
+        @AttributeOverride(name="masche15", column=@Column(name="MASCHE15", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck15", column=@Column(name="RUECK15", precision=9, scale=3) ),
+        @AttributeOverride(name="masche16", column=@Column(name="MASCHE16", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck16", column=@Column(name="RUECK16", precision=9, scale=3) ),
+        @AttributeOverride(name="masche17", column=@Column(name="MASCHE17", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck17", column=@Column(name="RUECK17", precision=9, scale=3) ),
+        @AttributeOverride(name="masche18", column=@Column(name="MASCHE18", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck18", column=@Column(name="RUECK18", precision=9, scale=3) ),
+        @AttributeOverride(name="masche19", column=@Column(name="MASCHE19", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck19", column=@Column(name="RUECK19", precision=9, scale=3) ),
+        @AttributeOverride(name="masche20", column=@Column(name="MASCHE20", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck20", column=@Column(name="RUECK20", precision=9, scale=3) ),
+        @AttributeOverride(name="masche21", column=@Column(name="MASCHE21", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck21", column=@Column(name="RUECK21", precision=9, scale=3) ),
+        @AttributeOverride(name="rest", column=@Column(name="REST", precision=9, scale=3) ) } )
+    public GsiebungsiebId getId() {
+        return this.id;
+    }
+
+    public void setId(GsiebungsiebId id) {
+        this.id = id;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/GsiebungsiebId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,687 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * GsiebungsiebId generated by hbm2java
+ */
+@Embeddable
+public class GsiebungsiebId  implements java.io.Serializable {
+
+
+     private long glotrechteid;
+     private long gsiebsatzid;
+     private BigDecimal gmasse;
+     private BigDecimal masche01;
+     private BigDecimal rueck01;
+     private BigDecimal masche02;
+     private BigDecimal rueck02;
+     private BigDecimal masche03;
+     private BigDecimal rueck03;
+     private BigDecimal masche04;
+     private BigDecimal rueck04;
+     private BigDecimal masche05;
+     private BigDecimal rueck05;
+     private BigDecimal masche06;
+     private BigDecimal rueck06;
+     private BigDecimal masche07;
+     private BigDecimal rueck07;
+     private BigDecimal masche08;
+     private BigDecimal rueck08;
+     private BigDecimal masche09;
+     private BigDecimal rueck09;
+     private BigDecimal masche10;
+     private BigDecimal rueck10;
+     private BigDecimal masche11;
+     private BigDecimal rueck11;
+     private BigDecimal masche12;
+     private BigDecimal rueck12;
+     private BigDecimal masche13;
+     private BigDecimal rueck13;
+     private BigDecimal masche14;
+     private BigDecimal rueck14;
+     private BigDecimal masche15;
+     private BigDecimal rueck15;
+     private BigDecimal masche16;
+     private BigDecimal rueck16;
+     private BigDecimal masche17;
+     private BigDecimal rueck17;
+     private BigDecimal masche18;
+     private BigDecimal rueck18;
+     private BigDecimal masche19;
+     private BigDecimal rueck19;
+     private BigDecimal masche20;
+     private BigDecimal rueck20;
+     private BigDecimal masche21;
+     private BigDecimal rueck21;
+     private BigDecimal rest;
+
+    public GsiebungsiebId() {
+    }
+
+
+    public GsiebungsiebId(long glotrechteid, long gsiebsatzid) {
+        this.glotrechteid = glotrechteid;
+        this.gsiebsatzid = gsiebsatzid;
+    }
+    public GsiebungsiebId(long glotrechteid, long gsiebsatzid, BigDecimal gmasse, BigDecimal masche01, BigDecimal rueck01, BigDecimal masche02, BigDecimal rueck02, BigDecimal masche03, BigDecimal rueck03, BigDecimal masche04, BigDecimal rueck04, BigDecimal masche05, BigDecimal rueck05, BigDecimal masche06, BigDecimal rueck06, BigDecimal masche07, BigDecimal rueck07, BigDecimal masche08, BigDecimal rueck08, BigDecimal masche09, BigDecimal rueck09, BigDecimal masche10, BigDecimal rueck10, BigDecimal masche11, BigDecimal rueck11, BigDecimal masche12, BigDecimal rueck12, BigDecimal masche13, BigDecimal rueck13, BigDecimal masche14, BigDecimal rueck14, BigDecimal masche15, BigDecimal rueck15, BigDecimal masche16, BigDecimal rueck16, BigDecimal masche17, BigDecimal rueck17, BigDecimal masche18, BigDecimal rueck18, BigDecimal masche19, BigDecimal rueck19, BigDecimal masche20, BigDecimal rueck20, BigDecimal masche21, BigDecimal rueck21, BigDecimal rest) {
+       this.glotrechteid = glotrechteid;
+       this.gsiebsatzid = gsiebsatzid;
+       this.gmasse = gmasse;
+       this.masche01 = masche01;
+       this.rueck01 = rueck01;
+       this.masche02 = masche02;
+       this.rueck02 = rueck02;
+       this.masche03 = masche03;
+       this.rueck03 = rueck03;
+       this.masche04 = masche04;
+       this.rueck04 = rueck04;
+       this.masche05 = masche05;
+       this.rueck05 = rueck05;
+       this.masche06 = masche06;
+       this.rueck06 = rueck06;
+       this.masche07 = masche07;
+       this.rueck07 = rueck07;
+       this.masche08 = masche08;
+       this.rueck08 = rueck08;
+       this.masche09 = masche09;
+       this.rueck09 = rueck09;
+       this.masche10 = masche10;
+       this.rueck10 = rueck10;
+       this.masche11 = masche11;
+       this.rueck11 = rueck11;
+       this.masche12 = masche12;
+       this.rueck12 = rueck12;
+       this.masche13 = masche13;
+       this.rueck13 = rueck13;
+       this.masche14 = masche14;
+       this.rueck14 = rueck14;
+       this.masche15 = masche15;
+       this.rueck15 = rueck15;
+       this.masche16 = masche16;
+       this.rueck16 = rueck16;
+       this.masche17 = masche17;
+       this.rueck17 = rueck17;
+       this.masche18 = masche18;
+       this.rueck18 = rueck18;
+       this.masche19 = masche19;
+       this.rueck19 = rueck19;
+       this.masche20 = masche20;
+       this.rueck20 = rueck20;
+       this.masche21 = masche21;
+       this.rueck21 = rueck21;
+       this.rest = rest;
+    }
+
+
+
+    @Column(name="GLOTRECHTEID", nullable=false, precision=11, scale=0)
+    public long getGlotrechteid() {
+        return this.glotrechteid;
+    }
+
+    public void setGlotrechteid(long glotrechteid) {
+        this.glotrechteid = glotrechteid;
+    }
+
+
+    @Column(name="GSIEBSATZID", nullable=false, precision=11, scale=0)
+    public long getGsiebsatzid() {
+        return this.gsiebsatzid;
+    }
+
+    public void setGsiebsatzid(long gsiebsatzid) {
+        this.gsiebsatzid = gsiebsatzid;
+    }
+
+
+    @Column(name="GMASSE", precision=9, scale=3)
+    public BigDecimal getGmasse() {
+        return this.gmasse;
+    }
+
+    public void setGmasse(BigDecimal gmasse) {
+        this.gmasse = gmasse;
+    }
+
+
+    @Column(name="MASCHE01", precision=7, scale=4)
+    public BigDecimal getMasche01() {
+        return this.masche01;
+    }
+
+    public void setMasche01(BigDecimal masche01) {
+        this.masche01 = masche01;
+    }
+
+
+    @Column(name="RUECK01", precision=9, scale=3)
+    public BigDecimal getRueck01() {
+        return this.rueck01;
+    }
+
+    public void setRueck01(BigDecimal rueck01) {
+        this.rueck01 = rueck01;
+    }
+
+
+    @Column(name="MASCHE02", precision=7, scale=4)
+    public BigDecimal getMasche02() {
+        return this.masche02;
+    }
+
+    public void setMasche02(BigDecimal masche02) {
+        this.masche02 = masche02;
+    }
+
+
+    @Column(name="RUECK02", precision=9, scale=3)
+    public BigDecimal getRueck02() {
+        return this.rueck02;
+    }
+
+    public void setRueck02(BigDecimal rueck02) {
+        this.rueck02 = rueck02;
+    }
+
+
+    @Column(name="MASCHE03", precision=7, scale=4)
+    public BigDecimal getMasche03() {
+        return this.masche03;
+    }
+
+    public void setMasche03(BigDecimal masche03) {
+        this.masche03 = masche03;
+    }
+
+
+    @Column(name="RUECK03", precision=9, scale=3)
+    public BigDecimal getRueck03() {
+        return this.rueck03;
+    }
+
+    public void setRueck03(BigDecimal rueck03) {
+        this.rueck03 = rueck03;
+    }
+
+
+    @Column(name="MASCHE04", precision=7, scale=4)
+    public BigDecimal getMasche04() {
+        return this.masche04;
+    }
+
+    public void setMasche04(BigDecimal masche04) {
+        this.masche04 = masche04;
+    }
+
+
+    @Column(name="RUECK04", precision=9, scale=3)
+    public BigDecimal getRueck04() {
+        return this.rueck04;
+    }
+
+    public void setRueck04(BigDecimal rueck04) {
+        this.rueck04 = rueck04;
+    }
+
+
+    @Column(name="MASCHE05", precision=7, scale=4)
+    public BigDecimal getMasche05() {
+        return this.masche05;
+    }
+
+    public void setMasche05(BigDecimal masche05) {
+        this.masche05 = masche05;
+    }
+
+
+    @Column(name="RUECK05", precision=9, scale=3)
+    public BigDecimal getRueck05() {
+        return this.rueck05;
+    }
+
+    public void setRueck05(BigDecimal rueck05) {
+        this.rueck05 = rueck05;
+    }
+
+
+    @Column(name="MASCHE06", precision=7, scale=4)
+    public BigDecimal getMasche06() {
+        return this.masche06;
+    }
+
+    public void setMasche06(BigDecimal masche06) {
+        this.masche06 = masche06;
+    }
+
+
+    @Column(name="RUECK06", precision=9, scale=3)
+    public BigDecimal getRueck06() {
+        return this.rueck06;
+    }
+
+    public void setRueck06(BigDecimal rueck06) {
+        this.rueck06 = rueck06;
+    }
+
+
+    @Column(name="MASCHE07", precision=7, scale=4)
+    public BigDecimal getMasche07() {
+        return this.masche07;
+    }
+
+    public void setMasche07(BigDecimal masche07) {
+        this.masche07 = masche07;
+    }
+
+
+    @Column(name="RUECK07", precision=9, scale=3)
+    public BigDecimal getRueck07() {
+        return this.rueck07;
+    }
+
+    public void setRueck07(BigDecimal rueck07) {
+        this.rueck07 = rueck07;
+    }
+
+
+    @Column(name="MASCHE08", precision=7, scale=4)
+    public BigDecimal getMasche08() {
+        return this.masche08;
+    }
+
+    public void setMasche08(BigDecimal masche08) {
+        this.masche08 = masche08;
+    }
+
+
+    @Column(name="RUECK08", precision=9, scale=3)
+    public BigDecimal getRueck08() {
+        return this.rueck08;
+    }
+
+    public void setRueck08(BigDecimal rueck08) {
+        this.rueck08 = rueck08;
+    }
+
+
+    @Column(name="MASCHE09", precision=7, scale=4)
+    public BigDecimal getMasche09() {
+        return this.masche09;
+    }
+
+    public void setMasche09(BigDecimal masche09) {
+        this.masche09 = masche09;
+    }
+
+
+    @Column(name="RUECK09", precision=9, scale=3)
+    public BigDecimal getRueck09() {
+        return this.rueck09;
+    }
+
+    public void setRueck09(BigDecimal rueck09) {
+        this.rueck09 = rueck09;
+    }
+
+
+    @Column(name="MASCHE10", precision=7, scale=4)
+    public BigDecimal getMasche10() {
+        return this.masche10;
+    }
+
+    public void setMasche10(BigDecimal masche10) {
+        this.masche10 = masche10;
+    }
+
+
+    @Column(name="RUECK10", precision=9, scale=3)
+    public BigDecimal getRueck10() {
+        return this.rueck10;
+    }
+
+    public void setRueck10(BigDecimal rueck10) {
+        this.rueck10 = rueck10;
+    }
+
+
+    @Column(name="MASCHE11", precision=7, scale=4)
+    public BigDecimal getMasche11() {
+        return this.masche11;
+    }
+
+    public void setMasche11(BigDecimal masche11) {
+        this.masche11 = masche11;
+    }
+
+
+    @Column(name="RUECK11", precision=9, scale=3)
+    public BigDecimal getRueck11() {
+        return this.rueck11;
+    }
+
+    public void setRueck11(BigDecimal rueck11) {
+        this.rueck11 = rueck11;
+    }
+
+
+    @Column(name="MASCHE12", precision=7, scale=4)
+    public BigDecimal getMasche12() {
+        return this.masche12;
+    }
+
+    public void setMasche12(BigDecimal masche12) {
+        this.masche12 = masche12;
+    }
+
+
+    @Column(name="RUECK12", precision=9, scale=3)
+    public BigDecimal getRueck12() {
+        return this.rueck12;
+    }
+
+    public void setRueck12(BigDecimal rueck12) {
+        this.rueck12 = rueck12;
+    }
+
+
+    @Column(name="MASCHE13", precision=7, scale=4)
+    public BigDecimal getMasche13() {
+        return this.masche13;
+    }
+
+    public void setMasche13(BigDecimal masche13) {
+        this.masche13 = masche13;
+    }
+
+
+    @Column(name="RUECK13", precision=9, scale=3)
+    public BigDecimal getRueck13() {
+        return this.rueck13;
+    }
+
+    public void setRueck13(BigDecimal rueck13) {
+        this.rueck13 = rueck13;
+    }
+
+
+    @Column(name="MASCHE14", precision=7, scale=4)
+    public BigDecimal getMasche14() {
+        return this.masche14;
+    }
+
+    public void setMasche14(BigDecimal masche14) {
+        this.masche14 = masche14;
+    }
+
+
+    @Column(name="RUECK14", precision=9, scale=3)
+    public BigDecimal getRueck14() {
+        return this.rueck14;
+    }
+
+    public void setRueck14(BigDecimal rueck14) {
+        this.rueck14 = rueck14;
+    }
+
+
+    @Column(name="MASCHE15", precision=7, scale=4)
+    public BigDecimal getMasche15() {
+        return this.masche15;
+    }
+
+    public void setMasche15(BigDecimal masche15) {
+        this.masche15 = masche15;
+    }
+
+
+    @Column(name="RUECK15", precision=9, scale=3)
+    public BigDecimal getRueck15() {
+        return this.rueck15;
+    }
+
+    public void setRueck15(BigDecimal rueck15) {
+        this.rueck15 = rueck15;
+    }
+
+
+    @Column(name="MASCHE16", precision=7, scale=4)
+    public BigDecimal getMasche16() {
+        return this.masche16;
+    }
+
+    public void setMasche16(BigDecimal masche16) {
+        this.masche16 = masche16;
+    }
+
+
+    @Column(name="RUECK16", precision=9, scale=3)
+    public BigDecimal getRueck16() {
+        return this.rueck16;
+    }
+
+    public void setRueck16(BigDecimal rueck16) {
+        this.rueck16 = rueck16;
+    }
+
+
+    @Column(name="MASCHE17", precision=7, scale=4)
+    public BigDecimal getMasche17() {
+        return this.masche17;
+    }
+
+    public void setMasche17(BigDecimal masche17) {
+        this.masche17 = masche17;
+    }
+
+
+    @Column(name="RUECK17", precision=9, scale=3)
+    public BigDecimal getRueck17() {
+        return this.rueck17;
+    }
+
+    public void setRueck17(BigDecimal rueck17) {
+        this.rueck17 = rueck17;
+    }
+
+
+    @Column(name="MASCHE18", precision=7, scale=4)
+    public BigDecimal getMasche18() {
+        return this.masche18;
+    }
+
+    public void setMasche18(BigDecimal masche18) {
+        this.masche18 = masche18;
+    }
+
+
+    @Column(name="RUECK18", precision=9, scale=3)
+    public BigDecimal getRueck18() {
+        return this.rueck18;
+    }
+
+    public void setRueck18(BigDecimal rueck18) {
+        this.rueck18 = rueck18;
+    }
+
+
+    @Column(name="MASCHE19", precision=7, scale=4)
+    public BigDecimal getMasche19() {
+        return this.masche19;
+    }
+
+    public void setMasche19(BigDecimal masche19) {
+        this.masche19 = masche19;
+    }
+
+
+    @Column(name="RUECK19", precision=9, scale=3)
+    public BigDecimal getRueck19() {
+        return this.rueck19;
+    }
+
+    public void setRueck19(BigDecimal rueck19) {
+        this.rueck19 = rueck19;
+    }
+
+
+    @Column(name="MASCHE20", precision=7, scale=4)
+    public BigDecimal getMasche20() {
+        return this.masche20;
+    }
+
+    public void setMasche20(BigDecimal masche20) {
+        this.masche20 = masche20;
+    }
+
+
+    @Column(name="RUECK20", precision=9, scale=3)
+    public BigDecimal getRueck20() {
+        return this.rueck20;
+    }
+
+    public void setRueck20(BigDecimal rueck20) {
+        this.rueck20 = rueck20;
+    }
+
+
+    @Column(name="MASCHE21", precision=7, scale=4)
+    public BigDecimal getMasche21() {
+        return this.masche21;
+    }
+
+    public void setMasche21(BigDecimal masche21) {
+        this.masche21 = masche21;
+    }
+
+
+    @Column(name="RUECK21", precision=9, scale=3)
+    public BigDecimal getRueck21() {
+        return this.rueck21;
+    }
+
+    public void setRueck21(BigDecimal rueck21) {
+        this.rueck21 = rueck21;
+    }
+
+
+    @Column(name="REST", precision=9, scale=3)
+    public BigDecimal getRest() {
+        return this.rest;
+    }
+
+    public void setRest(BigDecimal rest) {
+        this.rest = rest;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof GsiebungsiebId) ) return false;
+         GsiebungsiebId castOther = ( GsiebungsiebId ) other;
+
+         return (this.getGlotrechteid()==castOther.getGlotrechteid())
+ && (this.getGsiebsatzid()==castOther.getGsiebsatzid())
+ && ( (this.getGmasse()==castOther.getGmasse()) || ( this.getGmasse()!=null && castOther.getGmasse()!=null && this.getGmasse().equals(castOther.getGmasse()) ) )
+ && ( (this.getMasche01()==castOther.getMasche01()) || ( this.getMasche01()!=null && castOther.getMasche01()!=null && this.getMasche01().equals(castOther.getMasche01()) ) )
+ && ( (this.getRueck01()==castOther.getRueck01()) || ( this.getRueck01()!=null && castOther.getRueck01()!=null && this.getRueck01().equals(castOther.getRueck01()) ) )
+ && ( (this.getMasche02()==castOther.getMasche02()) || ( this.getMasche02()!=null && castOther.getMasche02()!=null && this.getMasche02().equals(castOther.getMasche02()) ) )
+ && ( (this.getRueck02()==castOther.getRueck02()) || ( this.getRueck02()!=null && castOther.getRueck02()!=null && this.getRueck02().equals(castOther.getRueck02()) ) )
+ && ( (this.getMasche03()==castOther.getMasche03()) || ( this.getMasche03()!=null && castOther.getMasche03()!=null && this.getMasche03().equals(castOther.getMasche03()) ) )
+ && ( (this.getRueck03()==castOther.getRueck03()) || ( this.getRueck03()!=null && castOther.getRueck03()!=null && this.getRueck03().equals(castOther.getRueck03()) ) )
+ && ( (this.getMasche04()==castOther.getMasche04()) || ( this.getMasche04()!=null && castOther.getMasche04()!=null && this.getMasche04().equals(castOther.getMasche04()) ) )
+ && ( (this.getRueck04()==castOther.getRueck04()) || ( this.getRueck04()!=null && castOther.getRueck04()!=null && this.getRueck04().equals(castOther.getRueck04()) ) )
+ && ( (this.getMasche05()==castOther.getMasche05()) || ( this.getMasche05()!=null && castOther.getMasche05()!=null && this.getMasche05().equals(castOther.getMasche05()) ) )
+ && ( (this.getRueck05()==castOther.getRueck05()) || ( this.getRueck05()!=null && castOther.getRueck05()!=null && this.getRueck05().equals(castOther.getRueck05()) ) )
+ && ( (this.getMasche06()==castOther.getMasche06()) || ( this.getMasche06()!=null && castOther.getMasche06()!=null && this.getMasche06().equals(castOther.getMasche06()) ) )
+ && ( (this.getRueck06()==castOther.getRueck06()) || ( this.getRueck06()!=null && castOther.getRueck06()!=null && this.getRueck06().equals(castOther.getRueck06()) ) )
+ && ( (this.getMasche07()==castOther.getMasche07()) || ( this.getMasche07()!=null && castOther.getMasche07()!=null && this.getMasche07().equals(castOther.getMasche07()) ) )
+ && ( (this.getRueck07()==castOther.getRueck07()) || ( this.getRueck07()!=null && castOther.getRueck07()!=null && this.getRueck07().equals(castOther.getRueck07()) ) )
+ && ( (this.getMasche08()==castOther.getMasche08()) || ( this.getMasche08()!=null && castOther.getMasche08()!=null && this.getMasche08().equals(castOther.getMasche08()) ) )
+ && ( (this.getRueck08()==castOther.getRueck08()) || ( this.getRueck08()!=null && castOther.getRueck08()!=null && this.getRueck08().equals(castOther.getRueck08()) ) )
+ && ( (this.getMasche09()==castOther.getMasche09()) || ( this.getMasche09()!=null && castOther.getMasche09()!=null && this.getMasche09().equals(castOther.getMasche09()) ) )
+ && ( (this.getRueck09()==castOther.getRueck09()) || ( this.getRueck09()!=null && castOther.getRueck09()!=null && this.getRueck09().equals(castOther.getRueck09()) ) )
+ && ( (this.getMasche10()==castOther.getMasche10()) || ( this.getMasche10()!=null && castOther.getMasche10()!=null && this.getMasche10().equals(castOther.getMasche10()) ) )
+ && ( (this.getRueck10()==castOther.getRueck10()) || ( this.getRueck10()!=null && castOther.getRueck10()!=null && this.getRueck10().equals(castOther.getRueck10()) ) )
+ && ( (this.getMasche11()==castOther.getMasche11()) || ( this.getMasche11()!=null && castOther.getMasche11()!=null && this.getMasche11().equals(castOther.getMasche11()) ) )
+ && ( (this.getRueck11()==castOther.getRueck11()) || ( this.getRueck11()!=null && castOther.getRueck11()!=null && this.getRueck11().equals(castOther.getRueck11()) ) )
+ && ( (this.getMasche12()==castOther.getMasche12()) || ( this.getMasche12()!=null && castOther.getMasche12()!=null && this.getMasche12().equals(castOther.getMasche12()) ) )
+ && ( (this.getRueck12()==castOther.getRueck12()) || ( this.getRueck12()!=null && castOther.getRueck12()!=null && this.getRueck12().equals(castOther.getRueck12()) ) )
+ && ( (this.getMasche13()==castOther.getMasche13()) || ( this.getMasche13()!=null && castOther.getMasche13()!=null && this.getMasche13().equals(castOther.getMasche13()) ) )
+ && ( (this.getRueck13()==castOther.getRueck13()) || ( this.getRueck13()!=null && castOther.getRueck13()!=null && this.getRueck13().equals(castOther.getRueck13()) ) )
+ && ( (this.getMasche14()==castOther.getMasche14()) || ( this.getMasche14()!=null && castOther.getMasche14()!=null && this.getMasche14().equals(castOther.getMasche14()) ) )
+ && ( (this.getRueck14()==castOther.getRueck14()) || ( this.getRueck14()!=null && castOther.getRueck14()!=null && this.getRueck14().equals(castOther.getRueck14()) ) )
+ && ( (this.getMasche15()==castOther.getMasche15()) || ( this.getMasche15()!=null && castOther.getMasche15()!=null && this.getMasche15().equals(castOther.getMasche15()) ) )
+ && ( (this.getRueck15()==castOther.getRueck15()) || ( this.getRueck15()!=null && castOther.getRueck15()!=null && this.getRueck15().equals(castOther.getRueck15()) ) )
+ && ( (this.getMasche16()==castOther.getMasche16()) || ( this.getMasche16()!=null && castOther.getMasche16()!=null && this.getMasche16().equals(castOther.getMasche16()) ) )
+ && ( (this.getRueck16()==castOther.getRueck16()) || ( this.getRueck16()!=null && castOther.getRueck16()!=null && this.getRueck16().equals(castOther.getRueck16()) ) )
+ && ( (this.getMasche17()==castOther.getMasche17()) || ( this.getMasche17()!=null && castOther.getMasche17()!=null && this.getMasche17().equals(castOther.getMasche17()) ) )
+ && ( (this.getRueck17()==castOther.getRueck17()) || ( this.getRueck17()!=null && castOther.getRueck17()!=null && this.getRueck17().equals(castOther.getRueck17()) ) )
+ && ( (this.getMasche18()==castOther.getMasche18()) || ( this.getMasche18()!=null && castOther.getMasche18()!=null && this.getMasche18().equals(castOther.getMasche18()) ) )
+ && ( (this.getRueck18()==castOther.getRueck18()) || ( this.getRueck18()!=null && castOther.getRueck18()!=null && this.getRueck18().equals(castOther.getRueck18()) ) )
+ && ( (this.getMasche19()==castOther.getMasche19()) || ( this.getMasche19()!=null && castOther.getMasche19()!=null && this.getMasche19().equals(castOther.getMasche19()) ) )
+ && ( (this.getRueck19()==castOther.getRueck19()) || ( this.getRueck19()!=null && castOther.getRueck19()!=null && this.getRueck19().equals(castOther.getRueck19()) ) )
+ && ( (this.getMasche20()==castOther.getMasche20()) || ( this.getMasche20()!=null && castOther.getMasche20()!=null && this.getMasche20().equals(castOther.getMasche20()) ) )
+ && ( (this.getRueck20()==castOther.getRueck20()) || ( this.getRueck20()!=null && castOther.getRueck20()!=null && this.getRueck20().equals(castOther.getRueck20()) ) )
+ && ( (this.getMasche21()==castOther.getMasche21()) || ( this.getMasche21()!=null && castOther.getMasche21()!=null && this.getMasche21().equals(castOther.getMasche21()) ) )
+ && ( (this.getRueck21()==castOther.getRueck21()) || ( this.getRueck21()!=null && castOther.getRueck21()!=null && this.getRueck21().equals(castOther.getRueck21()) ) )
+ && ( (this.getRest()==castOther.getRest()) || ( this.getRest()!=null && castOther.getRest()!=null && this.getRest().equals(castOther.getRest()) ) );
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getGlotrechteid();
+         result = 37 * result + (int) this.getGsiebsatzid();
+         result = 37 * result + ( getGmasse() == null ? 0 : this.getGmasse().hashCode() );
+         result = 37 * result + ( getMasche01() == null ? 0 : this.getMasche01().hashCode() );
+         result = 37 * result + ( getRueck01() == null ? 0 : this.getRueck01().hashCode() );
+         result = 37 * result + ( getMasche02() == null ? 0 : this.getMasche02().hashCode() );
+         result = 37 * result + ( getRueck02() == null ? 0 : this.getRueck02().hashCode() );
+         result = 37 * result + ( getMasche03() == null ? 0 : this.getMasche03().hashCode() );
+         result = 37 * result + ( getRueck03() == null ? 0 : this.getRueck03().hashCode() );
+         result = 37 * result + ( getMasche04() == null ? 0 : this.getMasche04().hashCode() );
+         result = 37 * result + ( getRueck04() == null ? 0 : this.getRueck04().hashCode() );
+         result = 37 * result + ( getMasche05() == null ? 0 : this.getMasche05().hashCode() );
+         result = 37 * result + ( getRueck05() == null ? 0 : this.getRueck05().hashCode() );
+         result = 37 * result + ( getMasche06() == null ? 0 : this.getMasche06().hashCode() );
+         result = 37 * result + ( getRueck06() == null ? 0 : this.getRueck06().hashCode() );
+         result = 37 * result + ( getMasche07() == null ? 0 : this.getMasche07().hashCode() );
+         result = 37 * result + ( getRueck07() == null ? 0 : this.getRueck07().hashCode() );
+         result = 37 * result + ( getMasche08() == null ? 0 : this.getMasche08().hashCode() );
+         result = 37 * result + ( getRueck08() == null ? 0 : this.getRueck08().hashCode() );
+         result = 37 * result + ( getMasche09() == null ? 0 : this.getMasche09().hashCode() );
+         result = 37 * result + ( getRueck09() == null ? 0 : this.getRueck09().hashCode() );
+         result = 37 * result + ( getMasche10() == null ? 0 : this.getMasche10().hashCode() );
+         result = 37 * result + ( getRueck10() == null ? 0 : this.getRueck10().hashCode() );
+         result = 37 * result + ( getMasche11() == null ? 0 : this.getMasche11().hashCode() );
+         result = 37 * result + ( getRueck11() == null ? 0 : this.getRueck11().hashCode() );
+         result = 37 * result + ( getMasche12() == null ? 0 : this.getMasche12().hashCode() );
+         result = 37 * result + ( getRueck12() == null ? 0 : this.getRueck12().hashCode() );
+         result = 37 * result + ( getMasche13() == null ? 0 : this.getMasche13().hashCode() );
+         result = 37 * result + ( getRueck13() == null ? 0 : this.getRueck13().hashCode() );
+         result = 37 * result + ( getMasche14() == null ? 0 : this.getMasche14().hashCode() );
+         result = 37 * result + ( getRueck14() == null ? 0 : this.getRueck14().hashCode() );
+         result = 37 * result + ( getMasche15() == null ? 0 : this.getMasche15().hashCode() );
+         result = 37 * result + ( getRueck15() == null ? 0 : this.getRueck15().hashCode() );
+         result = 37 * result + ( getMasche16() == null ? 0 : this.getMasche16().hashCode() );
+         result = 37 * result + ( getRueck16() == null ? 0 : this.getRueck16().hashCode() );
+         result = 37 * result + ( getMasche17() == null ? 0 : this.getMasche17().hashCode() );
+         result = 37 * result + ( getRueck17() == null ? 0 : this.getRueck17().hashCode() );
+         result = 37 * result + ( getMasche18() == null ? 0 : this.getMasche18().hashCode() );
+         result = 37 * result + ( getRueck18() == null ? 0 : this.getRueck18().hashCode() );
+         result = 37 * result + ( getMasche19() == null ? 0 : this.getMasche19().hashCode() );
+         result = 37 * result + ( getRueck19() == null ? 0 : this.getRueck19().hashCode() );
+         result = 37 * result + ( getMasche20() == null ? 0 : this.getMasche20().hashCode() );
+         result = 37 * result + ( getRueck20() == null ? 0 : this.getRueck20().hashCode() );
+         result = 37 * result + ( getMasche21() == null ? 0 : this.getMasche21().hashCode() );
+         result = 37 * result + ( getRueck21() == null ? 0 : this.getRueck21().hashCode() );
+         result = 37 * result + ( getRest() == null ? 0 : this.getRest().hashCode() );
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Hpeilpunkt.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,84 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.Table;
+
+/**
+ * Hpeilpunkt generated by hbm2java
+ */
+@Entity
+@Table(name="HPEILPUNKT"
+    ,schema="SEDDB"
+)
+public class Hpeilpunkt  implements java.io.Serializable {
+
+
+     private HpeilpunktId id;
+     private Hpeilung hpeilung;
+     private BigDecimal y;
+     private BigDecimal z;
+
+    public Hpeilpunkt() {
+    }
+
+    public Hpeilpunkt(HpeilpunktId id, Hpeilung hpeilung, BigDecimal y, BigDecimal z) {
+       this.id = id;
+       this.hpeilung = hpeilung;
+       this.y = y;
+       this.z = z;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="hpeilungid", column=@Column(name="HPEILUNGID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="punktnr", column=@Column(name="PUNKTNR", nullable=false, precision=5, scale=0) ) } )
+    public HpeilpunktId getId() {
+        return this.id;
+    }
+
+    public void setId(HpeilpunktId id) {
+        this.id = id;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="HPEILUNGID", nullable=false, insertable=false, updatable=false)
+    public Hpeilung getHpeilung() {
+        return this.hpeilung;
+    }
+
+    public void setHpeilung(Hpeilung hpeilung) {
+        this.hpeilung = hpeilung;
+    }
+
+
+    @Column(name="Y", nullable=false, precision=8, scale=3)
+    public BigDecimal getY() {
+        return this.y;
+    }
+
+    public void setY(BigDecimal y) {
+        this.y = y;
+    }
+
+
+    @Column(name="Z", nullable=false, precision=8, scale=3)
+    public BigDecimal getZ() {
+        return this.z;
+    }
+
+    public void setZ(BigDecimal z) {
+        this.z = z;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/HpeilpunktId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,65 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * HpeilpunktId generated by hbm2java
+ */
+@Embeddable
+public class HpeilpunktId  implements java.io.Serializable {
+
+
+     private long hpeilungid;
+     private int punktnr;
+
+    public HpeilpunktId() {
+    }
+
+    public HpeilpunktId(long hpeilungid, int punktnr) {
+       this.hpeilungid = hpeilungid;
+       this.punktnr = punktnr;
+    }
+
+
+
+    @Column(name="HPEILUNGID", nullable=false, precision=11, scale=0)
+    public long getHpeilungid() {
+        return this.hpeilungid;
+    }
+
+    public void setHpeilungid(long hpeilungid) {
+        this.hpeilungid = hpeilungid;
+    }
+
+
+    @Column(name="PUNKTNR", nullable=false, precision=5, scale=0)
+    public int getPunktnr() {
+        return this.punktnr;
+    }
+
+    public void setPunktnr(int punktnr) {
+        this.punktnr = punktnr;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof HpeilpunktId) ) return false;
+         HpeilpunktId castOther = ( HpeilpunktId ) other;
+
+         return (this.getHpeilungid()==castOther.getHpeilungid())
+ && (this.getPunktnr()==castOther.getPunktnr());
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getHpeilungid();
+         result = 37 * result + this.getPunktnr();
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Hpeilung.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,463 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+import javax.persistence.Temporal;
+import javax.persistence.TemporalType;
+
+/**
+ * Hpeilung generated by hbm2java
+ */
+@Entity
+@Table(name="HPEILUNG"
+    ,schema="SEDDB"
+)
+public class Hpeilung  implements java.io.Serializable {
+
+
+     private long hpeilungid;
+     private Station station;
+     private BigDecimal km;
+     private Date datum;
+     private String glwname;
+     private BigDecimal glwhoehe;
+     private BigDecimal buliabst;
+     private BigDecimal bulihoehe;
+     private BigDecimal bulifuss;
+     private BigDecimal buliruecken;
+     private BigDecimal bureabst;
+     private BigDecimal burehoehe;
+     private BigDecimal burefuss;
+     private BigDecimal bureruecken;
+     private BigDecimal fahrliabst;
+     private BigDecimal fahrreabst;
+     private BigDecimal fahrtief;
+     private BigDecimal ausbliabst;
+     private BigDecimal ausbreabst;
+     private BigDecimal achseabst;
+     private BigDecimal teilvonabst;
+     private BigDecimal teilbisabst;
+     private BigDecimal hmabst;
+     private BigDecimal lgkrechts;
+     private BigDecimal lgkhoch;
+     private BigDecimal lgkhoehe;
+     private BigDecimal rgkrechts;
+     private BigDecimal rgkhoch;
+     private BigDecimal rgkhoehe;
+     private String bemerkung;
+     private Date lastupdated;
+     private Long oldprfid;
+     private BigDecimal mitteabst;
+     private Set<Hpeilpunkt> hpeilpunkts = new HashSet<Hpeilpunkt>(0);
+     private Set<Messung> messungs = new HashSet<Messung>(0);
+
+    public Hpeilung() {
+    }
+
+    public Hpeilung(long hpeilungid, Station station, BigDecimal km, Date datum, Date lastupdated) {
+        this.hpeilungid = hpeilungid;
+        this.station = station;
+        this.km = km;
+        this.datum = datum;
+        this.lastupdated = lastupdated;
+    }
+    public Hpeilung(long hpeilungid, Station station, BigDecimal km, Date datum, String glwname, BigDecimal glwhoehe, BigDecimal buliabst, BigDecimal bulihoehe, BigDecimal bulifuss, BigDecimal buliruecken, BigDecimal bureabst, BigDecimal burehoehe, BigDecimal burefuss, BigDecimal bureruecken, BigDecimal fahrliabst, BigDecimal fahrreabst, BigDecimal fahrtief, BigDecimal ausbliabst, BigDecimal ausbreabst, BigDecimal achseabst, BigDecimal teilvonabst, BigDecimal teilbisabst, BigDecimal hmabst, BigDecimal lgkrechts, BigDecimal lgkhoch, BigDecimal lgkhoehe, BigDecimal rgkrechts, BigDecimal rgkhoch, BigDecimal rgkhoehe, String bemerkung, Date lastupdated, Long oldprfid, BigDecimal mitteabst, Set<Hpeilpunkt> hpeilpunkts, Set<Messung> messungs) {
+       this.hpeilungid = hpeilungid;
+       this.station = station;
+       this.km = km;
+       this.datum = datum;
+       this.glwname = glwname;
+       this.glwhoehe = glwhoehe;
+       this.buliabst = buliabst;
+       this.bulihoehe = bulihoehe;
+       this.bulifuss = bulifuss;
+       this.buliruecken = buliruecken;
+       this.bureabst = bureabst;
+       this.burehoehe = burehoehe;
+       this.burefuss = burefuss;
+       this.bureruecken = bureruecken;
+       this.fahrliabst = fahrliabst;
+       this.fahrreabst = fahrreabst;
+       this.fahrtief = fahrtief;
+       this.ausbliabst = ausbliabst;
+       this.ausbreabst = ausbreabst;
+       this.achseabst = achseabst;
+       this.teilvonabst = teilvonabst;
+       this.teilbisabst = teilbisabst;
+       this.hmabst = hmabst;
+       this.lgkrechts = lgkrechts;
+       this.lgkhoch = lgkhoch;
+       this.lgkhoehe = lgkhoehe;
+       this.rgkrechts = rgkrechts;
+       this.rgkhoch = rgkhoch;
+       this.rgkhoehe = rgkhoehe;
+       this.bemerkung = bemerkung;
+       this.lastupdated = lastupdated;
+       this.oldprfid = oldprfid;
+       this.mitteabst = mitteabst;
+       this.hpeilpunkts = hpeilpunkts;
+       this.messungs = messungs;
+    }
+
+     @Id
+
+
+    @Column(name="HPEILUNGID", unique=true, nullable=false, precision=11, scale=0)
+    public long getHpeilungid() {
+        return this.hpeilungid;
+    }
+
+    public void setHpeilungid(long hpeilungid) {
+        this.hpeilungid = hpeilungid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="STATIONID", nullable=false)
+    public Station getStation() {
+        return this.station;
+    }
+
+    public void setStation(Station station) {
+        this.station = station;
+    }
+
+
+    @Column(name="KM", nullable=false, precision=8, scale=3)
+    public BigDecimal getKm() {
+        return this.km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="DATUM", nullable=false, length=7)
+    public Date getDatum() {
+        return this.datum;
+    }
+
+    public void setDatum(Date datum) {
+        this.datum = datum;
+    }
+
+
+    @Column(name="GLWNAME", length=8)
+    public String getGlwname() {
+        return this.glwname;
+    }
+
+    public void setGlwname(String glwname) {
+        this.glwname = glwname;
+    }
+
+
+    @Column(name="GLWHOEHE", precision=8, scale=3)
+    public BigDecimal getGlwhoehe() {
+        return this.glwhoehe;
+    }
+
+    public void setGlwhoehe(BigDecimal glwhoehe) {
+        this.glwhoehe = glwhoehe;
+    }
+
+
+    @Column(name="BULIABST", precision=8, scale=3)
+    public BigDecimal getBuliabst() {
+        return this.buliabst;
+    }
+
+    public void setBuliabst(BigDecimal buliabst) {
+        this.buliabst = buliabst;
+    }
+
+
+    @Column(name="BULIHOEHE", precision=8, scale=3)
+    public BigDecimal getBulihoehe() {
+        return this.bulihoehe;
+    }
+
+    public void setBulihoehe(BigDecimal bulihoehe) {
+        this.bulihoehe = bulihoehe;
+    }
+
+
+    @Column(name="BULIFUSS", precision=8, scale=3)
+    public BigDecimal getBulifuss() {
+        return this.bulifuss;
+    }
+
+    public void setBulifuss(BigDecimal bulifuss) {
+        this.bulifuss = bulifuss;
+    }
+
+
+    @Column(name="BULIRUECKEN", precision=8, scale=3)
+    public BigDecimal getBuliruecken() {
+        return this.buliruecken;
+    }
+
+    public void setBuliruecken(BigDecimal buliruecken) {
+        this.buliruecken = buliruecken;
+    }
+
+
+    @Column(name="BUREABST", precision=8, scale=3)
+    public BigDecimal getBureabst() {
+        return this.bureabst;
+    }
+
+    public void setBureabst(BigDecimal bureabst) {
+        this.bureabst = bureabst;
+    }
+
+
+    @Column(name="BUREHOEHE", precision=8, scale=3)
+    public BigDecimal getBurehoehe() {
+        return this.burehoehe;
+    }
+
+    public void setBurehoehe(BigDecimal burehoehe) {
+        this.burehoehe = burehoehe;
+    }
+
+
+    @Column(name="BUREFUSS", precision=8, scale=3)
+    public BigDecimal getBurefuss() {
+        return this.burefuss;
+    }
+
+    public void setBurefuss(BigDecimal burefuss) {
+        this.burefuss = burefuss;
+    }
+
+
+    @Column(name="BURERUECKEN", precision=8, scale=3)
+    public BigDecimal getBureruecken() {
+        return this.bureruecken;
+    }
+
+    public void setBureruecken(BigDecimal bureruecken) {
+        this.bureruecken = bureruecken;
+    }
+
+
+    @Column(name="FAHRLIABST", precision=8, scale=3)
+    public BigDecimal getFahrliabst() {
+        return this.fahrliabst;
+    }
+
+    public void setFahrliabst(BigDecimal fahrliabst) {
+        this.fahrliabst = fahrliabst;
+    }
+
+
+    @Column(name="FAHRREABST", precision=8, scale=3)
+    public BigDecimal getFahrreabst() {
+        return this.fahrreabst;
+    }
+
+    public void setFahrreabst(BigDecimal fahrreabst) {
+        this.fahrreabst = fahrreabst;
+    }
+
+
+    @Column(name="FAHRTIEF", precision=8, scale=3)
+    public BigDecimal getFahrtief() {
+        return this.fahrtief;
+    }
+
+    public void setFahrtief(BigDecimal fahrtief) {
+        this.fahrtief = fahrtief;
+    }
+
+
+    @Column(name="AUSBLIABST", precision=8, scale=3)
+    public BigDecimal getAusbliabst() {
+        return this.ausbliabst;
+    }
+
+    public void setAusbliabst(BigDecimal ausbliabst) {
+        this.ausbliabst = ausbliabst;
+    }
+
+
+    @Column(name="AUSBREABST", precision=8, scale=3)
+    public BigDecimal getAusbreabst() {
+        return this.ausbreabst;
+    }
+
+    public void setAusbreabst(BigDecimal ausbreabst) {
+        this.ausbreabst = ausbreabst;
+    }
+
+
+    @Column(name="ACHSEABST", precision=8, scale=3)
+    public BigDecimal getAchseabst() {
+        return this.achseabst;
+    }
+
+    public void setAchseabst(BigDecimal achseabst) {
+        this.achseabst = achseabst;
+    }
+
+
+    @Column(name="TEILVONABST", precision=8, scale=3)
+    public BigDecimal getTeilvonabst() {
+        return this.teilvonabst;
+    }
+
+    public void setTeilvonabst(BigDecimal teilvonabst) {
+        this.teilvonabst = teilvonabst;
+    }
+
+
+    @Column(name="TEILBISABST", precision=8, scale=3)
+    public BigDecimal getTeilbisabst() {
+        return this.teilbisabst;
+    }
+
+    public void setTeilbisabst(BigDecimal teilbisabst) {
+        this.teilbisabst = teilbisabst;
+    }
+
+
+    @Column(name="HMABST", precision=8, scale=3)
+    public BigDecimal getHmabst() {
+        return this.hmabst;
+    }
+
+    public void setHmabst(BigDecimal hmabst) {
+        this.hmabst = hmabst;
+    }
+
+
+    @Column(name="LGKRECHTS", precision=11, scale=3)
+    public BigDecimal getLgkrechts() {
+        return this.lgkrechts;
+    }
+
+    public void setLgkrechts(BigDecimal lgkrechts) {
+        this.lgkrechts = lgkrechts;
+    }
+
+
+    @Column(name="LGKHOCH", precision=11, scale=3)
+    public BigDecimal getLgkhoch() {
+        return this.lgkhoch;
+    }
+
+    public void setLgkhoch(BigDecimal lgkhoch) {
+        this.lgkhoch = lgkhoch;
+    }
+
+
+    @Column(name="LGKHOEHE", precision=8, scale=3)
+    public BigDecimal getLgkhoehe() {
+        return this.lgkhoehe;
+    }
+
+    public void setLgkhoehe(BigDecimal lgkhoehe) {
+        this.lgkhoehe = lgkhoehe;
+    }
+
+
+    @Column(name="RGKRECHTS", precision=11, scale=3)
+    public BigDecimal getRgkrechts() {
+        return this.rgkrechts;
+    }
+
+    public void setRgkrechts(BigDecimal rgkrechts) {
+        this.rgkrechts = rgkrechts;
+    }
+
+
+    @Column(name="RGKHOCH", precision=11, scale=3)
+    public BigDecimal getRgkhoch() {
+        return this.rgkhoch;
+    }
+
+    public void setRgkhoch(BigDecimal rgkhoch) {
+        this.rgkhoch = rgkhoch;
+    }
+
+
+    @Column(name="RGKHOEHE", precision=8, scale=3)
+    public BigDecimal getRgkhoehe() {
+        return this.rgkhoehe;
+    }
+
+    public void setRgkhoehe(BigDecimal rgkhoehe) {
+        this.rgkhoehe = rgkhoehe;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="LASTUPDATED", nullable=false, length=7)
+    public Date getLastupdated() {
+        return this.lastupdated;
+    }
+
+    public void setLastupdated(Date lastupdated) {
+        this.lastupdated = lastupdated;
+    }
+
+
+    @Column(name="OLDPRFID", precision=11, scale=0)
+    public Long getOldprfid() {
+        return this.oldprfid;
+    }
+
+    public void setOldprfid(Long oldprfid) {
+        this.oldprfid = oldprfid;
+    }
+
+
+    @Column(name="MITTEABST", precision=8, scale=3)
+    public BigDecimal getMitteabst() {
+        return this.mitteabst;
+    }
+
+    public void setMitteabst(BigDecimal mitteabst) {
+        this.mitteabst = mitteabst;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="hpeilung")
+    public Set<Hpeilpunkt> getHpeilpunkts() {
+        return this.hpeilpunkts;
+    }
+
+    public void setHpeilpunkts(Set<Hpeilpunkt> hpeilpunkts) {
+        this.hpeilpunkts = hpeilpunkts;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="hpeilung")
+    public Set<Messung> getMessungs() {
+        return this.messungs;
+    }
+
+    public void setMessungs(Set<Messung> messungs) {
+        this.messungs = messungs;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Messung.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,995 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+import javax.persistence.Temporal;
+import javax.persistence.TemporalType;
+
+/**
+ * Messung generated by hbm2java
+ */
+@Entity
+@Table(name="MESSUNG"
+    ,schema="SEDDB"
+)
+public class Messung  implements java.io.Serializable {
+
+     private long messungid;
+     private Station station;
+     private Gsiebsatz gsiebsatz;
+     private Gfaenger gfaenger;
+     private Hpeilung hpeilung;
+     private Date datum;
+     private Long mpeilungid;
+     private int messnr;
+     private String fgue;
+     private Date uhrvon;
+     private Date uhrbis;
+     private BigDecimal wspcm;
+     private BigDecimal QBpegel;
+     private BigDecimal wspnn;
+     private BigDecimal ie;
+     private BigDecimal wtemp;
+     private BigDecimal gbreite;
+     private BigDecimal gbreitevon;
+     private BigDecimal gbreitebis;
+     private BigDecimal tgeschiebe;
+     private BigDecimal tsand;
+     private BigDecimal tschweb;
+     private BigDecimal cschweb;
+     private BigDecimal uferliabst;
+     private BigDecimal uferreabst;
+     private BigDecimal q;
+     private BigDecimal AHpeil;
+     private BigDecimal AMpeil;
+     private BigDecimal b;
+     private BigDecimal hm;
+     private BigDecimal vm;
+     private BigDecimal vsohle;
+     private BigDecimal tau;
+     private BigDecimal tauv;
+     private BigDecimal teilQ;
+     private BigDecimal teilAHpeil;
+     private BigDecimal teilAMpeil;
+     private BigDecimal teilB;
+     private BigDecimal teilHm;
+     private BigDecimal teilVm;
+     private BigDecimal teilVsohle;
+     private BigDecimal teilTau;
+     private BigDecimal teilTauv;
+     private Boolean mitteltyp;
+     private BigDecimal dm;
+     private BigDecimal sk;
+     private BigDecimal so;
+     private BigDecimal u;
+     private BigDecimal d90;
+     private BigDecimal d84;
+     private BigDecimal d80;
+     private BigDecimal d75;
+     private BigDecimal d70;
+     private BigDecimal d60;
+     private BigDecimal d50;
+     private BigDecimal d40;
+     private BigDecimal d30;
+     private BigDecimal d25;
+     private BigDecimal d20;
+     private BigDecimal d16;
+     private BigDecimal d10;
+     private BigDecimal dmin;
+     private BigDecimal durchdmin;
+     private BigDecimal dmax;
+     private BigDecimal durchdmax;
+     private int NGeschieb;
+     private int NVielpkt;
+     private Date sysDate;
+     private String bemerkung;
+     private Date lastupdated;
+     private BigDecimal km;
+     private BigDecimal glotabstoffset;
+     private BigDecimal slotabstoffset;
+     private BigDecimal gbreitevonlinks;
+     private BigDecimal gbreitebislinks;
+     private Boolean glotuferabstistvonlinks;
+     private Boolean slotuferabstistvonlinks;
+     private Set<Glotrechte> glotrechtes = new HashSet<Glotrechte>(0);
+     private Set<Slotrechte> slotrechtes = new HashSet<Slotrechte>(0);
+
+    public Messung() {
+    }
+
+
+    public Messung(long messungid, Station station, Date datum, int messnr, int NGeschieb, int NVielpkt, Date lastupdated, BigDecimal glotabstoffset, BigDecimal slotabstoffset) {
+        this.messungid = messungid;
+        this.station = station;
+        this.datum = datum;
+        this.messnr = messnr;
+        this.NGeschieb = NGeschieb;
+        this.NVielpkt = NVielpkt;
+        this.lastupdated = lastupdated;
+        this.glotabstoffset = glotabstoffset;
+        this.slotabstoffset = slotabstoffset;
+    }
+    public Messung(long messungid, Station station, Gsiebsatz gsiebsatz, Gfaenger gfaenger, Hpeilung hpeilung, Date datum, Long mpeilungid, int messnr, String fgue, Date uhrvon, Date uhrbis, BigDecimal wspcm, BigDecimal QBpegel, BigDecimal wspnn, BigDecimal ie, BigDecimal wtemp, BigDecimal gbreite, BigDecimal gbreitevon, BigDecimal gbreitebis, BigDecimal tgeschiebe, BigDecimal tsand, BigDecimal tschweb, BigDecimal cschweb, BigDecimal uferliabst, BigDecimal uferreabst, BigDecimal q, BigDecimal AHpeil, BigDecimal AMpeil, BigDecimal b, BigDecimal hm, BigDecimal vm, BigDecimal vsohle, BigDecimal tau, BigDecimal tauv, BigDecimal teilQ, BigDecimal teilAHpeil, BigDecimal teilAMpeil, BigDecimal teilB, BigDecimal teilHm, BigDecimal teilVm, BigDecimal teilVsohle, BigDecimal teilTau, BigDecimal teilTauv, Boolean mitteltyp, BigDecimal dm, BigDecimal sk, BigDecimal so, BigDecimal u, BigDecimal d90, BigDecimal d84, BigDecimal d80, BigDecimal d75, BigDecimal d70, BigDecimal d60, BigDecimal d50, BigDecimal d40, BigDecimal d30, BigDecimal d25, BigDecimal d20, BigDecimal d16, BigDecimal d10, BigDecimal dmin, BigDecimal durchdmin, BigDecimal dmax, BigDecimal durchdmax, int NGeschieb, int NVielpkt, Date sysDate, String bemerkung, Date lastupdated, BigDecimal km, BigDecimal glotabstoffset, BigDecimal slotabstoffset, BigDecimal gbreitevonlinks, BigDecimal gbreitebislinks, Boolean glotuferabstistvonlinks, Boolean slotuferabstistvonlinks, Set<Glotrechte> glotrechtes, Set<Slotrechte> slotrechtes) {
+       this.messungid = messungid;
+       this.station = station;
+       this.gsiebsatz = gsiebsatz;
+       this.gfaenger = gfaenger;
+       this.hpeilung = hpeilung;
+       this.datum = datum;
+       this.mpeilungid = mpeilungid;
+       this.messnr = messnr;
+       this.fgue = fgue;
+       this.uhrvon = uhrvon;
+       this.uhrbis = uhrbis;
+       this.wspcm = wspcm;
+       this.QBpegel = QBpegel;
+       this.wspnn = wspnn;
+       this.ie = ie;
+       this.wtemp = wtemp;
+       this.gbreite = gbreite;
+       this.gbreitevon = gbreitevon;
+       this.gbreitebis = gbreitebis;
+       this.tgeschiebe = tgeschiebe;
+       this.tsand = tsand;
+       this.tschweb = tschweb;
+       this.cschweb = cschweb;
+       this.uferliabst = uferliabst;
+       this.uferreabst = uferreabst;
+       this.q = q;
+       this.AHpeil = AHpeil;
+       this.AMpeil = AMpeil;
+       this.b = b;
+       this.hm = hm;
+       this.vm = vm;
+       this.vsohle = vsohle;
+       this.tau = tau;
+       this.tauv = tauv;
+       this.teilQ = teilQ;
+       this.teilAHpeil = teilAHpeil;
+       this.teilAMpeil = teilAMpeil;
+       this.teilB = teilB;
+       this.teilHm = teilHm;
+       this.teilVm = teilVm;
+       this.teilVsohle = teilVsohle;
+       this.teilTau = teilTau;
+       this.teilTauv = teilTauv;
+       this.mitteltyp = mitteltyp;
+       this.dm = dm;
+       this.sk = sk;
+       this.so = so;
+       this.u = u;
+       this.d90 = d90;
+       this.d84 = d84;
+       this.d80 = d80;
+       this.d75 = d75;
+       this.d70 = d70;
+       this.d60 = d60;
+       this.d50 = d50;
+       this.d40 = d40;
+       this.d30 = d30;
+       this.d25 = d25;
+       this.d20 = d20;
+       this.d16 = d16;
+       this.d10 = d10;
+       this.dmin = dmin;
+       this.durchdmin = durchdmin;
+       this.dmax = dmax;
+       this.durchdmax = durchdmax;
+       this.NGeschieb = NGeschieb;
+       this.NVielpkt = NVielpkt;
+       this.sysDate = sysDate;
+       this.bemerkung = bemerkung;
+       this.lastupdated = lastupdated;
+       this.km = km;
+       this.glotabstoffset = glotabstoffset;
+       this.slotabstoffset = slotabstoffset;
+       this.gbreitevonlinks = gbreitevonlinks;
+       this.gbreitebislinks = gbreitebislinks;
+       this.glotuferabstistvonlinks = glotuferabstistvonlinks;
+       this.slotuferabstistvonlinks = slotuferabstistvonlinks;
+       this.glotrechtes = glotrechtes;
+       this.slotrechtes = slotrechtes;
+    }
+
+     @Id
+
+
+    @Column(name="MESSUNGID", unique=true, nullable=false, precision=11, scale=0)
+    public long getMessungid() {
+        return this.messungid;
+    }
+
+    public void setMessungid(long messungid) {
+        this.messungid = messungid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="STATIONID", nullable=false)
+    public Station getStation() {
+        return this.station;
+    }
+
+    public void setStation(Station station) {
+        this.station = station;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="GSIEBSATZID")
+    public Gsiebsatz getGsiebsatz() {
+        return this.gsiebsatz;
+    }
+
+    public void setGsiebsatz(Gsiebsatz gsiebsatz) {
+        this.gsiebsatz = gsiebsatz;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="GFAENGERID")
+    public Gfaenger getGfaenger() {
+        return this.gfaenger;
+    }
+
+    public void setGfaenger(Gfaenger gfaenger) {
+        this.gfaenger = gfaenger;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="HPEILUNGID")
+    public Hpeilung getHpeilung() {
+        return this.hpeilung;
+    }
+
+    public void setHpeilung(Hpeilung hpeilung) {
+        this.hpeilung = hpeilung;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="DATUM", nullable=false, length=7)
+    public Date getDatum() {
+        return this.datum;
+    }
+
+    public void setDatum(Date datum) {
+        this.datum = datum;
+    }
+
+
+    @Column(name="MPEILUNGID", precision=11, scale=0)
+    public Long getMpeilungid() {
+        return this.mpeilungid;
+    }
+
+    public void setMpeilungid(Long mpeilungid) {
+        this.mpeilungid = mpeilungid;
+    }
+
+
+    @Column(name="MESSNR", nullable=false, precision=5, scale=0)
+    public int getMessnr() {
+        return this.messnr;
+    }
+
+    public void setMessnr(int messnr) {
+        this.messnr = messnr;
+    }
+
+
+    @Column(name="FGUE", length=10)
+    public String getFgue() {
+        return this.fgue;
+    }
+
+    public void setFgue(String fgue) {
+        this.fgue = fgue;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="UHRVON", length=7)
+    public Date getUhrvon() {
+        return this.uhrvon;
+    }
+
+    public void setUhrvon(Date uhrvon) {
+        this.uhrvon = uhrvon;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="UHRBIS", length=7)
+    public Date getUhrbis() {
+        return this.uhrbis;
+    }
+
+    public void setUhrbis(Date uhrbis) {
+        this.uhrbis = uhrbis;
+    }
+
+
+    @Column(name="WSPCM", precision=8, scale=1)
+    public BigDecimal getWspcm() {
+        return this.wspcm;
+    }
+
+    public void setWspcm(BigDecimal wspcm) {
+        this.wspcm = wspcm;
+    }
+
+
+    @Column(name="Q_BPEGEL", precision=8, scale=3)
+    public BigDecimal getQBpegel() {
+        return this.QBpegel;
+    }
+
+    public void setQBpegel(BigDecimal QBpegel) {
+        this.QBpegel = QBpegel;
+    }
+
+
+    @Column(name="WSPNN", precision=8, scale=3)
+    public BigDecimal getWspnn() {
+        return this.wspnn;
+    }
+
+    public void setWspnn(BigDecimal wspnn) {
+        this.wspnn = wspnn;
+    }
+
+
+    @Column(name="IE", precision=6, scale=3)
+    public BigDecimal getIe() {
+        return this.ie;
+    }
+
+    public void setIe(BigDecimal ie) {
+        this.ie = ie;
+    }
+
+
+    @Column(name="WTEMP", precision=4, scale=1)
+    public BigDecimal getWtemp() {
+        return this.wtemp;
+    }
+
+    public void setWtemp(BigDecimal wtemp) {
+        this.wtemp = wtemp;
+    }
+
+
+    @Column(name="GBREITE", precision=8, scale=3)
+    public BigDecimal getGbreite() {
+        return this.gbreite;
+    }
+
+    public void setGbreite(BigDecimal gbreite) {
+        this.gbreite = gbreite;
+    }
+
+
+    @Column(name="GBREITEVON", precision=8, scale=3)
+    public BigDecimal getGbreitevon() {
+        return this.gbreitevon;
+    }
+
+    public void setGbreitevon(BigDecimal gbreitevon) {
+        this.gbreitevon = gbreitevon;
+    }
+
+
+    @Column(name="GBREITEBIS", precision=8, scale=3)
+    public BigDecimal getGbreitebis() {
+        return this.gbreitebis;
+    }
+
+    public void setGbreitebis(BigDecimal gbreitebis) {
+        this.gbreitebis = gbreitebis;
+    }
+
+
+    @Column(name="TGESCHIEBE", precision=9, scale=3)
+    public BigDecimal getTgeschiebe() {
+        return this.tgeschiebe;
+    }
+
+    public void setTgeschiebe(BigDecimal tgeschiebe) {
+        this.tgeschiebe = tgeschiebe;
+    }
+
+
+    @Column(name="TSAND", precision=9, scale=3)
+    public BigDecimal getTsand() {
+        return this.tsand;
+    }
+
+    public void setTsand(BigDecimal tsand) {
+        this.tsand = tsand;
+    }
+
+
+    @Column(name="TSCHWEB", precision=9, scale=3)
+    public BigDecimal getTschweb() {
+        return this.tschweb;
+    }
+
+    public void setTschweb(BigDecimal tschweb) {
+        this.tschweb = tschweb;
+    }
+
+
+    @Column(name="CSCHWEB", precision=8, scale=3)
+    public BigDecimal getCschweb() {
+        return this.cschweb;
+    }
+
+    public void setCschweb(BigDecimal cschweb) {
+        this.cschweb = cschweb;
+    }
+
+
+    @Column(name="UFERLIABST", precision=8, scale=3)
+    public BigDecimal getUferliabst() {
+        return this.uferliabst;
+    }
+
+    public void setUferliabst(BigDecimal uferliabst) {
+        this.uferliabst = uferliabst;
+    }
+
+
+    @Column(name="UFERREABST", precision=8, scale=3)
+    public BigDecimal getUferreabst() {
+        return this.uferreabst;
+    }
+
+    public void setUferreabst(BigDecimal uferreabst) {
+        this.uferreabst = uferreabst;
+    }
+
+
+    @Column(name="Q", precision=8, scale=3)
+    public BigDecimal getQ() {
+        return this.q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+
+
+    @Column(name="A_HPEIL", precision=6, scale=1)
+    public BigDecimal getAHpeil() {
+        return this.AHpeil;
+    }
+
+    public void setAHpeil(BigDecimal AHpeil) {
+        this.AHpeil = AHpeil;
+    }
+
+
+    @Column(name="A_MPEIL", precision=6, scale=1)
+    public BigDecimal getAMpeil() {
+        return this.AMpeil;
+    }
+
+    public void setAMpeil(BigDecimal AMpeil) {
+        this.AMpeil = AMpeil;
+    }
+
+
+    @Column(name="B", precision=8, scale=3)
+    public BigDecimal getB() {
+        return this.b;
+    }
+
+    public void setB(BigDecimal b) {
+        this.b = b;
+    }
+
+
+    @Column(name="HM", precision=8, scale=3)
+    public BigDecimal getHm() {
+        return this.hm;
+    }
+
+    public void setHm(BigDecimal hm) {
+        this.hm = hm;
+    }
+
+
+    @Column(name="VM", precision=6, scale=4)
+    public BigDecimal getVm() {
+        return this.vm;
+    }
+
+    public void setVm(BigDecimal vm) {
+        this.vm = vm;
+    }
+
+
+    @Column(name="VSOHLE", precision=6, scale=4)
+    public BigDecimal getVsohle() {
+        return this.vsohle;
+    }
+
+    public void setVsohle(BigDecimal vsohle) {
+        this.vsohle = vsohle;
+    }
+
+
+    @Column(name="TAU", precision=9, scale=4)
+    public BigDecimal getTau() {
+        return this.tau;
+    }
+
+    public void setTau(BigDecimal tau) {
+        this.tau = tau;
+    }
+
+
+    @Column(name="TAUV", precision=9, scale=6)
+    public BigDecimal getTauv() {
+        return this.tauv;
+    }
+
+    public void setTauv(BigDecimal tauv) {
+        this.tauv = tauv;
+    }
+
+
+    @Column(name="TEIL_Q", precision=8, scale=3)
+    public BigDecimal getTeilQ() {
+        return this.teilQ;
+    }
+
+    public void setTeilQ(BigDecimal teilQ) {
+        this.teilQ = teilQ;
+    }
+
+
+    @Column(name="TEIL_A_HPEIL", precision=6, scale=1)
+    public BigDecimal getTeilAHpeil() {
+        return this.teilAHpeil;
+    }
+
+    public void setTeilAHpeil(BigDecimal teilAHpeil) {
+        this.teilAHpeil = teilAHpeil;
+    }
+
+
+    @Column(name="TEIL_A_MPEIL", precision=6, scale=1)
+    public BigDecimal getTeilAMpeil() {
+        return this.teilAMpeil;
+    }
+
+    public void setTeilAMpeil(BigDecimal teilAMpeil) {
+        this.teilAMpeil = teilAMpeil;
+    }
+
+
+    @Column(name="TEIL_B", precision=8, scale=3)
+    public BigDecimal getTeilB() {
+        return this.teilB;
+    }
+
+    public void setTeilB(BigDecimal teilB) {
+        this.teilB = teilB;
+    }
+
+
+    @Column(name="TEIL_HM", precision=8, scale=3)
+    public BigDecimal getTeilHm() {
+        return this.teilHm;
+    }
+
+    public void setTeilHm(BigDecimal teilHm) {
+        this.teilHm = teilHm;
+    }
+
+
+    @Column(name="TEIL_VM", precision=6, scale=4)
+    public BigDecimal getTeilVm() {
+        return this.teilVm;
+    }
+
+    public void setTeilVm(BigDecimal teilVm) {
+        this.teilVm = teilVm;
+    }
+
+
+    @Column(name="TEIL_VSOHLE", precision=6, scale=4)
+    public BigDecimal getTeilVsohle() {
+        return this.teilVsohle;
+    }
+
+    public void setTeilVsohle(BigDecimal teilVsohle) {
+        this.teilVsohle = teilVsohle;
+    }
+
+
+    @Column(name="TEIL_TAU", precision=9, scale=4)
+    public BigDecimal getTeilTau() {
+        return this.teilTau;
+    }
+
+    public void setTeilTau(BigDecimal teilTau) {
+        this.teilTau = teilTau;
+    }
+
+
+    @Column(name="TEIL_TAUV", precision=9, scale=6)
+    public BigDecimal getTeilTauv() {
+        return this.teilTauv;
+    }
+
+    public void setTeilTauv(BigDecimal teilTauv) {
+        this.teilTauv = teilTauv;
+    }
+
+
+    @Column(name="MITTELTYP", precision=1, scale=0)
+    public Boolean getMitteltyp() {
+        return this.mitteltyp;
+    }
+
+    public void setMitteltyp(Boolean mitteltyp) {
+        this.mitteltyp = mitteltyp;
+    }
+
+
+    @Column(name="DM", precision=7, scale=3)
+    public BigDecimal getDm() {
+        return this.dm;
+    }
+
+    public void setDm(BigDecimal dm) {
+        this.dm = dm;
+    }
+
+
+    @Column(name="SK", precision=8, scale=3)
+    public BigDecimal getSk() {
+        return this.sk;
+    }
+
+    public void setSk(BigDecimal sk) {
+        this.sk = sk;
+    }
+
+
+    @Column(name="SO", precision=8, scale=3)
+    public BigDecimal getSo() {
+        return this.so;
+    }
+
+    public void setSo(BigDecimal so) {
+        this.so = so;
+    }
+
+
+    @Column(name="U", precision=8, scale=3)
+    public BigDecimal getU() {
+        return this.u;
+    }
+
+    public void setU(BigDecimal u) {
+        this.u = u;
+    }
+
+
+    @Column(name="D90", precision=7, scale=4)
+    public BigDecimal getD90() {
+        return this.d90;
+    }
+
+    public void setD90(BigDecimal d90) {
+        this.d90 = d90;
+    }
+
+
+    @Column(name="D84", precision=7, scale=4)
+    public BigDecimal getD84() {
+        return this.d84;
+    }
+
+    public void setD84(BigDecimal d84) {
+        this.d84 = d84;
+    }
+
+
+    @Column(name="D80", precision=7, scale=4)
+    public BigDecimal getD80() {
+        return this.d80;
+    }
+
+    public void setD80(BigDecimal d80) {
+        this.d80 = d80;
+    }
+
+
+    @Column(name="D75", precision=7, scale=4)
+    public BigDecimal getD75() {
+        return this.d75;
+    }
+
+    public void setD75(BigDecimal d75) {
+        this.d75 = d75;
+    }
+
+
+    @Column(name="D70", precision=7, scale=4)
+    public BigDecimal getD70() {
+        return this.d70;
+    }
+
+    public void setD70(BigDecimal d70) {
+        this.d70 = d70;
+    }
+
+
+    @Column(name="D60", precision=7, scale=4)
+    public BigDecimal getD60() {
+        return this.d60;
+    }
+
+    public void setD60(BigDecimal d60) {
+        this.d60 = d60;
+    }
+
+
+    @Column(name="D50", precision=7, scale=4)
+    public BigDecimal getD50() {
+        return this.d50;
+    }
+
+    public void setD50(BigDecimal d50) {
+        this.d50 = d50;
+    }
+
+
+    @Column(name="D40", precision=7, scale=4)
+    public BigDecimal getD40() {
+        return this.d40;
+    }
+
+    public void setD40(BigDecimal d40) {
+        this.d40 = d40;
+    }
+
+
+    @Column(name="D30", precision=7, scale=4)
+    public BigDecimal getD30() {
+        return this.d30;
+    }
+
+    public void setD30(BigDecimal d30) {
+        this.d30 = d30;
+    }
+
+
+    @Column(name="D25", precision=7, scale=4)
+    public BigDecimal getD25() {
+        return this.d25;
+    }
+
+    public void setD25(BigDecimal d25) {
+        this.d25 = d25;
+    }
+
+
+    @Column(name="D20", precision=7, scale=4)
+    public BigDecimal getD20() {
+        return this.d20;
+    }
+
+    public void setD20(BigDecimal d20) {
+        this.d20 = d20;
+    }
+
+
+    @Column(name="D16", precision=7, scale=4)
+    public BigDecimal getD16() {
+        return this.d16;
+    }
+
+    public void setD16(BigDecimal d16) {
+        this.d16 = d16;
+    }
+
+
+    @Column(name="D10", precision=7, scale=4)
+    public BigDecimal getD10() {
+        return this.d10;
+    }
+
+    public void setD10(BigDecimal d10) {
+        this.d10 = d10;
+    }
+
+
+    @Column(name="DMIN", precision=7, scale=4)
+    public BigDecimal getDmin() {
+        return this.dmin;
+    }
+
+    public void setDmin(BigDecimal dmin) {
+        this.dmin = dmin;
+    }
+
+
+    @Column(name="DURCHDMIN", precision=6, scale=3)
+    public BigDecimal getDurchdmin() {
+        return this.durchdmin;
+    }
+
+    public void setDurchdmin(BigDecimal durchdmin) {
+        this.durchdmin = durchdmin;
+    }
+
+
+    @Column(name="DMAX", precision=7, scale=3)
+    public BigDecimal getDmax() {
+        return this.dmax;
+    }
+
+    public void setDmax(BigDecimal dmax) {
+        this.dmax = dmax;
+    }
+
+
+    @Column(name="DURCHDMAX", precision=6, scale=3)
+    public BigDecimal getDurchdmax() {
+        return this.durchdmax;
+    }
+
+    public void setDurchdmax(BigDecimal durchdmax) {
+        this.durchdmax = durchdmax;
+    }
+
+
+    @Column(name="N_GESCHIEB", nullable=false, precision=5, scale=0)
+    public int getNGeschieb() {
+        return this.NGeschieb;
+    }
+
+    public void setNGeschieb(int NGeschieb) {
+        this.NGeschieb = NGeschieb;
+    }
+
+
+    @Column(name="N_VIELPKT", nullable=false, precision=5, scale=0)
+    public int getNVielpkt() {
+        return this.NVielpkt;
+    }
+
+    public void setNVielpkt(int NVielpkt) {
+        this.NVielpkt = NVielpkt;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="SYS_DATE", length=7)
+    public Date getSysDate() {
+        return this.sysDate;
+    }
+
+    public void setSysDate(Date sysDate) {
+        this.sysDate = sysDate;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="LASTUPDATED", nullable=false, length=7)
+    public Date getLastupdated() {
+        return this.lastupdated;
+    }
+
+    public void setLastupdated(Date lastupdated) {
+        this.lastupdated = lastupdated;
+    }
+
+
+    @Column(name="KM", precision=8, scale=3)
+    public BigDecimal getKm() {
+        return this.km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name="GLOTABSTOFFSET", nullable=false, precision=8, scale=3)
+    public BigDecimal getGlotabstoffset() {
+        return this.glotabstoffset;
+    }
+
+    public void setGlotabstoffset(BigDecimal glotabstoffset) {
+        this.glotabstoffset = glotabstoffset;
+    }
+
+
+    @Column(name="SLOTABSTOFFSET", nullable=false, precision=8, scale=3)
+    public BigDecimal getSlotabstoffset() {
+        return this.slotabstoffset;
+    }
+
+    public void setSlotabstoffset(BigDecimal slotabstoffset) {
+        this.slotabstoffset = slotabstoffset;
+    }
+
+
+    @Column(name="GBREITEVONLINKS", precision=8, scale=3)
+    public BigDecimal getGbreitevonlinks() {
+        return this.gbreitevonlinks;
+    }
+
+    public void setGbreitevonlinks(BigDecimal gbreitevonlinks) {
+        this.gbreitevonlinks = gbreitevonlinks;
+    }
+
+
+    @Column(name="GBREITEBISLINKS", precision=8, scale=3)
+    public BigDecimal getGbreitebislinks() {
+        return this.gbreitebislinks;
+    }
+
+    public void setGbreitebislinks(BigDecimal gbreitebislinks) {
+        this.gbreitebislinks = gbreitebislinks;
+    }
+
+
+    @Column(name="GLOTUFERABSTISTVONLINKS", precision=1, scale=0)
+    public Boolean getGlotuferabstistvonlinks() {
+        return this.glotuferabstistvonlinks;
+    }
+
+    public void setGlotuferabstistvonlinks(Boolean glotuferabstistvonlinks) {
+        this.glotuferabstistvonlinks = glotuferabstistvonlinks;
+    }
+
+
+    @Column(name="SLOTUFERABSTISTVONLINKS", precision=1, scale=0)
+    public Boolean getSlotuferabstistvonlinks() {
+        return this.slotuferabstistvonlinks;
+    }
+
+    public void setSlotuferabstistvonlinks(Boolean slotuferabstistvonlinks) {
+        this.slotuferabstistvonlinks = slotuferabstistvonlinks;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="messung")
+    public Set<Glotrechte> getGlotrechtes() {
+        return this.glotrechtes;
+    }
+
+    public void setGlotrechtes(Set<Glotrechte> glotrechtes) {
+        this.glotrechtes = glotrechtes;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="messung")
+    public Set<Slotrechte> getSlotrechtes() {
+        return this.slotrechtes;
+    }
+
+    public void setSlotrechtes(Set<Slotrechte> slotrechtes) {
+        this.slotrechtes = slotrechtes;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Messunglotcount.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,45 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+/**
+ * Messunglotcount generated by hbm2java
+ */
+@Entity
+@Table(name="MESSUNGLOTCOUNT"
+    ,schema="SEDDB"
+)
+public class Messunglotcount  implements java.io.Serializable {
+
+
+     private MessunglotcountId id;
+
+    public Messunglotcount() {
+    }
+
+    public Messunglotcount(MessunglotcountId id) {
+       this.id = id;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="messungid", column=@Column(name="MESSUNGID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="glotcount", column=@Column(name="GLOTCOUNT", precision=22, scale=0) ),
+        @AttributeOverride(name="slotcount", column=@Column(name="SLOTCOUNT", precision=22, scale=0) ) } )
+    public MessunglotcountId getId() {
+        return this.id;
+    }
+
+    public void setId(MessunglotcountId id) {
+        this.id = id;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/MessunglotcountId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,84 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * MessunglotcountId generated by hbm2java
+ */
+@Embeddable
+public class MessunglotcountId  implements java.io.Serializable {
+
+
+     private long messungid;
+     private BigDecimal glotcount;
+     private BigDecimal slotcount;
+
+    public MessunglotcountId() {
+    }
+
+
+    public MessunglotcountId(long messungid) {
+        this.messungid = messungid;
+    }
+    public MessunglotcountId(long messungid, BigDecimal glotcount, BigDecimal slotcount) {
+       this.messungid = messungid;
+       this.glotcount = glotcount;
+       this.slotcount = slotcount;
+    }
+
+
+
+    @Column(name="MESSUNGID", nullable=false, precision=11, scale=0)
+    public long getMessungid() {
+        return this.messungid;
+    }
+
+    public void setMessungid(long messungid) {
+        this.messungid = messungid;
+    }
+
+
+    @Column(name="GLOTCOUNT", precision=22, scale=0)
+    public BigDecimal getGlotcount() {
+        return this.glotcount;
+    }
+
+    public void setGlotcount(BigDecimal glotcount) {
+        this.glotcount = glotcount;
+    }
+
+
+    @Column(name="SLOTCOUNT", precision=22, scale=0)
+    public BigDecimal getSlotcount() {
+        return this.slotcount;
+    }
+
+    public void setSlotcount(BigDecimal slotcount) {
+        this.slotcount = slotcount;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof MessunglotcountId) ) return false;
+         MessunglotcountId castOther = ( MessunglotcountId ) other;
+
+         return (this.getMessungid()==castOther.getMessungid())
+ && ( (this.getGlotcount()==castOther.getGlotcount()) || ( this.getGlotcount()!=null && castOther.getGlotcount()!=null && this.getGlotcount().equals(castOther.getGlotcount()) ) )
+ && ( (this.getSlotcount()==castOther.getSlotcount()) || ( this.getSlotcount()!=null && castOther.getSlotcount()!=null && this.getSlotcount().equals(castOther.getSlotcount()) ) );
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getMessungid();
+         result = 37 * result + ( getGlotcount() == null ? 0 : this.getGlotcount().hashCode() );
+         result = 37 * result + ( getSlotcount() == null ? 0 : this.getSlotcount().hashCode() );
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Messungsta.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,122 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+/**
+ * Messungsta generated by hbm2java
+ */
+@Entity
+@Table(name="MESSUNGSTA"
+    ,schema="SEDDB"
+)
+public class Messungsta  implements java.io.Serializable {
+
+
+     private MessungstaId id;
+
+    public Messungsta() {
+    }
+
+    public Messungsta(MessungstaId id) {
+       this.id = id;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="messungid", column=@Column(name="MESSUNGID") ),
+        @AttributeOverride(name="stationid", column=@Column(name="STATIONID") ),
+        @AttributeOverride(name="datum", column=@Column(name="DATUM") ),
+        @AttributeOverride(name="hpeilungid", column=@Column(name="HPEILUNGID") ),
+        @AttributeOverride(name="mpeilungid", column=@Column(name="MPEILUNGID") ),
+        @AttributeOverride(name="gfaengerid", column=@Column(name="GFAENGERID") ),
+        @AttributeOverride(name="gsiebsatzid", column=@Column(name="GSIEBSATZID") ),
+        @AttributeOverride(name="messnr", column=@Column(name="MESSNR") ),
+        @AttributeOverride(name="fgue", column=@Column(name="FGUE") ),
+        @AttributeOverride(name="uhrvon", column=@Column(name="UHRVON") ),
+        @AttributeOverride(name="uhrbis", column=@Column(name="UHRBIS") ),
+        @AttributeOverride(name="wspcm", column=@Column(name="WSPCM") ),
+        @AttributeOverride(name="QBpegel", column=@Column(name="Q_BPEGEL") ),
+        @AttributeOverride(name="wspnn", column=@Column(name="WSPNN") ),
+        @AttributeOverride(name="ie", column=@Column(name="IE") ),
+        @AttributeOverride(name="wtemp", column=@Column(name="WTEMP") ),
+        @AttributeOverride(name="gbreite", column=@Column(name="GBREITE") ),
+        @AttributeOverride(name="gbreitevon", column=@Column(name="GBREITEVON") ),
+        @AttributeOverride(name="gbreitebis", column=@Column(name="GBREITEBIS") ),
+        @AttributeOverride(name="tgeschiebe", column=@Column(name="TGESCHIEBE") ),
+        @AttributeOverride(name="tsand", column=@Column(name="TSAND") ),
+        @AttributeOverride(name="tschweb", column=@Column(name="TSCHWEB") ),
+        @AttributeOverride(name="cschweb", column=@Column(name="CSCHWEB") ),
+        @AttributeOverride(name="uferliabst", column=@Column(name="UFERLIABST") ),
+        @AttributeOverride(name="uferreabst", column=@Column(name="UFERREABST") ),
+        @AttributeOverride(name="q", column=@Column(name="Q") ),
+        @AttributeOverride(name="AHpeil", column=@Column(name="A_HPEIL") ),
+        @AttributeOverride(name="AMpeil", column=@Column(name="A_MPEIL") ),
+        @AttributeOverride(name="b", column=@Column(name="B") ),
+        @AttributeOverride(name="hm", column=@Column(name="HM") ),
+        @AttributeOverride(name="vm", column=@Column(name="VM") ),
+        @AttributeOverride(name="vsohle", column=@Column(name="VSOHLE") ),
+        @AttributeOverride(name="tau", column=@Column(name="TAU") ),
+        @AttributeOverride(name="tauv", column=@Column(name="TAUV") ),
+        @AttributeOverride(name="teilQ", column=@Column(name="TEIL_Q") ),
+        @AttributeOverride(name="teilAHpeil", column=@Column(name="TEIL_A_HPEIL") ),
+        @AttributeOverride(name="teilAMpeil", column=@Column(name="TEIL_A_MPEIL") ),
+        @AttributeOverride(name="teilB", column=@Column(name="TEIL_B") ),
+        @AttributeOverride(name="teilHm", column=@Column(name="TEIL_HM") ),
+        @AttributeOverride(name="teilVm", column=@Column(name="TEIL_VM") ),
+        @AttributeOverride(name="teilVsohle", column=@Column(name="TEIL_VSOHLE") ),
+        @AttributeOverride(name="teilTau", column=@Column(name="TEIL_TAU") ),
+        @AttributeOverride(name="teilTauv", column=@Column(name="TEIL_TAUV") ),
+        @AttributeOverride(name="mitteltyp", column=@Column(name="MITTELTYP") ),
+        @AttributeOverride(name="dm", column=@Column(name="DM") ),
+        @AttributeOverride(name="sk", column=@Column(name="SK") ),
+        @AttributeOverride(name="so", column=@Column(name="SO") ),
+        @AttributeOverride(name="u", column=@Column(name="U") ),
+        @AttributeOverride(name="d90", column=@Column(name="D90") ),
+        @AttributeOverride(name="d84", column=@Column(name="D84") ),
+        @AttributeOverride(name="d80", column=@Column(name="D80") ),
+        @AttributeOverride(name="d75", column=@Column(name="D75") ),
+        @AttributeOverride(name="d70", column=@Column(name="D70") ),
+        @AttributeOverride(name="d60", column=@Column(name="D60") ),
+        @AttributeOverride(name="d50", column=@Column(name="D50") ),
+        @AttributeOverride(name="d40", column=@Column(name="D40") ),
+        @AttributeOverride(name="d30", column=@Column(name="D30") ),
+        @AttributeOverride(name="d25", column=@Column(name="D25") ),
+        @AttributeOverride(name="d20", column=@Column(name="D20") ),
+        @AttributeOverride(name="d16", column=@Column(name="D16") ),
+        @AttributeOverride(name="d10", column=@Column(name="D10") ),
+        @AttributeOverride(name="dmin", column=@Column(name="DMIN") ),
+        @AttributeOverride(name="durchdmin", column=@Column(name="DURCHDMIN") ),
+        @AttributeOverride(name="dmax", column=@Column(name="DMAX") ),
+        @AttributeOverride(name="durchdmax", column=@Column(name="DURCHDMAX") ),
+        @AttributeOverride(name="NGeschieb", column=@Column(name="N_GESCHIEB") ),
+        @AttributeOverride(name="NVielpkt", column=@Column(name="N_VIELPKT") ),
+        @AttributeOverride(name="sysDate", column=@Column(name="SYS_DATE") ),
+        @AttributeOverride(name="bemerkung", column=@Column(name="BEMERKUNG") ),
+        @AttributeOverride(name="lastupdated", column=@Column(name="LASTUPDATED") ),
+        @AttributeOverride(name="km", column=@Column(name="KM") ),
+        @AttributeOverride(name="glotabstoffset", column=@Column(name="GLOTABSTOFFSET") ),
+        @AttributeOverride(name="slotabstoffset", column=@Column(name="SLOTABSTOFFSET") ),
+        @AttributeOverride(name="gbreitevonlinks", column=@Column(name="GBREITEVONLINKS") ),
+        @AttributeOverride(name="gbreitebislinks", column=@Column(name="GBREITEBISLINKS") ),
+        @AttributeOverride(name="staname", column=@Column(name="STANAME") ),
+        @AttributeOverride(name="stakm", column=@Column(name="STAKM") ),
+        @AttributeOverride(name="gewaesserid", column=@Column(name="GEWAESSERID") ),
+        @AttributeOverride(name="gewname", column=@Column(name="GEWNAME") ),
+        @AttributeOverride(name="bezugspegelid", column=@Column(name="BEZUGSPEGELID") ) } )
+    public MessungstaId getId() {
+        return this.id;
+    }
+
+    public void setId(MessungstaId id) {
+        this.id = id;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/MessungstaId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,1158 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.io.Serializable;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * MessungstaId generated by hbm2java
+ */
+@Embeddable
+public class MessungstaId  implements java.io.Serializable {
+
+
+     private Serializable messungid;
+     private Serializable stationid;
+     private Serializable datum;
+     private Serializable hpeilungid;
+     private Serializable mpeilungid;
+     private Serializable gfaengerid;
+     private Serializable gsiebsatzid;
+     private Serializable messnr;
+     private Serializable fgue;
+     private Serializable uhrvon;
+     private Serializable uhrbis;
+     private Serializable wspcm;
+     private Serializable QBpegel;
+     private Serializable wspnn;
+     private Serializable ie;
+     private Serializable wtemp;
+     private Serializable gbreite;
+     private Serializable gbreitevon;
+     private Serializable gbreitebis;
+     private Serializable tgeschiebe;
+     private Serializable tsand;
+     private Serializable tschweb;
+     private Serializable cschweb;
+     private Serializable uferliabst;
+     private Serializable uferreabst;
+     private Serializable q;
+     private Serializable AHpeil;
+     private Serializable AMpeil;
+     private Serializable b;
+     private Serializable hm;
+     private Serializable vm;
+     private Serializable vsohle;
+     private Serializable tau;
+     private Serializable tauv;
+     private Serializable teilQ;
+     private Serializable teilAHpeil;
+     private Serializable teilAMpeil;
+     private Serializable teilB;
+     private Serializable teilHm;
+     private Serializable teilVm;
+     private Serializable teilVsohle;
+     private Serializable teilTau;
+     private Serializable teilTauv;
+     private Serializable mitteltyp;
+     private Serializable dm;
+     private Serializable sk;
+     private Serializable so;
+     private Serializable u;
+     private Serializable d90;
+     private Serializable d84;
+     private Serializable d80;
+     private Serializable d75;
+     private Serializable d70;
+     private Serializable d60;
+     private Serializable d50;
+     private Serializable d40;
+     private Serializable d30;
+     private Serializable d25;
+     private Serializable d20;
+     private Serializable d16;
+     private Serializable d10;
+     private Serializable dmin;
+     private Serializable durchdmin;
+     private Serializable dmax;
+     private Serializable durchdmax;
+     private Serializable NGeschieb;
+     private Serializable NVielpkt;
+     private Serializable sysDate;
+     private Serializable bemerkung;
+     private Serializable lastupdated;
+     private Serializable km;
+     private Serializable glotabstoffset;
+     private Serializable slotabstoffset;
+     private Serializable gbreitevonlinks;
+     private Serializable gbreitebislinks;
+     private Serializable staname;
+     private Serializable stakm;
+     private Serializable gewaesserid;
+     private Serializable gewname;
+     private Serializable bezugspegelid;
+
+    public MessungstaId() {
+    }
+
+    public MessungstaId(Serializable messungid, Serializable stationid, Serializable datum, Serializable hpeilungid, Serializable mpeilungid, Serializable gfaengerid, Serializable gsiebsatzid, Serializable messnr, Serializable fgue, Serializable uhrvon, Serializable uhrbis, Serializable wspcm, Serializable QBpegel, Serializable wspnn, Serializable ie, Serializable wtemp, Serializable gbreite, Serializable gbreitevon, Serializable gbreitebis, Serializable tgeschiebe, Serializable tsand, Serializable tschweb, Serializable cschweb, Serializable uferliabst, Serializable uferreabst, Serializable q, Serializable AHpeil, Serializable AMpeil, Serializable b, Serializable hm, Serializable vm, Serializable vsohle, Serializable tau, Serializable tauv, Serializable teilQ, Serializable teilAHpeil, Serializable teilAMpeil, Serializable teilB, Serializable teilHm, Serializable teilVm, Serializable teilVsohle, Serializable teilTau, Serializable teilTauv, Serializable mitteltyp, Serializable dm, Serializable sk, Serializable so, Serializable u, Serializable d90, Serializable d84, Serializable d80, Serializable d75, Serializable d70, Serializable d60, Serializable d50, Serializable d40, Serializable d30, Serializable d25, Serializable d20, Serializable d16, Serializable d10, Serializable dmin, Serializable durchdmin, Serializable dmax, Serializable durchdmax, Serializable NGeschieb, Serializable NVielpkt, Serializable sysDate, Serializable bemerkung, Serializable lastupdated, Serializable km, Serializable glotabstoffset, Serializable slotabstoffset, Serializable gbreitevonlinks, Serializable gbreitebislinks, Serializable staname, Serializable stakm, Serializable gewaesserid, Serializable gewname, Serializable bezugspegelid) {
+       this.messungid = messungid;
+       this.stationid = stationid;
+       this.datum = datum;
+       this.hpeilungid = hpeilungid;
+       this.mpeilungid = mpeilungid;
+       this.gfaengerid = gfaengerid;
+       this.gsiebsatzid = gsiebsatzid;
+       this.messnr = messnr;
+       this.fgue = fgue;
+       this.uhrvon = uhrvon;
+       this.uhrbis = uhrbis;
+       this.wspcm = wspcm;
+       this.QBpegel = QBpegel;
+       this.wspnn = wspnn;
+       this.ie = ie;
+       this.wtemp = wtemp;
+       this.gbreite = gbreite;
+       this.gbreitevon = gbreitevon;
+       this.gbreitebis = gbreitebis;
+       this.tgeschiebe = tgeschiebe;
+       this.tsand = tsand;
+       this.tschweb = tschweb;
+       this.cschweb = cschweb;
+       this.uferliabst = uferliabst;
+       this.uferreabst = uferreabst;
+       this.q = q;
+       this.AHpeil = AHpeil;
+       this.AMpeil = AMpeil;
+       this.b = b;
+       this.hm = hm;
+       this.vm = vm;
+       this.vsohle = vsohle;
+       this.tau = tau;
+       this.tauv = tauv;
+       this.teilQ = teilQ;
+       this.teilAHpeil = teilAHpeil;
+       this.teilAMpeil = teilAMpeil;
+       this.teilB = teilB;
+       this.teilHm = teilHm;
+       this.teilVm = teilVm;
+       this.teilVsohle = teilVsohle;
+       this.teilTau = teilTau;
+       this.teilTauv = teilTauv;
+       this.mitteltyp = mitteltyp;
+       this.dm = dm;
+       this.sk = sk;
+       this.so = so;
+       this.u = u;
+       this.d90 = d90;
+       this.d84 = d84;
+       this.d80 = d80;
+       this.d75 = d75;
+       this.d70 = d70;
+       this.d60 = d60;
+       this.d50 = d50;
+       this.d40 = d40;
+       this.d30 = d30;
+       this.d25 = d25;
+       this.d20 = d20;
+       this.d16 = d16;
+       this.d10 = d10;
+       this.dmin = dmin;
+       this.durchdmin = durchdmin;
+       this.dmax = dmax;
+       this.durchdmax = durchdmax;
+       this.NGeschieb = NGeschieb;
+       this.NVielpkt = NVielpkt;
+       this.sysDate = sysDate;
+       this.bemerkung = bemerkung;
+       this.lastupdated = lastupdated;
+       this.km = km;
+       this.glotabstoffset = glotabstoffset;
+       this.slotabstoffset = slotabstoffset;
+       this.gbreitevonlinks = gbreitevonlinks;
+       this.gbreitebislinks = gbreitebislinks;
+       this.staname = staname;
+       this.stakm = stakm;
+       this.gewaesserid = gewaesserid;
+       this.gewname = gewname;
+       this.bezugspegelid = bezugspegelid;
+    }
+
+
+
+    @Column(name="MESSUNGID")
+    public Serializable getMessungid() {
+        return this.messungid;
+    }
+
+    public void setMessungid(Serializable messungid) {
+        this.messungid = messungid;
+    }
+
+
+    @Column(name="STATIONID")
+    public Serializable getStationid() {
+        return this.stationid;
+    }
+
+    public void setStationid(Serializable stationid) {
+        this.stationid = stationid;
+    }
+
+
+    @Column(name="DATUM")
+    public Serializable getDatum() {
+        return this.datum;
+    }
+
+    public void setDatum(Serializable datum) {
+        this.datum = datum;
+    }
+
+
+    @Column(name="HPEILUNGID")
+    public Serializable getHpeilungid() {
+        return this.hpeilungid;
+    }
+
+    public void setHpeilungid(Serializable hpeilungid) {
+        this.hpeilungid = hpeilungid;
+    }
+
+
+    @Column(name="MPEILUNGID")
+    public Serializable getMpeilungid() {
+        return this.mpeilungid;
+    }
+
+    public void setMpeilungid(Serializable mpeilungid) {
+        this.mpeilungid = mpeilungid;
+    }
+
+
+    @Column(name="GFAENGERID")
+    public Serializable getGfaengerid() {
+        return this.gfaengerid;
+    }
+
+    public void setGfaengerid(Serializable gfaengerid) {
+        this.gfaengerid = gfaengerid;
+    }
+
+
+    @Column(name="GSIEBSATZID")
+    public Serializable getGsiebsatzid() {
+        return this.gsiebsatzid;
+    }
+
+    public void setGsiebsatzid(Serializable gsiebsatzid) {
+        this.gsiebsatzid = gsiebsatzid;
+    }
+
+
+    @Column(name="MESSNR")
+    public Serializable getMessnr() {
+        return this.messnr;
+    }
+
+    public void setMessnr(Serializable messnr) {
+        this.messnr = messnr;
+    }
+
+
+    @Column(name="FGUE")
+    public Serializable getFgue() {
+        return this.fgue;
+    }
+
+    public void setFgue(Serializable fgue) {
+        this.fgue = fgue;
+    }
+
+
+    @Column(name="UHRVON")
+    public Serializable getUhrvon() {
+        return this.uhrvon;
+    }
+
+    public void setUhrvon(Serializable uhrvon) {
+        this.uhrvon = uhrvon;
+    }
+
+
+    @Column(name="UHRBIS")
+    public Serializable getUhrbis() {
+        return this.uhrbis;
+    }
+
+    public void setUhrbis(Serializable uhrbis) {
+        this.uhrbis = uhrbis;
+    }
+
+
+    @Column(name="WSPCM")
+    public Serializable getWspcm() {
+        return this.wspcm;
+    }
+
+    public void setWspcm(Serializable wspcm) {
+        this.wspcm = wspcm;
+    }
+
+
+    @Column(name="Q_BPEGEL")
+    public Serializable getQBpegel() {
+        return this.QBpegel;
+    }
+
+    public void setQBpegel(Serializable QBpegel) {
+        this.QBpegel = QBpegel;
+    }
+
+
+    @Column(name="WSPNN")
+    public Serializable getWspnn() {
+        return this.wspnn;
+    }
+
+    public void setWspnn(Serializable wspnn) {
+        this.wspnn = wspnn;
+    }
+
+
+    @Column(name="IE")
+    public Serializable getIe() {
+        return this.ie;
+    }
+
+    public void setIe(Serializable ie) {
+        this.ie = ie;
+    }
+
+
+    @Column(name="WTEMP")
+    public Serializable getWtemp() {
+        return this.wtemp;
+    }
+
+    public void setWtemp(Serializable wtemp) {
+        this.wtemp = wtemp;
+    }
+
+
+    @Column(name="GBREITE")
+    public Serializable getGbreite() {
+        return this.gbreite;
+    }
+
+    public void setGbreite(Serializable gbreite) {
+        this.gbreite = gbreite;
+    }
+
+
+    @Column(name="GBREITEVON")
+    public Serializable getGbreitevon() {
+        return this.gbreitevon;
+    }
+
+    public void setGbreitevon(Serializable gbreitevon) {
+        this.gbreitevon = gbreitevon;
+    }
+
+
+    @Column(name="GBREITEBIS")
+    public Serializable getGbreitebis() {
+        return this.gbreitebis;
+    }
+
+    public void setGbreitebis(Serializable gbreitebis) {
+        this.gbreitebis = gbreitebis;
+    }
+
+
+    @Column(name="TGESCHIEBE")
+    public Serializable getTgeschiebe() {
+        return this.tgeschiebe;
+    }
+
+    public void setTgeschiebe(Serializable tgeschiebe) {
+        this.tgeschiebe = tgeschiebe;
+    }
+
+
+    @Column(name="TSAND")
+    public Serializable getTsand() {
+        return this.tsand;
+    }
+
+    public void setTsand(Serializable tsand) {
+        this.tsand = tsand;
+    }
+
+
+    @Column(name="TSCHWEB")
+    public Serializable getTschweb() {
+        return this.tschweb;
+    }
+
+    public void setTschweb(Serializable tschweb) {
+        this.tschweb = tschweb;
+    }
+
+
+    @Column(name="CSCHWEB")
+    public Serializable getCschweb() {
+        return this.cschweb;
+    }
+
+    public void setCschweb(Serializable cschweb) {
+        this.cschweb = cschweb;
+    }
+
+
+    @Column(name="UFERLIABST")
+    public Serializable getUferliabst() {
+        return this.uferliabst;
+    }
+
+    public void setUferliabst(Serializable uferliabst) {
+        this.uferliabst = uferliabst;
+    }
+
+
+    @Column(name="UFERREABST")
+    public Serializable getUferreabst() {
+        return this.uferreabst;
+    }
+
+    public void setUferreabst(Serializable uferreabst) {
+        this.uferreabst = uferreabst;
+    }
+
+
+    @Column(name="Q")
+    public Serializable getQ() {
+        return this.q;
+    }
+
+    public void setQ(Serializable q) {
+        this.q = q;
+    }
+
+
+    @Column(name="A_HPEIL")
+    public Serializable getAHpeil() {
+        return this.AHpeil;
+    }
+
+    public void setAHpeil(Serializable AHpeil) {
+        this.AHpeil = AHpeil;
+    }
+
+
+    @Column(name="A_MPEIL")
+    public Serializable getAMpeil() {
+        return this.AMpeil;
+    }
+
+    public void setAMpeil(Serializable AMpeil) {
+        this.AMpeil = AMpeil;
+    }
+
+
+    @Column(name="B")
+    public Serializable getB() {
+        return this.b;
+    }
+
+    public void setB(Serializable b) {
+        this.b = b;
+    }
+
+
+    @Column(name="HM")
+    public Serializable getHm() {
+        return this.hm;
+    }
+
+    public void setHm(Serializable hm) {
+        this.hm = hm;
+    }
+
+
+    @Column(name="VM")
+    public Serializable getVm() {
+        return this.vm;
+    }
+
+    public void setVm(Serializable vm) {
+        this.vm = vm;
+    }
+
+
+    @Column(name="VSOHLE")
+    public Serializable getVsohle() {
+        return this.vsohle;
+    }
+
+    public void setVsohle(Serializable vsohle) {
+        this.vsohle = vsohle;
+    }
+
+
+    @Column(name="TAU")
+    public Serializable getTau() {
+        return this.tau;
+    }
+
+    public void setTau(Serializable tau) {
+        this.tau = tau;
+    }
+
+
+    @Column(name="TAUV")
+    public Serializable getTauv() {
+        return this.tauv;
+    }
+
+    public void setTauv(Serializable tauv) {
+        this.tauv = tauv;
+    }
+
+
+    @Column(name="TEIL_Q")
+    public Serializable getTeilQ() {
+        return this.teilQ;
+    }
+
+    public void setTeilQ(Serializable teilQ) {
+        this.teilQ = teilQ;
+    }
+
+
+    @Column(name="TEIL_A_HPEIL")
+    public Serializable getTeilAHpeil() {
+        return this.teilAHpeil;
+    }
+
+    public void setTeilAHpeil(Serializable teilAHpeil) {
+        this.teilAHpeil = teilAHpeil;
+    }
+
+
+    @Column(name="TEIL_A_MPEIL")
+    public Serializable getTeilAMpeil() {
+        return this.teilAMpeil;
+    }
+
+    public void setTeilAMpeil(Serializable teilAMpeil) {
+        this.teilAMpeil = teilAMpeil;
+    }
+
+
+    @Column(name="TEIL_B")
+    public Serializable getTeilB() {
+        return this.teilB;
+    }
+
+    public void setTeilB(Serializable teilB) {
+        this.teilB = teilB;
+    }
+
+
+    @Column(name="TEIL_HM")
+    public Serializable getTeilHm() {
+        return this.teilHm;
+    }
+
+    public void setTeilHm(Serializable teilHm) {
+        this.teilHm = teilHm;
+    }
+
+
+    @Column(name="TEIL_VM")
+    public Serializable getTeilVm() {
+        return this.teilVm;
+    }
+
+    public void setTeilVm(Serializable teilVm) {
+        this.teilVm = teilVm;
+    }
+
+
+    @Column(name="TEIL_VSOHLE")
+    public Serializable getTeilVsohle() {
+        return this.teilVsohle;
+    }
+
+    public void setTeilVsohle(Serializable teilVsohle) {
+        this.teilVsohle = teilVsohle;
+    }
+
+
+    @Column(name="TEIL_TAU")
+    public Serializable getTeilTau() {
+        return this.teilTau;
+    }
+
+    public void setTeilTau(Serializable teilTau) {
+        this.teilTau = teilTau;
+    }
+
+
+    @Column(name="TEIL_TAUV")
+    public Serializable getTeilTauv() {
+        return this.teilTauv;
+    }
+
+    public void setTeilTauv(Serializable teilTauv) {
+        this.teilTauv = teilTauv;
+    }
+
+
+    @Column(name="MITTELTYP")
+    public Serializable getMitteltyp() {
+        return this.mitteltyp;
+    }
+
+    public void setMitteltyp(Serializable mitteltyp) {
+        this.mitteltyp = mitteltyp;
+    }
+
+
+    @Column(name="DM")
+    public Serializable getDm() {
+        return this.dm;
+    }
+
+    public void setDm(Serializable dm) {
+        this.dm = dm;
+    }
+
+
+    @Column(name="SK")
+    public Serializable getSk() {
+        return this.sk;
+    }
+
+    public void setSk(Serializable sk) {
+        this.sk = sk;
+    }
+
+
+    @Column(name="SO")
+    public Serializable getSo() {
+        return this.so;
+    }
+
+    public void setSo(Serializable so) {
+        this.so = so;
+    }
+
+
+    @Column(name="U")
+    public Serializable getU() {
+        return this.u;
+    }
+
+    public void setU(Serializable u) {
+        this.u = u;
+    }
+
+
+    @Column(name="D90")
+    public Serializable getD90() {
+        return this.d90;
+    }
+
+    public void setD90(Serializable d90) {
+        this.d90 = d90;
+    }
+
+
+    @Column(name="D84")
+    public Serializable getD84() {
+        return this.d84;
+    }
+
+    public void setD84(Serializable d84) {
+        this.d84 = d84;
+    }
+
+
+    @Column(name="D80")
+    public Serializable getD80() {
+        return this.d80;
+    }
+
+    public void setD80(Serializable d80) {
+        this.d80 = d80;
+    }
+
+
+    @Column(name="D75")
+    public Serializable getD75() {
+        return this.d75;
+    }
+
+    public void setD75(Serializable d75) {
+        this.d75 = d75;
+    }
+
+
+    @Column(name="D70")
+    public Serializable getD70() {
+        return this.d70;
+    }
+
+    public void setD70(Serializable d70) {
+        this.d70 = d70;
+    }
+
+
+    @Column(name="D60")
+    public Serializable getD60() {
+        return this.d60;
+    }
+
+    public void setD60(Serializable d60) {
+        this.d60 = d60;
+    }
+
+
+    @Column(name="D50")
+    public Serializable getD50() {
+        return this.d50;
+    }
+
+    public void setD50(Serializable d50) {
+        this.d50 = d50;
+    }
+
+
+    @Column(name="D40")
+    public Serializable getD40() {
+        return this.d40;
+    }
+
+    public void setD40(Serializable d40) {
+        this.d40 = d40;
+    }
+
+
+    @Column(name="D30")
+    public Serializable getD30() {
+        return this.d30;
+    }
+
+    public void setD30(Serializable d30) {
+        this.d30 = d30;
+    }
+
+
+    @Column(name="D25")
+    public Serializable getD25() {
+        return this.d25;
+    }
+
+    public void setD25(Serializable d25) {
+        this.d25 = d25;
+    }
+
+
+    @Column(name="D20")
+    public Serializable getD20() {
+        return this.d20;
+    }
+
+    public void setD20(Serializable d20) {
+        this.d20 = d20;
+    }
+
+
+    @Column(name="D16")
+    public Serializable getD16() {
+        return this.d16;
+    }
+
+    public void setD16(Serializable d16) {
+        this.d16 = d16;
+    }
+
+
+    @Column(name="D10")
+    public Serializable getD10() {
+        return this.d10;
+    }
+
+    public void setD10(Serializable d10) {
+        this.d10 = d10;
+    }
+
+
+    @Column(name="DMIN")
+    public Serializable getDmin() {
+        return this.dmin;
+    }
+
+    public void setDmin(Serializable dmin) {
+        this.dmin = dmin;
+    }
+
+
+    @Column(name="DURCHDMIN")
+    public Serializable getDurchdmin() {
+        return this.durchdmin;
+    }
+
+    public void setDurchdmin(Serializable durchdmin) {
+        this.durchdmin = durchdmin;
+    }
+
+
+    @Column(name="DMAX")
+    public Serializable getDmax() {
+        return this.dmax;
+    }
+
+    public void setDmax(Serializable dmax) {
+        this.dmax = dmax;
+    }
+
+
+    @Column(name="DURCHDMAX")
+    public Serializable getDurchdmax() {
+        return this.durchdmax;
+    }
+
+    public void setDurchdmax(Serializable durchdmax) {
+        this.durchdmax = durchdmax;
+    }
+
+
+    @Column(name="N_GESCHIEB")
+    public Serializable getNGeschieb() {
+        return this.NGeschieb;
+    }
+
+    public void setNGeschieb(Serializable NGeschieb) {
+        this.NGeschieb = NGeschieb;
+    }
+
+
+    @Column(name="N_VIELPKT")
+    public Serializable getNVielpkt() {
+        return this.NVielpkt;
+    }
+
+    public void setNVielpkt(Serializable NVielpkt) {
+        this.NVielpkt = NVielpkt;
+    }
+
+
+    @Column(name="SYS_DATE")
+    public Serializable getSysDate() {
+        return this.sysDate;
+    }
+
+    public void setSysDate(Serializable sysDate) {
+        this.sysDate = sysDate;
+    }
+
+
+    @Column(name="BEMERKUNG")
+    public Serializable getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(Serializable bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="LASTUPDATED")
+    public Serializable getLastupdated() {
+        return this.lastupdated;
+    }
+
+    public void setLastupdated(Serializable lastupdated) {
+        this.lastupdated = lastupdated;
+    }
+
+
+    @Column(name="KM")
+    public Serializable getKm() {
+        return this.km;
+    }
+
+    public void setKm(Serializable km) {
+        this.km = km;
+    }
+
+
+    @Column(name="GLOTABSTOFFSET")
+    public Serializable getGlotabstoffset() {
+        return this.glotabstoffset;
+    }
+
+    public void setGlotabstoffset(Serializable glotabstoffset) {
+        this.glotabstoffset = glotabstoffset;
+    }
+
+
+    @Column(name="SLOTABSTOFFSET")
+    public Serializable getSlotabstoffset() {
+        return this.slotabstoffset;
+    }
+
+    public void setSlotabstoffset(Serializable slotabstoffset) {
+        this.slotabstoffset = slotabstoffset;
+    }
+
+
+    @Column(name="GBREITEVONLINKS")
+    public Serializable getGbreitevonlinks() {
+        return this.gbreitevonlinks;
+    }
+
+    public void setGbreitevonlinks(Serializable gbreitevonlinks) {
+        this.gbreitevonlinks = gbreitevonlinks;
+    }
+
+
+    @Column(name="GBREITEBISLINKS")
+    public Serializable getGbreitebislinks() {
+        return this.gbreitebislinks;
+    }
+
+    public void setGbreitebislinks(Serializable gbreitebislinks) {
+        this.gbreitebislinks = gbreitebislinks;
+    }
+
+
+    @Column(name="STANAME")
+    public Serializable getStaname() {
+        return this.staname;
+    }
+
+    public void setStaname(Serializable staname) {
+        this.staname = staname;
+    }
+
+
+    @Column(name="STAKM")
+    public Serializable getStakm() {
+        return this.stakm;
+    }
+
+    public void setStakm(Serializable stakm) {
+        this.stakm = stakm;
+    }
+
+
+    @Column(name="GEWAESSERID")
+    public Serializable getGewaesserid() {
+        return this.gewaesserid;
+    }
+
+    public void setGewaesserid(Serializable gewaesserid) {
+        this.gewaesserid = gewaesserid;
+    }
+
+
+    @Column(name="GEWNAME")
+    public Serializable getGewname() {
+        return this.gewname;
+    }
+
+    public void setGewname(Serializable gewname) {
+        this.gewname = gewname;
+    }
+
+
+    @Column(name="BEZUGSPEGELID")
+    public Serializable getBezugspegelid() {
+        return this.bezugspegelid;
+    }
+
+    public void setBezugspegelid(Serializable bezugspegelid) {
+        this.bezugspegelid = bezugspegelid;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof MessungstaId) ) return false;
+         MessungstaId castOther = ( MessungstaId ) other;
+
+         return ( (this.getMessungid()==castOther.getMessungid()) || ( this.getMessungid()!=null && castOther.getMessungid()!=null && this.getMessungid().equals(castOther.getMessungid()) ) )
+ && ( (this.getStationid()==castOther.getStationid()) || ( this.getStationid()!=null && castOther.getStationid()!=null && this.getStationid().equals(castOther.getStationid()) ) )
+ && ( (this.getDatum()==castOther.getDatum()) || ( this.getDatum()!=null && castOther.getDatum()!=null && this.getDatum().equals(castOther.getDatum()) ) )
+ && ( (this.getHpeilungid()==castOther.getHpeilungid()) || ( this.getHpeilungid()!=null && castOther.getHpeilungid()!=null && this.getHpeilungid().equals(castOther.getHpeilungid()) ) )
+ && ( (this.getMpeilungid()==castOther.getMpeilungid()) || ( this.getMpeilungid()!=null && castOther.getMpeilungid()!=null && this.getMpeilungid().equals(castOther.getMpeilungid()) ) )
+ && ( (this.getGfaengerid()==castOther.getGfaengerid()) || ( this.getGfaengerid()!=null && castOther.getGfaengerid()!=null && this.getGfaengerid().equals(castOther.getGfaengerid()) ) )
+ && ( (this.getGsiebsatzid()==castOther.getGsiebsatzid()) || ( this.getGsiebsatzid()!=null && castOther.getGsiebsatzid()!=null && this.getGsiebsatzid().equals(castOther.getGsiebsatzid()) ) )
+ && ( (this.getMessnr()==castOther.getMessnr()) || ( this.getMessnr()!=null && castOther.getMessnr()!=null && this.getMessnr().equals(castOther.getMessnr()) ) )
+ && ( (this.getFgue()==castOther.getFgue()) || ( this.getFgue()!=null && castOther.getFgue()!=null && this.getFgue().equals(castOther.getFgue()) ) )
+ && ( (this.getUhrvon()==castOther.getUhrvon()) || ( this.getUhrvon()!=null && castOther.getUhrvon()!=null && this.getUhrvon().equals(castOther.getUhrvon()) ) )
+ && ( (this.getUhrbis()==castOther.getUhrbis()) || ( this.getUhrbis()!=null && castOther.getUhrbis()!=null && this.getUhrbis().equals(castOther.getUhrbis()) ) )
+ && ( (this.getWspcm()==castOther.getWspcm()) || ( this.getWspcm()!=null && castOther.getWspcm()!=null && this.getWspcm().equals(castOther.getWspcm()) ) )
+ && ( (this.getQBpegel()==castOther.getQBpegel()) || ( this.getQBpegel()!=null && castOther.getQBpegel()!=null && this.getQBpegel().equals(castOther.getQBpegel()) ) )
+ && ( (this.getWspnn()==castOther.getWspnn()) || ( this.getWspnn()!=null && castOther.getWspnn()!=null && this.getWspnn().equals(castOther.getWspnn()) ) )
+ && ( (this.getIe()==castOther.getIe()) || ( this.getIe()!=null && castOther.getIe()!=null && this.getIe().equals(castOther.getIe()) ) )
+ && ( (this.getWtemp()==castOther.getWtemp()) || ( this.getWtemp()!=null && castOther.getWtemp()!=null && this.getWtemp().equals(castOther.getWtemp()) ) )
+ && ( (this.getGbreite()==castOther.getGbreite()) || ( this.getGbreite()!=null && castOther.getGbreite()!=null && this.getGbreite().equals(castOther.getGbreite()) ) )
+ && ( (this.getGbreitevon()==castOther.getGbreitevon()) || ( this.getGbreitevon()!=null && castOther.getGbreitevon()!=null && this.getGbreitevon().equals(castOther.getGbreitevon()) ) )
+ && ( (this.getGbreitebis()==castOther.getGbreitebis()) || ( this.getGbreitebis()!=null && castOther.getGbreitebis()!=null && this.getGbreitebis().equals(castOther.getGbreitebis()) ) )
+ && ( (this.getTgeschiebe()==castOther.getTgeschiebe()) || ( this.getTgeschiebe()!=null && castOther.getTgeschiebe()!=null && this.getTgeschiebe().equals(castOther.getTgeschiebe()) ) )
+ && ( (this.getTsand()==castOther.getTsand()) || ( this.getTsand()!=null && castOther.getTsand()!=null && this.getTsand().equals(castOther.getTsand()) ) )
+ && ( (this.getTschweb()==castOther.getTschweb()) || ( this.getTschweb()!=null && castOther.getTschweb()!=null && this.getTschweb().equals(castOther.getTschweb()) ) )
+ && ( (this.getCschweb()==castOther.getCschweb()) || ( this.getCschweb()!=null && castOther.getCschweb()!=null && this.getCschweb().equals(castOther.getCschweb()) ) )
+ && ( (this.getUferliabst()==castOther.getUferliabst()) || ( this.getUferliabst()!=null && castOther.getUferliabst()!=null && this.getUferliabst().equals(castOther.getUferliabst()) ) )
+ && ( (this.getUferreabst()==castOther.getUferreabst()) || ( this.getUferreabst()!=null && castOther.getUferreabst()!=null && this.getUferreabst().equals(castOther.getUferreabst()) ) )
+ && ( (this.getQ()==castOther.getQ()) || ( this.getQ()!=null && castOther.getQ()!=null && this.getQ().equals(castOther.getQ()) ) )
+ && ( (this.getAHpeil()==castOther.getAHpeil()) || ( this.getAHpeil()!=null && castOther.getAHpeil()!=null && this.getAHpeil().equals(castOther.getAHpeil()) ) )
+ && ( (this.getAMpeil()==castOther.getAMpeil()) || ( this.getAMpeil()!=null && castOther.getAMpeil()!=null && this.getAMpeil().equals(castOther.getAMpeil()) ) )
+ && ( (this.getB()==castOther.getB()) || ( this.getB()!=null && castOther.getB()!=null && this.getB().equals(castOther.getB()) ) )
+ && ( (this.getHm()==castOther.getHm()) || ( this.getHm()!=null && castOther.getHm()!=null && this.getHm().equals(castOther.getHm()) ) )
+ && ( (this.getVm()==castOther.getVm()) || ( this.getVm()!=null && castOther.getVm()!=null && this.getVm().equals(castOther.getVm()) ) )
+ && ( (this.getVsohle()==castOther.getVsohle()) || ( this.getVsohle()!=null && castOther.getVsohle()!=null && this.getVsohle().equals(castOther.getVsohle()) ) )
+ && ( (this.getTau()==castOther.getTau()) || ( this.getTau()!=null && castOther.getTau()!=null && this.getTau().equals(castOther.getTau()) ) )
+ && ( (this.getTauv()==castOther.getTauv()) || ( this.getTauv()!=null && castOther.getTauv()!=null && this.getTauv().equals(castOther.getTauv()) ) )
+ && ( (this.getTeilQ()==castOther.getTeilQ()) || ( this.getTeilQ()!=null && castOther.getTeilQ()!=null && this.getTeilQ().equals(castOther.getTeilQ()) ) )
+ && ( (this.getTeilAHpeil()==castOther.getTeilAHpeil()) || ( this.getTeilAHpeil()!=null && castOther.getTeilAHpeil()!=null && this.getTeilAHpeil().equals(castOther.getTeilAHpeil()) ) )
+ && ( (this.getTeilAMpeil()==castOther.getTeilAMpeil()) || ( this.getTeilAMpeil()!=null && castOther.getTeilAMpeil()!=null && this.getTeilAMpeil().equals(castOther.getTeilAMpeil()) ) )
+ && ( (this.getTeilB()==castOther.getTeilB()) || ( this.getTeilB()!=null && castOther.getTeilB()!=null && this.getTeilB().equals(castOther.getTeilB()) ) )
+ && ( (this.getTeilHm()==castOther.getTeilHm()) || ( this.getTeilHm()!=null && castOther.getTeilHm()!=null && this.getTeilHm().equals(castOther.getTeilHm()) ) )
+ && ( (this.getTeilVm()==castOther.getTeilVm()) || ( this.getTeilVm()!=null && castOther.getTeilVm()!=null && this.getTeilVm().equals(castOther.getTeilVm()) ) )
+ && ( (this.getTeilVsohle()==castOther.getTeilVsohle()) || ( this.getTeilVsohle()!=null && castOther.getTeilVsohle()!=null && this.getTeilVsohle().equals(castOther.getTeilVsohle()) ) )
+ && ( (this.getTeilTau()==castOther.getTeilTau()) || ( this.getTeilTau()!=null && castOther.getTeilTau()!=null && this.getTeilTau().equals(castOther.getTeilTau()) ) )
+ && ( (this.getTeilTauv()==castOther.getTeilTauv()) || ( this.getTeilTauv()!=null && castOther.getTeilTauv()!=null && this.getTeilTauv().equals(castOther.getTeilTauv()) ) )
+ && ( (this.getMitteltyp()==castOther.getMitteltyp()) || ( this.getMitteltyp()!=null && castOther.getMitteltyp()!=null && this.getMitteltyp().equals(castOther.getMitteltyp()) ) )
+ && ( (this.getDm()==castOther.getDm()) || ( this.getDm()!=null && castOther.getDm()!=null && this.getDm().equals(castOther.getDm()) ) )
+ && ( (this.getSk()==castOther.getSk()) || ( this.getSk()!=null && castOther.getSk()!=null && this.getSk().equals(castOther.getSk()) ) )
+ && ( (this.getSo()==castOther.getSo()) || ( this.getSo()!=null && castOther.getSo()!=null && this.getSo().equals(castOther.getSo()) ) )
+ && ( (this.getU()==castOther.getU()) || ( this.getU()!=null && castOther.getU()!=null && this.getU().equals(castOther.getU()) ) )
+ && ( (this.getD90()==castOther.getD90()) || ( this.getD90()!=null && castOther.getD90()!=null && this.getD90().equals(castOther.getD90()) ) )
+ && ( (this.getD84()==castOther.getD84()) || ( this.getD84()!=null && castOther.getD84()!=null && this.getD84().equals(castOther.getD84()) ) )
+ && ( (this.getD80()==castOther.getD80()) || ( this.getD80()!=null && castOther.getD80()!=null && this.getD80().equals(castOther.getD80()) ) )
+ && ( (this.getD75()==castOther.getD75()) || ( this.getD75()!=null && castOther.getD75()!=null && this.getD75().equals(castOther.getD75()) ) )
+ && ( (this.getD70()==castOther.getD70()) || ( this.getD70()!=null && castOther.getD70()!=null && this.getD70().equals(castOther.getD70()) ) )
+ && ( (this.getD60()==castOther.getD60()) || ( this.getD60()!=null && castOther.getD60()!=null && this.getD60().equals(castOther.getD60()) ) )
+ && ( (this.getD50()==castOther.getD50()) || ( this.getD50()!=null && castOther.getD50()!=null && this.getD50().equals(castOther.getD50()) ) )
+ && ( (this.getD40()==castOther.getD40()) || ( this.getD40()!=null && castOther.getD40()!=null && this.getD40().equals(castOther.getD40()) ) )
+ && ( (this.getD30()==castOther.getD30()) || ( this.getD30()!=null && castOther.getD30()!=null && this.getD30().equals(castOther.getD30()) ) )
+ && ( (this.getD25()==castOther.getD25()) || ( this.getD25()!=null && castOther.getD25()!=null && this.getD25().equals(castOther.getD25()) ) )
+ && ( (this.getD20()==castOther.getD20()) || ( this.getD20()!=null && castOther.getD20()!=null && this.getD20().equals(castOther.getD20()) ) )
+ && ( (this.getD16()==castOther.getD16()) || ( this.getD16()!=null && castOther.getD16()!=null && this.getD16().equals(castOther.getD16()) ) )
+ && ( (this.getD10()==castOther.getD10()) || ( this.getD10()!=null && castOther.getD10()!=null && this.getD10().equals(castOther.getD10()) ) )
+ && ( (this.getDmin()==castOther.getDmin()) || ( this.getDmin()!=null && castOther.getDmin()!=null && this.getDmin().equals(castOther.getDmin()) ) )
+ && ( (this.getDurchdmin()==castOther.getDurchdmin()) || ( this.getDurchdmin()!=null && castOther.getDurchdmin()!=null && this.getDurchdmin().equals(castOther.getDurchdmin()) ) )
+ && ( (this.getDmax()==castOther.getDmax()) || ( this.getDmax()!=null && castOther.getDmax()!=null && this.getDmax().equals(castOther.getDmax()) ) )
+ && ( (this.getDurchdmax()==castOther.getDurchdmax()) || ( this.getDurchdmax()!=null && castOther.getDurchdmax()!=null && this.getDurchdmax().equals(castOther.getDurchdmax()) ) )
+ && ( (this.getNGeschieb()==castOther.getNGeschieb()) || ( this.getNGeschieb()!=null && castOther.getNGeschieb()!=null && this.getNGeschieb().equals(castOther.getNGeschieb()) ) )
+ && ( (this.getNVielpkt()==castOther.getNVielpkt()) || ( this.getNVielpkt()!=null && castOther.getNVielpkt()!=null && this.getNVielpkt().equals(castOther.getNVielpkt()) ) )
+ && ( (this.getSysDate()==castOther.getSysDate()) || ( this.getSysDate()!=null && castOther.getSysDate()!=null && this.getSysDate().equals(castOther.getSysDate()) ) )
+ && ( (this.getBemerkung()==castOther.getBemerkung()) || ( this.getBemerkung()!=null && castOther.getBemerkung()!=null && this.getBemerkung().equals(castOther.getBemerkung()) ) )
+ && ( (this.getLastupdated()==castOther.getLastupdated()) || ( this.getLastupdated()!=null && castOther.getLastupdated()!=null && this.getLastupdated().equals(castOther.getLastupdated()) ) )
+ && ( (this.getKm()==castOther.getKm()) || ( this.getKm()!=null && castOther.getKm()!=null && this.getKm().equals(castOther.getKm()) ) )
+ && ( (this.getGlotabstoffset()==castOther.getGlotabstoffset()) || ( this.getGlotabstoffset()!=null && castOther.getGlotabstoffset()!=null && this.getGlotabstoffset().equals(castOther.getGlotabstoffset()) ) )
+ && ( (this.getSlotabstoffset()==castOther.getSlotabstoffset()) || ( this.getSlotabstoffset()!=null && castOther.getSlotabstoffset()!=null && this.getSlotabstoffset().equals(castOther.getSlotabstoffset()) ) )
+ && ( (this.getGbreitevonlinks()==castOther.getGbreitevonlinks()) || ( this.getGbreitevonlinks()!=null && castOther.getGbreitevonlinks()!=null && this.getGbreitevonlinks().equals(castOther.getGbreitevonlinks()) ) )
+ && ( (this.getGbreitebislinks()==castOther.getGbreitebislinks()) || ( this.getGbreitebislinks()!=null && castOther.getGbreitebislinks()!=null && this.getGbreitebislinks().equals(castOther.getGbreitebislinks()) ) )
+ && ( (this.getStaname()==castOther.getStaname()) || ( this.getStaname()!=null && castOther.getStaname()!=null && this.getStaname().equals(castOther.getStaname()) ) )
+ && ( (this.getStakm()==castOther.getStakm()) || ( this.getStakm()!=null && castOther.getStakm()!=null && this.getStakm().equals(castOther.getStakm()) ) )
+ && ( (this.getGewaesserid()==castOther.getGewaesserid()) || ( this.getGewaesserid()!=null && castOther.getGewaesserid()!=null && this.getGewaesserid().equals(castOther.getGewaesserid()) ) )
+ && ( (this.getGewname()==castOther.getGewname()) || ( this.getGewname()!=null && castOther.getGewname()!=null && this.getGewname().equals(castOther.getGewname()) ) )
+ && ( (this.getBezugspegelid()==castOther.getBezugspegelid()) || ( this.getBezugspegelid()!=null && castOther.getBezugspegelid()!=null && this.getBezugspegelid().equals(castOther.getBezugspegelid()) ) );
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + ( getMessungid() == null ? 0 : this.getMessungid().hashCode() );
+         result = 37 * result + ( getStationid() == null ? 0 : this.getStationid().hashCode() );
+         result = 37 * result + ( getDatum() == null ? 0 : this.getDatum().hashCode() );
+         result = 37 * result + ( getHpeilungid() == null ? 0 : this.getHpeilungid().hashCode() );
+         result = 37 * result + ( getMpeilungid() == null ? 0 : this.getMpeilungid().hashCode() );
+         result = 37 * result + ( getGfaengerid() == null ? 0 : this.getGfaengerid().hashCode() );
+         result = 37 * result + ( getGsiebsatzid() == null ? 0 : this.getGsiebsatzid().hashCode() );
+         result = 37 * result + ( getMessnr() == null ? 0 : this.getMessnr().hashCode() );
+         result = 37 * result + ( getFgue() == null ? 0 : this.getFgue().hashCode() );
+         result = 37 * result + ( getUhrvon() == null ? 0 : this.getUhrvon().hashCode() );
+         result = 37 * result + ( getUhrbis() == null ? 0 : this.getUhrbis().hashCode() );
+         result = 37 * result + ( getWspcm() == null ? 0 : this.getWspcm().hashCode() );
+         result = 37 * result + ( getQBpegel() == null ? 0 : this.getQBpegel().hashCode() );
+         result = 37 * result + ( getWspnn() == null ? 0 : this.getWspnn().hashCode() );
+         result = 37 * result + ( getIe() == null ? 0 : this.getIe().hashCode() );
+         result = 37 * result + ( getWtemp() == null ? 0 : this.getWtemp().hashCode() );
+         result = 37 * result + ( getGbreite() == null ? 0 : this.getGbreite().hashCode() );
+         result = 37 * result + ( getGbreitevon() == null ? 0 : this.getGbreitevon().hashCode() );
+         result = 37 * result + ( getGbreitebis() == null ? 0 : this.getGbreitebis().hashCode() );
+         result = 37 * result + ( getTgeschiebe() == null ? 0 : this.getTgeschiebe().hashCode() );
+         result = 37 * result + ( getTsand() == null ? 0 : this.getTsand().hashCode() );
+         result = 37 * result + ( getTschweb() == null ? 0 : this.getTschweb().hashCode() );
+         result = 37 * result + ( getCschweb() == null ? 0 : this.getCschweb().hashCode() );
+         result = 37 * result + ( getUferliabst() == null ? 0 : this.getUferliabst().hashCode() );
+         result = 37 * result + ( getUferreabst() == null ? 0 : this.getUferreabst().hashCode() );
+         result = 37 * result + ( getQ() == null ? 0 : this.getQ().hashCode() );
+         result = 37 * result + ( getAHpeil() == null ? 0 : this.getAHpeil().hashCode() );
+         result = 37 * result + ( getAMpeil() == null ? 0 : this.getAMpeil().hashCode() );
+         result = 37 * result + ( getB() == null ? 0 : this.getB().hashCode() );
+         result = 37 * result + ( getHm() == null ? 0 : this.getHm().hashCode() );
+         result = 37 * result + ( getVm() == null ? 0 : this.getVm().hashCode() );
+         result = 37 * result + ( getVsohle() == null ? 0 : this.getVsohle().hashCode() );
+         result = 37 * result + ( getTau() == null ? 0 : this.getTau().hashCode() );
+         result = 37 * result + ( getTauv() == null ? 0 : this.getTauv().hashCode() );
+         result = 37 * result + ( getTeilQ() == null ? 0 : this.getTeilQ().hashCode() );
+         result = 37 * result + ( getTeilAHpeil() == null ? 0 : this.getTeilAHpeil().hashCode() );
+         result = 37 * result + ( getTeilAMpeil() == null ? 0 : this.getTeilAMpeil().hashCode() );
+         result = 37 * result + ( getTeilB() == null ? 0 : this.getTeilB().hashCode() );
+         result = 37 * result + ( getTeilHm() == null ? 0 : this.getTeilHm().hashCode() );
+         result = 37 * result + ( getTeilVm() == null ? 0 : this.getTeilVm().hashCode() );
+         result = 37 * result + ( getTeilVsohle() == null ? 0 : this.getTeilVsohle().hashCode() );
+         result = 37 * result + ( getTeilTau() == null ? 0 : this.getTeilTau().hashCode() );
+         result = 37 * result + ( getTeilTauv() == null ? 0 : this.getTeilTauv().hashCode() );
+         result = 37 * result + ( getMitteltyp() == null ? 0 : this.getMitteltyp().hashCode() );
+         result = 37 * result + ( getDm() == null ? 0 : this.getDm().hashCode() );
+         result = 37 * result + ( getSk() == null ? 0 : this.getSk().hashCode() );
+         result = 37 * result + ( getSo() == null ? 0 : this.getSo().hashCode() );
+         result = 37 * result + ( getU() == null ? 0 : this.getU().hashCode() );
+         result = 37 * result + ( getD90() == null ? 0 : this.getD90().hashCode() );
+         result = 37 * result + ( getD84() == null ? 0 : this.getD84().hashCode() );
+         result = 37 * result + ( getD80() == null ? 0 : this.getD80().hashCode() );
+         result = 37 * result + ( getD75() == null ? 0 : this.getD75().hashCode() );
+         result = 37 * result + ( getD70() == null ? 0 : this.getD70().hashCode() );
+         result = 37 * result + ( getD60() == null ? 0 : this.getD60().hashCode() );
+         result = 37 * result + ( getD50() == null ? 0 : this.getD50().hashCode() );
+         result = 37 * result + ( getD40() == null ? 0 : this.getD40().hashCode() );
+         result = 37 * result + ( getD30() == null ? 0 : this.getD30().hashCode() );
+         result = 37 * result + ( getD25() == null ? 0 : this.getD25().hashCode() );
+         result = 37 * result + ( getD20() == null ? 0 : this.getD20().hashCode() );
+         result = 37 * result + ( getD16() == null ? 0 : this.getD16().hashCode() );
+         result = 37 * result + ( getD10() == null ? 0 : this.getD10().hashCode() );
+         result = 37 * result + ( getDmin() == null ? 0 : this.getDmin().hashCode() );
+         result = 37 * result + ( getDurchdmin() == null ? 0 : this.getDurchdmin().hashCode() );
+         result = 37 * result + ( getDmax() == null ? 0 : this.getDmax().hashCode() );
+         result = 37 * result + ( getDurchdmax() == null ? 0 : this.getDurchdmax().hashCode() );
+         result = 37 * result + ( getNGeschieb() == null ? 0 : this.getNGeschieb().hashCode() );
+         result = 37 * result + ( getNVielpkt() == null ? 0 : this.getNVielpkt().hashCode() );
+         result = 37 * result + ( getSysDate() == null ? 0 : this.getSysDate().hashCode() );
+         result = 37 * result + ( getBemerkung() == null ? 0 : this.getBemerkung().hashCode() );
+         result = 37 * result + ( getLastupdated() == null ? 0 : this.getLastupdated().hashCode() );
+         result = 37 * result + ( getKm() == null ? 0 : this.getKm().hashCode() );
+         result = 37 * result + ( getGlotabstoffset() == null ? 0 : this.getGlotabstoffset().hashCode() );
+         result = 37 * result + ( getSlotabstoffset() == null ? 0 : this.getSlotabstoffset().hashCode() );
+         result = 37 * result + ( getGbreitevonlinks() == null ? 0 : this.getGbreitevonlinks().hashCode() );
+         result = 37 * result + ( getGbreitebislinks() == null ? 0 : this.getGbreitebislinks().hashCode() );
+         result = 37 * result + ( getStaname() == null ? 0 : this.getStaname().hashCode() );
+         result = 37 * result + ( getStakm() == null ? 0 : this.getStakm().hashCode() );
+         result = 37 * result + ( getGewaesserid() == null ? 0 : this.getGewaesserid().hashCode() );
+         result = 37 * result + ( getGewname() == null ? 0 : this.getGewname().hashCode() );
+         result = 37 * result + ( getBezugspegelid() == null ? 0 : this.getBezugspegelid().hashCode() );
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Messunguferbezug.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,45 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+/**
+ * Messunguferbezug generated by hbm2java
+ */
+@Entity
+@Table(name="MESSUNGUFERBEZUG"
+    ,schema="SEDDB"
+)
+public class Messunguferbezug  implements java.io.Serializable {
+
+
+     private MessunguferbezugId id;
+
+    public Messunguferbezug() {
+    }
+
+    public Messunguferbezug(MessunguferbezugId id) {
+       this.id = id;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="messungid", column=@Column(name="MESSUNGID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="uferistlinks", column=@Column(name="UFERISTLINKS", nullable=false, precision=1, scale=0) ),
+        @AttributeOverride(name="hmabst", column=@Column(name="HMABST", precision=8, scale=3) ) } )
+    public MessunguferbezugId getId() {
+        return this.id;
+    }
+
+    public void setId(MessunguferbezugId id) {
+        this.id = id;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/MessunguferbezugId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,84 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * MessunguferbezugId generated by hbm2java
+ */
+@Embeddable
+public class MessunguferbezugId  implements java.io.Serializable {
+
+
+     private long messungid;
+     private boolean uferistlinks;
+     private BigDecimal hmabst;
+
+    public MessunguferbezugId() {
+    }
+
+    public MessunguferbezugId(long messungid, boolean uferistlinks) {
+        this.messungid = messungid;
+        this.uferistlinks = uferistlinks;
+    }
+    public MessunguferbezugId(long messungid, boolean uferistlinks, BigDecimal hmabst) {
+       this.messungid = messungid;
+       this.uferistlinks = uferistlinks;
+       this.hmabst = hmabst;
+    }
+
+
+
+    @Column(name="MESSUNGID", nullable=false, precision=11, scale=0)
+    public long getMessungid() {
+        return this.messungid;
+    }
+
+    public void setMessungid(long messungid) {
+        this.messungid = messungid;
+    }
+
+
+    @Column(name="UFERISTLINKS", nullable=false, precision=1, scale=0)
+    public boolean isUferistlinks() {
+        return this.uferistlinks;
+    }
+
+    public void setUferistlinks(boolean uferistlinks) {
+        this.uferistlinks = uferistlinks;
+    }
+
+
+    @Column(name="HMABST", precision=8, scale=3)
+    public BigDecimal getHmabst() {
+        return this.hmabst;
+    }
+
+    public void setHmabst(BigDecimal hmabst) {
+        this.hmabst = hmabst;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof MessunguferbezugId) ) return false;
+         MessunguferbezugId castOther = ( MessunguferbezugId ) other;
+
+         return (this.getMessungid()==castOther.getMessungid())
+ && (this.isUferistlinks()==castOther.isUferistlinks())
+ && ( (this.getHmabst()==castOther.getHmabst()) || ( this.getHmabst()!=null && castOther.getHmabst()!=null && this.getHmabst().equals(castOther.getHmabst()) ) );
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getMessungid();
+         result = 37 * result + (this.isUferistlinks()?1:0);
+         result = 37 * result + ( getHmabst() == null ? 0 : this.getHmabst().hashCode() );
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Mpeilpunkt.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,84 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.Table;
+
+/**
+ * Mpeilpunkt generated by hbm2java
+ */
+@Entity
+@Table(name="MPEILPUNKT"
+    ,schema="SEDDB"
+)
+public class Mpeilpunkt  implements java.io.Serializable {
+
+
+     private MpeilpunktId id;
+     private Mpeilung mpeilung;
+     private BigDecimal y;
+     private BigDecimal z;
+
+    public Mpeilpunkt() {
+    }
+
+    public Mpeilpunkt(MpeilpunktId id, Mpeilung mpeilung, BigDecimal y, BigDecimal z) {
+       this.id = id;
+       this.mpeilung = mpeilung;
+       this.y = y;
+       this.z = z;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="mpeilungid", column=@Column(name="MPEILUNGID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="punktnr", column=@Column(name="PUNKTNR", nullable=false, precision=5, scale=0) ) } )
+    public MpeilpunktId getId() {
+        return this.id;
+    }
+
+    public void setId(MpeilpunktId id) {
+        this.id = id;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="MPEILUNGID", nullable=false, insertable=false, updatable=false)
+    public Mpeilung getMpeilung() {
+        return this.mpeilung;
+    }
+
+    public void setMpeilung(Mpeilung mpeilung) {
+        this.mpeilung = mpeilung;
+    }
+
+
+    @Column(name="Y", nullable=false, precision=8, scale=3)
+    public BigDecimal getY() {
+        return this.y;
+    }
+
+    public void setY(BigDecimal y) {
+        this.y = y;
+    }
+
+
+    @Column(name="Z", nullable=false, precision=8, scale=3)
+    public BigDecimal getZ() {
+        return this.z;
+    }
+
+    public void setZ(BigDecimal z) {
+        this.z = z;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/MpeilpunktId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,65 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * MpeilpunktId generated by hbm2java
+ */
+@Embeddable
+public class MpeilpunktId  implements java.io.Serializable {
+
+
+     private long mpeilungid;
+     private int punktnr;
+
+    public MpeilpunktId() {
+    }
+
+    public MpeilpunktId(long mpeilungid, int punktnr) {
+       this.mpeilungid = mpeilungid;
+       this.punktnr = punktnr;
+    }
+
+
+
+    @Column(name="MPEILUNGID", nullable=false, precision=11, scale=0)
+    public long getMpeilungid() {
+        return this.mpeilungid;
+    }
+
+    public void setMpeilungid(long mpeilungid) {
+        this.mpeilungid = mpeilungid;
+    }
+
+
+    @Column(name="PUNKTNR", nullable=false, precision=5, scale=0)
+    public int getPunktnr() {
+        return this.punktnr;
+    }
+
+    public void setPunktnr(int punktnr) {
+        this.punktnr = punktnr;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof MpeilpunktId) ) return false;
+         MpeilpunktId castOther = ( MpeilpunktId ) other;
+
+         return (this.getMpeilungid()==castOther.getMpeilungid())
+ && (this.getPunktnr()==castOther.getPunktnr());
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getMpeilungid();
+         result = 37 * result + this.getPunktnr();
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Mpeilung.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,140 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+import javax.persistence.Temporal;
+import javax.persistence.TemporalType;
+
+/**
+ * Mpeilung generated by hbm2java
+ */
+@Entity
+@Table(name="MPEILUNG"
+    ,schema="SEDDB"
+)
+public class Mpeilung  implements java.io.Serializable {
+
+
+     private long mpeilungid;
+     private Station station;
+     private Date datum;
+     private BigDecimal km;
+     private String bemerkung;
+     private Date lastupdated;
+     private Long oldapeilid;
+     private Set<Mpeilpunkt> mpeilpunkts = new HashSet<Mpeilpunkt>(0);
+
+    public Mpeilung() {
+    }
+
+    public Mpeilung(long mpeilungid, Station station, Date datum, BigDecimal km, Date lastupdated) {
+        this.mpeilungid = mpeilungid;
+        this.station = station;
+        this.datum = datum;
+        this.km = km;
+        this.lastupdated = lastupdated;
+    }
+    public Mpeilung(long mpeilungid, Station station, Date datum, BigDecimal km, String bemerkung, Date lastupdated, Long oldapeilid, Set<Mpeilpunkt> mpeilpunkts) {
+       this.mpeilungid = mpeilungid;
+       this.station = station;
+       this.datum = datum;
+       this.km = km;
+       this.bemerkung = bemerkung;
+       this.lastupdated = lastupdated;
+       this.oldapeilid = oldapeilid;
+       this.mpeilpunkts = mpeilpunkts;
+    }
+
+     @Id
+
+
+    @Column(name="MPEILUNGID", unique=true, nullable=false, precision=11, scale=0)
+    public long getMpeilungid() {
+        return this.mpeilungid;
+    }
+
+    public void setMpeilungid(long mpeilungid) {
+        this.mpeilungid = mpeilungid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="STATIONID", nullable=false)
+    public Station getStation() {
+        return this.station;
+    }
+
+    public void setStation(Station station) {
+        this.station = station;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="DATUM", nullable=false, length=7)
+    public Date getDatum() {
+        return this.datum;
+    }
+
+    public void setDatum(Date datum) {
+        this.datum = datum;
+    }
+
+
+    @Column(name="KM", nullable=false, precision=8, scale=3)
+    public BigDecimal getKm() {
+        return this.km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="LASTUPDATED", nullable=false, length=7)
+    public Date getLastupdated() {
+        return this.lastupdated;
+    }
+
+    public void setLastupdated(Date lastupdated) {
+        this.lastupdated = lastupdated;
+    }
+
+
+    @Column(name="OLDAPEILID", precision=11, scale=0)
+    public Long getOldapeilid() {
+        return this.oldapeilid;
+    }
+
+    public void setOldapeilid(Long oldapeilid) {
+        this.oldapeilid = oldapeilid;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="mpeilung")
+    public Set<Mpeilpunkt> getMpeilpunkts() {
+        return this.mpeilpunkts;
+    }
+
+    public void setMpeilpunkts(Set<Mpeilpunkt> mpeilpunkts) {
+        this.mpeilpunkts = mpeilpunkts;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Probebild.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,194 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.Table;
+
+/**
+ * Probebild generated by hbm2java
+ */
+@Entity
+@Table(name="PROBEBILD"
+    ,schema="SEDDB"
+)
+public class Probebild  implements java.io.Serializable {
+
+
+     private long probebildid;
+     private Zzthema zzthema;
+     private Sohlprobe sohlprobe;
+     private int lfdnr;
+     private boolean istdigital;
+     private String pfad;
+     private String standort;
+     private String medium;
+     private String medpfad;
+     private String bemerkung;
+     private String typklein;
+     private String typmittel;
+     private String typgross;
+
+    public Probebild() {
+    }
+
+    public Probebild(long probebildid, Zzthema zzthema, Sohlprobe sohlprobe, int lfdnr, boolean istdigital) {
+        this.probebildid = probebildid;
+        this.zzthema = zzthema;
+        this.sohlprobe = sohlprobe;
+        this.lfdnr = lfdnr;
+        this.istdigital = istdigital;
+    }
+    public Probebild(long probebildid, Zzthema zzthema, Sohlprobe sohlprobe, int lfdnr, boolean istdigital, String pfad, String standort, String medium, String medpfad, String bemerkung, String typklein, String typmittel, String typgross) {
+       this.probebildid = probebildid;
+       this.zzthema = zzthema;
+       this.sohlprobe = sohlprobe;
+       this.lfdnr = lfdnr;
+       this.istdigital = istdigital;
+       this.pfad = pfad;
+       this.standort = standort;
+       this.medium = medium;
+       this.medpfad = medpfad;
+       this.bemerkung = bemerkung;
+       this.typklein = typklein;
+       this.typmittel = typmittel;
+       this.typgross = typgross;
+    }
+
+     @Id
+
+
+    @Column(name="PROBEBILDID", unique=true, nullable=false, precision=11, scale=0)
+    public long getProbebildid() {
+        return this.probebildid;
+    }
+
+    public void setProbebildid(long probebildid) {
+        this.probebildid = probebildid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="THEMAID", nullable=false)
+    public Zzthema getZzthema() {
+        return this.zzthema;
+    }
+
+    public void setZzthema(Zzthema zzthema) {
+        this.zzthema = zzthema;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="SOHLPROBEID", nullable=false)
+    public Sohlprobe getSohlprobe() {
+        return this.sohlprobe;
+    }
+
+    public void setSohlprobe(Sohlprobe sohlprobe) {
+        this.sohlprobe = sohlprobe;
+    }
+
+
+    @Column(name="LFDNR", nullable=false, precision=5, scale=0)
+    public int getLfdnr() {
+        return this.lfdnr;
+    }
+
+    public void setLfdnr(int lfdnr) {
+        this.lfdnr = lfdnr;
+    }
+
+
+    @Column(name="ISTDIGITAL", nullable=false, precision=1, scale=0)
+    public boolean isIstdigital() {
+        return this.istdigital;
+    }
+
+    public void setIstdigital(boolean istdigital) {
+        this.istdigital = istdigital;
+    }
+
+
+    @Column(name="PFAD", length=512)
+    public String getPfad() {
+        return this.pfad;
+    }
+
+    public void setPfad(String pfad) {
+        this.pfad = pfad;
+    }
+
+
+    @Column(name="STANDORT", length=50)
+    public String getStandort() {
+        return this.standort;
+    }
+
+    public void setStandort(String standort) {
+        this.standort = standort;
+    }
+
+
+    @Column(name="MEDIUM", length=50)
+    public String getMedium() {
+        return this.medium;
+    }
+
+    public void setMedium(String medium) {
+        this.medium = medium;
+    }
+
+
+    @Column(name="MEDPFAD", length=50)
+    public String getMedpfad() {
+        return this.medpfad;
+    }
+
+    public void setMedpfad(String medpfad) {
+        this.medpfad = medpfad;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="TYPKLEIN", length=8)
+    public String getTypklein() {
+        return this.typklein;
+    }
+
+    public void setTypklein(String typklein) {
+        this.typklein = typklein;
+    }
+
+
+    @Column(name="TYPMITTEL", length=8)
+    public String getTypmittel() {
+        return this.typmittel;
+    }
+
+    public void setTypmittel(String typmittel) {
+        this.typmittel = typmittel;
+    }
+
+
+    @Column(name="TYPGROSS", length=8)
+    public String getTypgross() {
+        return this.typgross;
+    }
+
+    public void setTypgross(String typgross) {
+        this.typgross = typgross;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Siebanalyse.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,361 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+/**
+ * Siebanalyse generated by hbm2java
+ */
+@Entity
+@Table(name="SIEBANALYSE"
+    ,schema="SEDDB"
+)
+public class Siebanalyse  implements java.io.Serializable {
+
+
+     private long siebanalyseid;
+     private Sohlprobe sohlprobe;
+     private boolean istdigital;
+     private String standort;
+     private BigDecimal dm;
+     private BigDecimal so;
+     private BigDecimal sk;
+     private BigDecimal u;
+     private BigDecimal d90;
+     private BigDecimal d84;
+     private BigDecimal d80;
+     private BigDecimal d75;
+     private BigDecimal d70;
+     private BigDecimal d60;
+     private BigDecimal d50;
+     private BigDecimal d40;
+     private BigDecimal d30;
+     private BigDecimal d25;
+     private BigDecimal d20;
+     private BigDecimal d16;
+     private BigDecimal d10;
+     private BigDecimal dmin;
+     private BigDecimal durchdmin;
+     private BigDecimal dmax;
+     private BigDecimal durchdmax;
+     private String bemerkung;
+     private Ssiebung ssiebung;
+
+    public Siebanalyse() {
+    }
+
+    public Siebanalyse(long siebanalyseid, Sohlprobe sohlprobe, boolean istdigital) {
+        this.siebanalyseid = siebanalyseid;
+        this.sohlprobe = sohlprobe;
+        this.istdigital = istdigital;
+    }
+    public Siebanalyse(long siebanalyseid, Sohlprobe sohlprobe, boolean istdigital, String standort, BigDecimal dm, BigDecimal so, BigDecimal sk, BigDecimal u, BigDecimal d90, BigDecimal d84, BigDecimal d80, BigDecimal d75, BigDecimal d70, BigDecimal d60, BigDecimal d50, BigDecimal d40, BigDecimal d30, BigDecimal d25, BigDecimal d20, BigDecimal d16, BigDecimal d10, BigDecimal dmin, BigDecimal durchdmin, BigDecimal dmax, BigDecimal durchdmax, String bemerkung, Ssiebung ssiebung) {
+       this.siebanalyseid = siebanalyseid;
+       this.sohlprobe = sohlprobe;
+       this.istdigital = istdigital;
+       this.standort = standort;
+       this.dm = dm;
+       this.so = so;
+       this.sk = sk;
+       this.u = u;
+       this.d90 = d90;
+       this.d84 = d84;
+       this.d80 = d80;
+       this.d75 = d75;
+       this.d70 = d70;
+       this.d60 = d60;
+       this.d50 = d50;
+       this.d40 = d40;
+       this.d30 = d30;
+       this.d25 = d25;
+       this.d20 = d20;
+       this.d16 = d16;
+       this.d10 = d10;
+       this.dmin = dmin;
+       this.durchdmin = durchdmin;
+       this.dmax = dmax;
+       this.durchdmax = durchdmax;
+       this.bemerkung = bemerkung;
+       this.ssiebung = ssiebung;
+    }
+
+     @Id
+
+
+    @Column(name="SIEBANALYSEID", unique=true, nullable=false, precision=11, scale=0)
+    public long getSiebanalyseid() {
+        return this.siebanalyseid;
+    }
+
+    public void setSiebanalyseid(long siebanalyseid) {
+        this.siebanalyseid = siebanalyseid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="SOHLPROBEID", nullable=false)
+    public Sohlprobe getSohlprobe() {
+        return this.sohlprobe;
+    }
+
+    public void setSohlprobe(Sohlprobe sohlprobe) {
+        this.sohlprobe = sohlprobe;
+    }
+
+
+    @Column(name="ISTDIGITAL", nullable=false, precision=1, scale=0)
+    public boolean isIstdigital() {
+        return this.istdigital;
+    }
+
+    public void setIstdigital(boolean istdigital) {
+        this.istdigital = istdigital;
+    }
+
+
+    @Column(name="STANDORT", length=50)
+    public String getStandort() {
+        return this.standort;
+    }
+
+    public void setStandort(String standort) {
+        this.standort = standort;
+    }
+
+
+    @Column(name="DM", precision=7, scale=4)
+    public BigDecimal getDm() {
+        return this.dm;
+    }
+
+    public void setDm(BigDecimal dm) {
+        this.dm = dm;
+    }
+
+
+    @Column(name="SO", precision=8, scale=3)
+    public BigDecimal getSo() {
+        return this.so;
+    }
+
+    public void setSo(BigDecimal so) {
+        this.so = so;
+    }
+
+
+    @Column(name="SK", precision=8, scale=3)
+    public BigDecimal getSk() {
+        return this.sk;
+    }
+
+    public void setSk(BigDecimal sk) {
+        this.sk = sk;
+    }
+
+
+    @Column(name="U", precision=8, scale=3)
+    public BigDecimal getU() {
+        return this.u;
+    }
+
+    public void setU(BigDecimal u) {
+        this.u = u;
+    }
+
+
+    @Column(name="D90", precision=7, scale=4)
+    public BigDecimal getD90() {
+        return this.d90;
+    }
+
+    public void setD90(BigDecimal d90) {
+        this.d90 = d90;
+    }
+
+
+    @Column(name="D84", precision=7, scale=4)
+    public BigDecimal getD84() {
+        return this.d84;
+    }
+
+    public void setD84(BigDecimal d84) {
+        this.d84 = d84;
+    }
+
+
+    @Column(name="D80", precision=7, scale=4)
+    public BigDecimal getD80() {
+        return this.d80;
+    }
+
+    public void setD80(BigDecimal d80) {
+        this.d80 = d80;
+    }
+
+
+    @Column(name="D75", precision=7, scale=4)
+    public BigDecimal getD75() {
+        return this.d75;
+    }
+
+    public void setD75(BigDecimal d75) {
+        this.d75 = d75;
+    }
+
+
+    @Column(name="D70", precision=7, scale=4)
+    public BigDecimal getD70() {
+        return this.d70;
+    }
+
+    public void setD70(BigDecimal d70) {
+        this.d70 = d70;
+    }
+
+
+    @Column(name="D60", precision=7, scale=4)
+    public BigDecimal getD60() {
+        return this.d60;
+    }
+
+    public void setD60(BigDecimal d60) {
+        this.d60 = d60;
+    }
+
+
+    @Column(name="D50", precision=7, scale=4)
+    public BigDecimal getD50() {
+        return this.d50;
+    }
+
+    public void setD50(BigDecimal d50) {
+        this.d50 = d50;
+    }
+
+
+    @Column(name="D40", precision=7, scale=4)
+    public BigDecimal getD40() {
+        return this.d40;
+    }
+
+    public void setD40(BigDecimal d40) {
+        this.d40 = d40;
+    }
+
+
+    @Column(name="D30", precision=7, scale=4)
+    public BigDecimal getD30() {
+        return this.d30;
+    }
+
+    public void setD30(BigDecimal d30) {
+        this.d30 = d30;
+    }
+
+
+    @Column(name="D25", precision=7, scale=4)
+    public BigDecimal getD25() {
+        return this.d25;
+    }
+
+    public void setD25(BigDecimal d25) {
+        this.d25 = d25;
+    }
+
+
+    @Column(name="D20", precision=7, scale=4)
+    public BigDecimal getD20() {
+        return this.d20;
+    }
+
+    public void setD20(BigDecimal d20) {
+        this.d20 = d20;
+    }
+
+
+    @Column(name="D16", precision=7, scale=4)
+    public BigDecimal getD16() {
+        return this.d16;
+    }
+
+    public void setD16(BigDecimal d16) {
+        this.d16 = d16;
+    }
+
+
+    @Column(name="D10", precision=7, scale=4)
+    public BigDecimal getD10() {
+        return this.d10;
+    }
+
+    public void setD10(BigDecimal d10) {
+        this.d10 = d10;
+    }
+
+
+    @Column(name="DMIN", precision=7, scale=4)
+    public BigDecimal getDmin() {
+        return this.dmin;
+    }
+
+    public void setDmin(BigDecimal dmin) {
+        this.dmin = dmin;
+    }
+
+
+    @Column(name="DURCHDMIN", precision=6, scale=3)
+    public BigDecimal getDurchdmin() {
+        return this.durchdmin;
+    }
+
+    public void setDurchdmin(BigDecimal durchdmin) {
+        this.durchdmin = durchdmin;
+    }
+
+
+    @Column(name="DMAX", precision=7, scale=4)
+    public BigDecimal getDmax() {
+        return this.dmax;
+    }
+
+    public void setDmax(BigDecimal dmax) {
+        this.dmax = dmax;
+    }
+
+
+    @Column(name="DURCHDMAX", precision=6, scale=3)
+    public BigDecimal getDurchdmax() {
+        return this.durchdmax;
+    }
+
+    public void setDurchdmax(BigDecimal durchdmax) {
+        this.durchdmax = durchdmax;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+@OneToOne(fetch=FetchType.LAZY, mappedBy="siebanalyse")
+    public Ssiebung getSsiebung() {
+        return this.ssiebung;
+    }
+
+    public void setSsiebung(Ssiebung ssiebung) {
+        this.ssiebung = ssiebung;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Slotlinks.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,53 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+/**
+ * Slotlinks generated by hbm2java
+ */
+@Entity
+@Table(name="SLOTLINKS"
+    ,schema="SEDDB"
+)
+public class Slotlinks  implements java.io.Serializable {
+
+
+     private SlotlinksId id;
+
+    public Slotlinks() {
+    }
+
+    public Slotlinks(SlotlinksId id) {
+       this.id = id;
+    }
+
+     @EmbeddedId
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="slotrechteid", column=@Column(name="SLOTRECHTEID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="messungid", column=@Column(name="MESSUNGID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="uferabst", column=@Column(name="UFERABST", nullable=false, precision=8, scale=3) ),
+        @AttributeOverride(name="tsand", column=@Column(name="TSAND", precision=8, scale=3) ),
+        @AttributeOverride(name="tschweb", column=@Column(name="TSCHWEB", precision=8, scale=3) ),
+        @AttributeOverride(name="fv", column=@Column(name="FV", precision=6, scale=3) ),
+        @AttributeOverride(name="vm", column=@Column(name="VM", precision=6, scale=4) ),
+        @AttributeOverride(name="tiefe", column=@Column(name="TIEFE", precision=8, scale=3) ),
+        @AttributeOverride(name="vsohle", column=@Column(name="VSOHLE", precision=6, scale=4) ),
+        @AttributeOverride(name="bemerkung", column=@Column(name="BEMERKUNG", length=240) ),
+        @AttributeOverride(name="uferablinks", column=@Column(name="UFERABLINKS", precision=8, scale=3) ),
+        @AttributeOverride(name="linksabst", column=@Column(name="LINKSABST", precision=22, scale=0) ) } )
+    public SlotlinksId getId() {
+        return this.id;
+    }
+
+    public void setId(SlotlinksId id) {
+        this.id = id;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/SlotlinksId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,211 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * SlotlinksId generated by hbm2java
+ */
+@Embeddable
+public class SlotlinksId  implements java.io.Serializable {
+
+
+     private long slotrechteid;
+     private long messungid;
+     private BigDecimal uferabst;
+     private BigDecimal tsand;
+     private BigDecimal tschweb;
+     private BigDecimal fv;
+     private BigDecimal vm;
+     private BigDecimal tiefe;
+     private BigDecimal vsohle;
+     private String bemerkung;
+     private BigDecimal uferablinks;
+     private BigDecimal linksabst;
+
+    public SlotlinksId() {
+    }
+
+    public SlotlinksId(long slotrechteid, long messungid, BigDecimal uferabst) {
+        this.slotrechteid = slotrechteid;
+        this.messungid = messungid;
+        this.uferabst = uferabst;
+    }
+    public SlotlinksId(long slotrechteid, long messungid, BigDecimal uferabst, BigDecimal tsand, BigDecimal tschweb, BigDecimal fv, BigDecimal vm, BigDecimal tiefe, BigDecimal vsohle, String bemerkung, BigDecimal uferablinks, BigDecimal linksabst) {
+       this.slotrechteid = slotrechteid;
+       this.messungid = messungid;
+       this.uferabst = uferabst;
+       this.tsand = tsand;
+       this.tschweb = tschweb;
+       this.fv = fv;
+       this.vm = vm;
+       this.tiefe = tiefe;
+       this.vsohle = vsohle;
+       this.bemerkung = bemerkung;
+       this.uferablinks = uferablinks;
+       this.linksabst = linksabst;
+    }
+
+
+
+    @Column(name="SLOTRECHTEID", nullable=false, precision=11, scale=0)
+    public long getSlotrechteid() {
+        return this.slotrechteid;
+    }
+
+    public void setSlotrechteid(long slotrechteid) {
+        this.slotrechteid = slotrechteid;
+    }
+
+
+    @Column(name="MESSUNGID", nullable=false, precision=11, scale=0)
+    public long getMessungid() {
+        return this.messungid;
+    }
+
+    public void setMessungid(long messungid) {
+        this.messungid = messungid;
+    }
+
+
+    @Column(name="UFERABST", nullable=false, precision=8, scale=3)
+    public BigDecimal getUferabst() {
+        return this.uferabst;
+    }
+
+    public void setUferabst(BigDecimal uferabst) {
+        this.uferabst = uferabst;
+    }
+
+
+    @Column(name="TSAND", precision=8, scale=3)
+    public BigDecimal getTsand() {
+        return this.tsand;
+    }
+
+    public void setTsand(BigDecimal tsand) {
+        this.tsand = tsand;
+    }
+
+
+    @Column(name="TSCHWEB", precision=8, scale=3)
+    public BigDecimal getTschweb() {
+        return this.tschweb;
+    }
+
+    public void setTschweb(BigDecimal tschweb) {
+        this.tschweb = tschweb;
+    }
+
+
+    @Column(name="FV", precision=6, scale=3)
+    public BigDecimal getFv() {
+        return this.fv;
+    }
+
+    public void setFv(BigDecimal fv) {
+        this.fv = fv;
+    }
+
+
+    @Column(name="VM", precision=6, scale=4)
+    public BigDecimal getVm() {
+        return this.vm;
+    }
+
+    public void setVm(BigDecimal vm) {
+        this.vm = vm;
+    }
+
+
+    @Column(name="TIEFE", precision=8, scale=3)
+    public BigDecimal getTiefe() {
+        return this.tiefe;
+    }
+
+    public void setTiefe(BigDecimal tiefe) {
+        this.tiefe = tiefe;
+    }
+
+
+    @Column(name="VSOHLE", precision=6, scale=4)
+    public BigDecimal getVsohle() {
+        return this.vsohle;
+    }
+
+    public void setVsohle(BigDecimal vsohle) {
+        this.vsohle = vsohle;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="UFERABLINKS", precision=8, scale=3)
+    public BigDecimal getUferablinks() {
+        return this.uferablinks;
+    }
+
+    public void setUferablinks(BigDecimal uferablinks) {
+        this.uferablinks = uferablinks;
+    }
+
+
+    @Column(name="LINKSABST", precision=22, scale=0)
+    public BigDecimal getLinksabst() {
+        return this.linksabst;
+    }
+
+    public void setLinksabst(BigDecimal linksabst) {
+        this.linksabst = linksabst;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+		 if ( (other == null ) ) return false;
+		 if ( !(other instanceof SlotlinksId) ) return false;
+		 SlotlinksId castOther = ( SlotlinksId ) other;
+
+		 return (this.getSlotrechteid()==castOther.getSlotrechteid())
+ && (this.getMessungid()==castOther.getMessungid())
+ && ( (this.getUferabst()==castOther.getUferabst()) || ( this.getUferabst()!=null && castOther.getUferabst()!=null && this.getUferabst().equals(castOther.getUferabst()) ) )
+ && ( (this.getTsand()==castOther.getTsand()) || ( this.getTsand()!=null && castOther.getTsand()!=null && this.getTsand().equals(castOther.getTsand()) ) )
+ && ( (this.getTschweb()==castOther.getTschweb()) || ( this.getTschweb()!=null && castOther.getTschweb()!=null && this.getTschweb().equals(castOther.getTschweb()) ) )
+ && ( (this.getFv()==castOther.getFv()) || ( this.getFv()!=null && castOther.getFv()!=null && this.getFv().equals(castOther.getFv()) ) )
+ && ( (this.getVm()==castOther.getVm()) || ( this.getVm()!=null && castOther.getVm()!=null && this.getVm().equals(castOther.getVm()) ) )
+ && ( (this.getTiefe()==castOther.getTiefe()) || ( this.getTiefe()!=null && castOther.getTiefe()!=null && this.getTiefe().equals(castOther.getTiefe()) ) )
+ && ( (this.getVsohle()==castOther.getVsohle()) || ( this.getVsohle()!=null && castOther.getVsohle()!=null && this.getVsohle().equals(castOther.getVsohle()) ) )
+ && ( (this.getBemerkung()==castOther.getBemerkung()) || ( this.getBemerkung()!=null && castOther.getBemerkung()!=null && this.getBemerkung().equals(castOther.getBemerkung()) ) )
+ && ( (this.getUferablinks()==castOther.getUferablinks()) || ( this.getUferablinks()!=null && castOther.getUferablinks()!=null && this.getUferablinks().equals(castOther.getUferablinks()) ) )
+ && ( (this.getLinksabst()==castOther.getLinksabst()) || ( this.getLinksabst()!=null && castOther.getLinksabst()!=null && this.getLinksabst().equals(castOther.getLinksabst()) ) );
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getSlotrechteid();
+         result = 37 * result + (int) this.getMessungid();
+         result = 37 * result + ( getUferabst() == null ? 0 : this.getUferabst().hashCode() );
+         result = 37 * result + ( getTsand() == null ? 0 : this.getTsand().hashCode() );
+         result = 37 * result + ( getTschweb() == null ? 0 : this.getTschweb().hashCode() );
+         result = 37 * result + ( getFv() == null ? 0 : this.getFv().hashCode() );
+         result = 37 * result + ( getVm() == null ? 0 : this.getVm().hashCode() );
+         result = 37 * result + ( getTiefe() == null ? 0 : this.getTiefe().hashCode() );
+         result = 37 * result + ( getVsohle() == null ? 0 : this.getVsohle().hashCode() );
+         result = 37 * result + ( getBemerkung() == null ? 0 : this.getBemerkung().hashCode() );
+         result = 37 * result + ( getUferablinks() == null ? 0 : this.getUferablinks().hashCode() );
+         result = 37 * result + ( getLinksabst() == null ? 0 : this.getLinksabst().hashCode() );
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Slotrechte.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,183 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+
+/**
+ * Slotrechte generated by hbm2java
+ */
+@Entity
+@Table(name="SLOTRECHTE"
+    ,schema="SEDDB"
+)
+public class Slotrechte  implements java.io.Serializable {
+
+
+     private long slotrechteid;
+     private Messung messung;
+     private BigDecimal uferabst;
+     private BigDecimal tsand;
+     private BigDecimal tschweb;
+     private BigDecimal fv;
+     private BigDecimal vm;
+     private BigDecimal tiefe;
+     private BigDecimal vsohle;
+     private String bemerkung;
+     private BigDecimal uferablinks;
+     private Set<Sprobe> sprobes = new HashSet<Sprobe>(0);
+
+    public Slotrechte() {
+    }
+
+    public Slotrechte(long slotrechteid, Messung messung, BigDecimal uferabst) {
+        this.slotrechteid = slotrechteid;
+        this.messung = messung;
+        this.uferabst = uferabst;
+    }
+    public Slotrechte(long slotrechteid, Messung messung, BigDecimal uferabst, BigDecimal tsand, BigDecimal tschweb, BigDecimal fv, BigDecimal vm, BigDecimal tiefe, BigDecimal vsohle, String bemerkung, BigDecimal uferablinks, Set<Sprobe> sprobes) {
+       this.slotrechteid = slotrechteid;
+       this.messung = messung;
+       this.uferabst = uferabst;
+       this.tsand = tsand;
+       this.tschweb = tschweb;
+       this.fv = fv;
+       this.vm = vm;
+       this.tiefe = tiefe;
+       this.vsohle = vsohle;
+       this.bemerkung = bemerkung;
+       this.uferablinks = uferablinks;
+       this.sprobes = sprobes;
+    }
+
+     @Id
+
+
+    @Column(name="SLOTRECHTEID", unique=true, nullable=false, precision=11, scale=0)
+    public long getSlotrechteid() {
+        return this.slotrechteid;
+    }
+
+    public void setSlotrechteid(long slotrechteid) {
+        this.slotrechteid = slotrechteid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="MESSUNGID", nullable=false)
+    public Messung getMessung() {
+        return this.messung;
+    }
+
+    public void setMessung(Messung messung) {
+        this.messung = messung;
+    }
+
+
+    @Column(name="UFERABST", nullable=false, precision=8, scale=3)
+    public BigDecimal getUferabst() {
+        return this.uferabst;
+    }
+
+    public void setUferabst(BigDecimal uferabst) {
+        this.uferabst = uferabst;
+    }
+
+
+    @Column(name="TSAND", precision=8, scale=3)
+    public BigDecimal getTsand() {
+        return this.tsand;
+    }
+
+    public void setTsand(BigDecimal tsand) {
+        this.tsand = tsand;
+    }
+
+
+    @Column(name="TSCHWEB", precision=8, scale=3)
+    public BigDecimal getTschweb() {
+        return this.tschweb;
+    }
+
+    public void setTschweb(BigDecimal tschweb) {
+        this.tschweb = tschweb;
+    }
+
+
+    @Column(name="FV", precision=6, scale=3)
+    public BigDecimal getFv() {
+        return this.fv;
+    }
+
+    public void setFv(BigDecimal fv) {
+        this.fv = fv;
+    }
+
+
+    @Column(name="VM", precision=6, scale=4)
+    public BigDecimal getVm() {
+        return this.vm;
+    }
+
+    public void setVm(BigDecimal vm) {
+        this.vm = vm;
+    }
+
+
+    @Column(name="TIEFE", precision=8, scale=3)
+    public BigDecimal getTiefe() {
+        return this.tiefe;
+    }
+
+    public void setTiefe(BigDecimal tiefe) {
+        this.tiefe = tiefe;
+    }
+
+
+    @Column(name="VSOHLE", precision=6, scale=4)
+    public BigDecimal getVsohle() {
+        return this.vsohle;
+    }
+
+    public void setVsohle(BigDecimal vsohle) {
+        this.vsohle = vsohle;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="UFERABLINKS", precision=8, scale=3)
+    public BigDecimal getUferablinks() {
+        return this.uferablinks;
+    }
+
+    public void setUferablinks(BigDecimal uferablinks) {
+        this.uferablinks = uferablinks;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="slotrechte")
+    public Set<Sprobe> getSprobes() {
+        return this.sprobes;
+    }
+
+    public void setSprobes(Set<Sprobe> sprobes) {
+        this.sprobes = sprobes;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Sohlprobe.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,156 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+import javax.persistence.Temporal;
+import javax.persistence.TemporalType;
+
+/**
+ * Sohlprobe generated by hbm2java
+ */
+@Entity
+@Table(name="SOHLPROBE"
+    ,schema="SEDDB"
+)
+public class Sohlprobe  implements java.io.Serializable {
+
+
+     private long sohlprobeid;
+     private Zzprobenahmeart zzprobenahmeart;
+     private Sohltest sohltest;
+     private BigDecimal tiefevon;
+     private BigDecimal tiefebis;
+     private String beschreibung;
+     private Date lastupdated;
+     private Set<Probebild> probebilds = new HashSet<Probebild>(0);
+     private Set<Siebanalyse> siebanalyses = new HashSet<Siebanalyse>(0);
+
+    public Sohlprobe() {
+    }
+
+    public Sohlprobe(long sohlprobeid, Zzprobenahmeart zzprobenahmeart, Sohltest sohltest, Date lastupdated) {
+        this.sohlprobeid = sohlprobeid;
+        this.zzprobenahmeart = zzprobenahmeart;
+        this.sohltest = sohltest;
+        this.lastupdated = lastupdated;
+    }
+    public Sohlprobe(long sohlprobeid, Zzprobenahmeart zzprobenahmeart, Sohltest sohltest, BigDecimal tiefevon, BigDecimal tiefebis, String beschreibung, Date lastupdated, Set<Probebild> probebilds, Set<Siebanalyse> siebanalyses) {
+       this.sohlprobeid = sohlprobeid;
+       this.zzprobenahmeart = zzprobenahmeart;
+       this.sohltest = sohltest;
+       this.tiefevon = tiefevon;
+       this.tiefebis = tiefebis;
+       this.beschreibung = beschreibung;
+       this.lastupdated = lastupdated;
+       this.probebilds = probebilds;
+       this.siebanalyses = siebanalyses;
+    }
+
+     @Id
+
+
+    @Column(name="SOHLPROBEID", unique=true, nullable=false, precision=11, scale=0)
+    public long getSohlprobeid() {
+        return this.sohlprobeid;
+    }
+
+    public void setSohlprobeid(long sohlprobeid) {
+        this.sohlprobeid = sohlprobeid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="PROBENAHMEARTID", nullable=false)
+    public Zzprobenahmeart getZzprobenahmeart() {
+        return this.zzprobenahmeart;
+    }
+
+    public void setZzprobenahmeart(Zzprobenahmeart zzprobenahmeart) {
+        this.zzprobenahmeart = zzprobenahmeart;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="SOHLTESTID", nullable=false)
+    public Sohltest getSohltest() {
+        return this.sohltest;
+    }
+
+    public void setSohltest(Sohltest sohltest) {
+        this.sohltest = sohltest;
+    }
+
+
+    @Column(name="TIEFEVON", precision=8, scale=3)
+    public BigDecimal getTiefevon() {
+        return this.tiefevon;
+    }
+
+    public void setTiefevon(BigDecimal tiefevon) {
+        this.tiefevon = tiefevon;
+    }
+
+
+    @Column(name="TIEFEBIS", precision=8, scale=3)
+    public BigDecimal getTiefebis() {
+        return this.tiefebis;
+    }
+
+    public void setTiefebis(BigDecimal tiefebis) {
+        this.tiefebis = tiefebis;
+    }
+
+
+    @Column(name="BESCHREIBUNG", length=1024)
+    public String getBeschreibung() {
+        return this.beschreibung;
+    }
+
+    public void setBeschreibung(String beschreibung) {
+        this.beschreibung = beschreibung;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="LASTUPDATED", nullable=false, length=7)
+    public Date getLastupdated() {
+        return this.lastupdated;
+    }
+
+    public void setLastupdated(Date lastupdated) {
+        this.lastupdated = lastupdated;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="sohlprobe")
+    public Set<Probebild> getProbebilds() {
+        return this.probebilds;
+    }
+
+    public void setProbebilds(Set<Probebild> probebilds) {
+        this.probebilds = probebilds;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="sohlprobe")
+    public Set<Siebanalyse> getSiebanalyses() {
+        return this.siebanalyses;
+    }
+
+    public void setSiebanalyses(Set<Siebanalyse> siebanalyses) {
+        this.siebanalyses = siebanalyses;
+    }
+
+
+
+
+}
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Sohltest.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,299 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+import javax.persistence.Temporal;
+import javax.persistence.TemporalType;
+
+/**
+ * Sohltest generated by hbm2java
+ */
+@Entity
+@Table(name="SOHLTEST"
+    ,schema="SEDDB"
+)
+public class Sohltest  implements java.io.Serializable {
+
+     private long sohltestid;
+     private Station station;
+     private Zzsondierungart zzsondierungart;
+     private Zzarchiv zzarchiv;
+     private Date datum;
+     private boolean istnurjahr;
+     private BigDecimal km;
+     private BigDecimal abstand;
+     private boolean abstistvonlinks;
+     private boolean abstbezug;
+     private BigDecimal abstoffset;
+     private BigDecimal abstlinks;
+     private BigDecimal rechts;
+     private BigDecimal hoch;
+     private BigDecimal hoehe;
+     private String bemerkung;
+     private String sohlebeschreib;
+     private String sondierungbeschreib;
+     private Date lastupdated;
+     private Set<Bild> bilds = new HashSet<Bild>(0);
+     private Set<Sohlprobe> sohlprobes = new HashSet<Sohlprobe>(0);
+
+    public Sohltest() {
+    }
+
+
+    public Sohltest(long sohltestid, Station station, Date datum, boolean istnurjahr, BigDecimal abstand, boolean abstistvonlinks, boolean abstbezug, BigDecimal abstoffset, Date lastupdated) {
+        this.sohltestid = sohltestid;
+        this.station = station;
+        this.datum = datum;
+        this.istnurjahr = istnurjahr;
+        this.abstand = abstand;
+        this.abstistvonlinks = abstistvonlinks;
+        this.abstbezug = abstbezug;
+        this.abstoffset = abstoffset;
+        this.lastupdated = lastupdated;
+    }
+    public Sohltest(long sohltestid, Station station, Zzsondierungart zzsondierungart, Zzarchiv zzarchiv, Date datum, boolean istnurjahr, BigDecimal km, BigDecimal abstand, boolean abstistvonlinks, boolean abstbezug, BigDecimal abstoffset, BigDecimal abstlinks, BigDecimal rechts, BigDecimal hoch, BigDecimal hoehe, String bemerkung, String sohlebeschreib, String sondierungbeschreib, Date lastupdated, Set<Bild> bilds, Set<Sohlprobe> sohlprobes) {
+       this.sohltestid = sohltestid;
+       this.station = station;
+       this.zzsondierungart = zzsondierungart;
+       this.zzarchiv = zzarchiv;
+       this.datum = datum;
+       this.istnurjahr = istnurjahr;
+       this.km = km;
+       this.abstand = abstand;
+       this.abstistvonlinks = abstistvonlinks;
+       this.abstbezug = abstbezug;
+       this.abstoffset = abstoffset;
+       this.abstlinks = abstlinks;
+       this.rechts = rechts;
+       this.hoch = hoch;
+       this.hoehe = hoehe;
+       this.bemerkung = bemerkung;
+       this.sohlebeschreib = sohlebeschreib;
+       this.sondierungbeschreib = sondierungbeschreib;
+       this.lastupdated = lastupdated;
+       this.bilds = bilds;
+       this.sohlprobes = sohlprobes;
+    }
+
+     @Id
+
+
+    @Column(name="SOHLTESTID", unique=true, nullable=false, precision=11, scale=0)
+    public long getSohltestid() {
+        return this.sohltestid;
+    }
+
+    public void setSohltestid(long sohltestid) {
+        this.sohltestid = sohltestid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="STATIONID", nullable=false)
+    public Station getStation() {
+        return this.station;
+    }
+
+    public void setStation(Station station) {
+        this.station = station;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="SONDIERUNGARTID")
+    public Zzsondierungart getZzsondierungart() {
+        return this.zzsondierungart;
+    }
+
+    public void setZzsondierungart(Zzsondierungart zzsondierungart) {
+        this.zzsondierungart = zzsondierungart;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="ARCHIVID")
+    public Zzarchiv getZzarchiv() {
+        return this.zzarchiv;
+    }
+
+    public void setZzarchiv(Zzarchiv zzarchiv) {
+        this.zzarchiv = zzarchiv;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="DATUM", nullable=false, length=7)
+    public Date getDatum() {
+        return this.datum;
+    }
+
+    public void setDatum(Date datum) {
+        this.datum = datum;
+    }
+
+
+    @Column(name="ISTNURJAHR", nullable=false, precision=1, scale=0)
+    public boolean isIstnurjahr() {
+        return this.istnurjahr;
+    }
+
+    public void setIstnurjahr(boolean istnurjahr) {
+        this.istnurjahr = istnurjahr;
+    }
+
+
+    @Column(name="KM", precision=8, scale=3)
+    public BigDecimal getKm() {
+        return this.km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name="ABSTAND", nullable=false, precision=8, scale=3)
+    public BigDecimal getAbstand() {
+        return this.abstand;
+    }
+
+    public void setAbstand(BigDecimal abstand) {
+        this.abstand = abstand;
+    }
+
+
+    @Column(name="ABSTISTVONLINKS", nullable=false, precision=1, scale=0)
+    public boolean isAbstistvonlinks() {
+        return this.abstistvonlinks;
+    }
+
+    public void setAbstistvonlinks(boolean abstistvonlinks) {
+        this.abstistvonlinks = abstistvonlinks;
+    }
+
+
+    @Column(name="ABSTBEZUG", nullable=false, precision=1, scale=0)
+    public boolean isAbstbezug() {
+        return this.abstbezug;
+    }
+
+    public void setAbstbezug(boolean abstbezug) {
+        this.abstbezug = abstbezug;
+    }
+
+
+    @Column(name="ABSTOFFSET", nullable=false, precision=8, scale=3)
+    public BigDecimal getAbstoffset() {
+        return this.abstoffset;
+    }
+
+    public void setAbstoffset(BigDecimal abstoffset) {
+        this.abstoffset = abstoffset;
+    }
+
+
+    @Column(name="ABSTLINKS", precision=8, scale=3)
+    public BigDecimal getAbstlinks() {
+        return this.abstlinks;
+    }
+
+    public void setAbstlinks(BigDecimal abstlinks) {
+        this.abstlinks = abstlinks;
+    }
+
+
+    @Column(name="RECHTS", precision=11, scale=3)
+    public BigDecimal getRechts() {
+        return this.rechts;
+    }
+
+    public void setRechts(BigDecimal rechts) {
+        this.rechts = rechts;
+    }
+
+
+    @Column(name="HOCH", precision=11, scale=3)
+    public BigDecimal getHoch() {
+        return this.hoch;
+    }
+
+    public void setHoch(BigDecimal hoch) {
+        this.hoch = hoch;
+    }
+
+
+    @Column(name="HOEHE", precision=8, scale=3)
+    public BigDecimal getHoehe() {
+        return this.hoehe;
+    }
+
+    public void setHoehe(BigDecimal hoehe) {
+        this.hoehe = hoehe;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="SOHLEBESCHREIB", length=1024)
+    public String getSohlebeschreib() {
+        return this.sohlebeschreib;
+    }
+
+    public void setSohlebeschreib(String sohlebeschreib) {
+        this.sohlebeschreib = sohlebeschreib;
+    }
+
+
+    @Column(name="SONDIERUNGBESCHREIB", length=1024)
+    public String getSondierungbeschreib() {
+        return this.sondierungbeschreib;
+    }
+
+    public void setSondierungbeschreib(String sondierungbeschreib) {
+        this.sondierungbeschreib = sondierungbeschreib;
+    }
+
+    @Temporal(TemporalType.DATE)
+    @Column(name="LASTUPDATED", nullable=false, length=7)
+    public Date getLastupdated() {
+        return this.lastupdated;
+    }
+
+    public void setLastupdated(Date lastupdated) {
+        this.lastupdated = lastupdated;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="sohltest")
+    public Set<Bild> getBilds() {
+        return this.bilds;
+    }
+
+    public void setBilds(Set<Bild> bilds) {
+        this.bilds = bilds;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="sohltest")
+    public Set<Sohlprobe> getSohlprobes() {
+        return this.sohlprobes;
+    }
+
+    public void setSohlprobes(Set<Sohlprobe> sohlprobes) {
+        this.sohlprobes = sohlprobes;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Sprobe.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,154 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.Table;
+
+/**
+ * Sprobe generated by hbm2java
+ */
+@Entity
+@Table(name="SPROBE"
+    ,schema="SEDDB"
+)
+public class Sprobe  implements java.io.Serializable {
+
+
+     private SprobeId id;
+     private Slotrechte slotrechte;
+     private short wprozpkt;
+     private BigDecimal wpkt;
+     private BigDecimal vpkt;
+     private BigDecimal csandpkt;
+     private BigDecimal cschwebpkt;
+     private BigDecimal sandtrieb;
+     private BigDecimal schwebtrieb;
+
+    public Sprobe() {
+    }
+
+
+    public Sprobe(SprobeId id, Slotrechte slotrechte, short wprozpkt, BigDecimal wpkt, BigDecimal vpkt, BigDecimal csandpkt, BigDecimal cschwebpkt) {
+        this.id = id;
+        this.slotrechte = slotrechte;
+        this.wprozpkt = wprozpkt;
+        this.wpkt = wpkt;
+        this.vpkt = vpkt;
+        this.csandpkt = csandpkt;
+        this.cschwebpkt = cschwebpkt;
+    }
+    public Sprobe(SprobeId id, Slotrechte slotrechte, short wprozpkt, BigDecimal wpkt, BigDecimal vpkt, BigDecimal csandpkt, BigDecimal cschwebpkt, BigDecimal sandtrieb, BigDecimal schwebtrieb) {
+       this.id = id;
+       this.slotrechte = slotrechte;
+       this.wprozpkt = wprozpkt;
+       this.wpkt = wpkt;
+       this.vpkt = vpkt;
+       this.csandpkt = csandpkt;
+       this.cschwebpkt = cschwebpkt;
+       this.sandtrieb = sandtrieb;
+       this.schwebtrieb = schwebtrieb;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="slotrechteid", column=@Column(name="SLOTRECHTEID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="lfdnr", column=@Column(name="LFDNR", nullable=false, precision=5, scale=0) ) } )
+    public SprobeId getId() {
+        return this.id;
+    }
+
+    public void setId(SprobeId id) {
+        this.id = id;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="SLOTRECHTEID", nullable=false, insertable=false, updatable=false)
+    public Slotrechte getSlotrechte() {
+        return this.slotrechte;
+    }
+
+    public void setSlotrechte(Slotrechte slotrechte) {
+        this.slotrechte = slotrechte;
+    }
+
+
+    @Column(name="WPROZPKT", nullable=false, precision=3, scale=0)
+    public short getWprozpkt() {
+        return this.wprozpkt;
+    }
+
+    public void setWprozpkt(short wprozpkt) {
+        this.wprozpkt = wprozpkt;
+    }
+
+
+    @Column(name="WPKT", nullable=false, precision=8, scale=3)
+    public BigDecimal getWpkt() {
+        return this.wpkt;
+    }
+
+    public void setWpkt(BigDecimal wpkt) {
+        this.wpkt = wpkt;
+    }
+
+
+    @Column(name="VPKT", nullable=false, precision=6, scale=4)
+    public BigDecimal getVpkt() {
+        return this.vpkt;
+    }
+
+    public void setVpkt(BigDecimal vpkt) {
+        this.vpkt = vpkt;
+    }
+
+
+    @Column(name="CSANDPKT", nullable=false, precision=8, scale=3)
+    public BigDecimal getCsandpkt() {
+        return this.csandpkt;
+    }
+
+    public void setCsandpkt(BigDecimal csandpkt) {
+        this.csandpkt = csandpkt;
+    }
+
+
+    @Column(name="CSCHWEBPKT", nullable=false, precision=8, scale=3)
+    public BigDecimal getCschwebpkt() {
+        return this.cschwebpkt;
+    }
+
+    public void setCschwebpkt(BigDecimal cschwebpkt) {
+        this.cschwebpkt = cschwebpkt;
+    }
+
+
+    @Column(name="SANDTRIEB", precision=8, scale=3)
+    public BigDecimal getSandtrieb() {
+        return this.sandtrieb;
+    }
+
+    public void setSandtrieb(BigDecimal sandtrieb) {
+        this.sandtrieb = sandtrieb;
+    }
+
+
+    @Column(name="SCHWEBTRIEB", precision=8, scale=3)
+    public BigDecimal getSchwebtrieb() {
+        return this.schwebtrieb;
+    }
+
+    public void setSchwebtrieb(BigDecimal schwebtrieb) {
+        this.schwebtrieb = schwebtrieb;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/SprobeId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,65 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * SprobeId generated by hbm2java
+ */
+@Embeddable
+public class SprobeId  implements java.io.Serializable {
+
+
+     private long slotrechteid;
+     private int lfdnr;
+
+    public SprobeId() {
+    }
+
+    public SprobeId(long slotrechteid, int lfdnr) {
+       this.slotrechteid = slotrechteid;
+       this.lfdnr = lfdnr;
+    }
+
+
+
+    @Column(name="SLOTRECHTEID", nullable=false, precision=11, scale=0)
+    public long getSlotrechteid() {
+        return this.slotrechteid;
+    }
+
+    public void setSlotrechteid(long slotrechteid) {
+        this.slotrechteid = slotrechteid;
+    }
+
+
+    @Column(name="LFDNR", nullable=false, precision=5, scale=0)
+    public int getLfdnr() {
+        return this.lfdnr;
+    }
+
+    public void setLfdnr(int lfdnr) {
+        this.lfdnr = lfdnr;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof SprobeId) ) return false;
+         SprobeId castOther = ( SprobeId ) other;
+
+         return (this.getSlotrechteid()==castOther.getSlotrechteid())
+ && (this.getLfdnr()==castOther.getLfdnr());
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getSlotrechteid();
+         result = 37 * result + this.getLfdnr();
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Ssiebung.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,351 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToOne;
+import javax.persistence.PrimaryKeyJoinColumn;
+import javax.persistence.Table;
+import org.hibernate.annotations.GenericGenerator;
+import org.hibernate.annotations.Parameter;
+
+/**
+ * Ssiebung generated by hbm2java
+ */
+@Entity
+@Table(name="SSIEBUNG"
+    ,schema="SEDDB"
+)
+public class Ssiebung  implements java.io.Serializable {
+
+
+     private long siebanalyseid;
+     private Siebanalyse siebanalyse;
+     private Gsiebsatz gsiebsatz;
+     private BigDecimal gmasse;
+     private BigDecimal rsieb01;
+     private BigDecimal rsieb02;
+     private BigDecimal rsieb03;
+     private BigDecimal rsieb04;
+     private BigDecimal rsieb05;
+     private BigDecimal rsieb06;
+     private BigDecimal rsieb07;
+     private BigDecimal rsieb08;
+     private BigDecimal rsieb09;
+     private BigDecimal rsieb10;
+     private BigDecimal rsieb11;
+     private BigDecimal rsieb12;
+     private BigDecimal rsieb13;
+     private BigDecimal rsieb14;
+     private BigDecimal rsieb15;
+     private BigDecimal rsieb16;
+     private BigDecimal rsieb17;
+     private BigDecimal rsieb18;
+     private BigDecimal rsieb19;
+     private BigDecimal rsieb20;
+     private BigDecimal rsieb21;
+     private BigDecimal rest;
+
+    public Ssiebung() {
+    }
+
+    public Ssiebung(Siebanalyse siebanalyse, Gsiebsatz gsiebsatz) {
+        this.siebanalyse = siebanalyse;
+        this.gsiebsatz = gsiebsatz;
+    }
+    public Ssiebung(Siebanalyse siebanalyse, Gsiebsatz gsiebsatz, BigDecimal gmasse, BigDecimal rsieb01, BigDecimal rsieb02, BigDecimal rsieb03, BigDecimal rsieb04, BigDecimal rsieb05, BigDecimal rsieb06, BigDecimal rsieb07, BigDecimal rsieb08, BigDecimal rsieb09, BigDecimal rsieb10, BigDecimal rsieb11, BigDecimal rsieb12, BigDecimal rsieb13, BigDecimal rsieb14, BigDecimal rsieb15, BigDecimal rsieb16, BigDecimal rsieb17, BigDecimal rsieb18, BigDecimal rsieb19, BigDecimal rsieb20, BigDecimal rsieb21, BigDecimal rest) {
+       this.siebanalyse = siebanalyse;
+       this.gsiebsatz = gsiebsatz;
+       this.gmasse = gmasse;
+       this.rsieb01 = rsieb01;
+       this.rsieb02 = rsieb02;
+       this.rsieb03 = rsieb03;
+       this.rsieb04 = rsieb04;
+       this.rsieb05 = rsieb05;
+       this.rsieb06 = rsieb06;
+       this.rsieb07 = rsieb07;
+       this.rsieb08 = rsieb08;
+       this.rsieb09 = rsieb09;
+       this.rsieb10 = rsieb10;
+       this.rsieb11 = rsieb11;
+       this.rsieb12 = rsieb12;
+       this.rsieb13 = rsieb13;
+       this.rsieb14 = rsieb14;
+       this.rsieb15 = rsieb15;
+       this.rsieb16 = rsieb16;
+       this.rsieb17 = rsieb17;
+       this.rsieb18 = rsieb18;
+       this.rsieb19 = rsieb19;
+       this.rsieb20 = rsieb20;
+       this.rsieb21 = rsieb21;
+       this.rest = rest;
+    }
+
+     @GenericGenerator(name="generator", strategy="foreign", parameters=@Parameter(name="property", value="siebanalyse"))@Id @GeneratedValue(generator="generator")
+
+
+    @Column(name="SIEBANALYSEID", unique=true, nullable=false, precision=11, scale=0)
+    public long getSiebanalyseid() {
+        return this.siebanalyseid;
+    }
+
+    public void setSiebanalyseid(long siebanalyseid) {
+        this.siebanalyseid = siebanalyseid;
+    }
+
+@OneToOne(fetch=FetchType.LAZY)@PrimaryKeyJoinColumn
+    public Siebanalyse getSiebanalyse() {
+        return this.siebanalyse;
+    }
+
+    public void setSiebanalyse(Siebanalyse siebanalyse) {
+        this.siebanalyse = siebanalyse;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="GSIEBSATZID", nullable=false)
+    public Gsiebsatz getGsiebsatz() {
+        return this.gsiebsatz;
+    }
+
+    public void setGsiebsatz(Gsiebsatz gsiebsatz) {
+        this.gsiebsatz = gsiebsatz;
+    }
+
+
+    @Column(name="GMASSE", precision=9, scale=3)
+    public BigDecimal getGmasse() {
+        return this.gmasse;
+    }
+
+    public void setGmasse(BigDecimal gmasse) {
+        this.gmasse = gmasse;
+    }
+
+
+    @Column(name="RSIEB01", precision=9, scale=3)
+    public BigDecimal getRsieb01() {
+        return this.rsieb01;
+    }
+
+    public void setRsieb01(BigDecimal rsieb01) {
+        this.rsieb01 = rsieb01;
+    }
+
+
+    @Column(name="RSIEB02", precision=9, scale=3)
+    public BigDecimal getRsieb02() {
+        return this.rsieb02;
+    }
+
+    public void setRsieb02(BigDecimal rsieb02) {
+        this.rsieb02 = rsieb02;
+    }
+
+
+    @Column(name="RSIEB03", precision=9, scale=3)
+    public BigDecimal getRsieb03() {
+        return this.rsieb03;
+    }
+
+    public void setRsieb03(BigDecimal rsieb03) {
+        this.rsieb03 = rsieb03;
+    }
+
+
+    @Column(name="RSIEB04", precision=9, scale=3)
+    public BigDecimal getRsieb04() {
+        return this.rsieb04;
+    }
+
+    public void setRsieb04(BigDecimal rsieb04) {
+        this.rsieb04 = rsieb04;
+    }
+
+
+    @Column(name="RSIEB05", precision=9, scale=3)
+    public BigDecimal getRsieb05() {
+        return this.rsieb05;
+    }
+
+    public void setRsieb05(BigDecimal rsieb05) {
+        this.rsieb05 = rsieb05;
+    }
+
+
+    @Column(name="RSIEB06", precision=9, scale=3)
+    public BigDecimal getRsieb06() {
+        return this.rsieb06;
+    }
+
+    public void setRsieb06(BigDecimal rsieb06) {
+        this.rsieb06 = rsieb06;
+    }
+
+
+    @Column(name="RSIEB07", precision=9, scale=3)
+    public BigDecimal getRsieb07() {
+        return this.rsieb07;
+    }
+
+    public void setRsieb07(BigDecimal rsieb07) {
+        this.rsieb07 = rsieb07;
+    }
+
+
+    @Column(name="RSIEB08", precision=9, scale=3)
+    public BigDecimal getRsieb08() {
+        return this.rsieb08;
+    }
+
+    public void setRsieb08(BigDecimal rsieb08) {
+        this.rsieb08 = rsieb08;
+    }
+
+
+    @Column(name="RSIEB09", precision=9, scale=3)
+    public BigDecimal getRsieb09() {
+        return this.rsieb09;
+    }
+
+    public void setRsieb09(BigDecimal rsieb09) {
+        this.rsieb09 = rsieb09;
+    }
+
+
+    @Column(name="RSIEB10", precision=9, scale=3)
+    public BigDecimal getRsieb10() {
+        return this.rsieb10;
+    }
+
+    public void setRsieb10(BigDecimal rsieb10) {
+        this.rsieb10 = rsieb10;
+    }
+
+
+    @Column(name="RSIEB11", precision=9, scale=3)
+    public BigDecimal getRsieb11() {
+        return this.rsieb11;
+    }
+
+    public void setRsieb11(BigDecimal rsieb11) {
+        this.rsieb11 = rsieb11;
+    }
+
+
+    @Column(name="RSIEB12", precision=9, scale=3)
+    public BigDecimal getRsieb12() {
+        return this.rsieb12;
+    }
+
+    public void setRsieb12(BigDecimal rsieb12) {
+        this.rsieb12 = rsieb12;
+    }
+
+
+    @Column(name="RSIEB13", precision=9, scale=3)
+    public BigDecimal getRsieb13() {
+        return this.rsieb13;
+    }
+
+    public void setRsieb13(BigDecimal rsieb13) {
+        this.rsieb13 = rsieb13;
+    }
+
+
+    @Column(name="RSIEB14", precision=9, scale=3)
+    public BigDecimal getRsieb14() {
+        return this.rsieb14;
+    }
+
+    public void setRsieb14(BigDecimal rsieb14) {
+        this.rsieb14 = rsieb14;
+    }
+
+
+    @Column(name="RSIEB15", precision=9, scale=3)
+    public BigDecimal getRsieb15() {
+        return this.rsieb15;
+    }
+
+    public void setRsieb15(BigDecimal rsieb15) {
+        this.rsieb15 = rsieb15;
+    }
+
+
+    @Column(name="RSIEB16", precision=9, scale=3)
+    public BigDecimal getRsieb16() {
+        return this.rsieb16;
+    }
+
+    public void setRsieb16(BigDecimal rsieb16) {
+        this.rsieb16 = rsieb16;
+    }
+
+
+    @Column(name="RSIEB17", precision=9, scale=3)
+    public BigDecimal getRsieb17() {
+        return this.rsieb17;
+    }
+
+    public void setRsieb17(BigDecimal rsieb17) {
+        this.rsieb17 = rsieb17;
+    }
+
+
+    @Column(name="RSIEB18", precision=9, scale=3)
+    public BigDecimal getRsieb18() {
+        return this.rsieb18;
+    }
+
+    public void setRsieb18(BigDecimal rsieb18) {
+        this.rsieb18 = rsieb18;
+    }
+
+
+    @Column(name="RSIEB19", precision=9, scale=3)
+    public BigDecimal getRsieb19() {
+        return this.rsieb19;
+    }
+
+    public void setRsieb19(BigDecimal rsieb19) {
+        this.rsieb19 = rsieb19;
+    }
+
+
+    @Column(name="RSIEB20", precision=9, scale=3)
+    public BigDecimal getRsieb20() {
+        return this.rsieb20;
+    }
+
+    public void setRsieb20(BigDecimal rsieb20) {
+        this.rsieb20 = rsieb20;
+    }
+
+
+    @Column(name="RSIEB21", precision=9, scale=3)
+    public BigDecimal getRsieb21() {
+        return this.rsieb21;
+    }
+
+    public void setRsieb21(BigDecimal rsieb21) {
+        this.rsieb21 = rsieb21;
+    }
+
+
+    @Column(name="REST", precision=9, scale=3)
+    public BigDecimal getRest() {
+        return this.rest;
+    }
+
+    public void setRest(BigDecimal rest) {
+        this.rest = rest;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Ssiebungsieb.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,88 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+/**
+ * Ssiebungsieb generated by hbm2java
+ */
+@Entity
+@Table(name="SSIEBUNGSIEB"
+    ,schema="SEDDB"
+)
+public class Ssiebungsieb  implements java.io.Serializable {
+
+
+     private SsiebungsiebId id;
+
+    public Ssiebungsieb() {
+    }
+
+    public Ssiebungsieb(SsiebungsiebId id) {
+       this.id = id;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="siebanalyseid", column=@Column(name="SIEBANALYSEID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="gsiebsatzid", column=@Column(name="GSIEBSATZID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="gmasse", column=@Column(name="GMASSE", precision=9, scale=3) ),
+        @AttributeOverride(name="masche01", column=@Column(name="MASCHE01", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck01", column=@Column(name="RUECK01", precision=9, scale=3) ),
+        @AttributeOverride(name="masche02", column=@Column(name="MASCHE02", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck02", column=@Column(name="RUECK02", precision=9, scale=3) ),
+        @AttributeOverride(name="masche03", column=@Column(name="MASCHE03", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck03", column=@Column(name="RUECK03", precision=9, scale=3) ),
+        @AttributeOverride(name="masche04", column=@Column(name="MASCHE04", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck04", column=@Column(name="RUECK04", precision=9, scale=3) ),
+        @AttributeOverride(name="masche05", column=@Column(name="MASCHE05", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck05", column=@Column(name="RUECK05", precision=9, scale=3) ),
+        @AttributeOverride(name="masche06", column=@Column(name="MASCHE06", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck06", column=@Column(name="RUECK06", precision=9, scale=3) ),
+        @AttributeOverride(name="masche07", column=@Column(name="MASCHE07", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck07", column=@Column(name="RUECK07", precision=9, scale=3) ),
+        @AttributeOverride(name="masche08", column=@Column(name="MASCHE08", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck08", column=@Column(name="RUECK08", precision=9, scale=3) ),
+        @AttributeOverride(name="masche09", column=@Column(name="MASCHE09", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck09", column=@Column(name="RUECK09", precision=9, scale=3) ),
+        @AttributeOverride(name="masche10", column=@Column(name="MASCHE10", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck10", column=@Column(name="RUECK10", precision=9, scale=3) ),
+        @AttributeOverride(name="masche11", column=@Column(name="MASCHE11", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck11", column=@Column(name="RUECK11", precision=9, scale=3) ),
+        @AttributeOverride(name="masche12", column=@Column(name="MASCHE12", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck12", column=@Column(name="RUECK12", precision=9, scale=3) ),
+        @AttributeOverride(name="masche13", column=@Column(name="MASCHE13", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck13", column=@Column(name="RUECK13", precision=9, scale=3) ),
+        @AttributeOverride(name="masche14", column=@Column(name="MASCHE14", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck14", column=@Column(name="RUECK14", precision=9, scale=3) ),
+        @AttributeOverride(name="masche15", column=@Column(name="MASCHE15", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck15", column=@Column(name="RUECK15", precision=9, scale=3) ),
+        @AttributeOverride(name="masche16", column=@Column(name="MASCHE16", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck16", column=@Column(name="RUECK16", precision=9, scale=3) ),
+        @AttributeOverride(name="masche17", column=@Column(name="MASCHE17", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck17", column=@Column(name="RUECK17", precision=9, scale=3) ),
+        @AttributeOverride(name="masche18", column=@Column(name="MASCHE18", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck18", column=@Column(name="RUECK18", precision=9, scale=3) ),
+        @AttributeOverride(name="masche19", column=@Column(name="MASCHE19", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck19", column=@Column(name="RUECK19", precision=9, scale=3) ),
+        @AttributeOverride(name="masche20", column=@Column(name="MASCHE20", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck20", column=@Column(name="RUECK20", precision=9, scale=3) ),
+        @AttributeOverride(name="masche21", column=@Column(name="MASCHE21", precision=7, scale=4) ),
+        @AttributeOverride(name="rueck21", column=@Column(name="RUECK21", precision=9, scale=3) ),
+        @AttributeOverride(name="rest", column=@Column(name="REST", precision=9, scale=3) ) } )
+    public SsiebungsiebId getId() {
+        return this.id;
+    }
+
+    public void setId(SsiebungsiebId id) {
+        this.id = id;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/SsiebungsiebId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,686 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * SsiebungsiebId generated by hbm2java
+ */
+@Embeddable
+public class SsiebungsiebId  implements java.io.Serializable {
+
+
+     private long siebanalyseid;
+     private long gsiebsatzid;
+     private BigDecimal gmasse;
+     private BigDecimal masche01;
+     private BigDecimal rueck01;
+     private BigDecimal masche02;
+     private BigDecimal rueck02;
+     private BigDecimal masche03;
+     private BigDecimal rueck03;
+     private BigDecimal masche04;
+     private BigDecimal rueck04;
+     private BigDecimal masche05;
+     private BigDecimal rueck05;
+     private BigDecimal masche06;
+     private BigDecimal rueck06;
+     private BigDecimal masche07;
+     private BigDecimal rueck07;
+     private BigDecimal masche08;
+     private BigDecimal rueck08;
+     private BigDecimal masche09;
+     private BigDecimal rueck09;
+     private BigDecimal masche10;
+     private BigDecimal rueck10;
+     private BigDecimal masche11;
+     private BigDecimal rueck11;
+     private BigDecimal masche12;
+     private BigDecimal rueck12;
+     private BigDecimal masche13;
+     private BigDecimal rueck13;
+     private BigDecimal masche14;
+     private BigDecimal rueck14;
+     private BigDecimal masche15;
+     private BigDecimal rueck15;
+     private BigDecimal masche16;
+     private BigDecimal rueck16;
+     private BigDecimal masche17;
+     private BigDecimal rueck17;
+     private BigDecimal masche18;
+     private BigDecimal rueck18;
+     private BigDecimal masche19;
+     private BigDecimal rueck19;
+     private BigDecimal masche20;
+     private BigDecimal rueck20;
+     private BigDecimal masche21;
+     private BigDecimal rueck21;
+     private BigDecimal rest;
+
+    public SsiebungsiebId() {
+    }
+
+    public SsiebungsiebId(long siebanalyseid, long gsiebsatzid) {
+        this.siebanalyseid = siebanalyseid;
+        this.gsiebsatzid = gsiebsatzid;
+    }
+    public SsiebungsiebId(long siebanalyseid, long gsiebsatzid, BigDecimal gmasse, BigDecimal masche01, BigDecimal rueck01, BigDecimal masche02, BigDecimal rueck02, BigDecimal masche03, BigDecimal rueck03, BigDecimal masche04, BigDecimal rueck04, BigDecimal masche05, BigDecimal rueck05, BigDecimal masche06, BigDecimal rueck06, BigDecimal masche07, BigDecimal rueck07, BigDecimal masche08, BigDecimal rueck08, BigDecimal masche09, BigDecimal rueck09, BigDecimal masche10, BigDecimal rueck10, BigDecimal masche11, BigDecimal rueck11, BigDecimal masche12, BigDecimal rueck12, BigDecimal masche13, BigDecimal rueck13, BigDecimal masche14, BigDecimal rueck14, BigDecimal masche15, BigDecimal rueck15, BigDecimal masche16, BigDecimal rueck16, BigDecimal masche17, BigDecimal rueck17, BigDecimal masche18, BigDecimal rueck18, BigDecimal masche19, BigDecimal rueck19, BigDecimal masche20, BigDecimal rueck20, BigDecimal masche21, BigDecimal rueck21, BigDecimal rest) {
+       this.siebanalyseid = siebanalyseid;
+       this.gsiebsatzid = gsiebsatzid;
+       this.gmasse = gmasse;
+       this.masche01 = masche01;
+       this.rueck01 = rueck01;
+       this.masche02 = masche02;
+       this.rueck02 = rueck02;
+       this.masche03 = masche03;
+       this.rueck03 = rueck03;
+       this.masche04 = masche04;
+       this.rueck04 = rueck04;
+       this.masche05 = masche05;
+       this.rueck05 = rueck05;
+       this.masche06 = masche06;
+       this.rueck06 = rueck06;
+       this.masche07 = masche07;
+       this.rueck07 = rueck07;
+       this.masche08 = masche08;
+       this.rueck08 = rueck08;
+       this.masche09 = masche09;
+       this.rueck09 = rueck09;
+       this.masche10 = masche10;
+       this.rueck10 = rueck10;
+       this.masche11 = masche11;
+       this.rueck11 = rueck11;
+       this.masche12 = masche12;
+       this.rueck12 = rueck12;
+       this.masche13 = masche13;
+       this.rueck13 = rueck13;
+       this.masche14 = masche14;
+       this.rueck14 = rueck14;
+       this.masche15 = masche15;
+       this.rueck15 = rueck15;
+       this.masche16 = masche16;
+       this.rueck16 = rueck16;
+       this.masche17 = masche17;
+       this.rueck17 = rueck17;
+       this.masche18 = masche18;
+       this.rueck18 = rueck18;
+       this.masche19 = masche19;
+       this.rueck19 = rueck19;
+       this.masche20 = masche20;
+       this.rueck20 = rueck20;
+       this.masche21 = masche21;
+       this.rueck21 = rueck21;
+       this.rest = rest;
+    }
+
+
+
+    @Column(name="SIEBANALYSEID", nullable=false, precision=11, scale=0)
+    public long getSiebanalyseid() {
+        return this.siebanalyseid;
+    }
+
+    public void setSiebanalyseid(long siebanalyseid) {
+        this.siebanalyseid = siebanalyseid;
+    }
+
+
+    @Column(name="GSIEBSATZID", nullable=false, precision=11, scale=0)
+    public long getGsiebsatzid() {
+        return this.gsiebsatzid;
+    }
+
+    public void setGsiebsatzid(long gsiebsatzid) {
+        this.gsiebsatzid = gsiebsatzid;
+    }
+
+
+    @Column(name="GMASSE", precision=9, scale=3)
+    public BigDecimal getGmasse() {
+        return this.gmasse;
+    }
+
+    public void setGmasse(BigDecimal gmasse) {
+        this.gmasse = gmasse;
+    }
+
+
+    @Column(name="MASCHE01", precision=7, scale=4)
+    public BigDecimal getMasche01() {
+        return this.masche01;
+    }
+
+    public void setMasche01(BigDecimal masche01) {
+        this.masche01 = masche01;
+    }
+
+
+    @Column(name="RUECK01", precision=9, scale=3)
+    public BigDecimal getRueck01() {
+        return this.rueck01;
+    }
+
+    public void setRueck01(BigDecimal rueck01) {
+        this.rueck01 = rueck01;
+    }
+
+
+    @Column(name="MASCHE02", precision=7, scale=4)
+    public BigDecimal getMasche02() {
+        return this.masche02;
+    }
+
+    public void setMasche02(BigDecimal masche02) {
+        this.masche02 = masche02;
+    }
+
+
+    @Column(name="RUECK02", precision=9, scale=3)
+    public BigDecimal getRueck02() {
+        return this.rueck02;
+    }
+
+    public void setRueck02(BigDecimal rueck02) {
+        this.rueck02 = rueck02;
+    }
+
+
+    @Column(name="MASCHE03", precision=7, scale=4)
+    public BigDecimal getMasche03() {
+        return this.masche03;
+    }
+
+    public void setMasche03(BigDecimal masche03) {
+        this.masche03 = masche03;
+    }
+
+
+    @Column(name="RUECK03", precision=9, scale=3)
+    public BigDecimal getRueck03() {
+        return this.rueck03;
+    }
+
+    public void setRueck03(BigDecimal rueck03) {
+        this.rueck03 = rueck03;
+    }
+
+
+    @Column(name="MASCHE04", precision=7, scale=4)
+    public BigDecimal getMasche04() {
+        return this.masche04;
+    }
+
+    public void setMasche04(BigDecimal masche04) {
+        this.masche04 = masche04;
+    }
+
+
+    @Column(name="RUECK04", precision=9, scale=3)
+    public BigDecimal getRueck04() {
+        return this.rueck04;
+    }
+
+    public void setRueck04(BigDecimal rueck04) {
+        this.rueck04 = rueck04;
+    }
+
+
+    @Column(name="MASCHE05", precision=7, scale=4)
+    public BigDecimal getMasche05() {
+        return this.masche05;
+    }
+
+    public void setMasche05(BigDecimal masche05) {
+        this.masche05 = masche05;
+    }
+
+
+    @Column(name="RUECK05", precision=9, scale=3)
+    public BigDecimal getRueck05() {
+        return this.rueck05;
+    }
+
+    public void setRueck05(BigDecimal rueck05) {
+        this.rueck05 = rueck05;
+    }
+
+
+    @Column(name="MASCHE06", precision=7, scale=4)
+    public BigDecimal getMasche06() {
+        return this.masche06;
+    }
+
+    public void setMasche06(BigDecimal masche06) {
+        this.masche06 = masche06;
+    }
+
+
+    @Column(name="RUECK06", precision=9, scale=3)
+    public BigDecimal getRueck06() {
+        return this.rueck06;
+    }
+
+    public void setRueck06(BigDecimal rueck06) {
+        this.rueck06 = rueck06;
+    }
+
+
+    @Column(name="MASCHE07", precision=7, scale=4)
+    public BigDecimal getMasche07() {
+        return this.masche07;
+    }
+
+    public void setMasche07(BigDecimal masche07) {
+        this.masche07 = masche07;
+    }
+
+
+    @Column(name="RUECK07", precision=9, scale=3)
+    public BigDecimal getRueck07() {
+        return this.rueck07;
+    }
+
+    public void setRueck07(BigDecimal rueck07) {
+        this.rueck07 = rueck07;
+    }
+
+
+    @Column(name="MASCHE08", precision=7, scale=4)
+    public BigDecimal getMasche08() {
+        return this.masche08;
+    }
+
+    public void setMasche08(BigDecimal masche08) {
+        this.masche08 = masche08;
+    }
+
+
+    @Column(name="RUECK08", precision=9, scale=3)
+    public BigDecimal getRueck08() {
+        return this.rueck08;
+    }
+
+    public void setRueck08(BigDecimal rueck08) {
+        this.rueck08 = rueck08;
+    }
+
+
+    @Column(name="MASCHE09", precision=7, scale=4)
+    public BigDecimal getMasche09() {
+        return this.masche09;
+    }
+
+    public void setMasche09(BigDecimal masche09) {
+        this.masche09 = masche09;
+    }
+
+
+    @Column(name="RUECK09", precision=9, scale=3)
+    public BigDecimal getRueck09() {
+        return this.rueck09;
+    }
+
+    public void setRueck09(BigDecimal rueck09) {
+        this.rueck09 = rueck09;
+    }
+
+
+    @Column(name="MASCHE10", precision=7, scale=4)
+    public BigDecimal getMasche10() {
+        return this.masche10;
+    }
+
+    public void setMasche10(BigDecimal masche10) {
+        this.masche10 = masche10;
+    }
+
+
+    @Column(name="RUECK10", precision=9, scale=3)
+    public BigDecimal getRueck10() {
+        return this.rueck10;
+    }
+
+    public void setRueck10(BigDecimal rueck10) {
+        this.rueck10 = rueck10;
+    }
+
+
+    @Column(name="MASCHE11", precision=7, scale=4)
+    public BigDecimal getMasche11() {
+        return this.masche11;
+    }
+
+    public void setMasche11(BigDecimal masche11) {
+        this.masche11 = masche11;
+    }
+
+
+    @Column(name="RUECK11", precision=9, scale=3)
+    public BigDecimal getRueck11() {
+        return this.rueck11;
+    }
+
+    public void setRueck11(BigDecimal rueck11) {
+        this.rueck11 = rueck11;
+    }
+
+
+    @Column(name="MASCHE12", precision=7, scale=4)
+    public BigDecimal getMasche12() {
+        return this.masche12;
+    }
+
+    public void setMasche12(BigDecimal masche12) {
+        this.masche12 = masche12;
+    }
+
+
+    @Column(name="RUECK12", precision=9, scale=3)
+    public BigDecimal getRueck12() {
+        return this.rueck12;
+    }
+
+    public void setRueck12(BigDecimal rueck12) {
+        this.rueck12 = rueck12;
+    }
+
+
+    @Column(name="MASCHE13", precision=7, scale=4)
+    public BigDecimal getMasche13() {
+        return this.masche13;
+    }
+
+    public void setMasche13(BigDecimal masche13) {
+        this.masche13 = masche13;
+    }
+
+
+    @Column(name="RUECK13", precision=9, scale=3)
+    public BigDecimal getRueck13() {
+        return this.rueck13;
+    }
+
+    public void setRueck13(BigDecimal rueck13) {
+        this.rueck13 = rueck13;
+    }
+
+
+    @Column(name="MASCHE14", precision=7, scale=4)
+    public BigDecimal getMasche14() {
+        return this.masche14;
+    }
+
+    public void setMasche14(BigDecimal masche14) {
+        this.masche14 = masche14;
+    }
+
+
+    @Column(name="RUECK14", precision=9, scale=3)
+    public BigDecimal getRueck14() {
+        return this.rueck14;
+    }
+
+    public void setRueck14(BigDecimal rueck14) {
+        this.rueck14 = rueck14;
+    }
+
+
+    @Column(name="MASCHE15", precision=7, scale=4)
+    public BigDecimal getMasche15() {
+        return this.masche15;
+    }
+
+    public void setMasche15(BigDecimal masche15) {
+        this.masche15 = masche15;
+    }
+
+
+    @Column(name="RUECK15", precision=9, scale=3)
+    public BigDecimal getRueck15() {
+        return this.rueck15;
+    }
+
+    public void setRueck15(BigDecimal rueck15) {
+        this.rueck15 = rueck15;
+    }
+
+
+    @Column(name="MASCHE16", precision=7, scale=4)
+    public BigDecimal getMasche16() {
+        return this.masche16;
+    }
+
+    public void setMasche16(BigDecimal masche16) {
+        this.masche16 = masche16;
+    }
+
+
+    @Column(name="RUECK16", precision=9, scale=3)
+    public BigDecimal getRueck16() {
+        return this.rueck16;
+    }
+
+    public void setRueck16(BigDecimal rueck16) {
+        this.rueck16 = rueck16;
+    }
+
+
+    @Column(name="MASCHE17", precision=7, scale=4)
+    public BigDecimal getMasche17() {
+        return this.masche17;
+    }
+
+    public void setMasche17(BigDecimal masche17) {
+        this.masche17 = masche17;
+    }
+
+
+    @Column(name="RUECK17", precision=9, scale=3)
+    public BigDecimal getRueck17() {
+        return this.rueck17;
+    }
+
+    public void setRueck17(BigDecimal rueck17) {
+        this.rueck17 = rueck17;
+    }
+
+
+    @Column(name="MASCHE18", precision=7, scale=4)
+    public BigDecimal getMasche18() {
+        return this.masche18;
+    }
+
+    public void setMasche18(BigDecimal masche18) {
+        this.masche18 = masche18;
+    }
+
+
+    @Column(name="RUECK18", precision=9, scale=3)
+    public BigDecimal getRueck18() {
+        return this.rueck18;
+    }
+
+    public void setRueck18(BigDecimal rueck18) {
+        this.rueck18 = rueck18;
+    }
+
+
+    @Column(name="MASCHE19", precision=7, scale=4)
+    public BigDecimal getMasche19() {
+        return this.masche19;
+    }
+
+    public void setMasche19(BigDecimal masche19) {
+        this.masche19 = masche19;
+    }
+
+
+    @Column(name="RUECK19", precision=9, scale=3)
+    public BigDecimal getRueck19() {
+        return this.rueck19;
+    }
+
+    public void setRueck19(BigDecimal rueck19) {
+        this.rueck19 = rueck19;
+    }
+
+
+    @Column(name="MASCHE20", precision=7, scale=4)
+    public BigDecimal getMasche20() {
+        return this.masche20;
+    }
+
+    public void setMasche20(BigDecimal masche20) {
+        this.masche20 = masche20;
+    }
+
+
+    @Column(name="RUECK20", precision=9, scale=3)
+    public BigDecimal getRueck20() {
+        return this.rueck20;
+    }
+
+    public void setRueck20(BigDecimal rueck20) {
+        this.rueck20 = rueck20;
+    }
+
+
+    @Column(name="MASCHE21", precision=7, scale=4)
+    public BigDecimal getMasche21() {
+        return this.masche21;
+    }
+
+    public void setMasche21(BigDecimal masche21) {
+        this.masche21 = masche21;
+    }
+
+
+    @Column(name="RUECK21", precision=9, scale=3)
+    public BigDecimal getRueck21() {
+        return this.rueck21;
+    }
+
+    public void setRueck21(BigDecimal rueck21) {
+        this.rueck21 = rueck21;
+    }
+
+
+    @Column(name="REST", precision=9, scale=3)
+    public BigDecimal getRest() {
+        return this.rest;
+    }
+
+    public void setRest(BigDecimal rest) {
+        this.rest = rest;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+		 if ( (other == null ) ) return false;
+		 if ( !(other instanceof SsiebungsiebId) ) return false;
+		 SsiebungsiebId castOther = ( SsiebungsiebId ) other;
+
+		 return (this.getSiebanalyseid()==castOther.getSiebanalyseid())
+ && (this.getGsiebsatzid()==castOther.getGsiebsatzid())
+ && ( (this.getGmasse()==castOther.getGmasse()) || ( this.getGmasse()!=null && castOther.getGmasse()!=null && this.getGmasse().equals(castOther.getGmasse()) ) )
+ && ( (this.getMasche01()==castOther.getMasche01()) || ( this.getMasche01()!=null && castOther.getMasche01()!=null && this.getMasche01().equals(castOther.getMasche01()) ) )
+ && ( (this.getRueck01()==castOther.getRueck01()) || ( this.getRueck01()!=null && castOther.getRueck01()!=null && this.getRueck01().equals(castOther.getRueck01()) ) )
+ && ( (this.getMasche02()==castOther.getMasche02()) || ( this.getMasche02()!=null && castOther.getMasche02()!=null && this.getMasche02().equals(castOther.getMasche02()) ) )
+ && ( (this.getRueck02()==castOther.getRueck02()) || ( this.getRueck02()!=null && castOther.getRueck02()!=null && this.getRueck02().equals(castOther.getRueck02()) ) )
+ && ( (this.getMasche03()==castOther.getMasche03()) || ( this.getMasche03()!=null && castOther.getMasche03()!=null && this.getMasche03().equals(castOther.getMasche03()) ) )
+ && ( (this.getRueck03()==castOther.getRueck03()) || ( this.getRueck03()!=null && castOther.getRueck03()!=null && this.getRueck03().equals(castOther.getRueck03()) ) )
+ && ( (this.getMasche04()==castOther.getMasche04()) || ( this.getMasche04()!=null && castOther.getMasche04()!=null && this.getMasche04().equals(castOther.getMasche04()) ) )
+ && ( (this.getRueck04()==castOther.getRueck04()) || ( this.getRueck04()!=null && castOther.getRueck04()!=null && this.getRueck04().equals(castOther.getRueck04()) ) )
+ && ( (this.getMasche05()==castOther.getMasche05()) || ( this.getMasche05()!=null && castOther.getMasche05()!=null && this.getMasche05().equals(castOther.getMasche05()) ) )
+ && ( (this.getRueck05()==castOther.getRueck05()) || ( this.getRueck05()!=null && castOther.getRueck05()!=null && this.getRueck05().equals(castOther.getRueck05()) ) )
+ && ( (this.getMasche06()==castOther.getMasche06()) || ( this.getMasche06()!=null && castOther.getMasche06()!=null && this.getMasche06().equals(castOther.getMasche06()) ) )
+ && ( (this.getRueck06()==castOther.getRueck06()) || ( this.getRueck06()!=null && castOther.getRueck06()!=null && this.getRueck06().equals(castOther.getRueck06()) ) )
+ && ( (this.getMasche07()==castOther.getMasche07()) || ( this.getMasche07()!=null && castOther.getMasche07()!=null && this.getMasche07().equals(castOther.getMasche07()) ) )
+ && ( (this.getRueck07()==castOther.getRueck07()) || ( this.getRueck07()!=null && castOther.getRueck07()!=null && this.getRueck07().equals(castOther.getRueck07()) ) )
+ && ( (this.getMasche08()==castOther.getMasche08()) || ( this.getMasche08()!=null && castOther.getMasche08()!=null && this.getMasche08().equals(castOther.getMasche08()) ) )
+ && ( (this.getRueck08()==castOther.getRueck08()) || ( this.getRueck08()!=null && castOther.getRueck08()!=null && this.getRueck08().equals(castOther.getRueck08()) ) )
+ && ( (this.getMasche09()==castOther.getMasche09()) || ( this.getMasche09()!=null && castOther.getMasche09()!=null && this.getMasche09().equals(castOther.getMasche09()) ) )
+ && ( (this.getRueck09()==castOther.getRueck09()) || ( this.getRueck09()!=null && castOther.getRueck09()!=null && this.getRueck09().equals(castOther.getRueck09()) ) )
+ && ( (this.getMasche10()==castOther.getMasche10()) || ( this.getMasche10()!=null && castOther.getMasche10()!=null && this.getMasche10().equals(castOther.getMasche10()) ) )
+ && ( (this.getRueck10()==castOther.getRueck10()) || ( this.getRueck10()!=null && castOther.getRueck10()!=null && this.getRueck10().equals(castOther.getRueck10()) ) )
+ && ( (this.getMasche11()==castOther.getMasche11()) || ( this.getMasche11()!=null && castOther.getMasche11()!=null && this.getMasche11().equals(castOther.getMasche11()) ) )
+ && ( (this.getRueck11()==castOther.getRueck11()) || ( this.getRueck11()!=null && castOther.getRueck11()!=null && this.getRueck11().equals(castOther.getRueck11()) ) )
+ && ( (this.getMasche12()==castOther.getMasche12()) || ( this.getMasche12()!=null && castOther.getMasche12()!=null && this.getMasche12().equals(castOther.getMasche12()) ) )
+ && ( (this.getRueck12()==castOther.getRueck12()) || ( this.getRueck12()!=null && castOther.getRueck12()!=null && this.getRueck12().equals(castOther.getRueck12()) ) )
+ && ( (this.getMasche13()==castOther.getMasche13()) || ( this.getMasche13()!=null && castOther.getMasche13()!=null && this.getMasche13().equals(castOther.getMasche13()) ) )
+ && ( (this.getRueck13()==castOther.getRueck13()) || ( this.getRueck13()!=null && castOther.getRueck13()!=null && this.getRueck13().equals(castOther.getRueck13()) ) )
+ && ( (this.getMasche14()==castOther.getMasche14()) || ( this.getMasche14()!=null && castOther.getMasche14()!=null && this.getMasche14().equals(castOther.getMasche14()) ) )
+ && ( (this.getRueck14()==castOther.getRueck14()) || ( this.getRueck14()!=null && castOther.getRueck14()!=null && this.getRueck14().equals(castOther.getRueck14()) ) )
+ && ( (this.getMasche15()==castOther.getMasche15()) || ( this.getMasche15()!=null && castOther.getMasche15()!=null && this.getMasche15().equals(castOther.getMasche15()) ) )
+ && ( (this.getRueck15()==castOther.getRueck15()) || ( this.getRueck15()!=null && castOther.getRueck15()!=null && this.getRueck15().equals(castOther.getRueck15()) ) )
+ && ( (this.getMasche16()==castOther.getMasche16()) || ( this.getMasche16()!=null && castOther.getMasche16()!=null && this.getMasche16().equals(castOther.getMasche16()) ) )
+ && ( (this.getRueck16()==castOther.getRueck16()) || ( this.getRueck16()!=null && castOther.getRueck16()!=null && this.getRueck16().equals(castOther.getRueck16()) ) )
+ && ( (this.getMasche17()==castOther.getMasche17()) || ( this.getMasche17()!=null && castOther.getMasche17()!=null && this.getMasche17().equals(castOther.getMasche17()) ) )
+ && ( (this.getRueck17()==castOther.getRueck17()) || ( this.getRueck17()!=null && castOther.getRueck17()!=null && this.getRueck17().equals(castOther.getRueck17()) ) )
+ && ( (this.getMasche18()==castOther.getMasche18()) || ( this.getMasche18()!=null && castOther.getMasche18()!=null && this.getMasche18().equals(castOther.getMasche18()) ) )
+ && ( (this.getRueck18()==castOther.getRueck18()) || ( this.getRueck18()!=null && castOther.getRueck18()!=null && this.getRueck18().equals(castOther.getRueck18()) ) )
+ && ( (this.getMasche19()==castOther.getMasche19()) || ( this.getMasche19()!=null && castOther.getMasche19()!=null && this.getMasche19().equals(castOther.getMasche19()) ) )
+ && ( (this.getRueck19()==castOther.getRueck19()) || ( this.getRueck19()!=null && castOther.getRueck19()!=null && this.getRueck19().equals(castOther.getRueck19()) ) )
+ && ( (this.getMasche20()==castOther.getMasche20()) || ( this.getMasche20()!=null && castOther.getMasche20()!=null && this.getMasche20().equals(castOther.getMasche20()) ) )
+ && ( (this.getRueck20()==castOther.getRueck20()) || ( this.getRueck20()!=null && castOther.getRueck20()!=null && this.getRueck20().equals(castOther.getRueck20()) ) )
+ && ( (this.getMasche21()==castOther.getMasche21()) || ( this.getMasche21()!=null && castOther.getMasche21()!=null && this.getMasche21().equals(castOther.getMasche21()) ) )
+ && ( (this.getRueck21()==castOther.getRueck21()) || ( this.getRueck21()!=null && castOther.getRueck21()!=null && this.getRueck21().equals(castOther.getRueck21()) ) )
+ && ( (this.getRest()==castOther.getRest()) || ( this.getRest()!=null && castOther.getRest()!=null && this.getRest().equals(castOther.getRest()) ) );
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getSiebanalyseid();
+         result = 37 * result + (int) this.getGsiebsatzid();
+         result = 37 * result + ( getGmasse() == null ? 0 : this.getGmasse().hashCode() );
+         result = 37 * result + ( getMasche01() == null ? 0 : this.getMasche01().hashCode() );
+         result = 37 * result + ( getRueck01() == null ? 0 : this.getRueck01().hashCode() );
+         result = 37 * result + ( getMasche02() == null ? 0 : this.getMasche02().hashCode() );
+         result = 37 * result + ( getRueck02() == null ? 0 : this.getRueck02().hashCode() );
+         result = 37 * result + ( getMasche03() == null ? 0 : this.getMasche03().hashCode() );
+         result = 37 * result + ( getRueck03() == null ? 0 : this.getRueck03().hashCode() );
+         result = 37 * result + ( getMasche04() == null ? 0 : this.getMasche04().hashCode() );
+         result = 37 * result + ( getRueck04() == null ? 0 : this.getRueck04().hashCode() );
+         result = 37 * result + ( getMasche05() == null ? 0 : this.getMasche05().hashCode() );
+         result = 37 * result + ( getRueck05() == null ? 0 : this.getRueck05().hashCode() );
+         result = 37 * result + ( getMasche06() == null ? 0 : this.getMasche06().hashCode() );
+         result = 37 * result + ( getRueck06() == null ? 0 : this.getRueck06().hashCode() );
+         result = 37 * result + ( getMasche07() == null ? 0 : this.getMasche07().hashCode() );
+         result = 37 * result + ( getRueck07() == null ? 0 : this.getRueck07().hashCode() );
+         result = 37 * result + ( getMasche08() == null ? 0 : this.getMasche08().hashCode() );
+         result = 37 * result + ( getRueck08() == null ? 0 : this.getRueck08().hashCode() );
+         result = 37 * result + ( getMasche09() == null ? 0 : this.getMasche09().hashCode() );
+         result = 37 * result + ( getRueck09() == null ? 0 : this.getRueck09().hashCode() );
+         result = 37 * result + ( getMasche10() == null ? 0 : this.getMasche10().hashCode() );
+         result = 37 * result + ( getRueck10() == null ? 0 : this.getRueck10().hashCode() );
+         result = 37 * result + ( getMasche11() == null ? 0 : this.getMasche11().hashCode() );
+         result = 37 * result + ( getRueck11() == null ? 0 : this.getRueck11().hashCode() );
+         result = 37 * result + ( getMasche12() == null ? 0 : this.getMasche12().hashCode() );
+         result = 37 * result + ( getRueck12() == null ? 0 : this.getRueck12().hashCode() );
+         result = 37 * result + ( getMasche13() == null ? 0 : this.getMasche13().hashCode() );
+         result = 37 * result + ( getRueck13() == null ? 0 : this.getRueck13().hashCode() );
+         result = 37 * result + ( getMasche14() == null ? 0 : this.getMasche14().hashCode() );
+         result = 37 * result + ( getRueck14() == null ? 0 : this.getRueck14().hashCode() );
+         result = 37 * result + ( getMasche15() == null ? 0 : this.getMasche15().hashCode() );
+         result = 37 * result + ( getRueck15() == null ? 0 : this.getRueck15().hashCode() );
+         result = 37 * result + ( getMasche16() == null ? 0 : this.getMasche16().hashCode() );
+         result = 37 * result + ( getRueck16() == null ? 0 : this.getRueck16().hashCode() );
+         result = 37 * result + ( getMasche17() == null ? 0 : this.getMasche17().hashCode() );
+         result = 37 * result + ( getRueck17() == null ? 0 : this.getRueck17().hashCode() );
+         result = 37 * result + ( getMasche18() == null ? 0 : this.getMasche18().hashCode() );
+         result = 37 * result + ( getRueck18() == null ? 0 : this.getRueck18().hashCode() );
+         result = 37 * result + ( getMasche19() == null ? 0 : this.getMasche19().hashCode() );
+         result = 37 * result + ( getRueck19() == null ? 0 : this.getRueck19().hashCode() );
+         result = 37 * result + ( getMasche20() == null ? 0 : this.getMasche20().hashCode() );
+         result = 37 * result + ( getRueck20() == null ? 0 : this.getRueck20().hashCode() );
+         result = 37 * result + ( getMasche21() == null ? 0 : this.getMasche21().hashCode() );
+         result = 37 * result + ( getRueck21() == null ? 0 : this.getRueck21().hashCode() );
+         result = 37 * result + ( getRest() == null ? 0 : this.getRest().hashCode() );
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Station.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,305 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+import javax.persistence.UniqueConstraint;
+
+/**
+ * Station generated by hbm2java
+ */
+@Entity
+@Table(name="STATION"
+    ,schema="SEDDB"
+    , uniqueConstraints = @UniqueConstraint(columnNames={"GEWAESSERID", "NAME"})
+)
+public class Station  implements java.io.Serializable {
+
+
+     private long stationid;
+     private Bezugspegel bezugspegel;
+     private Gewaesser gewaesser;
+     private BigDecimal km;
+     private String name;
+     private boolean uferistlinks;
+     private BigDecimal teilVon;
+     private BigDecimal teilBis;
+     private BigDecimal abstHmst;
+     private BigDecimal LHochwert;
+     private BigDecimal LRechwert;
+     private BigDecimal LNn;
+     private BigDecimal RHochwert;
+     private BigDecimal RRechwert;
+     private BigDecimal RNn;
+     private String bemerkung;
+     private Long oldmstid;
+     private boolean istfeststoff;
+     private Set<Sohltest> sohltests = new HashSet<Sohltest>(0);
+     private Set<Hpeilung> hpeilungs = new HashSet<Hpeilung>(0);
+     private Set<Mpeilung> mpeilungs = new HashSet<Mpeilung>(0);
+     private Set<Messung> messungs = new HashSet<Messung>(0);
+
+    public Station() {
+    }
+
+    public Station(long stationid, Gewaesser gewaesser, BigDecimal km, String name, boolean uferistlinks, boolean istfeststoff) {
+        this.stationid = stationid;
+        this.gewaesser = gewaesser;
+        this.km = km;
+        this.name = name;
+        this.uferistlinks = uferistlinks;
+        this.istfeststoff = istfeststoff;
+    }
+    public Station(long stationid, Bezugspegel bezugspegel, Gewaesser gewaesser, BigDecimal km, String name, boolean uferistlinks, BigDecimal teilVon, BigDecimal teilBis, BigDecimal abstHmst, BigDecimal LHochwert, BigDecimal LRechwert, BigDecimal LNn, BigDecimal RHochwert, BigDecimal RRechwert, BigDecimal RNn, String bemerkung, Long oldmstid, boolean istfeststoff, Set<Sohltest> sohltests, Set<Hpeilung> hpeilungs, Set<Mpeilung> mpeilungs, Set<Messung> messungs) {
+       this.stationid = stationid;
+       this.bezugspegel = bezugspegel;
+       this.gewaesser = gewaesser;
+       this.km = km;
+       this.name = name;
+       this.uferistlinks = uferistlinks;
+       this.teilVon = teilVon;
+       this.teilBis = teilBis;
+       this.abstHmst = abstHmst;
+       this.LHochwert = LHochwert;
+       this.LRechwert = LRechwert;
+       this.LNn = LNn;
+       this.RHochwert = RHochwert;
+       this.RRechwert = RRechwert;
+       this.RNn = RNn;
+       this.bemerkung = bemerkung;
+       this.oldmstid = oldmstid;
+       this.istfeststoff = istfeststoff;
+       this.sohltests = sohltests;
+       this.hpeilungs = hpeilungs;
+       this.mpeilungs = mpeilungs;
+       this.messungs = messungs;
+    }
+
+     @Id
+
+
+    @Column(name="STATIONID", unique=true, nullable=false, precision=11, scale=0)
+    public long getStationid() {
+        return this.stationid;
+    }
+
+    public void setStationid(long stationid) {
+        this.stationid = stationid;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="BEZUGSPEGELID")
+    public Bezugspegel getBezugspegel() {
+        return this.bezugspegel;
+    }
+
+    public void setBezugspegel(Bezugspegel bezugspegel) {
+        this.bezugspegel = bezugspegel;
+    }
+
+@ManyToOne(fetch=FetchType.LAZY)
+    @JoinColumn(name="GEWAESSERID", nullable=false)
+    public Gewaesser getGewaesser() {
+        return this.gewaesser;
+    }
+
+    public void setGewaesser(Gewaesser gewaesser) {
+        this.gewaesser = gewaesser;
+    }
+
+
+    @Column(name="KM", nullable=false, precision=8, scale=3)
+    public BigDecimal getKm() {
+        return this.km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name="NAME", nullable=false, length=50)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="UFERISTLINKS", nullable=false, precision=1, scale=0)
+    public boolean isUferistlinks() {
+        return this.uferistlinks;
+    }
+
+    public void setUferistlinks(boolean uferistlinks) {
+        this.uferistlinks = uferistlinks;
+    }
+
+
+    @Column(name="TEIL_VON", precision=8, scale=3)
+    public BigDecimal getTeilVon() {
+        return this.teilVon;
+    }
+
+    public void setTeilVon(BigDecimal teilVon) {
+        this.teilVon = teilVon;
+    }
+
+
+    @Column(name="TEIL_BIS", precision=8, scale=3)
+    public BigDecimal getTeilBis() {
+        return this.teilBis;
+    }
+
+    public void setTeilBis(BigDecimal teilBis) {
+        this.teilBis = teilBis;
+    }
+
+
+    @Column(name="ABST_HMST", precision=8, scale=3)
+    public BigDecimal getAbstHmst() {
+        return this.abstHmst;
+    }
+
+    public void setAbstHmst(BigDecimal abstHmst) {
+        this.abstHmst = abstHmst;
+    }
+
+
+    @Column(name="L_HOCHWERT", precision=11, scale=3)
+    public BigDecimal getLHochwert() {
+        return this.LHochwert;
+    }
+
+    public void setLHochwert(BigDecimal LHochwert) {
+        this.LHochwert = LHochwert;
+    }
+
+
+    @Column(name="L_RECHWERT", precision=11, scale=3)
+    public BigDecimal getLRechwert() {
+        return this.LRechwert;
+    }
+
+    public void setLRechwert(BigDecimal LRechwert) {
+        this.LRechwert = LRechwert;
+    }
+
+
+    @Column(name="L_NN", precision=8, scale=3)
+    public BigDecimal getLNn() {
+        return this.LNn;
+    }
+
+    public void setLNn(BigDecimal LNn) {
+        this.LNn = LNn;
+    }
+
+
+    @Column(name="R_HOCHWERT", precision=11, scale=3)
+    public BigDecimal getRHochwert() {
+        return this.RHochwert;
+    }
+
+    public void setRHochwert(BigDecimal RHochwert) {
+        this.RHochwert = RHochwert;
+    }
+
+
+    @Column(name="R_RECHWERT", precision=11, scale=3)
+    public BigDecimal getRRechwert() {
+        return this.RRechwert;
+    }
+
+    public void setRRechwert(BigDecimal RRechwert) {
+        this.RRechwert = RRechwert;
+    }
+
+
+    @Column(name="R_NN", precision=8, scale=3)
+    public BigDecimal getRNn() {
+        return this.RNn;
+    }
+
+    public void setRNn(BigDecimal RNn) {
+        this.RNn = RNn;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="OLDMSTID", precision=11, scale=0)
+    public Long getOldmstid() {
+        return this.oldmstid;
+    }
+
+    public void setOldmstid(Long oldmstid) {
+        this.oldmstid = oldmstid;
+    }
+
+
+    @Column(name="ISTFESTSTOFF", nullable=false, precision=1, scale=0)
+    public boolean isIstfeststoff() {
+        return this.istfeststoff;
+    }
+
+    public void setIstfeststoff(boolean istfeststoff) {
+        this.istfeststoff = istfeststoff;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="station")
+    public Set<Sohltest> getSohltests() {
+        return this.sohltests;
+    }
+
+    public void setSohltests(Set<Sohltest> sohltests) {
+        this.sohltests = sohltests;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="station")
+    public Set<Hpeilung> getHpeilungs() {
+        return this.hpeilungs;
+    }
+
+    public void setHpeilungs(Set<Hpeilung> hpeilungs) {
+        this.hpeilungs = hpeilungs;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="station")
+    public Set<Mpeilung> getMpeilungs() {
+        return this.mpeilungs;
+    }
+
+    public void setMpeilungs(Set<Mpeilung> mpeilungs) {
+        this.mpeilungs = mpeilungs;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="station")
+    public Set<Messung> getMessungs() {
+        return this.messungs;
+    }
+
+    public void setMessungs(Set<Messung> messungs) {
+        this.messungs = messungs;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Stationgew.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,61 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.AttributeOverride;
+import javax.persistence.AttributeOverrides;
+import javax.persistence.Column;
+import javax.persistence.EmbeddedId;
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+/**
+ * Stationgew generated by hbm2java
+ */
+@Entity
+@Table(name="STATIONGEW"
+    ,schema="SEDDB"
+)
+public class Stationgew  implements java.io.Serializable {
+
+
+     private StationgewId id;
+
+    public Stationgew() {
+    }
+
+    public Stationgew(StationgewId id) {
+       this.id = id;
+    }
+
+     @EmbeddedId
+
+
+    @AttributeOverrides( {
+        @AttributeOverride(name="stationid", column=@Column(name="STATIONID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="gewaesserid", column=@Column(name="GEWAESSERID", nullable=false, precision=11, scale=0) ),
+        @AttributeOverride(name="km", column=@Column(name="KM", nullable=false, precision=8, scale=3) ),
+        @AttributeOverride(name="bezugspegelid", column=@Column(name="BEZUGSPEGELID", precision=11, scale=0) ),
+        @AttributeOverride(name="name", column=@Column(name="NAME", nullable=false, length=50) ),
+        @AttributeOverride(name="uferistlinks", column=@Column(name="UFERISTLINKS", nullable=false, precision=1, scale=0) ),
+        @AttributeOverride(name="teilVon", column=@Column(name="TEIL_VON", precision=8, scale=3) ),
+        @AttributeOverride(name="teilBis", column=@Column(name="TEIL_BIS", precision=8, scale=3) ),
+        @AttributeOverride(name="abstHmst", column=@Column(name="ABST_HMST", precision=8, scale=3) ),
+        @AttributeOverride(name="LHochwert", column=@Column(name="L_HOCHWERT", precision=11, scale=3) ),
+        @AttributeOverride(name="LRechwert", column=@Column(name="L_RECHWERT", precision=11, scale=3) ),
+        @AttributeOverride(name="LNn", column=@Column(name="L_NN", precision=8, scale=3) ),
+        @AttributeOverride(name="RHochwert", column=@Column(name="R_HOCHWERT", precision=11, scale=3) ),
+        @AttributeOverride(name="RRechwert", column=@Column(name="R_RECHWERT", precision=11, scale=3) ),
+        @AttributeOverride(name="RNn", column=@Column(name="R_NN", precision=8, scale=3) ),
+        @AttributeOverride(name="bemerkung", column=@Column(name="BEMERKUNG", length=240) ),
+        @AttributeOverride(name="oldmstid", column=@Column(name="OLDMSTID", precision=11, scale=0) ),
+        @AttributeOverride(name="istfeststoff", column=@Column(name="ISTFESTSTOFF", nullable=false, precision=1, scale=0) ),
+        @AttributeOverride(name="gewname", column=@Column(name="GEWNAME", nullable=false, length=20) ) } )
+    public StationgewId getId() {
+        return this.id;
+    }
+
+    public void setId(StationgewId id) {
+        this.id = id;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/StationgewId.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,313 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.math.BigDecimal;
+import javax.persistence.Column;
+import javax.persistence.Embeddable;
+
+/**
+ * StationgewId generated by hbm2java
+ */
+@Embeddable
+public class StationgewId  implements java.io.Serializable {
+
+
+     private long stationid;
+     private long gewaesserid;
+     private BigDecimal km;
+     private Long bezugspegelid;
+     private String name;
+     private boolean uferistlinks;
+     private BigDecimal teilVon;
+     private BigDecimal teilBis;
+     private BigDecimal abstHmst;
+     private BigDecimal LHochwert;
+     private BigDecimal LRechwert;
+     private BigDecimal LNn;
+     private BigDecimal RHochwert;
+     private BigDecimal RRechwert;
+     private BigDecimal RNn;
+     private String bemerkung;
+     private Long oldmstid;
+     private boolean istfeststoff;
+     private String gewname;
+
+    public StationgewId() {
+    }
+
+    public StationgewId(long stationid, long gewaesserid, BigDecimal km, String name, boolean uferistlinks, boolean istfeststoff, String gewname) {
+        this.stationid = stationid;
+        this.gewaesserid = gewaesserid;
+        this.km = km;
+        this.name = name;
+        this.uferistlinks = uferistlinks;
+        this.istfeststoff = istfeststoff;
+        this.gewname = gewname;
+    }
+    public StationgewId(long stationid, long gewaesserid, BigDecimal km, Long bezugspegelid, String name, boolean uferistlinks, BigDecimal teilVon, BigDecimal teilBis, BigDecimal abstHmst, BigDecimal LHochwert, BigDecimal LRechwert, BigDecimal LNn, BigDecimal RHochwert, BigDecimal RRechwert, BigDecimal RNn, String bemerkung, Long oldmstid, boolean istfeststoff, String gewname) {
+       this.stationid = stationid;
+       this.gewaesserid = gewaesserid;
+       this.km = km;
+       this.bezugspegelid = bezugspegelid;
+       this.name = name;
+       this.uferistlinks = uferistlinks;
+       this.teilVon = teilVon;
+       this.teilBis = teilBis;
+       this.abstHmst = abstHmst;
+       this.LHochwert = LHochwert;
+       this.LRechwert = LRechwert;
+       this.LNn = LNn;
+       this.RHochwert = RHochwert;
+       this.RRechwert = RRechwert;
+       this.RNn = RNn;
+       this.bemerkung = bemerkung;
+       this.oldmstid = oldmstid;
+       this.istfeststoff = istfeststoff;
+       this.gewname = gewname;
+    }
+
+
+
+    @Column(name="STATIONID", nullable=false, precision=11, scale=0)
+    public long getStationid() {
+        return this.stationid;
+    }
+
+    public void setStationid(long stationid) {
+        this.stationid = stationid;
+    }
+
+
+    @Column(name="GEWAESSERID", nullable=false, precision=11, scale=0)
+    public long getGewaesserid() {
+        return this.gewaesserid;
+    }
+
+    public void setGewaesserid(long gewaesserid) {
+        this.gewaesserid = gewaesserid;
+    }
+
+
+    @Column(name="KM", nullable=false, precision=8, scale=3)
+    public BigDecimal getKm() {
+        return this.km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name="BEZUGSPEGELID", precision=11, scale=0)
+    public Long getBezugspegelid() {
+        return this.bezugspegelid;
+    }
+
+    public void setBezugspegelid(Long bezugspegelid) {
+        this.bezugspegelid = bezugspegelid;
+    }
+
+
+    @Column(name="NAME", nullable=false, length=50)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="UFERISTLINKS", nullable=false, precision=1, scale=0)
+    public boolean isUferistlinks() {
+        return this.uferistlinks;
+    }
+
+    public void setUferistlinks(boolean uferistlinks) {
+        this.uferistlinks = uferistlinks;
+    }
+
+
+    @Column(name="TEIL_VON", precision=8, scale=3)
+    public BigDecimal getTeilVon() {
+        return this.teilVon;
+    }
+
+    public void setTeilVon(BigDecimal teilVon) {
+        this.teilVon = teilVon;
+    }
+
+
+    @Column(name="TEIL_BIS", precision=8, scale=3)
+    public BigDecimal getTeilBis() {
+        return this.teilBis;
+    }
+
+    public void setTeilBis(BigDecimal teilBis) {
+        this.teilBis = teilBis;
+    }
+
+
+    @Column(name="ABST_HMST", precision=8, scale=3)
+    public BigDecimal getAbstHmst() {
+        return this.abstHmst;
+    }
+
+    public void setAbstHmst(BigDecimal abstHmst) {
+        this.abstHmst = abstHmst;
+    }
+
+
+    @Column(name="L_HOCHWERT", precision=11, scale=3)
+    public BigDecimal getLHochwert() {
+        return this.LHochwert;
+    }
+
+    public void setLHochwert(BigDecimal LHochwert) {
+        this.LHochwert = LHochwert;
+    }
+
+
+    @Column(name="L_RECHWERT", precision=11, scale=3)
+    public BigDecimal getLRechwert() {
+        return this.LRechwert;
+    }
+
+    public void setLRechwert(BigDecimal LRechwert) {
+        this.LRechwert = LRechwert;
+    }
+
+
+    @Column(name="L_NN", precision=8, scale=3)
+    public BigDecimal getLNn() {
+        return this.LNn;
+    }
+
+    public void setLNn(BigDecimal LNn) {
+        this.LNn = LNn;
+    }
+
+
+    @Column(name="R_HOCHWERT", precision=11, scale=3)
+    public BigDecimal getRHochwert() {
+        return this.RHochwert;
+    }
+
+    public void setRHochwert(BigDecimal RHochwert) {
+        this.RHochwert = RHochwert;
+    }
+
+
+    @Column(name="R_RECHWERT", precision=11, scale=3)
+    public BigDecimal getRRechwert() {
+        return this.RRechwert;
+    }
+
+    public void setRRechwert(BigDecimal RRechwert) {
+        this.RRechwert = RRechwert;
+    }
+
+
+    @Column(name="R_NN", precision=8, scale=3)
+    public BigDecimal getRNn() {
+        return this.RNn;
+    }
+
+    public void setRNn(BigDecimal RNn) {
+        this.RNn = RNn;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+
+    @Column(name="OLDMSTID", precision=11, scale=0)
+    public Long getOldmstid() {
+        return this.oldmstid;
+    }
+
+    public void setOldmstid(Long oldmstid) {
+        this.oldmstid = oldmstid;
+    }
+
+
+    @Column(name="ISTFESTSTOFF", nullable=false, precision=1, scale=0)
+    public boolean isIstfeststoff() {
+        return this.istfeststoff;
+    }
+
+    public void setIstfeststoff(boolean istfeststoff) {
+        this.istfeststoff = istfeststoff;
+    }
+
+
+    @Column(name="GEWNAME", nullable=false, length=20)
+    public String getGewname() {
+        return this.gewname;
+    }
+
+    public void setGewname(String gewname) {
+        this.gewname = gewname;
+    }
+
+
+   public boolean equals(Object other) {
+         if ( (this == other ) ) return true;
+         if ( (other == null ) ) return false;
+         if ( !(other instanceof StationgewId) ) return false;
+         StationgewId castOther = ( StationgewId ) other;
+
+         return (this.getStationid()==castOther.getStationid())
+ && (this.getGewaesserid()==castOther.getGewaesserid())
+ && ( (this.getKm()==castOther.getKm()) || ( this.getKm()!=null && castOther.getKm()!=null && this.getKm().equals(castOther.getKm()) ) )
+ && ( (this.getBezugspegelid()==castOther.getBezugspegelid()) || ( this.getBezugspegelid()!=null && castOther.getBezugspegelid()!=null && this.getBezugspegelid().equals(castOther.getBezugspegelid()) ) )
+ && ( (this.getName()==castOther.getName()) || ( this.getName()!=null && castOther.getName()!=null && this.getName().equals(castOther.getName()) ) )
+ && (this.isUferistlinks()==castOther.isUferistlinks())
+ && ( (this.getTeilVon()==castOther.getTeilVon()) || ( this.getTeilVon()!=null && castOther.getTeilVon()!=null && this.getTeilVon().equals(castOther.getTeilVon()) ) )
+ && ( (this.getTeilBis()==castOther.getTeilBis()) || ( this.getTeilBis()!=null && castOther.getTeilBis()!=null && this.getTeilBis().equals(castOther.getTeilBis()) ) )
+ && ( (this.getAbstHmst()==castOther.getAbstHmst()) || ( this.getAbstHmst()!=null && castOther.getAbstHmst()!=null && this.getAbstHmst().equals(castOther.getAbstHmst()) ) )
+ && ( (this.getLHochwert()==castOther.getLHochwert()) || ( this.getLHochwert()!=null && castOther.getLHochwert()!=null && this.getLHochwert().equals(castOther.getLHochwert()) ) )
+ && ( (this.getLRechwert()==castOther.getLRechwert()) || ( this.getLRechwert()!=null && castOther.getLRechwert()!=null && this.getLRechwert().equals(castOther.getLRechwert()) ) )
+ && ( (this.getLNn()==castOther.getLNn()) || ( this.getLNn()!=null && castOther.getLNn()!=null && this.getLNn().equals(castOther.getLNn()) ) )
+ && ( (this.getRHochwert()==castOther.getRHochwert()) || ( this.getRHochwert()!=null && castOther.getRHochwert()!=null && this.getRHochwert().equals(castOther.getRHochwert()) ) )
+ && ( (this.getRRechwert()==castOther.getRRechwert()) || ( this.getRRechwert()!=null && castOther.getRRechwert()!=null && this.getRRechwert().equals(castOther.getRRechwert()) ) )
+ && ( (this.getRNn()==castOther.getRNn()) || ( this.getRNn()!=null && castOther.getRNn()!=null && this.getRNn().equals(castOther.getRNn()) ) )
+ && ( (this.getBemerkung()==castOther.getBemerkung()) || ( this.getBemerkung()!=null && castOther.getBemerkung()!=null && this.getBemerkung().equals(castOther.getBemerkung()) ) )
+ && ( (this.getOldmstid()==castOther.getOldmstid()) || ( this.getOldmstid()!=null && castOther.getOldmstid()!=null && this.getOldmstid().equals(castOther.getOldmstid()) ) )
+ && (this.isIstfeststoff()==castOther.isIstfeststoff())
+ && ( (this.getGewname()==castOther.getGewname()) || ( this.getGewname()!=null && castOther.getGewname()!=null && this.getGewname().equals(castOther.getGewname()) ) );
+   }
+
+   public int hashCode() {
+         int result = 17;
+
+         result = 37 * result + (int) this.getStationid();
+         result = 37 * result + (int) this.getGewaesserid();
+         result = 37 * result + ( getKm() == null ? 0 : this.getKm().hashCode() );
+         result = 37 * result + ( getBezugspegelid() == null ? 0 : this.getBezugspegelid().hashCode() );
+         result = 37 * result + ( getName() == null ? 0 : this.getName().hashCode() );
+         result = 37 * result + (this.isUferistlinks()?1:0);
+         result = 37 * result + ( getTeilVon() == null ? 0 : this.getTeilVon().hashCode() );
+         result = 37 * result + ( getTeilBis() == null ? 0 : this.getTeilBis().hashCode() );
+         result = 37 * result + ( getAbstHmst() == null ? 0 : this.getAbstHmst().hashCode() );
+         result = 37 * result + ( getLHochwert() == null ? 0 : this.getLHochwert().hashCode() );
+         result = 37 * result + ( getLRechwert() == null ? 0 : this.getLRechwert().hashCode() );
+         result = 37 * result + ( getLNn() == null ? 0 : this.getLNn().hashCode() );
+         result = 37 * result + ( getRHochwert() == null ? 0 : this.getRHochwert().hashCode() );
+         result = 37 * result + ( getRRechwert() == null ? 0 : this.getRRechwert().hashCode() );
+         result = 37 * result + ( getRNn() == null ? 0 : this.getRNn().hashCode() );
+         result = 37 * result + ( getBemerkung() == null ? 0 : this.getBemerkung().hashCode() );
+         result = 37 * result + ( getOldmstid() == null ? 0 : this.getOldmstid().hashCode() );
+         result = 37 * result + (this.isIstfeststoff()?1:0);
+         result = 37 * result + ( getGewname() == null ? 0 : this.getGewname().hashCode() );
+         return result;
+   }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/TmpGloChanged.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,42 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.UniqueConstraint;
+
+/**
+ * TmpGloChanged generated by hbm2java
+ */
+@Entity
+@Table(name="TMP_GLO_CHANGED"
+    ,schema="SEDDB"
+    , uniqueConstraints = @UniqueConstraint(columnNames="GLOTRECHTEID")
+)
+public class TmpGloChanged  implements java.io.Serializable {
+
+
+     private long glotrechteid;
+
+    public TmpGloChanged() {
+    }
+
+    public TmpGloChanged(long glotrechteid) {
+       this.glotrechteid = glotrechteid;
+    }
+
+     @Id
+
+
+    @Column(name="GLOTRECHTEID", unique=true, nullable=false, precision=11, scale=0)
+    public long getGlotrechteid() {
+        return this.glotrechteid;
+    }
+
+    public void setGlotrechteid(long glotrechteid) {
+        this.glotrechteid = glotrechteid;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/TmpMesAchanged.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,42 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.UniqueConstraint;
+
+/**
+ * TmpMesAchanged generated by hbm2java
+ */
+@Entity
+@Table(name="TMP_MES_ACHANGED"
+    ,schema="SEDDB"
+    , uniqueConstraints = @UniqueConstraint(columnNames="MESSUNGID")
+)
+public class TmpMesAchanged  implements java.io.Serializable {
+
+
+     private long messungid;
+
+    public TmpMesAchanged() {
+    }
+
+    public TmpMesAchanged(long messungid) {
+       this.messungid = messungid;
+    }
+
+     @Id
+
+
+    @Column(name="MESSUNGID", unique=true, nullable=false, precision=11, scale=0)
+    public long getMessungid() {
+        return this.messungid;
+    }
+
+    public void setMessungid(long messungid) {
+        this.messungid = messungid;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/TmpMesGchanged.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,42 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.UniqueConstraint;
+
+/**
+ * TmpMesGchanged generated by hbm2java
+ */
+@Entity
+@Table(name="TMP_MES_GCHANGED"
+    ,schema="SEDDB"
+    , uniqueConstraints = @UniqueConstraint(columnNames="MESSUNGID")
+)
+public class TmpMesGchanged  implements java.io.Serializable {
+
+
+     private long messungid;
+
+    public TmpMesGchanged() {
+    }
+
+    public TmpMesGchanged(long messungid) {
+       this.messungid = messungid;
+    }
+
+     @Id
+
+
+    @Column(name="MESSUNGID", unique=true, nullable=false, precision=11, scale=0)
+    public long getMessungid() {
+        return this.messungid;
+    }
+
+    public void setMessungid(long messungid) {
+        this.messungid = messungid;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/TmpMesQchanged.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,42 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.UniqueConstraint;
+
+/**
+ * TmpMesQchanged generated by hbm2java
+ */
+@Entity
+@Table(name="TMP_MES_QCHANGED"
+    ,schema="SEDDB"
+    , uniqueConstraints = @UniqueConstraint(columnNames="MESSUNGID")
+)
+public class TmpMesQchanged  implements java.io.Serializable {
+
+
+     private long messungid;
+
+    public TmpMesQchanged() {
+    }
+
+    public TmpMesQchanged(long messungid) {
+       this.messungid = messungid;
+    }
+
+     @Id
+
+
+    @Column(name="MESSUNGID", unique=true, nullable=false, precision=11, scale=0)
+    public long getMessungid() {
+        return this.messungid;
+    }
+
+    public void setMessungid(long messungid) {
+        this.messungid = messungid;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/TmpMesSchanged.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,42 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.UniqueConstraint;
+
+/**
+ * TmpMesSchanged generated by hbm2java
+ */
+@Entity
+@Table(name="TMP_MES_SCHANGED"
+    ,schema="SEDDB"
+    , uniqueConstraints = @UniqueConstraint(columnNames="MESSUNGID")
+)
+public class TmpMesSchanged  implements java.io.Serializable {
+
+
+     private long messungid;
+
+    public TmpMesSchanged() {
+    }
+
+    public TmpMesSchanged(long messungid) {
+       this.messungid = messungid;
+    }
+
+     @Id
+
+
+    @Column(name="MESSUNGID", unique=true, nullable=false, precision=11, scale=0)
+    public long getMessungid() {
+        return this.messungid;
+    }
+
+    public void setMessungid(long messungid) {
+        this.messungid = messungid;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Zzarchiv.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,96 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+
+/**
+ * Zzarchiv generated by hbm2java
+ */
+@Entity
+@Table(name="ZZARCHIV"
+    ,schema="SEDDB"
+)
+public class Zzarchiv  implements java.io.Serializable {
+
+
+     private long archivid;
+     private String name;
+     private boolean istaktiv;
+     private String bemerkung;
+     private Set<Sohltest> sohltests = new HashSet<Sohltest>(0);
+
+    public Zzarchiv() {
+    }
+
+    public Zzarchiv(long archivid, String name, boolean istaktiv) {
+        this.archivid = archivid;
+        this.name = name;
+        this.istaktiv = istaktiv;
+    }
+    public Zzarchiv(long archivid, String name, boolean istaktiv, String bemerkung, Set<Sohltest> sohltests) {
+       this.archivid = archivid;
+       this.name = name;
+       this.istaktiv = istaktiv;
+       this.bemerkung = bemerkung;
+       this.sohltests = sohltests;
+    }
+
+     @Id
+
+
+    @Column(name="ARCHIVID", unique=true, nullable=false, precision=11, scale=0)
+    public long getArchivid() {
+        return this.archivid;
+    }
+
+    public void setArchivid(long archivid) {
+        this.archivid = archivid;
+    }
+
+
+    @Column(name="NAME", nullable=false, length=50)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="ISTAKTIV", nullable=false, precision=1, scale=0)
+    public boolean isIstaktiv() {
+        return this.istaktiv;
+    }
+
+    public void setIstaktiv(boolean istaktiv) {
+        this.istaktiv = istaktiv;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="zzarchiv")
+    public Set<Sohltest> getSohltests() {
+        return this.sohltests;
+    }
+
+    public void setSohltests(Set<Sohltest> sohltests) {
+        this.sohltests = sohltests;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Zzprobenahmeart.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,110 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+
+/**
+ * Zzprobenahmeart generated by hbm2java
+ */
+@Entity
+@Table(name="ZZPROBENAHMEART"
+    ,schema="SEDDB"
+)
+public class Zzprobenahmeart  implements java.io.Serializable {
+
+
+     private long probenahmeartid;
+     private boolean istkern;
+     private String name;
+     private boolean istaktiv;
+     private String bemerkung;
+     private Set<Sohlprobe> sohlprobes = new HashSet<Sohlprobe>(0);
+
+    public Zzprobenahmeart() {
+    }
+
+
+    public Zzprobenahmeart(long probenahmeartid, boolean istkern, String name, boolean istaktiv) {
+        this.probenahmeartid = probenahmeartid;
+        this.istkern = istkern;
+        this.name = name;
+        this.istaktiv = istaktiv;
+    }
+    public Zzprobenahmeart(long probenahmeartid, boolean istkern, String name, boolean istaktiv, String bemerkung, Set<Sohlprobe> sohlprobes) {
+       this.probenahmeartid = probenahmeartid;
+       this.istkern = istkern;
+       this.name = name;
+       this.istaktiv = istaktiv;
+       this.bemerkung = bemerkung;
+       this.sohlprobes = sohlprobes;
+    }
+
+     @Id
+
+
+    @Column(name="PROBENAHMEARTID", unique=true, nullable=false, precision=11, scale=0)
+    public long getProbenahmeartid() {
+        return this.probenahmeartid;
+    }
+
+    public void setProbenahmeartid(long probenahmeartid) {
+        this.probenahmeartid = probenahmeartid;
+    }
+
+
+    @Column(name="ISTKERN", nullable=false, precision=1, scale=0)
+    public boolean isIstkern() {
+        return this.istkern;
+    }
+
+    public void setIstkern(boolean istkern) {
+        this.istkern = istkern;
+    }
+
+
+    @Column(name="NAME", nullable=false, length=50)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="ISTAKTIV", nullable=false, precision=1, scale=0)
+    public boolean isIstaktiv() {
+        return this.istaktiv;
+    }
+
+    public void setIstaktiv(boolean istaktiv) {
+        this.istaktiv = istaktiv;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="zzprobenahmeart")
+    public Set<Sohlprobe> getSohlprobes() {
+        return this.sohlprobes;
+    }
+
+    public void setSohlprobes(Set<Sohlprobe> sohlprobes) {
+        this.sohlprobes = sohlprobes;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Zzsondierungart.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,96 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+
+/**
+ * Zzsondierungart generated by hbm2java
+ */
+@Entity
+@Table(name="ZZSONDIERUNGART"
+    ,schema="SEDDB"
+)
+public class Zzsondierungart  implements java.io.Serializable {
+
+
+     private long sondierungartid;
+     private String name;
+     private boolean istaktiv;
+     private String bemerkung;
+     private Set<Sohltest> sohltests = new HashSet<Sohltest>(0);
+
+    public Zzsondierungart() {
+    }
+
+    public Zzsondierungart(long sondierungartid, String name, boolean istaktiv) {
+        this.sondierungartid = sondierungartid;
+        this.name = name;
+        this.istaktiv = istaktiv;
+    }
+    public Zzsondierungart(long sondierungartid, String name, boolean istaktiv, String bemerkung, Set<Sohltest> sohltests) {
+       this.sondierungartid = sondierungartid;
+       this.name = name;
+       this.istaktiv = istaktiv;
+       this.bemerkung = bemerkung;
+       this.sohltests = sohltests;
+    }
+
+     @Id
+
+
+    @Column(name="SONDIERUNGARTID", unique=true, nullable=false, precision=11, scale=0)
+    public long getSondierungartid() {
+        return this.sondierungartid;
+    }
+
+    public void setSondierungartid(long sondierungartid) {
+        this.sondierungartid = sondierungartid;
+    }
+
+
+    @Column(name="NAME", nullable=false, length=50)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="ISTAKTIV", nullable=false, precision=1, scale=0)
+    public boolean isIstaktiv() {
+        return this.istaktiv;
+    }
+
+    public void setIstaktiv(boolean istaktiv) {
+        this.istaktiv = istaktiv;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="zzsondierungart")
+    public Set<Sohltest> getSohltests() {
+        return this.sohltests;
+    }
+
+    public void setSohltests(Set<Sohltest> sohltests) {
+        this.sohltests = sohltests;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/seddb/model/Zzthema.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,108 @@
+package de.intevation.seddb.model;
+// Generated 14.06.2012 11:30:57 by Hibernate Tools 3.4.0.CR1
+
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+
+/**
+ * Zzthema generated by hbm2java
+ */
+@Entity
+@Table(name="ZZTHEMA"
+    ,schema="SEDDB"
+)
+public class Zzthema  implements java.io.Serializable {
+
+
+     private long themaid;
+     private String name;
+     private boolean istaktiv;
+     private String bemerkung;
+     private Set<Bild> bilds = new HashSet<Bild>(0);
+     private Set<Probebild> probebilds = new HashSet<Probebild>(0);
+
+    public Zzthema() {
+    }
+
+
+    public Zzthema(long themaid, String name, boolean istaktiv) {
+        this.themaid = themaid;
+        this.name = name;
+        this.istaktiv = istaktiv;
+    }
+    public Zzthema(long themaid, String name, boolean istaktiv, String bemerkung, Set<Bild> bilds, Set<Probebild> probebilds) {
+       this.themaid = themaid;
+       this.name = name;
+       this.istaktiv = istaktiv;
+       this.bemerkung = bemerkung;
+       this.bilds = bilds;
+       this.probebilds = probebilds;
+    }
+
+     @Id
+
+
+    @Column(name="THEMAID", unique=true, nullable=false, precision=11, scale=0)
+    public long getThemaid() {
+        return this.themaid;
+    }
+
+    public void setThemaid(long themaid) {
+        this.themaid = themaid;
+    }
+
+
+    @Column(name="NAME", nullable=false, length=50)
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name="ISTAKTIV", nullable=false, precision=1, scale=0)
+    public boolean isIstaktiv() {
+        return this.istaktiv;
+    }
+
+    public void setIstaktiv(boolean istaktiv) {
+        this.istaktiv = istaktiv;
+    }
+
+
+    @Column(name="BEMERKUNG", length=240)
+    public String getBemerkung() {
+        return this.bemerkung;
+    }
+
+    public void setBemerkung(String bemerkung) {
+        this.bemerkung = bemerkung;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="zzthema")
+    public Set<Bild> getBilds() {
+        return this.bilds;
+    }
+
+    public void setBilds(Set<Bild> bilds) {
+        this.bilds = bilds;
+    }
+
+@OneToMany(fetch=FetchType.LAZY, mappedBy="zzthema")
+    public Set<Probebild> getProbebilds() {
+        return this.probebilds;
+    }
+
+    public void setProbebilds(Set<Probebild> probebilds) {
+        this.probebilds = probebilds;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/test/java/de/intevation/flys/AppTest.java	Fri Sep 28 12:14:48 2012 +0200
@@ -0,0 +1,38 @@
+package de.intevation.flys;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+/**
+ * Unit test for simple App.
+ */
+public class AppTest
+    extends TestCase
+{
+    /**
+     * Create the test case
+     *
+     * @param testName name of the test case
+     */
+    public AppTest( String testName )
+    {
+        super( testName );
+    }
+
+    /**
+     * @return the suite of tests being tested
+     */
+    public static Test suite()
+    {
+        return new TestSuite( AppTest.class );
+    }
+
+    /**
+     * Rigourous Test :-)
+     */
+    public void testApp()
+    {
+        assertTrue( true );
+    }
+}

http://dive4elements.wald.intevation.org