changeset 2877:f0a67bc0e777 2.7

merged flys-backend/2.7
author Thomas Arendsen Hein <thomas@intevation.de>
date Fri, 28 Sep 2012 12:14:31 +0200
parents 6310b1582f2d (current diff) d3438e271b3c (diff)
children 3cda41b5eb23
files
diffstat 194 files changed, 28131 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/ChangeLog	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,3081 @@
+2012-05-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* Tagged module as '2.7'.
+
+2012-05-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/BedHeightEpoch.java,
+	  src/main/java/de/intevation/flys/model/BedHeightSingle.java: Added lower
+	  and upper km to function that returns all singles and epochs for a given
+	  river.
+
+2012-05-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/BedHeightSingleValue.java: Fixed
+	  a typo.
+
+2012-05-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/BedHeightSingleValue.java,
+	   src/main/java/de/intevation/flys/model/BedHeightEpochValue.java: Added
+	   functions to retrieve single and epoch values based on its owner and km
+	   range.
+
+2012-05-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/BedHeightEpoch.java,
+	  src/main/java/de/intevation/flys/model/BedHeightSingle.java: Added
+	  functions to get singles and epochs by river and by id.
+
+2012-05-15  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FlowVelocityModel.java,
+	  src/main/java/de/intevation/flys/model/FlowVelocityModelValue.java,
+	  src/main/java/de/intevation/flys/model/DischargeZone.java: Added static
+	  functions to retrieve data from database.
+
+2012-05-15  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/DischargeZone.java: Added a
+	  function getDischargeZones() that returns all DischargeZones for a given
+	  river.
+
+2012-05-10  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/gauges.py: Search for field 'MPNAAM' and use its
+	  value as name for the geometry.
+
+	* src/main/java/de/intevation/flys/model/GaugeLocation.java: New model
+	  class for storing locations of gauges.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered GaugeLocation class.
+
+2012-05-10  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/HydrBoundary.java,
+	  src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java: New.
+	  Hydrological boundaries.
+
+	* src/main/java/de/intevation/flys/model/Line.java: Modified signature of
+	  Line.getLines(). It now also takes the name of a line to retrieve more
+	  specific lines.
+
+	* src/main/java/de/intevation/flys/model/Building.java: Modified signature
+	  of Building.getBuildings(). It now also takes the name of a building to
+	  retrieve more specific lines.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered HydrBoundary and HydrBoundaryPoly classes.
+
+2012-05-10  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/crosssectiontracks.py: Search for 'STATION' field in
+	  shapefile to extract the current km.
+
+2012-05-08  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Hws.java: The getHws() function
+	  got a further parameter 'name' to retrieve specific hws only.
+
+
+2012-05-08  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Catchment.java: The
+	  getCatchments() function got a further parameter 'name' to retrieve
+	  specific catchments only.
+
+2012-05-08  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-spatial_idx.sql: Set the geometry type of 'catchments'
+	  to 'multipolygon'.
+
+	* src/main/java/de/intevation/flys/model/Catchment.java: The geometry
+	  attribute in such instances is now from type 'Geometry'.
+
+2012-05-08  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/import-dems.sql: New. Insert statements to insert dems into
+	  database.
+
+2012-05-04  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-drop-spatial.sql,
+	  doc/schema/oracle-spatial.sql: New relation 'gauge_location' and some
+	  bugfixes in the drop schema.
+
+	* contrib/shpimporter/catchments.py,
+	  contrib/shpimporter/hws.py,
+	  contrib/shpimporter/gauges.py: New importers.
+
+	* contrib/shpimporter/importer.py: Added a debug statement to improve the
+	  visibility of the log output.
+
+	* contrib/shpimporter/shpimporter.py: Make use of the new importers.
+
+
+2012-05-03  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-drop-spatial.sql,
+	  doc/schema/oracle-spatial.sql: Added new relations for hydrological
+	  boundaries and appended a 'name' field to relations that had no such
+	  field yet.
+
+	* contrib/shpimporter/floodplains.py,
+	  contrib/shpimporter/boundaries.py: New importers for floodplain and
+	  hydrological boundaries.
+
+	* contrib/shpimporter/lines.py,
+	  contrib/shpimporter/buildings.py,
+	  contrib/shpimporter/uesg.py,
+	  contrib/shpimporter/fixpoints.py,
+	  contrib/shpimporter/axis.py,
+	  contrib/shpimporter/crosssectiontracks.py,
+	  contrib/shpimporter/km.py: Set the 'name' attribute for new features.
+
+	* contrib/shpimporter/importer.py: Some bugfixes and improvements:
+	  geometries are transformed into a destination coordinate system now.
+
+	* contrib/shpimporter/shpimporter.py: Use all importers and defined the
+	  destination srs.
+
+2012-05-02	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FastCrossSectionLine.java,
+	  src/main/java/de/intevation/flys/model/CrossSectionPoint.java,
+	  src/main/java/de/intevation/flys/model/CrossSectionLine.java,
+	  src/main/java/de/intevation/flys/importer/ImportCrossSectionLine.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Store meassure points of profiles as Doubles not as BigDecimal.
+	  This should save a lot of memory during the import.
+
+2012-04-27	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/FlowVelocityMeasurementParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceColumn.java:
+	  Removed superfluous imports.
+
+2012-04-27	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql-minfo.sql: Adjusted PostgreSQL port
+	  to match the Oracle schema.
+
+2012-04-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/annotation-types.xml: Adapted annotation typed based on BfG wishes.
+
+2012-04-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-drop.sql: New SQL file to drop WINFO specific db
+	  schema.
+
+2012-04-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle.sql: Made SQL instructions upper case.
+
+2012-04-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-drop-spatial.sql: New statements to drop the whole
+	  spatial schema.
+
+2012-04-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/lines.py,
+	  contrib/shpimporter/buildings.py,
+	  contrib/shpimporter/importer.py,
+	  contrib/shpimporter/fixpoints.py,
+	  contrib/shpimporter/axis.py,
+	  contrib/shpimporter/crosssectiontracks.py,
+	  contrib/shpimporter/km.py: New classes for importing specific
+	  shapefiles. Each of this imports defines its target db tablename and a
+	  directory path to the shapefiles it should import.
+
+	* contrib/shpimporter/uesg.py: Some modifications necessary to streamline
+	  the import process of shapefiles.
+
+	* contrib/shpimporter/shpimporter.py: Use all available imports for the
+	  import process.
+
+2012-04-26  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-spatial.sql: Repaired broken schema.
+
+2012-04-26  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a config
+	  option to skip parsing MINFO waterlevel differences:
+
+	    -Dflys.backend.importer.skip.waterlevel.differences=True
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Parse and
+	  store MINFO specific waterlevel differences.
+
+2012-04-26  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/WaterlevelDifferencesParser.java:
+	  New parser for MINFO specific waterlevel differences.
+
+	* src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelDifference.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceColumn.java:
+	  New importer classes used during the import process of MINFO specific
+	  waterlevel differences.
+
+2012-04-26  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added a missing constraint to
+	  'waterlevel_difference' relation. Remove that constraint in
+	  oracle-drop-minfo.sql.
+
+2012-04-25  Felix Wolfsteller <felix@intevation.de>
+
+	  * src/main/java/de/intevation/flys/model/WaterlevelDifferenceValue.java:
+	    (setValue, setValues): Change setter name to allow hibernate to recognize
+	    it.
+
+2012-04-24  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/WaterlevelDifferenceColumn.java,
+	  src/main/java/de/intevation/flys/model/WaterlevelDifference.java,
+	  src/main/java/de/intevation/flys/model/WaterlevelDifferenceValue.java:
+	  New model classes for MINFO specific waterlevel differences.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes.
+
+2012-04-24  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Improved the schema to store MINFO
+	  specific waterlevel differences.
+
+2012-04-24  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/WaterlevelValue.java: Renamed a
+	  property ('qRange' -> 'qrange') because there have been problem during the
+	  import.
+
+	* src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java:
+	  Filled the stub with code.
+
+	* src/main/java/de/intevation/flys/importer/ImportWaterlevelValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevel.java: Some
+	  adaptions and missing methods which are required during the import.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Use the
+	  correct directory to search for waterlevel files.
+
+2012-04-24  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql: Small type adaptions in the waterlevel_values
+	  relation.
+
+	* src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java:
+	  First stub of a parser for MINFO specific waterlevel values.
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a new config
+	  option to skip parsing MINFO specifc waterlevel_values:
+
+	    -Dflys.backend.importer.skip.waterlevels=True
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Added code
+	  path to start parsing and storing MINFO specific waterlevel values.
+
+2012-04-20  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportWaterlevelValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevelQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWaterlevel.java: New
+	  importer classes used for MINFO specific waterlevel import.
+
+2012-04-20  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/WaterlevelQRange.java,
+	  src/main/java/de/intevation/flys/model/WaterlevelValue.java,
+	  src/main/java/de/intevation/flys/model/Waterlevel.java: New model
+	  classes for MINFO specific waterlevel data.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes.
+
+2012-04-20  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added relations for waterlevels
+	  specific to MINFO. Note: those waterlevel values are not stored in the
+	  WINFO specific relations!
+
+2012-04-19  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentYield.java,
+	  src/main/java/de/intevation/flys/model/SedimentYield.java: Added a
+	  column 'description' to the sediment_yield relation.
+
+	* src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java:
+	  New parser for sediment yield data.
+
+	* src/main/java/de/intevation/flys/model/GrainFraction.java: Added constants
+	  that represent the names of the grain fraction types.
+
+	* src/main/java/de/intevation/flys/importer/ImportGrainFraction.java: New
+	  constructor that takes a name only.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Improved the
+	  process of parsing sediment yield files.
+
+2012-04-19  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added new config
+	  option to skip parsing sediment yield data:
+
+	     -Dflys.backend.importer.skip.sediment.yield=true
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Prepared the
+	  importer to parse sediment yield data.
+
+	* src/main/java/de/intevation/flys/importer/ImportSedimentYield.java:
+	  storeDependencies() now throws SQLExceptions and
+	  ConstraintViolationExceptions.
+
+2012-04-19  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportGrainFraction.java,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentYieldValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentYield.java: New
+	  importer classes for importing sediment yield data.
+
+2012-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/SedimentYield.java,
+	  src/main/java/de/intevation/flys/model/SedimentYieldValue.java,
+	  src/main/java/de/intevation/flys/model/GrainFraction.java: New model
+	  classes for sediment yield data.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes.
+
+2012-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added missing river_id column to
+	  sediment_yield relation.
+
+2012-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Floodmaps.java,
+	  src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeZone.java:
+	  Removed superflous imports.
+
+2012-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added relations for storing sediment
+	  yield values.
+
+2012-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: The discharge_zone relation has no
+	  longer a reference to a named main value but stores a lower and upper
+	  discharge as string.
+
+	* src/main/java/de/intevation/flys/model/DischargeZone.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeZone.java:
+	  Adapted the code to the changes in the db schema.
+
+	* src/main/java/de/intevation/flys/importer/parsers/FlowVelocityModelParser.java:
+	  This parser now reads the meta information properly.
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java:
+	  Repaired broken HQL statement.
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java:
+	  Log the number of flow velocity model values that have been written into
+	  database.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/FlowVelocityMeasurementParser.java:
+	  New parser for flow velocity measurements.
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurementValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurement.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java:
+	  Fixed broken HQL statements.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Store flow
+	  velocity measurements into database after parsing them.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes for flow velocity measurements.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurementValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurement.java:
+	  New temp classes used during the import process of flow velocity
+	  measurements.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FlowVelocityMeasurementValue.java,
+	  src/main/java/de/intevation/flys/model/FlowVelocityMeasurement.java: New
+	  model classes for storing flow velocity measurements.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added new relations for MINFO specific
+	  flow velocity measurements.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/FlowVelocityModelParser.java:
+	  New. Parser for model files of MINFO specific flow velocity data.
+
+	* doc/schema/oracle-minfo.sql: Added a missing q column to
+	  flow_velocity_model_values relation.
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java,
+	  src/main/java/de/intevation/flys/model/FlowVelocityModelValue.java:
+	  Added missing q column.
+
+	* src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java:
+	  Added setter methods for meta data and an addValue() for adding new
+	  ImportFlowVelocityModelValues.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Use
+	  FlowVelocityModelParser for parsing model data of flow velocity files.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeZone.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java:
+	  Temp classes used during the import process of flow velocity data.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Implemented
+	  the method that stores flow velocity model data.
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Prepared for
+	  parsing flow  velocity files.
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a config
+	  option to skip parsing flow velocity files:
+
+	    -Dflys.backend.importer.skip.flow.velocity=true
+
+2012-04-17  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FlowVelocityModel.java,
+	  src/main/java/de/intevation/flys/model/FlowVelocityModelValue.java,
+	  src/main/java/de/intevation/flys/model/DischargeZone.java: New model
+	  classes for MINFO specific database relations.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the new model classes.
+
+2012-04-16  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added new relations for MINFO specific
+	  flow velocity values.
+
+2012-02-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/oracle-minfo.sql: Fixed column unit_id in table depths
+	  to match type of column id in table units.
+
+	* doc/schema/postgresql-minfo.sql: oracle-minfo.sql for a better DBMS.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/MorphologicalWidth.java: Added
+	  annotation for values.
+
+	* src/main/java/de/intevation/flys/importer/ImportMorphWidthValue.java:
+	  Removed debug output in getPeer() and storeDependencies().
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/MorphologicalWidthParser.java:
+	  New parser for morphological widths files.
+
+	* src/main/java/de/intevation/flys/importer/ImportMorphWidth.java: Throw
+	  constraint violation exceptions.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Parse and
+	  store morphological widths.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a config
+	  option to skip parsing morphological widths:
+
+	    -Dflys.backend.importer.skip.morphological.width=true
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql: Added a description field to morphological
+	  width values.
+
+	* src/main/java/de/intevation/flys/model/MorphologicalWidthValue.java:
+	  Added new instance variable for descriptions.
+
+	* src/main/java/de/intevation/flys/importer/ImportMorphWidthValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportMorphWidth.java: New
+	  temp classes used to store morphological width values during the import.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Added new relations for MINFO specific
+	  morphological width.
+
+	* src/main/java/de/intevation/flys/model/MorphologicalWidth.java,
+	  src/main/java/de/intevation/flys/model/MorphologicalWidthValue.java: New
+	  model classes for morphological width.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model classes.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql: Added a description field to table
+	  sediment_density.
+
+	* src/main/java/de/intevation/flys/model/SedimentDensityValue.java,
+	  src/main/java/de/intevation/flys/model/SedimentDensity.java: Some
+	  modifications based on the changes of the schema adaption in last commit.
+
+	* src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java:
+	  Override parse() of parent class to get the filename.
+
+	* src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java,
+	  src/main/java/de/intevation/flys/importer/ImportDepth.java,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java:
+	  Implemented the methods storeDependencies() and getPeer() to save new
+	  instances into database.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Save all
+	  ImportSedimentDensity objects to database.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the new model classes.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Modified the db schema specific to
+	  MINFO; replaced some columns.
+
+	* src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java:
+	  Implemented the method stubs: parse meta data and data values.
+
+	* src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java:
+	  Added and replaced some instance variables because the db schema has
+	  changed.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/LineParser.java: New.
+	  An abstract parser that might be used to read a file and handle each
+	  line contained in the file seperatly.
+
+	* src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java:
+	  New. Subclasses LineParser. It is able to parse MINFO specific sediment
+	  density files. NOTE: currently just a stub.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Prepared to
+	  read MINFO specific sediment density files.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a new
+	  config option to skip parsing MINFO sediment density values:
+
+	    -Dflys.backend.importer.skip.sediment.density=true
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java,
+	  src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportDepth.java: New importer
+	  classes used during MINFO sediment density import.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Improved the MINFO schema to store
+	  sediment density values specific to a river and depth.
+
+	* src/main/java/de/intevation/flys/model/SedimentDensityValue.java,
+	  src/main/java/de/intevation/flys/model/SedimentDensity.java,
+	  src/main/java/de/intevation/flys/model/Depth.java: New model classes
+	  used to store sediment density values specific to a river and depth.
+
+2012-04-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java:
+	  New. An abstract super class for BedHeightSingleParser and
+	  BedHeightEpochParser. It implements methods for parsing meta data.
+	  Concrete subclasses need to implements the method for parsing data rows
+	  only.
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeight.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightValue.java: New.
+	  Interfaces that define some major methods which enables the BedHeightParser
+	  to parse both - single and epoch bed heights.
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java:
+	  Moved the code for parsing meta data to BedHeightParser which is now the
+	  parent class.
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightEpochParser.java:
+	  New. A further subclass of BedHeightParser for parsing MINFO bed heights
+	  for epochs.
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingleValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightEpochValue.java:
+	  Made them subclasses of ImportBedHeightValue.
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java: Made
+	  them subclasses of ImportBedHeight.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Some
+	  adjustments to store ImportBedHeight and ImportBedHeightValue instances
+	  instead of concrete subclasses.
+
+2012-04-12  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql: Added a link to the river to bed_height_epoch
+	  table.
+
+	* src/main/java/de/intevation/flys/model/BedHeightEpochValue.java,
+	  src/main/java/de/intevation/flys/model/BedHeightEpoch.java: New model
+	  classes for MINFO bed height epochs.
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Splitted up the
+	  config option to skip parsing bed heights. Now, we are able to skip single
+	  and epoch bed heights using the following options:
+
+	    -Dflys.backend.importer.skip.bed.height.single=true  (skip singles)
+	    -Dflys.backend.importer.skip.bed.height.epoch=true   (skip epochs)
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightEpochValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java: Implemented
+	  the whole stuff to parse those data.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the new model classes.
+
+2012-04-12  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Schema adaptions specific to MINFO bed
+	  heights.
+
+	* src/main/java/de/intevation/flys/model/BedHeightSingleValue.java,
+	  src/main/java/de/intevation/flys/model/BedHeightType.java,
+	  src/main/java/de/intevation/flys/model/ElevationModel.java,
+	  src/main/java/de/intevation/flys/model/LocationSystem.java,
+	  src/main/java/de/intevation/flys/model/BedHeightSingle.java: New model
+	  classes for MINFO bed heights.
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java:
+	  Some logging adjustments and a little bugfix: add BedHeightSingle objects
+	  after they were parsed; otherwise they are not saved to database.
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingleValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java,
+	  src/main/java/de/intevation/flys/importer/ImportElevationModel.java,
+	  src/main/java/de/intevation/flys/importer/ImportLocationSystem.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightType.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java: Implemented
+	  storeDependencies() and getPeer().
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered model classes.
+
+2012-04-11  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java:
+	  Finished work on parsing meta information and data specific to single bed
+	  heights.
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingleValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java,
+	  src/main/java/de/intevation/flys/importer/ImportElevationModel.java,
+	  src/main/java/de/intevation/flys/importer/ImportLocationSystem.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightType.java: Some
+	  new and modified temp storages used during MINFO import.
+
+2012-04-11  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: Some schema adaptions specific to bed
+	  heights in MINFO.
+
+2012-04-11  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added new command
+	  line option to skip parsing bed heights (german "Sohlhoehen").
+	  Set "-Dflys.backend.importer.skip.bed.height=true" to skip parsing this
+	  file type.
+
+	* src/main/java/de/intevation/flys/importer/parsers/BedHeightEpochParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java:
+	  Initial checkin of parsers for bed heights (single and epoch).
+
+	* src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java,
+	  src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java: Temp
+	  storage for bed heights data used during the import.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java: Added an INFO
+	  statement that signals the start of parsing rivers.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Collect and
+	  trigger parsing of bed heights files (placed in 'Morphologie/Sohlhoehen').
+
+2012-04-11  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-minfo.sql,
+	  doc/schema/oracle-drop-minfo.sql: MINFO specific DB schema and sql statements
+	  to drop MINFO specific stuff.
+
+2012-03-29  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: Added a config
+	  option "flys.backend.importer.infogew.file" to set the path of an
+	  INFO.gew file.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java: Read the path
+	  to the INFO.gew from the new config option and try to parse it.
+
+2012-03-19  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-spatial.sql: Set the type of 'lower' and 'upper' column
+	  of relation 'dem' to NUMBER(19,5).
+
+2012-03-19  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Floodmaps.java: Set geometry type
+	  to 'Geometry', because Oracle can save POLYGONS and MULTIPOLYGONS in the
+	  same relation; Hibernate seems unable to load both types.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered missing Floodmaps.
+
+2012-03-16  Ingo Weinzierl <ingo@intevation.de>
+
+	* Tagged module as 'pre2.7-2012-03-16'.
+
+2012-03-16  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-spatial.sql: Repaired broken oracle db schema for
+	  relation 'floodmaps'.
+
+2012-03-08  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Floodmaps.java: New model class for
+	  'floodmaps'.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the Floodmaps model class.
+
+2012-03-07  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/shpimporter/shpimporter.py,
+	  contrib/shpimporter/utils.py,
+	  contrib/shpimporter/uesg.py: A python based tool for importing
+	  shapefiles into a database. This tool is based on python because it
+	  makes use of GDAL OGR to read shapefiles and write features into
+	  database.
+
+2012-03-07  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: Adapted the PostgreSQL schema for
+	  floodmaps.
+
+2012-03-06  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql,
+	  doc/schema/oracle-spatial.sql: Added new relations for existing
+	  floodmaps (currently tested for PostgreSQL only!).
+
+2012-03-06  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java: Adapted method call of
+	  Log4J logger 'warning()' -> 'warn()'.
+
+2012-03-02  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	Fix or workaround flys/issue632 .
+
+	* src/main/java/de/intevation/flys/model/Wst.java: Avoid NPE when
+	  trying to get min/max q values.
+
+2012-02-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/DischargeTable.java:
+	  The list of DischargeTableValue is now sorted by Q.
+
+2012-02-09  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java: Added a method
+	  getMasterDischargeTable() to retrieve the discharge table with kind 0.
+
+2012-02-03  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java
+	  (getGaugeDatumsKMs,queryGaugeDatumsKMs): renamed to avoid hibernate
+	  running into trouble finding db-mapping for type Map for
+	  what looks like a 'getter' of GaugeDatumsKMs.
+
+2012-02-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java(getGaugeDatumsKMs):
+	  New method to build a map of gauge stattion km to 
+	  the datums (PNP) of the gauge.  Useful look if a km 
+	  is a gauge station.
+
+2012-01-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FastAnnotations.java:
+	  New. Fetches all informations of annotations in one go for
+	  a river. Useful to speed up annotation handling.
+
+2012-01-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java: Added a function that
+	  returns a Gauge based on its official number.
+
+2012-01-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/FastCrossSectionLine.java:
+	  New. Cacheable representation of the cross section line.
+
+	* src/main/java/de/intevation/flys/model/CrossSectionLine.java:
+	  Add a new isValid() method.
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java:
+	  Added method getFastLines() to fetch the lines (FastCrossSectionLines)
+	  directly with a single SQL statement and without expensive
+	  intermediate representations.
+
+2012-01-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java(getLines):
+	  Added method to fetch the lines of a cross section in a given interval.
+	  Useful to have chunked access to the lines.
+
+2012-01-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSectionLine.java
+	  (fetchCrossSectionLinesPoints): Simplified and prevent reallocations.
+
+2012-01-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/StaFileParser.java:
+	  Introduced boolean system property 'flys.backend.sta.parse.gauge.numbers'
+	  default: false. If set the official number is parsed out of
+	  the first line of the STA files. This leads to problems with the
+	  data of the Elbe river.
+
+	* src/main/java/de/intevation/flys/backend/SpatialInfo.java: Removed
+	  superfluous import.
+
+2012-01-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: Fixed table hws.
+
+2012-01-05  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/RiverAxis.java: The method
+	  getRiverAxis() now returns a list of RiverAxis objects. There is a
+	  modeling problem (see comment in the header of the class) which should
+	  be fixed!
+
+	* src/main/java/de/intevation/flys/backend/SpatialInfo.java: Adapted the
+	  code based on the modified signature in RiverAxis.
+
+2012-01-03  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java: Added a method
+	  determineMinMaxQFree() that determines the min/max Qs at a given
+	  kilometer.
+
+2012-01-02	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Remove 'NOT NULL' constraint from
+	  gauges.range_id because there are gauges which don't have
+	  a 'Gueltigkeitsbereich'
+	
+	  To adjust existing PostgreSQL databases use:
+	
+	      ALTER TABLE gauges ALTER COLUMN range_id DROP NOT NULL;
+
+	* src/main/java/de/intevation/flys/model/River.java: Handle
+	  null references to 'Gueltigkeitsbereiche'.
+
+2011-12-28  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java: Added a method
+	  determineMinMaxQ(double double) to be able to determine the Q range of a
+	  WST without having a Range object.
+
+2011-12-19	Sascha L. Teichmann	<sascha.teichmann@intevation.de>:
+
+	* src/main/java/de/intevation/flys/importer/parsers/StaFileParser.java:
+	  Parse the official 'Pegelnummer' out of the STA files, too.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Adjusted import model.
+
+	* src/main/java/de/intevation/flys/model/Gauge.java: Make the
+	  official gauge number accessible via Hibernate.
+
+	* doc/schema/postgresql.sql, doc/schema/oracle.sql: Added
+	  an official_number to the gauges table.
+
+2011-11-30	Bjoern Schilberg <bjoern.schilberg@intevation.de>:
+
+	* doc/schema/oracle-spatial.sql: Adjust extent of germany to EPSG:31467
+	  (GK3) coordinates.
+
+2011-11-30	Bjoern Schilberg <bjoern.schilberg@intevation.de>:
+
+	* doc/schema/oracle-spatial.sql: Adjust extent to the extent of germany
+	and srs to 31467 in USER_SDO_GEOM_METADATA.
+
+2011-11-29	Bjoern Schilberg <bjoern.schilberg@intevation.de>:
+
+	* doc/schema/oracle_create_user.sql: Fixed notation of the table in the
+	  alter statement.
+
+2011-11-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>:
+
+	Fixed flys/issue415
+
+	* doc/schema/oracle.sql: Increased precision of a and b in ranges.
+
+2011-11-10  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Building.java,
+	  src/main/java/de/intevation/flys/model/Fixpoint.java: Added functions
+	  that return a list of Buildings/Fixpoints for a given river.
+
+2011-11-10  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/oracle-spatial.sql,
+	  doc/schema/postgresql-spatial.sql: Adapted the "kind" field of "lines"
+	  relation (Int -> Varchar).
+
+	* doc/schema/oracle-spatial_idx.sql: Added missing spatial index for
+	  "lines" relation.
+
+	* src/main/java/de/intevation/flys/model/Line.java: Added a function that
+	  returns all lines of a given river.
+
+2011-11-09  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Catchment.java: New. A model for
+	  the 'catchment' relation.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the Catchment model.
+
+2011-11-09  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: Synced "hws" relation with oracle
+	  schema.
+
+	* src/main/java/de/intevation/flys/model/Hws.java: New. A model for the
+	  "hws" relation.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the Hws model.
+
+2011-11-09  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Added methods that return information about the database connection used
+	  by a concrete SessionFactoryImpl.
+
+2011-11-09  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Floodplain.java: Changed the
+	  geometry type from MultiPolygon to Polygon.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Added a function that returns the db driver of a given
+	  SessionFactoryImpl instance.
+
+2011-11-01  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/RiverAxisKm.java: New. Model class
+	  for 'river_axes_km' relation.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Added mapping for RiverAxisKm.
+
+2011-10-25  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle_create_user.sql:
+	  Added explizit TABLESPACE definition for the user.
+
+2011-10-25  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle_create_user.sql: 
+	  Adjusted oracle_create_user.sql script. Added USER SQL snippet to change
+	  tablespace to users.
+
+2011-10-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>:
+
+	* src/main/java/de/intevation/flys/utils/DateGuesser.java:
+	  New. Date guesser from desktop FLYS.
+
+	* src/main/java/de/intevation/flys/importer/parsers/WstParser.java:
+	  Try to parse the name of a WST column as a date and store
+	  the date into the database
+	  
+	* src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Added code to store the date of the column in the database.
+
+2011-10-24  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: 
+	  Adjusted geometry type for floodplain and hws in AddGeometryColumn.
+
+2011-10-19  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle.sql:
+	  Added missing views (wst_value_table,wst_w_values,wst_q_values).
+
+2011-10-18  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle.sql:
+	  Removed all CREATE OR REPLACE TRIGGER statements.
+
+2011-10-17  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle.sql:
+	  Adjusted NUMBER format for units, positions and cross_section_points tables.
+
+2011-10-10  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle.sql:
+	  Fix errors.
+
+2011-10-10  Bjoern Schilberg <bjoern.schilberg@intevation.de>
+
+	* doc/schema/oracle.sql:
+	  Adjust oracle schema [I382].
+
+2011-10-10  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportUnit.java:
+	  Removed obsolete imports.
+
+2011-10-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>:
+
+	* ChangeLog: Added database update statements.
+
+	To update existing databases:
+
+	    BEGIN;
+	      CREATE SEQUENCE UNITS_ID_SEQ;
+	      CREATE TABLE units (
+	        id   int PRIMARY KEY NOT NULL,
+	        name VARCHAR(32)     NOT NULL UNIQUE
+	      );
+	    ALTER TABLE rivers ADD COLUMN wst_unit_id int REFERENCES units(id);
+	    INSERT INTO units (id, name) VALUES (nextval('UNITS_ID_SEQ'), 'NN + m');
+	    INSERT INTO units (id, name) VALUES (nextval('UNITS_ID_SEQ'), 'NHN + m');
+	    UPDATE rivers SET wst_unit_id = (SELECT id FROM units WHERE name = 'NHN + m') WHERE name = 'Elbe';
+	    UPDATE rivers SET wst_unit_id = (SELECT id FROM units WHERE name = 'NN + m') WHERE name <> 'Elbe';
+	    ALTER TABLE rivers ALTER COLUMN wst_unit_id SET NOT NULL;
+	    COMMIT;
+
+2011-10-05  Ingo Weinzierl <ingo@intevation.de>
+
+	* contrib/dump-schema.sh: New. A small shell script that dumps the
+	  database schema of a given database to SQL.
+
+	* pom.xml: Added dependency for hibernate-spatial-oracle stuff.
+
+2011-10-04  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql.sql: Removed unit column from wsts; added a
+	  wst_unit_id column to rivers. We decided to support a single elevation
+	  model for the whole river.
+
+	* src/main/java/de/intevation/flys/model/River.java: Added a WstUnit
+	  column.
+
+	* src/main/java/de/intevation/flys/model/Wst.java: Removed the Unit
+	  column.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java: Provide a
+	  getUnit() method that allows querying the elevation unit for this wst.
+
+	* src/main/java/de/intevation/flys/importer/ImportUnit.java: Removed
+	  storeDependencies().
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java: Save the
+	  Unit of the wst file or a default one into database before all other
+	  dependencies as well as the river itself is saved to database.
+
+2011-10-04  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql.sql: Added a "units" table.
+
+	* src/main/java/de/intevation/flys/model/Unit.java: New. Model class that
+	  represents a unit.
+
+	* src/main/java/de/intevation/flys/importer/ImportUnit.java: New. Model
+	  class that is used to import units.
+
+	* src/main/java/de/intevation/flys/model/Wst.java: A Wst stores a Unit
+	  which references "units" table now.
+
+	* src/main/java/de/intevation/flys/importer/parsers/WstParser.java: Set
+	  units which have already been parsed on Wsts.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java: Added setter
+	  method for Units and call storeDependencies() for ImportUnits.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model class Unit.
+
+2011-10-04  Ingo Weinzierl <ingo@intevation.de>
+
+	flys/issue333 (W-INFO / Berechnung Wasserspiegellage, Zuordnung Bezugspegel)
+
+	* src/main/java/de/intevation/flys/model/River.java: Modified
+	  determination of gauge based on a km range. River.determineGauge(double,
+	  double) will now return the gauge which matches the start km.
+
+2011-09-28  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/StringUtil.java:
+	  (wWrap): New method (extracted from WaterlevelSelectState).
+
+2011-09-28  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/StringUtil.java:
+	  (unbracket): New method (extracted from WaterlevelSelectState).
+
+2011-09-27	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSectionLine.java:
+	  Moved some logic from cross section demo app to this model.
+	
+2011-09-26  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSectionTrack.java:
+	  Added new function that the nearest CrossSectionTrack of a river to a
+	  given kilometer.
+
+2011-09-23  Bjoern Schilberg <bjoern@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: 
+	  Added missing river_axes_km schema.
+	* doc/schema/oracle-spatial.sql:
+	  Added missing dem and lines schema.
+
+2011-09-22  Bjoern Schilberg <bjoern@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql, doc/schema/oracle-spatial.sql:
+	  Harmonized oracle and postgresql spatial schema.
+
+2011-09-22  Bjoern Schilberg <bjoern@intevation.de>
+
+	* doc/schema/oracle-spatial.sql: Harmonized Extent for Saar and Mosel. Some cleanups.
+
+2011-09-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Added the possibility to enable JMX (/MBean) support for hibernate. By
+	  default, this support is NOT enabled. To enable JMX support for
+	  hibernate, set the system property "flys.backend.enablejmx=true".
+
+2011-09-20  Bjoern Schilberg <bjoern@intevation.de>
+
+	* doc/schema/oracle_create_user.sql: Added right to create views.
+
+2011-09-20  Bjoern Schilberg <bjoern@intevation.de>
+
+	* doc/schema/oracle-spatial.sql: Generalized oracle spatial schema.
+
+2011-09-19  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged RELEASE 2.5
+
+	* Changes: Prepared changes for release.
+
+2011-09-16  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/DGM.java: Added function to query
+	  a DGM by Id.
+
+2011-09-15  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  pom.xml: Removed oracle dependency.
+
+2011-09-15  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle.sql: Added inital oracle schema.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial_idx.sql: Added inital oracle-spatial_idx.sql
+	   script.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Deactivated spatial indexes.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle_create_user.sql: Added inital oracle_create_user.sql
+	   script.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema floodplain.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema hws.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema cross_section_tracks.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema river_axes.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema fixpoints.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added schema buildings.
+
+2011-09-14  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Fixed table river_axes_km.
+
+2011-09-13  Bjoern Schilberg <bjoern@intevation.de>
+
+	*  doc/schema/oracle-spatial.sql: Added initial oracle schemas.
+
+2011-09-12  Bjoern Schilberg <bjoern@intevation.de>
+	
+	* doc/schema/postgresql-spatial.sql: Added initial schemas for
+	  Hydrologie/Einzugsgebiete, Hydrologie/HW-Schutzanlagen, Hydrologie/Hydr.
+	  Grenzen/Linien, BfG/hauptoeff_*.shp, BfG/MNQ-*.shp,
+	  BfG/modellgrenze*.shp,  BfG/uferlinie.shp, BfG/vorland_*.shp,
+	  Hydrologie/Streckendaten, Hydrologie/UeSG/Berechnung,
+	  Hydrologie/UeSG/Messung
+
+2011-09-02  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: Removed SERIALs from schema.
+	Auto-Increment is realized through SERIALs now.
+
+	* src/main/java/de/intevation/flys/model/DGM.java: The table for dem has
+	been renamed to 'dem' (before 'dgm').
+
+2011-09-01  Hans Plum <hans@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql:
+	Hint for unifying table names: dgm -> dem
+
+2011-09-01  Hans Plum <hans@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql:
+	Added mappings to existing data in file system (based on 
+	river Saar); added TODOs for missing tables/mappings
+
+2011-08-31  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: New relation for floodplains (german
+	  "Talaue").
+
+	* src/main/java/de/intevation/flys/model/Floodplain.java: New. Model class
+	  that represents a floodplain.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the Floodplain mapping.
+
+2011-08-31  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: Added a relation for DGMs. Note, that
+	  no data of the DGM is stored in this relation, but only the file path to
+	  the DGM file.
+
+	* src/main/java/de/intevation/flys/model/DGM.java: New. This class provides
+	  information for a DGM (km range and file path).
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the DGM.
+
+2011-08-30  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSectionTrack.java: New
+	  static function to retrieve all CrossSectionTracks of a specific river.
+
+2011-08-25  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/FileTools.java: Moved to
+	  artifacts-common module.
+
+	* src/main/java/de/intevation/flys/importer/parsers/PegelGltParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/PRFParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/HYKParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/AnnotationsParser.java,
+	  src/main/java/de/intevation/flys/importer/parsers/InfoGewParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportGauge.java: Adjusted
+	  imports of FileTools.
+
+2011-08-25  Felix Wolfsteller <felix.wolfsteller@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java (getMainValues)
+	  (setMainValues):
+	  New method and mapping to get/set MainValues of a Gauge. Essentially
+	  developed by Sascha L. Teichmann.
+
+2011-08-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Reverted the default db dialect to PostgreSQL - this has been changed to
+	  PostGIS by accident.
+	  Configure the dialect "org.hibernatespatial.postgis.PostgisDialect" to
+	  use the PostGIS stuff.
+
+2011-08-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/RiverAxis.java: Added a function
+	  that returns the RiverAxis for a given River.
+
+	* src/main/java/de/intevation/flys/backend/SpatialInfo.java: Use the new
+	  function of RiverAxis to retrieve the RiverAxis.
+
+2011-08-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* doc/schema/postgresql-spatial.sql: The PostGIS database schema for
+	  FLYS spatial data.
+
+	* src/main/java/de/intevation/flys/model/CrossSectionTrack.java,
+	  src/main/java/de/intevation/flys/model/RiverAxis.java,
+	  src/main/java/de/intevation/flys/model/Line.java,
+	  src/main/java/de/intevation/flys/model/Building.java,
+	  src/main/java/de/intevation/flys/model/Fixpoint.java: Model classes that
+	  represent FLYS spatial data.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered model classes.
+
+	* pom.xml: Added dependencies to HibernateSpatial and PostGIS.
+
+	* src/main/java/de/intevation/flys/backend/SpatialInfo.java,
+	  contrib/spatial-info.sh: A small demo application that prints out some
+	  information about spatial data of specific rivers. Modify the system
+	  property -Dflys.backend.spatial.river to get information of a river of
+	  your choice.
+
+2011-07-31	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Re-enabled Hibernate schema dumps.
+
+	* src/main/java/de/intevation/flys/App.java: Removed old code
+	  and use new SessionFactoryProvider infrastructure.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Added methods to create db configurations without opening them.
+	  Useful for introspection only purposes.
+
+2011-07-31	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml: Bumped Apache DBCP up to 1.4 to use the same version
+	  as artifact database.
+
+2011-07-28	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Make use of the LRU cache from artifacts common.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added missing foreign key contraint on hyks table.
+	  To update existing databases:
+
+	  ALTER TABLE hyks ADD CONSTRAINT hyks_river_id_fkey FOREIGN KEY (river_id) REFERENCES rivers(id);
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/HYKParser.java:
+	  Check if zone coordinates in HYKs are swapped and warn the user.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/HYK.java
+	  src/main/java/de/intevation/flys/importer/parsers/HYKParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKFormation.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKEntry.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYK.java:
+	  Various small fixes and some extra logging.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Config.java: New.
+	  Central singleton to configure the Importer.
+	  Uses system properties by now:
+
+	  flys.backend.importer.dry.run: boolean
+	      default false. true: don't write to database.
+
+	  flys.backend.importer.annotation.types: String
+	      default unset. Filename of annotation type classifications.
+
+	  flys.backend.importer.skip.gauges: boolean
+	      default: false. true: don't parse/store *.glt, *.sta files
+
+	  flys.backend.importer.skip.annotations: boolean
+	      default: false. true: don't parse/store *.km files
+
+	  flys.backend.importer.skip.prfs: boolean
+	      default: false. true: don't parse/store *.prf files
+
+	  flys.backend.importer.skip.hyks: boolean
+	      default: false. true: don't parse/store *.hyk files
+
+	  flys.backend.importer.skip.wst: boolean
+	      default: false. true: don't parse/store river wst files
+
+	  flys.backend.importer.skip.extra.wsts: boolean
+	      default: false. true: don't parse/store extra *.zus, *.wst files
+
+	  flys.backend.importer.skip.fixations: boolean
+	      default: false. true: don't parse/store fixation *.wst files
+
+	  flys.backend.importer.skip.official.lines: boolean
+	      default: false. true: don't parse/store 'amtliche Linien' *.wst files
+
+	  flys.backend.importer.skip.flood.water: boolean
+	      default: false. true: don't parse/store 'HW-Marken' *.wst files
+
+	  flys.backend.importer.skip.flood.protection: boolean
+	      default: false. true: don't parse/store 'HW-Schutzanlagen' *.wst files
+
+2011-07-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/HYKEntry.java: Fixed OrderBy
+	  clause.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportHYKFormation.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKFlowZone.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKEntry.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYK.java:
+	  Store HYK data structures to database. Needs testing.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse the HYKs from the importer. TODO: Store them in database.
+
+2011-07-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/HYKParser.java:
+	  Create data structures while parsing.
+
+	* src/main/java/de/intevation/flys/importer/ImportHYKFormation.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYK.java: Added
+	  getters/setters to ease model wiring.
+
+2011-07-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportHYKFormation.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKFlowZone.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYKEntry.java,
+	  src/main/java/de/intevation/flys/importer/ImportHYK.java: New.
+	  Importer models for HYKs.
+
+2011-07-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Each entry in a HYK can have
+	  an optional 'Peilungsjahr' (measure) not only the whole HYK.
+	  To update existing databases:
+	  BEGIN;
+	    ALTER TABLE hyks DROP COLUMN measure;
+	    ALTER TABLE hyk_entries ADD COLUMN measure TIMESTAMP;
+	  COMMIT;
+
+	  * src/main/java/de/intevation/flys/model/HYKEntry.java,
+	    src/main/java/de/intevation/flys/model/HYK.java:
+	    Adjusted Hibernate models.
+
+2011-07-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/HYKParser.java:
+	  Added callback mechanism and recursive file search like in the PRF parser.
+	  All BfG-HYK files seem to parse correctly now. TODO: Build the data structures.
+
+	* src/main/java/de/intevation/flys/importer/parsers/PRFParser.java:
+	  Added x.canRead() before accepting files for parsing.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/HYKParser.java:
+	  Initial version of the HYK parser. Not ready, yet.
+
+	* src/main/java/de/intevation/flys/importer/ImportHYKFlowZoneType.java:
+	  Importer model for HYK flow zone types.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Argh! Added distance_{vl|hf|vr} to
+	  wrong table.
+	  To update existing databases:
+	  BEGIN;
+	    ALTER TABLE hyk_entries DROP COLUMN distance_vl;
+	    ALTER TABLE hyk_entries DROP COLUMN distance_hf;
+	    ALTER TABLE hyk_entries DROP COLUMN distance_vr;
+	    ALTER TABLE hyk_formations ADD COLUMN distance_vl NUMERIC NOT NULL;
+	    ALTER TABLE hyk_formations ADD COLUMN distance_hf NUMERIC NOT NULL;
+	    ALTER TABLE hyk_formations ADD COLUMN distance_vr NUMERIC NOT NULL;
+	  COMMIT;
+
+	* src/main/java/de/intevation/flys/model/HYKFormation.java,
+	  src/main/java/de/intevation/flys/model/HYKEntry.java:
+	  Adjusted Hibernate models.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/FileTools.java: Added method
+	  walkTree() to traverse a directory tree. To be reused in HYK parser.
+
+	* src/main/java/de/intevation/flys/importer/parsers/PRFParser.java:
+	  Uses the FileTools.walkTree() method now to find all PRF file.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added missing columns.
+	  To update existing databases:
+	  BEGIN;
+	    ALTER TABLE hyks ADD COLUMN measure TIMESTAMP;
+	    ALTER TABLE hyk_entries ADD COLUMN distance_vl NUMERIC NOT NULL;
+	    ALTER TABLE hyk_entries ADD COLUMN distance_hf NUMERIC NOT NULL;
+	    ALTER TABLE hyk_entries ADD COLUMN distance_vr NUMERIC NOT NULL;
+	  COMMIT;
+
+	* src/main/java/de/intevation/flys/model/HYKEntry.java,
+	  src/main/java/de/intevation/flys/model/HYK.java:
+	  Adjusted Hibernate models.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/parsers/*.java:
+	  New package. Moved the file parsers (*.gew, *.sta, *.at, *.glt, *.prf, *.km, *.wst)
+	  into this package.
+
+	* src/main/java/de/intevation/flys/importer/*.java: Adjusted the imports.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/HYKFormation.java,
+	  src/main/java/de/intevation/flys/model/HYKEntry.java,
+	  src/main/java/de/intevation/flys/model/HYKFlowZone.java,
+	  src/main/java/de/intevation/flys/model/HYKFlowZoneType.java,
+	  src/main/java/de/intevation/flys/model/HYK.java: New. The hibernate models
+	  for the HYK structures.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new models.
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java: Added
+	  'order by' annotation for fetching the cross section lines.
+
+2011-07-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added structures for HYKs "Hydraulische Kenngroessen"
+	  To update existing databases:
+
+	  BEGIN;
+	    CREATE SEQUENCE HYKS_ID_SEQ;
+	    CREATE TABLE hyks (
+	        id          int PRIMARY KEY NOT NULL,
+	        river_id    int             NOT NULL,
+	        description VARCHAR(256)    NOT NULL
+	    );
+	    
+	    CREATE SEQUENCE HYK_ENTRIES_ID_SEQ;
+	    CREATE TABLE hyk_entries (
+	        id     int PRIMARY KEY NOT NULL,
+	        hyk_id int             NOT NULL REFERENCES hyks(id),
+	        km     NUMERIC         NOT NULL,
+	        UNIQUE (hyk_id, km)
+	    );
+	    
+	    CREATE SEQUENCE HYK_FORMATIONS_ID_SEQ;
+	    CREATE TABLE hyk_formations (
+	        id            int PRIMARY KEY NOT NULL,
+	        formation_num int             NOT NULL DEFAULT 0,
+	        hyk_entry_id  int             NOT NULL REFERENCES hyk_entries(id),
+	        top           NUMERIC         NOT NULL,
+	        bottom        NUMERIC         NOT NULL,
+	        UNIQUE (hyk_entry_id, formation_num)
+	    );
+	    
+	    CREATE SEQUENCE HYK_FLOW_ZONE_TYPES_ID_SEQ;
+	    CREATE TABLE hyk_flow_zone_types (
+	        id          int PRIMARY KEY NOT NULL,
+	        name        VARCHAR(50)     NOT NULL UNIQUE,
+	        description VARCHAR(256)
+	    );
+	    
+	    CREATE SEQUENCE HYK_FLOW_ZONES_ID_SEQ;
+	    CREATE TABLE hyk_flow_zones (
+	        id           int PRIMARY KEY NOT NULL,
+	        formation_id int             NOT NULL REFERENCES hyk_formations(id),
+	        type_id      int             NOT NULL REFERENCES hyk_flow_zone_types(id),
+	        a            NUMERIC         NOT NULL,
+	        b            NUMERIC         NOT NULL,
+	        CHECK (a <= b)
+	    );
+	  COMMIT;
+
+2011-07-13	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml: Bumped Hibernate up to 3.6.5.
+
+2011-07-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/FileTools.java: Argh!
+	  Forget to call the file hashing so only the file lengths were
+	  compared.
+
+2011-07-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/FileTools.java:
+	  Added a class HashedFile to compare files by there length
+	  and a message digest. Digest can be set with system property
+	  "flys.backend.file.cmp.digest" and defaults to MD5. Useful to
+	  detect file duplicates.
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java: Added
+	  method prfAccept(File) to callback to check if a found PRF file
+	  should be parsed. Useful to prevent parsing file duplicates.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Use the HashedFile and the PRFParser.Callback to prevent
+	  parsing of PRF duplicates.
+
+2011-07-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Misspelled sequence.
+	  To update existing databases:
+
+	    DROP SEQUENCE CROSS_SECTION_LINES_SEQ;
+		CREATE SEQUENCE CROSS_SECTION_LINES_ID_SEQ;
+
+	* src/main/java/de/intevation/flys/importer/ImportCrossSection.java:
+	  Added some logging because importing is slow.
+
+	* src/main/java/de/intevation/flys/importer/ImportCrossSectionLine.java:
+	  Prevent NPE if a cross section line from db has no points.
+
+2011-07-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Parse all PRFs in all subfolders of a river and store them
+	as cross sections into the database. Needs testing!
+
+	* src/main/java/de/intevation/flys/importer/ImportCrossSection.java,
+	  src/main/java/de/intevation/flys/importer/ImportCrossSectionLine.java:
+	  New. Importer models for cross sections.
+
+	* src/main/java/de/intevation/flys/importer/XY.java:
+	  New. Made top level class from inner PRFParser.XY.
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java:
+	  Moved out XY class. Renamed callback.
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java,
+	  src/main/java/de/intevation/flys/model/CrossSectionLine.java,
+	  src/main/java/de/intevation/flys/model/CrossSectionPoint.java,
+	  src/main/java/de/intevation/flys/importer/ImportTimeInterval.java:
+	  Added convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse and store cross sections into database.
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Introduced a new table cross_section_line
+	  holding the km of a set of points.
+
+	* src/main/java/de/intevation/flys/model/CrossSectionLine.java:
+	  New. Model for a single line of a "Querprofil".
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java: Removed
+	  'km' and 'points' they are part of the line now.
+
+	* src/main/java/de/intevation/flys/model/CrossSectionPoint.java:
+	  They reference to the containing line now.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered new model.
+
+	  To update existing databases:
+	  BEGIN;
+	      DROP SEQUENCE CROSS_SECTIONS_ID_SEQ;
+	      DROP SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+	      DROP TABLE cross_section_points;
+	      DROP TABLE cross_sections;
+	      CREATE SEQUENCE CROSS_SECTIONS_ID_SEQ;
+	      CREATE TABLE cross_sections (
+	          id               int PRIMARY KEY NOT NULL,
+	          river_id         int             NOT NULL REFERENCES rivers(id),
+	          time_interval_id int                      REFERENCES time_intervals(id),
+	          description      VARCHAR(256)
+	      );
+	      CREATE SEQUENCE CROSS_SECTION_LINES_SEQ;
+	      CREATE TABLE cross_section_lines (
+	          id               int PRIMARY KEY NOT NULL,
+	          km               NUMERIC         NOT NULL,
+	          cross_section_id int             NOT NULL REFERENCES cross_sections(id),
+	          UNIQUE (km, cross_section_id)
+	      );
+	      CREATE SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+	      CREATE TABLE cross_section_points (
+	          id                    int PRIMARY KEY NOT NULL,
+	          cross_section_line_id int             NOT NULL REFERENCES cross_section_lines(id),
+	          col_pos               int             NOT NULL,
+	          x                     NUMERIC         NOT NULL,
+	          y                     NUMERIC         NOT NULL,
+	          UNIQUE (cross_section_line_id, col_pos)
+	      );
+	  COMMIT;
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Dropped constraint that enforces the
+	  uniqueness of km and river. This is violated because there are
+	  more than one sounding in different year at the same km of a river.
+	  Added column 'description' to the cross section table to make it
+	  human readable.
+
+	  To update existing databases:
+
+		ALTER TABLE cross_sections DROP CONSTRAINT cross_sections_km_key;
+		ALTER TABLE cross_sections ADD COLUMN description VARCHAR(256);
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java:
+	  Added the description column to the Hibernate model.
+	
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Dropped constraint that enforces the
+	  uniqueness of x in a "Querprofil-Spur". There are vertical lines
+	  in the soundings so this constraint is violated.
+
+	  To update existing databases:
+	
+		ALTER TABLE cross_section_points DROP CONSTRAINT cross_section_points_cross_section_id_key2;
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java:
+	  Added a callback to be called from parsePRFs() if
+	  a PRF was parsed successfully. Useful to scan whole
+	  sub directories for PRF files.
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java:
+	  Extract the year of sounding from file names. If not found
+	  from the name of th containing directory. Description is made
+	  of file name and parent directory file name.
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java:
+	  Extracted the data. All BfG PRFs are parsed correctly, now.
+
+2011-07-07	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java:
+	  Extract km from lines. TODO: extract data.
+
+2011-07-06	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PRFParser.java: New.
+	  Parser for PRF files. TODO extra data and station from data lines.
+
+2011-07-06	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Point3d.java: Deleted.
+	  Not needed (braindead).
+
+	* src/main/java/de/intevation/flys/model/CrossSectionPoint.java:
+	  Directly store the x/y values now.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Removed registration of Point3d.
+
+	To update existing databases:
+
+	BEGIN;
+	  ALTER TABLE cross_section_points DROP COLUMN point3d_id;
+	  DROP SEQUENCE POINTS3D_ID_SEQ;
+	  DROP TABLE points3d;
+	  ALTER TABLE cross_section_points ADD COLUMN x NUMERIC NOT NULL;
+	  ALTER TABLE cross_section_points ADD COLUMN y NUMERIC NOT NULL;
+	  ALTER TABLE cross_section_points ADD CONSTRAINT
+		cross_section_points_cross_section_id_key2 UNIQUE (cross_section_id, x);
+	COMMIT;
+
+2011-07-06	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/CrossSection.java,
+	  src/main/java/de/intevation/flys/model/CrossSectionPoint.java:
+	  New. Hibernate models for cross-sections and their forming points.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered the new models.
+
+2011-07-06	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Point3d.java: New. point3d model
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Registered point3d model.
+
+2011-07-06	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Add relations for cross sections.
+	  To update existing databases:
+	     BEGIN;
+	     	
+	     CREATE SEQUENCE CROSS_SECTIONS_ID_SEQ;
+	     CREATE TABLE cross_sections (
+	         id               int PRIMARY KEY NOT NULL,
+	         km               NUMERIC         NOT NULL,
+	         river_id         int             NOT NULL REFERENCES rivers(id),
+	         time_interval_id int             REFERENCES time_intervals(id),
+	         UNIQUE (km, river_id)
+	     );
+	     
+	     CREATE SEQUENCE POINTS3D_ID_SEQ;
+	     CREATE TABLE points3d (
+	         id int     PRIMARY KEY NOT NULL,
+	         x  NUMERIC NOT NULL,
+	         y  NUMERIC NOT NULL,
+	         z  NUMERIC NOT NULL
+	     );
+	     
+	     CREATE SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+	     CREATE TABLE cross_section_points (
+	         id               int PRIMARY KEY NOT NULL,
+	         cross_section_id int NOT NULL REFERENCES cross_sections(id),
+	         point3d_id       int NOT NULL REFERENCES points3d(id),
+	         col_pos          int NOT NULL,
+	         UNIQUE (cross_section_id, point3d_id, col_pos),
+	         UNIQUE (cross_section_id, col_pos)
+	     );
+	     
+	     COMMIT;
+
+2011-06-28  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged RELEASE 2.4
+
+2011-06-27  Ingo Weinzierl <ingo@intevation.de>
+
+	* Changes: Prepared changes for the upcoming release.
+
+2011-06-26	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java,
+	  src/main/java/de/intevation/flys/importer/AnnotationClassifier.java,
+	  src/main/java/de/intevation/flys/importer/AtFileParser.java,
+	  src/main/java/de/intevation/flys/importer/ValueKey.java,
+	  src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Removed trailing whitespace.
+
+2011-06-26	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AnnotationClassifier.java:
+	  Removed superfluous imports.
+
+2011-06-26	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added a method maxOverlap to determine the gauge which has
+	  the max common length to a given interval. This is for
+	  numerical stability in slightly overlapping gauge ranges.
+
+2011-06-26	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java: When
+	looking up a gauge by km containment use precision of 1e-6.
+
+2011-06-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added method to find gauge by its name.
+
+2011-06-19	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/annotation-types.xml: Fixed some rules.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java,
+	  src/main/java/de/intevation/flys/importer/InfoGewParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Call the right constructors now.
+
+	* src/main/java/de/intevation/flys/importer/ImportAnnotationType.java:
+	  Added getter/setter for name property.
+
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java:
+	  Print duplicated at WARN level not as DEBUG.
+
+	* src/main/java/de/intevation/flys/importer/AnnotationClassifier.java:
+	  Fixed XPath expression to build the internal lookup structures
+	  correctly.
+
+2011-06-19	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Added classification of annotation types. Needs testing!
+
+	* doc/annotation-types.xml: New. Rules to classify the different
+	  types of annotations. The classification works like this:
+
+	  There are unique types like 'Bruecke', 'Pegel' and so on.
+	  They are defined in the /annotation/type section and
+	  identified by their name. One of the types can be set 
+	  as the default type if no rule applies.
+
+	  In the /annotation/pattern section are two types of pattern.
+
+	  1 - file pattern: If a KM file is opened its filename is
+	      matched against the regular expressions of these
+		  patterns. If a match is found the corresponding type
+		  is used as the default type in the open file.
+		  If no match is found the global default type is used
+		  as the default type.
+
+	  2 - line patterns: For each line of an open KM file these
+	      patterns are applied to find a match. If a match is
+		  found the corresponding type is used as the type of
+		  the annotation. If no match is found the default
+		  file default is assumed to be the right type. For
+		  the file default see 1.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  To activate the annotation type classification set
+	  the system property
+
+	      'flys.backend.importer.annotation.types'
+
+	  to the path of a XML looking like the annotation-types.xml
+	  file. If the system property is not set no classification
+	  is done.
+
+	* src/main/java/de/intevation/flys/importer/AnnotationClassifier.java:
+	  New. Implements the classification.
+	  
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java,
+	  src/main/java/de/intevation/flys/importer/InfoGewParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Looped through the annotation type classification.
+
+2011-06-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added method to find gauge by a position lying in its range.
+
+2011-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	First part of flys/issue18
+
+	* doc/schema/postgresql.sql: Add new table 'annotation_types'.
+	  To update existing databases:
+	    BEGIN;
+	      CREATE SEQUENCE ANNOTATION_TYPES_ID_SEQ;
+	      CREATE TABLE annotation_types (
+	          id    int PRIMARY KEY NOT NULL,
+	          name  VARCHAR(256)    NOT NULL UNIQUE
+	      );
+		  ALTER TABLE annotations ADD COLUMN type_id int REFERENCES annotation_types(id);
+	    COMMIT;
+
+	* doc/schema/postgresql-cleanup.sql: Removed. Hopeless out-dated.
+
+	* src/main/java/de/intevation/flys/model/AnnotationType.java:
+	  New. Hibernate model to access the type.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Register the new backend type.
+
+	* src/main/java/de/intevation/flys/model/Annotation.java:
+	  References the annotation type.
+
+	* src/main/java/de/intevation/flys/importer/ImportAnnotationType.java:
+	  New. Model to help import the annotation type.
+
+	* src/main/java/de/intevation/flys/importer/ImportAnnotation.java:
+	  Uses the import type.
+
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java:
+	  Set the annotation type to 'null'. TODO: Do the classification!
+
+2011-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Fix for flys/issue110. start index was shifted by one.
+
+2011-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportAnnotation.java:
+	  Forgot to store reference to edge.
+
+2011-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	 * src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	   Register backend model.
+
+	 * src/main/java/de/intevation/flys/importer/ImportEdge.java: New. Model
+	   for importing the edges.
+
+	 * src/main/java/de/intevation/flys/importer/AnnotationsParser.java:
+	   Parses the edges of an annotation, too.
+
+2011-06-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added table edges to model 'Kanten' of an annotation.
+	  To update existing databases:
+	    BEGIN;
+	      CREATE SEQUENCE EDGES_ID_SEQ;
+	      CREATE TABLE edges (
+	        id     int PRIMARY KEY NOT NULL,
+	        top    NUMERIC,
+	        bottom NUMERIC);
+	      ALTER TABLE annotations ADD COLUMN edge_id int REFERENCES edges(id);
+	    COMMIT;
+
+	* src/main/java/de/intevation/flys/model/Edge.java: New. A 'Kanten'-Model.
+	* src/main/java/de/intevation/flys/model/Annotation.java: References the
+	  edges.
+	
+2011-06-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added method to find gauge only by station position.
+
+2011-05-24	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Make search for gauges independent of from/to order.
+
+2011-05-24	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added a new view to select qs of a WST.
+	  To update existing databases:
+
+	    CREATE VIEW wst_q_values AS
+	        SELECT wc.position AS column_pos,
+	               wqr.q       AS q, 
+	               r.a         AS a, 
+	               r.b         AS b,
+	               wc.wst_id   AS wst_id
+	        FROM wst_column_q_ranges wcqr
+	        JOIN wst_q_ranges wqr ON wcqr.wst_q_range_id = wqr.id
+	        JOIN ranges r         ON wqr.range_id        = r.id
+	        JOIN wst_columns wc   ON wcqr.wst_column_id  = wc.id
+	        ORDER BY wc.position, wcqr.wst_column_id, r.a;	  
+
+2011-05-24	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added a new view to select ws of a WST.
+	  To update existing databases:
+
+	    CREATE VIEW wst_w_values AS
+	        SELECT wcv."position" AS km, 
+	               wcv.w          AS w,  
+	               wc."position"  AS column_pos, 
+	               w.id           AS wst_id
+	            FROM wst_column_values wcv
+	            JOIN wst_columns wc ON wcv.wst_column_id = wc.id
+	            JOIN wsts w         ON wc.wst_id = w.id
+	        ORDER BY wcv."position", wc."position";
+
+2011-05-23	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	 flys/issue76
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Close gaps between q ranges.
+
+2011-05-20  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged RELEASE 2.3.1
+
+2011-05-13  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged RELEASE 2.3
+
+2011-05-13  Ingo Weinzierl <ingo@intevation.de>
+
+	* Changes: Changes for release 2.3 - see Changes file to get to know about
+	  the version numbers of this software.
+
+2011-05-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added a column which tells
+	  if a river counts its km up or downwards.
+	  To update existing databases:
+
+	    ALTER TABLE rivers ADD COLUMN km_up BOOLEAN NOT NULL DEFAULT true;
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Adjust Hibernate mapping of new column.
+
+2011-05-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Fixed flys/issue11 and flys51.
+
+2011-05-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ValueKey.java:
+	  Add some unsharp comparison (eps = 1e-6) to avoid 
+	  numerical problems.
+
+	* src/main/java/de/intevation/flys/importer/IdValueKey.java:
+	  Removed. Not need any longer.
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Use unsharp lookup.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Flush more often. Hopefully this reduces hibernate sync
+	  problems?!
+
+2011-05-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Fixed silly bug. Set flush mode back to auto because
+	  manual flushing causes some undetermined problems.
+
+2011-05-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Do not load _all_ values from discharge tables and
+	  wst columns. This is extremly slow and will lead
+	  to OOM if more rivers are imported. Now only the
+	  last 20 columns und discharge tables are cached.
+
+	* src/main/java/de/intevation/flys/importer/ValueKey.java:
+	  New. Key for caching discharge table values and wst
+	  column values.
+
+	* src/main/java/de/intevation/flys/importer/IdValueKey.java:
+	  Fixed bug in equals().
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Removed too eloquent debug output.
+
+2011-05-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/IdValueKey.java:
+	  Use BigDecimals as representation for the numeric components
+	  which prevents running into unique constraint problems
+	  caused by imprecision.
+
+	* src/main/java/de/intevation/flys/importer/ImportRange.java,
+	  src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Ranges are now cached globally, too.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Improved logging.
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTableValue.java:
+	  Removed superfluous imports.
+
+2011-05-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/StaFileParser.java:
+	  Only accept main value types 'Q', 'W', 'D' and 'T' by default.
+	  '-' is not handled any more because it causes too much constraint
+	  problems. You can overwrite the imported type with the
+	  system property flys.backend.main.value.types" (default "QWTD")
+	  
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Set session flush mode to manual. Hopefully this improves the
+	  performance a bit.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportGauge.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumn.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  Improved logging.
+
+2011-05-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstColumnValueKey.java: Deleted
+	* src/main/java/de/intevation/flys/importer/IdValueKey.java: Reinserted
+	  here in a more generalized form.
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  Cache the discharge table value, too.
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTableValue.java:
+	  Use the global cache.
+
+2011-05-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImporterSession.java:
+	  New. Centralized caching in the thread local context. Importing
+	  the Elbe leads to OOM because the column values of the
+	  WST files where loaded separately for every file.
+
+	* src/main/java/de/intevation/flys/importer/ImportPosition.java,
+	  src/main/java/de/intevation/flys/importer/Importer.java,
+	  src/main/java/de/intevation/flys/importer/ImportAnnotation.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportMainValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportMainValueType.java,
+	  src/main/java/de/intevation/flys/importer/ImportNamedMainValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportGauge.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumnValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumnQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumn.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeTableValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeTable.java,
+	  src/main/java/de/intevation/flys/importer/ImportAttribute.java,
+	  src/main/java/de/intevation/flys/importer/ImportTimeInterval.java:
+	  Adjusted to use the new global context.
+
+2011-05-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Dropped constraint which
+	  forces discharge tables to have a unique time interval
+	  for a given gauge and kind.
+
+	  There are AT files (historical Mosel/Perl/Perl/1967-1981.at
+	  and Mosel/Perl/1967-1981-1.at) which violate this
+	  constraint. Its a technical question to the customer
+	  how to handle these cases.
+
+	  To adjust existing databases:
+
+	    ALTER TABLE discharge_tables DROP CONSTRAINT discharge_tables_gauge_id_key;
+
+2011-05-08	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  There are wst files where column names are not unique.
+	  Make them unique by appending (1), (2) and so on.
+
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Import the "HW-Schutzanlagen", too.
+
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Make import of historical discharge tables work.
+
+	* doc/schema/postgresql.sql: Added forgotten column 'description'.
+
+	  !!! You have to drop your database !!!
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  src/main/java/de/intevation/flys/model/DischargeTable.java:
+	  Add the forgotten description property.
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Fixed problems with date recognition.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Prefix the description of the historical discharge tables 
+	  with "Histor.Abflusstafeln".
+
+	* src/main/java/de/intevation/flys/importer/ImportTimeInterval.java:
+	  Fixed silly programming error.
+
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Inject 'kind' attribute.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Traverse the "Histor.Abflusstafeln" for the historical
+	  discharge tables. too. TODO: Store them in the database.
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  Added convinience constructor to set the kind of the
+	  discharge table.
+	  
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Prefix "Zusätzliche Längsschnitte" with "Zus.Längsschnitte"
+	  in description.
+
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Import 'Hochwasser-Marken', too.
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Removed superfluous import.
+
+2011-05-05	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Ignore lines that contain km positions which were found
+	  before in the same file.
+
+2011-05-05  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java: Added a method that
+	  returns all gauges of the river intersected by a given start and end
+	  point.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportTimeInterval.java:
+	  Forgot to fetch peer from result set.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportTimeInterval.java:
+	  New. Importer model help fetching the database peer.
+
+	* src/main/java/de/intevation/flys/model/TimeInterval.java:
+	  Add convinience constructor with start and stop time.
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Attach a time interval to a discharge table if we find one.
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  Store the reference to the importer model of the 
+	  time interval of the discharge table.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java:
+	  Try to extract time ranges from at files.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse and store "amtliche Linien" wst files.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse and store fixation wst files as well.
+
+2011-05-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse the "zusaetzliche Laengsschnitte", too.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java:
+	  Add getter/setter for column 'kind'.
+
+2011-05-02	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Fixed flys/issue19: Do not take km column in wst file as a water level.
+
+2011-05-02  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java: Introduced a 'scale'
+	  that is used to adjust the range of min/max W values.
+
+2011-05-01	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Removed superfluous import.
+
+2011-04-20  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstColumnValueKey.java:
+	  New. This class is used as distinct key of a WstColumnValue - e.g. as
+	  key in a map.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java: A
+	  WstColumnValue cache is build up while initialization. This cache
+	  contains all WstColumnValues that exist in the database.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumn.java: New
+	  constructor that takes the WstColumnValues cache. It is used to
+	  initialize new ImportWstColumnValue objects.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumnValue.java:
+	  Speedup: A ImportWstColumnValue has a WstColumnValues cache that
+	  contains all WstColumnValues existing in the database. This makes it
+	  unnecessary to call an sql statement for each WstColumnValue to
+	  determine its existence in the database.
+
+2011-04-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql, doc/schema/postgresql-cleanup.sql:
+	  Added a view 'wst_value_table' which aggregates the data
+	  to build w/q value tables.
+
+	  To update existing databases:
+
+	  BEGIN;
+	    CREATE VIEW wst_value_table AS
+	        SELECT wcv.position AS position,
+	               w,
+	               (SELECT q
+	                FROM   wst_column_q_ranges wcqr
+	                       JOIN wst_q_ranges wqr
+	                         ON wcqr.wst_q_range_id = wqr.id
+	                       JOIN ranges r
+	                         ON r.id = wqr.range_id
+	                WHERE  wcqr.wst_column_id = wc.id
+	                       AND wcv.position BETWEEN r.a AND r.b) AS q,
+	               wc.position                                   AS column_pos,
+	               w.id                                          AS wst_id
+	        FROM   wst_column_values wcv
+	               JOIN wst_columns wc
+	                 ON wcv.wst_column_id = wc.id
+	               JOIN wsts w
+	                 ON wc.wst_id = w.id
+	        ORDER  BY wcv.position ASC,
+	              wc.position DESC;
+	  COMMIT;
+
+2011-04-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java:
+	  Add forgotten one to many relation Wst -> WstColumn.
+
+2011-04-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Import of q ranges of wst files was totally broken. :-/
+	  You have to reimport all your data.
+
+2011-04-18	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  Forgot kind parameter in peer fetching query.
+	  
+2011-04-18  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java::
+	  Added a new method to determine the gauge based on a given start and end
+	  point of the river.
+
+2011-04-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added 'position' column to wst_columns
+	  to allow order them by there column position in the original
+	  wst file.
+	
+	  Update existing database with:
+
+	    BEGIN;
+	      ALTER TABLE wst_columns ADD COLUMN position int;
+	      UPDATE wst_columns w SET 
+	        position = id - (SELECT min(id) FROM wst_columns WHERE wst_id = w.wst_id);
+	      ALTER TABLE wst_columns ADD CONSTRAINT wst_columns_wst_id_position_key
+	        UNIQUE (wst_id, position);
+	    COMMIT;
+
+	* src/main/java/de/intevation/flys/model/Wst.java,
+	  src/main/java/de/intevation/flys/model/WstColumn.java:
+	  Adjusted models.
+
+	* src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Adjusted importer.
+
+2011-04-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Forget ',' in schema.
+
+2011-04-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java (determineMinMaxQ): 
+	  Fixed index problem when an empty list is returned.
+
+2011-04-15  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Wst.java: A Wst is now able to
+	  return its min and max Q values.
+
+	* src/main/java/de/intevation/flys/model/Gauge.java: A Gauge is now able
+	  to return its min and max W values.
+
+2011-04-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Added new column 'kind' in
+	  discharge tables and wst to distinguish between different
+	  kinds like 'Haupt-WST', 'zusaetzliche Laengsschnitte',
+	  'amtliche Daten' and so on.
+
+	  Update existing databases with:
+	    BEGIN;
+	      ALTER TABLE discharge_tables ADD COLUMN kind int NOT NULL DEFAULT 0;
+	      ALTER TABLE wsts             ADD COLUMN kind int NOT NULL DEFAULT 0;
+	    COMMIT;
+
+	* src/main/java/de/intevation/flys/model/DischargeTable.java
+	  src/main/java/de/intevation/flys/model/Wst.java,
+	  src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeTable.java:
+	  Adjusted the models.
+
+2011-04-15  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionHolder.java: Moved to
+	  this module from flys-artifacts. This is necessary to get access to the
+	  current session in this module as well.
+
+2011-04-14  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/River.java: Added a method that
+	  returns the min and max distance of a river.
+
+2011-04-03	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/**/*.java: Removed trailing whitespace.
+
+2011-03-30  Ingo Weinzierl <ingo@intevation.de>
+
+	Tagged RELEASE 0.1
+
+2011-03-28	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Range.java:
+	  Forgot to save the last change before commit.
+
+2011-03-28	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Range.java:
+	  Added methods to find out if two ranges intersects.
+
+2011-03-24	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  Added an one to many relation to the discharge tables of a gauge.
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Finished import of WSTs.
+
+	TODO 1: Speed it up! It takes on a high end machine over 7(!)
+	        minutes only for the data of the Saar.
+	TODO 2: Double precision floating point representations produced
+	        by the the parsers leed to unique constraint violations
+			in the backend on a second run. So the import is currently
+			only working on freshly initialized data bases.
+			More consequent working with BigDecimal and some
+			rounding may be of help here.
+
+	* src/main/java/de/intevation/flys/model/WstColumnValue.java:
+	  Added convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumnValue.java:
+	  Added getPeer() method.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Add a list of the ImportWstColumnValues produced by the WST parser.
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java: Add
+	  the (km, w) values to the ImportWstColumns.
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Build models for wsts, wst columns and q ranges and
+	  store them in the backend. TODO: store the w values.
+
+	* src/main/java/de/intevation/flys/model/WstQRange.java
+	  src/main/java/de/intevation/flys/model/Wst.java,
+	  src/main/java/de/intevation/flys/model/Range.java,
+	  src/main/java/de/intevation/flys/model/WstColumnQRange.java,
+	  src/main/java/de/intevation/flys/model/WstColumn.java:
+	  Added convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportWstQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumnQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumn.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java:
+	  Added getPeer() methods.
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  The unit extraction in the WST parser of desktop FLYS
+	  is broken! Add a hack here to repair this for our
+	  importer. Desktop FLYS needs a fix, too!
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java:
+	  Ported some stuff ver from WSTSource.java of desktop flys to
+	  parse WST files. TODO: create instances of the import models.
+
+	* src/main/java/de/intevation/flys/utils/StringUtil.java:
+	  Copied from desktop flys. Used for some string operations
+	  in WST parser.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Added system property 'flys.backend.importer.dry.run'.
+	  Set to true only the parsing is done and no writing
+	  to the backend. Default: false.
+
+	*  src/main/java/de/intevation/flys/App.java,
+	   src/main/java/de/intevation/flys/model/MainValueType.java:
+	   Removed needless imports.
+
+2011-03-22  Ingo Weinzierl <ingo@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/DischargeTableValue.java,
+	  src/main/java/de/intevation/flys/model/DischargeTable.java: Added new
+	  constructors.
+
+	* src/main/java/de/intevation/flys/importer/AtFileParser.java: New. This
+	  parser is used to '*.at' files.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java: Added code to
+	  import discharge tables.
+
+	* src/main/java/de/intevation/flys/importer/ImportDischargeTableValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportDischargeTable.java: New.
+	  Helper models for import discharge tables.
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/WstParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Added stub for WST parser.
+
+2011-03-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportWstQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWst.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumnValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumnQRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportWstColumn.java:
+	  Added importer helper model stubs for WST imports.
+	
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Second part of parsing/storing main values. Should be finished now.
+
+	* src/main/java/de/intevation/flys/importer/ImportNamedMainValue.java,
+	  src/main/java/de/intevation/flys/importer/ImportMainValue.java:
+	  New. Helper models for import main values,
+
+	* src/main/java/de/intevation/flys/model/MainValue.java,
+	  src/main/java/de/intevation/flys/model/NamedMainValue.java:
+	  Added convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Write main values to backend, too.
+
+	* src/main/java/de/intevation/flys/importer/StaFileParser.java:
+	  Build importer models for main values.
+
+	* src/main/java/de/intevation/flys/importer/ImportMainValueType.java:
+	  Data was called 'value'. Now it is 'name' to fit the
+	  schema.
+
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Fixed wrong unique constraint.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Added some logging when storing gauges.
+
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  Add forgotten column river_id.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Small HQL fix.
+
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Gauge.java:
+	  Added convinience contructor.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Fixed getPeer() method.
+
+	* src/main/java/de/intevation/flys/importer/StaFileParser.java:
+	  Fixed parsing of STA files.
+
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Propagate river into storing of gauges.
+
+2011-03-21	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Added code to store rivers not only annotations.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Added stub code to write gauges.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	First part of parsing main values.
+
+	* src/main/java/de/intevation/flys/App.java: Commented out
+	  creation of dummy rivers.
+
+	* src/main/java/de/intevation/flys/model/NamedMainValues.java:
+	  Moved to NamedMainValue.
+
+	* src/main/java/de/intevation/flys/model/NamedMainValue.java:
+	  New. Formerly NamedMainValues.
+
+	* src/main/java/de/intevation/flys/model/MainValue.java:
+	  New. Forgotten part of the model.
+
+	* src/main/java/de/intevation/flys/model/MainValueType.java:
+	  Data is String not BigDecimal
+
+	* src/main/java/de/intevation/flys/model/Range.java: Removed
+	  contructor with double arguments. Using BigDecimal now.
+
+	* src/main/java/de/intevation/flys/importer/PegelGltParser.java:
+	  Propagate BigDecimal usage.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Removed needless import. Added TODO
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Parse the dependencies of the gauges, too.
+
+	* src/main/java/de/intevation/flys/importer/StaFileParser.java:
+	  New. Parser for STA files.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Call STA file parser.
+
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java:
+	  Uses BigDecimal now.
+
+	* src/main/java/de/intevation/flys/importer/ImportAttribute.java:
+	  Fixed wrong type cast in equals.
+
+	* src/main/java/de/intevation/flys/importer/ImportMainValueType.java:
+	  New. Helper model for importing main value types.
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Register forgotten MainValue model.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	Store annotations in backend.
+
+	* src/main/java/de/intevation/flys/model/Annotation.java:
+	  New convinience constructor.
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added toString() method.
+
+	* src/main/java/de/intevation/flys/model/Range.java:
+	  Fixed nasty mistake in @OneToOne annotatation.
+	  New convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportPosition.java
+	  src/main/java/de/intevation/flys/importer/ImportAnnotation.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java
+	  src/main/java/de/intevation/flys/importer/ImportAttribute.java:
+	  Make storing to backend work. It's a bit too slow. :-/
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Fetch peer from backend. Added method to store annotations.
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Stored annotations into backend. More eloquent SQL exception
+	  handling.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/Attribute.java,
+	  src/main/java/de/intevation/flys/model/Position.java:
+	  Added convinience constructors.
+
+	* src/main/java/de/intevation/flys/importer/ImportPosition.java,
+	  src/main/java/de/intevation/flys/importer/ImportAttribute.java:
+	  Bound them to there backend peers.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/AnnotationsParser.java:
+	  New. Added parser to read *.KM files.
+
+	* src/main/java/de/intevation/flys/importer/ImportPosition.java,
+	  src/main/java/de/intevation/flys/importer/PegelGltParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportRiver.java,
+	  src/main/java/de/intevation/flys/importer/ImportAnnotation.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java,
+	  src/main/java/de/intevation/flys/importer/InfoGewParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportAttribute.java:
+	  Adjusted to load the annotations from *.KM files.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/ImportPosition.java,
+	  src/main/java/de/intevation/flys/importer/ImportRange.java,
+	  src/main/java/de/intevation/flys/importer/ImportAttribute.java,
+	  src/main/java/de/intevation/flys/importer/ImportAnnotation.java:
+	  New helper models for import.
+
+	* src/main/java/de/intevation/flys/importer/PegelGltParser.java,
+	  src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  Use new models.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/PegelGltParser.java:
+	  New. Parser for PEGEL.GLT files.
+
+	* src/main/java/de/intevation/flys/importer/ImportGauge.java:
+	  New. Import model for gauges.
+
+	* src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java:
+	  Removed needless imports.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Added method to parse the gauges.
+
+	* src/main/java/de/intevation/flys/importer/InfoGewParser.java:
+	  Trigger pegel glt file parsing.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Used thread local pattern to make sharing of session easier.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Fixed error in HQL statement.
+
+2011-03-17	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java:
+	  Commented out a debug block because it leaks the db password.
+
+2011-03-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/importer/InfoGewParser.java:
+	  Expose imported rivers.
+
+	* src/main/java/de/intevation/flys/importer/InfoGewParser.java:
+	  Store imported rivers into database. Needs testing!
+
+2011-03-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Default connection parameters are now overwritable with
+	  system properties (defaults in brackets):
+	  - user name:     flys.backend.user     (flys)
+	  - user password: flys.backend.password (flys)
+	  - db dialect:    flys.backend.dialect  (org.hibernate.dialect.PostgreSQLDialect)
+	  - db driver:     flys.backend.driver   (org.postgresql.Driver)
+	  - db url:        flys.backend.url      (jdbc:postgresql://localhost:5432/flys)
+
+2011-03-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  Expose createSessionFactory() as public to be usable without
+	  a artifact database running.
+
+2011-03-16	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/utils/FileTools.java: 
+	  Tools for handling with filenames. Currently there is
+	  a repair(File) method with repairs letter case errors
+	  which is useful when reading windows filenames on a
+	  un*x platform.
+
+	* src/main/java/de/intevation/flys/importer/Importer.java:
+	  Standalone app to read data from the file system and
+	  store it in a database. Currently it does not store 
+	  anything. It only loads info gew files.
+
+	* src/main/java/de/intevation/flys/importer/InfoGewParser.java:
+	  Info gew parser.
+
+	* src/main/java/de/intevation/flys/importer/ImportRiver.java:
+	  Helper model of a river used produced by parsing the
+	  info gew files.
+
+2011-03-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java:
+	  New. SessionFactoryProvider.getSessionFactory() provides a
+	  SessionFactory to use the Hibernate O/R mapper for the FLYS backend.
+
+2011-03-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml: Added dependency to artifacts-commons to
+	  be able to use the global configuration of the artifact database.
+
+2011-03-15	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/App.java: Wire all POJOs
+	  to corresponding factory.
+
+	* src/main/java/de/intevation/flys/model/*.java: Generate
+	  all foreign key constraints. TODO: name them correctly
+	  because the machine generated names are ugly and do
+	  not fit the PostgreSQL names.
+
+	* doc/schema/postgresql.sql: Small quantifier fix in descriptions
+	  of wst columns.
+
+2011-03-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql: Fixed wrong spelled 
+	  column references in foreign keys introduces with
+	  last change.
+
+2011-03-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* src/main/java/de/intevation/flys/model/*.java: Added
+	  column annotations for simple fields. TODO: foreign keys.
+
+2011-03-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql, doc/schema/postgresql-cleanup.sql:
+	  Fixed inconsistent table names.
+
+	* src/main/java/de/intevation/flys/model/*.java: Added
+	  entity and id annotations.
+
+2011-03-14	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql.sql, doc/schema/postgresql-cleanup.sql:
+	  Added missing sequences.
+
+	* doc/schema/sqlite.sql: Deleted. Not longer supported.
+
+2011-03-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql-cleanup.sql: Forgot to add.
+
+	* src/main/java/de/intevation/flys/App.java: Use
+	  Apache Commons DBCP as Hibernate connection provide.
+
+	* src/main/java/de/intevation/flys/model/River.java:
+	  Added a constructor with string argument. Set the
+	  sequence increment to 1 (eat up 100 at a time before).
+
+	* pom.xml: Added PostgreSQL 8.4 driver as runtime dependency.
+
+2011-03-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/postgresql-cleanup.sql: New. Tear down schema
+	  for a postgres database.
+
+	* doc/schema/postgresql.sql: Added squence for 
+	  auto generating ids in river table. Cleaned up schema.
+
+	* src/main/java/de/intevation/flys/App.java: Simple
+	  test app to interact with hibernate. Needs to be removed
+	  because its only a toy.
+
+	* src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java:
+	  New. Binds Apache Commons to Hibernate.
+
+	* pom.xml: Added dependencies to log4j, commons dbcp,
+	  JPA of hibernate.
+
+	* src/main/java/de/intevation/flys/model/River.java: Added
+	  JPA annotations.
+
+	* src/main/java/de/intevation/flys/model/*.java: Replaced
+	  Long with Integer because column ids are only four bytes wide.
+
+2011-03-11	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/sqlite.sql, doc/schema/postgresql.sql: Fixed
+	  smaller issues in ddl.
+
+	* src/main/java/de/intevation/flys/model/*.java: Added POJOs
+	  of to be mapped. TODO: Map them!
+
+	* pom.xml: Added plugin config for hibernate.
+
+2011-03-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml: Added dependency (and corresponding repository) to
+	  Hibernate Core 3.6.1 Final
+
+2011-03-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* pom.xml, src/**: Created a new empty maven project:
+	  $ mvn archetype:create         \
+	    -DgroupId=de.intevation.flys \
+		-DartifactId=flys-backend
+
+2011-03-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* README: New. Some setup instructions.
+
+	* doc/schema/postgresql.sql: New. Schema converted to PostgreSQL
+
+	* doc/schema/sqlite.sql: Fixed defect foreign key constraints.
+
+2011-03-09	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* doc/schema/sqlite.sql: Factorized time intervals out into
+	  a separated table.
+
+2011-01-22	Sascha L. Teichmann	<sascha.teichmann@intevation.de>
+
+	* contrib/import-kms.py, contrib/import-gew.py: Initial scripts
+	to import data into SQLite database. They still need some work.
+
+2011-02-10	Sascha L. Teichmann	<sascha.teichmann@intevation.de>:
+
+	* doc/schema/sqlite.sql: Added initial schema for
+	FLYS database.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/Changes	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,109 @@
+2011-09-19      RELEASE 2.5
+
+    NEW:
+
+        * Improved the data model to support:
+          - cross sections
+          - hyks ("Hydraulische Kenngroessen")
+
+        * Improved the importer to read:
+          - PRF files
+          - HYK files
+
+        * Added a central Config to configure the Importer. There are several
+          system properties to adjust what to import (see ChangeLog for
+          details).
+
+        * Added a Postgis and Oracle models for spatial flys data:
+          - catchments
+          - river axes
+          - buildings
+          - fixpoints
+          - cross section tracks
+          - hws ("HW-Schutzanlagen")
+          - floodplains
+
+        * Bumped Hibernate up to 3.6.5.
+
+        * Bumped Apache DBCP up to 1.4.
+
+
+
+2011-06-27      RELEASE 2.4
+
+    New:
+
+        * Improved the data model to support:
+          - edges ("Oberkante", "Unterkante")
+          - annotations
+
+        * Improved the importer to read:
+          - edges
+          - annotations
+
+        * Made search for gauges in River independent of from/to
+          kilometer order.
+
+
+    Fixes:
+
+        * flys/issue76 Close gaps between q ranges
+
+        * flys/issue110 Fixed import of ATs that skipped the first value.
+
+
+
+2011-05-13      RELEASE 2.3
+
+    New:
+
+        * Import of:
+          - "zusätzliche Längsschnitte"
+          - "Fixierungen"
+          - "amtliche Linien"
+          - "Hochwassermarken"
+          - "Historische Abflusskurven"
+          - "HW-Schutzanlagen"
+
+        * Improvements in the SQL schema:
+          - Added a 'kind' column to WSTs
+          - Added a 'position' column to WST columns to enable sorting WST columns.
+          - Added a 'km_up' column to rivers to determine the flow direction of rivers.
+
+        * Rivers are now able to:
+          - determine their min/max kilometer range
+          - determine the selected gauge(s) based on a kilometer range
+
+        * WSTs are able to determine their min/max Q range.
+
+        * Gauges are able to determine their min/max W range.
+
+        * Added a view 'wst_value_table' that aggregates the data to build w/q
+          value tables.
+
+        * Added one-to-many relation Wst -> WstColumn
+
+        * Speedup of the importer by using an internal caching mechanism.
+
+        * Sta-Parse just accepts the following main value types: 'Q', 'W', 'D' and 'T'
+
+
+    Fixes:
+
+        * Fixed import of Q ranges in wst files.
+
+        * Fixed flys/issue19: Do not take km column in wst file as a water level.
+
+        * Fixed flys/issue11 (Diagramm: Fehlerhafte Werte in Abflusskurven)
+
+        * Fixed flys/issue51 (WINFO: Fachdaten am Pegel Perl enthält Sprünge)
+
+
+    !!!
+
+    The version number of this release depends on an existing desktop variant of
+    this software that is in version 2.1.
+
+    !!!
+
+2011-03-30      RELEASE 0.1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/README	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,10 @@
+For the artifact database
+# su - postgres
+$ createuser --no-createrole --no-superuser --pwprompt --no-createdb artifacts
+$ createdb --encoding=UTF-8 --owner artifacts artifactsdb
+
+For the flys database
+
+# su - postgres
+$ createuser --no-createrole --no-superuser --pwprompt --no-createdb flys
+$ createdb --encoding=UTF-8 --owner flys flystest1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/dump-schema.sh	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+DB_URL=jdbc:postgresql://czech-republic.atlas.intevation.de:5432/flys3
+DB_USER=flys
+DB_PASS=flys
+DB_DIALECT=org.hibernatespatial.oracle.OracleSpatial10gDialect
+
+mvn compile package -e \
+    -Dlog4j.configuration=file://`readlink -f contrib/log4j.properties` \
+    -Dflys.backend.user=$DB_USER \
+    -Dflys.backend.password=$DB_PASS \
+    -Dflys.backend.dialect=$DB_DIALECT \
+    -Dflys.backend.url=$DB_URL \
+    -Dexec.mainClass=de.intevation.flys.App \
+    exec:java
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/import-gew.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,223 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import sys
+import os
+import codecs
+import re
+
+HAUPTWERT  = re.compile(r"\s*([^\s]+)\s+([^\s+]+)\s+([QWDT-])")
+WHITESPACE = re.compile(r"\s+")
+
+class KM(object):
+
+    def __init__(self, filename):
+        self.filename = filename
+        self.load_values()
+
+    def load_values(self):
+        with codecs.open(self.filename, "r", "latin-1") as f:
+            for line in f:
+                line = line.strip()
+                if not line or line.startswith("*"):
+                    parts = [s.strip() for s in line.split(";")]
+                    # TODO: Use code from import-kms.py
+
+class AbflussTafel(object):
+
+    def __init__(self, filename):
+        self.filename = filename
+        self.name     = ""
+        self.values = []
+        self.load_values()
+
+    def load_values(self):
+        with codecs.open(self.filename, "r", "latin-1") as f:
+            first = True
+            for line in f:
+                line = line.strip()
+                if not line: continue
+                if line.startswith("#! name="):
+                    self.name = line[8:]
+                    continue
+                if line.startswith("#") or line.startswith("*"):
+                    continue
+                line = line.replace(",", ".")
+                splits = WHITESPACE.split(line)
+
+                if len(splits) < 2 or len(splits) > 11:
+                    continue
+
+                w = float(splits[0])
+
+                shift = 0
+
+                if len(splits) != 11 and first:
+                    shift = 11 - len(splits)
+
+                for idx, q in enumerate(splits[1:]):
+                    i_w = w + shift + idx
+                    i_q = float(q)
+                    w_q = (i_w/100.0, i_q/100.0)
+                    self.values.append(w_q)
+
+                first = False
+
+
+class Hauptwert(object):
+    def __init__(self, name, value, kind):
+        self.name  = name
+        self.extra = value
+        self.kind  = kind
+
+class Pegel(object):
+    def __init__(self, name, start, stop, sta, at, html):
+        self.name       = name
+        self.start      = start
+        self.stop       = stop
+        self.sta        = sta
+        self.at         = at
+        self.html       = html
+        self.aeo        = 0.0
+        self.nullpunkt  = 0.0
+        self.km         = 0.0
+        self.hauptwerte = []
+        self.load_hauptwerte()
+        self.at_data = AbflussTafel(self.at)
+
+    def load_hauptwerte(self):
+        with codecs.open(self.sta, "r", "latin-1") as f:
+            for line_no, line in enumerate(f):
+                line = line.rstrip()
+                if line_no == 0:
+                    first = False
+                    name = line[16:37].strip()
+                    line = [s.replace(",", ".") for s in line[37:].split()]
+                    self.aeo = float(line[0])
+                    self.nullpunkt = float(line[1])
+                    print >> sys.stderr, "pegel name: '%s'" % name
+                    print >> sys.stderr, "pegel aeo: '%f'" % self.aeo
+                    print >> sys.stderr, "pegel nullpunkt: '%f'" % self.nullpunkt
+                elif line_no == 1:
+                    self.km = float(line[29:36].strip().replace(",", "."))
+                    print >> sys.stderr, "km: '%f'" % self.km
+                else:
+                    if not line: continue
+                    line = line.replace(",", ".")
+                    m = HAUPTWERT.match(line)
+                    if not m: continue
+                    self.hauptwerte.append(Hauptwert(
+                        m.group(1), float(m.group(2)), m.group(3)))
+
+class Gewaesser(object):
+
+    def __init__(self, name=None, b_b=None, wst=None):
+        self.name = name
+        self.b_b = b_b
+        self.wst = wst
+        self.pegel = []
+
+    def load_pegel(self):
+        dir_name = os.path.dirname(self.wst)
+        pegel_glt = find_file(dir_name, "PEGEL.GLT")
+        if not pegel_glt:
+            print >> sys.stderr, "Missing PEGEL.GLT for %r" % self.name
+            return
+
+        print >> sys.stderr, "pegel_glt: %r" % pegel_glt
+
+        with codecs.open(pegel_glt, "r", "latin-1") as f:
+            for line in f:
+                line = line.strip()
+                if not line or line.startswith("#"):
+                    continue
+                # using re to cope with quoted columns,
+                # shlex has unicode problems.
+                parts = [p for p in re.split("( |\\\".*?\\\"|'.*?')", line) 
+                         if p.strip()]
+                if len(parts) < 7:
+                    print >> sys.stderr, "too less colums (need 7): %r" % line
+                    continue
+
+                print >> sys.stderr, "%r" % parts
+                self.pegel.append(Pegel(
+                    parts[0],
+                    min(float(parts[2]), float(parts[3])),
+                    max(float(parts[2]), float(parts[3])),
+                    norm_path(parts[4], dir_name),
+                    norm_path(parts[5], dir_name),
+                    parts[6]))
+
+
+    def __repr__(self):
+        return u"Gewaesser(name=%r, b_b=%r, wst=%r)" % (
+            self.name, self.b_b, self.wst)
+
+def norm_path(path, ref):
+    if not os.path.isabs(path):
+        path = os.path.normpath(os.path.join(ref, path))
+    return path
+
+def find_file(path, what):
+    what = what.lower()
+    for filename in os.listdir(path):
+        p = os.path.join(path, filename)
+        if os.path.isfile(p) and filename.lower() == what:
+            return p
+    return None
+    
+
+def read_gew(filename):
+
+    gewaesser = []
+
+    current = Gewaesser()
+
+    filename = os.path.abspath(filename)
+    dirname = os.path.dirname(filename)
+
+    with codecs.open(filename, "r", "latin-1") as f:
+        for line in f:
+            line = line.strip()
+            if not line or line.startswith("*"):
+                continue
+
+            if line.startswith(u"Gewässer:"):
+                if current.name:
+                    gewaesser.append(current)
+                    current = Gewaesser()
+                current.name = line[len(u"Gewässer:"):].strip()
+            elif line.startswith(u"B+B-Info:"):
+                current.b_b = norm_path(line[len(u"B+B-Info:"):].strip(),
+                                        dirname)
+            elif line.startswith(u"WSTDatei:"):
+                current.wst = norm_path(line[len(u"WSTDatei:"):].strip(),
+                                        dirname)
+
+        if current.name:
+            gewaesser.append(current)
+
+    return gewaesser
+
+def main():
+
+    if len(sys.argv) < 2:
+        print >> sys.stderr, "missing gew file"
+        sys.exit(1)
+
+    gew_filename = sys.argv[1]
+
+    if not os.path.isfile(gew_filename):
+        print >> sys.stderr, "'%s' is not a file" % gew_filename
+        sys.exit(1)
+
+    gewaesser = read_gew(gew_filename)
+
+    for gew in gewaesser:
+        gew.load_pegel()
+
+    
+
+if __name__ == '__main__':
+    main()
+# vim: set fileencoding=utf-8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/import-kms.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+
+import sys
+import logging
+import re
+import os
+
+import sqlite3 as db
+import locale
+import codecs
+
+from optparse import OptionParser
+
+log = logging.getLogger(__name__) 
+log.setLevel(logging.WARNING)
+log.addHandler(logging.StreamHandler(sys.stderr))
+
+RANGE = re.compile("([^#]*)#(.*)")
+DEFAULT_DATABASE = "flys.db"
+
+SQL_NEXT_ID   = "SELECT coalesce(max(id), -1) + 1 FROM %s"
+SQL_SELECT_ID = "SELECT id FROM %s WHERE %s = ?"
+SQL_INSERT_ID = "INSERT INTO %s (id, %s) VALUES (?, ?)"
+
+SQL_SELECT_RANGE_ID = """
+SELECT id FROM ranges WHERE river_id = ? AND a = ? AND b = ?
+"""
+SQL_INSERT_RANGE_ID = """
+INSERT INTO ranges (id, river_id, a, b) VALUES (?, ?, ?, ?)
+"""
+SQL_SELECT_ANNOTATION_ID = """
+SELECT id FROM annotations
+WHERE range_id = ? AND attribute_id = ? AND position_id = ?
+"""
+SQL_INSERT_ANNOTATION_ID = """
+INSERT INTO annotations (id, range_id, attribute_id, position_id) 
+VALUES (?, ?, ?, ?)
+"""
+
+def encode(s):
+    try:
+        return unicode(s, "latin-1")
+    except UnicodeDecodeError:
+        return unicode.encode(s, locale.getpreferredencoding())
+
+class hashabledict(dict):
+    def __key(self):
+        return tuple((k, self[k]) for k in sorted(self))
+    def __hash__(self):
+        return hash(self.__key())
+    def __eq__(self, other):
+        return self.__key() == other.__key()
+
+def cache(f):
+    def func(*args, **kw):
+        key = (args, hashabledict(kw))
+        try:
+            return f.__cache__[key]
+        except KeyError:
+            value = f(*args, **kw)
+            f.__cache__[key] = value
+            return value
+    f.__cache__ = {}
+    return func
+
+NEXT_IDS = {}
+def next_id(cur, relation):
+    idx = NEXT_IDS.get(relation)
+    if idx is None:
+        cur.execute(SQL_NEXT_ID % relation)
+        idx = cur.fetchone()[0]
+    NEXT_IDS[relation] = idx + 1
+    return idx
+
+def get_id(cur, relation, attribute, value):
+    select_stmt = SQL_SELECT_ID % (relation, attribute)
+    #log.debug(select_stmt)
+    cur.execute(select_stmt, (value,))
+    row = cur.fetchone()
+    if row: return row[0]
+    idx = next_id(cur, relation)
+    insert_stmnt = SQL_INSERT_ID % (relation, attribute)
+    #log.debug(insert_stmnt)
+    cur.execute(insert_stmnt, (idx, value))
+    cur.connection.commit()
+    log.debug("insert %s '%s' id: '%d'" % (relation, value, idx))
+    return idx
+
+#@cache
+def get_river_id(cur, name):
+    return get_id(cur, "rivers", "name", name)
+
+#@cache
+def get_attribute_id(cur, value):
+    return get_id(cur, "attributes", "value", value)
+
+#@cache
+def get_position_id(cur, value):
+    return get_id(cur, "positions", "value", value)
+
+#@cache
+def get_range_id(cur, river_id, a, b):
+    cur.execute(SQL_SELECT_RANGE_ID, (river_id, a, b))
+    row = cur.fetchone()
+    if row: return row[0]
+    idx = next_id(cur, "ranges")
+    cur.execute(SQL_INSERT_RANGE_ID, (idx, river_id, a, b))
+    cur.connection.commit()
+    return idx
+
+#@cache
+def get_annotation_id(cur, range_id, attribute_id, position_id):
+    cur.execute(SQL_SELECT_ANNOTATION_ID, (
+        range_id, attribute_id, position_id))
+    row = cur.fetchone()
+    if row: return row[0]
+    idx = next_id(cur, "annotations")
+    cur.execute(SQL_INSERT_ANNOTATION_ID, (
+        idx, range_id, attribute_id, position_id))
+    cur.connection.commit()
+    return idx
+
+def files(root, accept=lambda x: True):
+    if os.path.isfile(root):
+        if accept(root): yield root
+    elif os.path.isdir(root):
+        stack = [ root ]
+        while stack:
+            cur = stack.pop()
+            for f in os.listdir(cur):
+                p = os.path.join(cur, f)
+                if os.path.isdir(p):
+                    stack.append(p)
+                elif os.path.isfile(p) and accept(p):
+                    yield p
+
+def feed_km(cur, river_id, km_file):
+
+    log.info("processing: %s" % km_file)
+
+    for line in codecs.open(km_file, "r", "latin-1"):
+        line = line.strip()
+        if not line or line.startswith('*'):
+            continue
+        parts = [x.strip() for x in line.split(';')]
+        if len(parts) < 3:
+            log.error("cannot process: '%s'" % line)
+            continue
+        m = RANGE.match(parts[2])
+        try:
+            if m:
+                x = [float(x.replace(",", ".")) for x in m.groups()]
+                a, b = min(x), max(x)
+                if a == b: b = None
+            else:
+                a, b = float(parts[2].replace(",", ".")), None
+        except ValueError:
+            log.error("cannot process: '%s'" % line)
+            continue
+
+        attribute = parts[0]
+        position  = parts[1]
+        attribute_id = get_attribute_id(cur, attribute) if attribute else None
+        position_id  = get_position_id(cur, position) if position else None
+
+        range_id = get_range_id(cur, river_id, a, b)
+
+        get_annotation_id(cur, range_id, attribute_id, position_id)
+
+def main():
+
+    usage = "usage: %prog [options] river km-file ..."
+    parser = OptionParser(usage=usage)
+    parser.add_option(
+        "-v", "--verbose", action="store_true",
+        dest="verbose",
+        help="verbose output")
+    parser.add_option(
+        "-r", "--recursive", action="store_true",
+        dest="recursive", default=False,
+        help="recursive")
+    parser.add_option(
+        "-d", "--database", action="store",
+        dest="database",
+        help="database to connect with",
+        default=DEFAULT_DATABASE)
+
+    options, args = parser.parse_args()
+
+    if options.verbose:
+        log.setLevel(logging.INFO)
+    
+    if len(args) < 1:
+        log.error("missing river argument")
+        sys.exit(1)
+
+    river = unicode(args[0], locale.getpreferredencoding())
+
+    with db.connect(options.database) as con:
+        cur = con.cursor()
+        river_id = get_river_id(cur, river)
+
+        for arg in args[1:]:
+            if options.recursive:
+                for km_file in files(
+                    arg, lambda x: x.lower().endswith(".km")):
+                    feed_km(cur, river_id, km_file)
+            else:
+                feed_km(cur, river_id, arg)
+        
+
+if __name__ == '__main__':
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/axis.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,46 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="river_axes"
+PATH="Geodaesie/Flussachse+km"
+
+
+class Axis(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2
+
+
+    def isShapeRelevant(self, name, path):
+        return name == "achse"
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat = ogr.Feature(featureDef)
+        newFeat.SetGeometry(feat.GetGeometryRef())
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            riverId = feat.GetField("river_id")
+        else:
+            riverId = self.river_id
+
+        if self.IsFieldSet(feat, "kind"):
+            kind = feat.GetField("kind")
+        else:
+            kind = 0
+
+        newFeat.SetField("river_id", riverId)
+        newFeat.SetField("kind", kind)
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/boundaries.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,82 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="hydr_boundaries"
+TABLE_NAME_POLY="hydr_boundaries_poly"
+PATH="Hydrologie/Hydr.Grenzen/Linien"
+
+
+class HydrBoundary(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def getKind(self, path):
+        if path.find("BfG") > 0:
+            return 1
+        else:
+            return 2
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        kind  = self.getKind(args['path'])
+
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+        newFeat.SetField("kind", kind)
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        return newFeat
+
+
+
+class HydrBoundaryPoly(HydrBoundary):
+
+    def getTablename(self):
+        return TABLE_NAME_POLY
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 3 or geomType == 6
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        kind  = self.getKind(args['path'])
+
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+        newFeat.SetField("kind", kind)
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/buildings.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,42 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="buildings"
+PATH="Geodaesie/Bauwerke"
+
+
+class Building(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat = ogr.Feature(featureDef)
+        newFeat.SetGeometry(feat.GetGeometryRef())
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "Name"):
+            newFeat.SetField("name", feat.GetField("Name"))
+        elif self.IsFieldSet(feat, "KWNAAM"):
+            newFeat.SetField("name", feat.GetField("KWNAAM"))
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/catchments.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,48 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="catchment"
+PATH="Hydrologie/Einzugsgebiet"
+
+
+class Catchment(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 3 or geomType == 6
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "Name"):
+            newFeat.SetField("name", feat.GetField("name"))
+        else:
+            newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "AREA"):
+            newFeat.SetField("area", feat.GetField("area"))
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/crosssectiontracks.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,52 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="cross_section_tracks"
+PATH="Geodaesie/Querprofile"
+
+
+class CrosssectionTrack(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat = ogr.Feature(featureDef)
+        newFeat.SetGeometry(feat.GetGeometryRef())
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "KILOMETER"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("KILOMETER"))
+        elif self.IsFieldSet(feat, "KM"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("KM"))
+        elif self.IsFieldSet(feat, "STATION"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("STATION"))
+        else:
+            return None
+
+        if self.IsFieldSet(feat, "ELEVATION"):
+            newFeat.SetField("z", feat.GetFieldAsDouble("ELEVATION"))
+        else:
+            newFeat.SetField("z", 0)
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/fixpoints.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,56 @@
+import ogr, osr
+
+from importer import Importer
+
+TABLE_NAME="fixpoints"
+PATH="Geodaesie/Festpunkte"
+
+
+class Fixpoint(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 1
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "KM"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("KM"))
+        elif self.IsFieldSet(feat, "ELBE_KM"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("ELBE_KM"))
+        else:
+            return None
+
+        if self.IsFieldSet(feat, "X"):
+            newFeat.SetField("x", feat.GetFieldAsDouble("X"))
+
+        if self.IsFieldSet(feat, "Y"):
+            newFeat.SetField("y", feat.GetFieldAsDouble("Y"))
+
+        if self.IsFieldSet(feat, "HPGP"):
+            newFeat.SetField("HPGP", feat.GetField("HPGP"))
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/floodplains.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,40 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="floodplain"
+PATH="Hydrologie/Hydr.Grenzen"
+
+
+class Floodplain(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 3 or geomType == 6
+
+
+    def isShapeRelevant(self, name, path):
+        return name.find("talaue") >= 0
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/gauges.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,47 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="gauge_location"
+PATH="Hydrologie/Streckendaten"
+
+
+class GaugeLocation(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 1
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "Name"):
+            newFeat.SetField("name", feat.GetField("name"))
+        elif self.IsFieldSet(feat, "MPNAAM"):
+            newFeat.SetField("name", feat.GetField("MPNAAM"))
+        else:
+            newFeat.SetField("name", args['name'])
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/hws.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,51 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="hws"
+PATH="Hydrologie/HW-Schutzanlagen"
+
+
+class HWS(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "TYP"):
+            newFeat.SetField("type", feat.GetField("TYP"))
+
+        if self.IsFieldSet(feat, "Bauart"):
+            newFeat.SetField("hws_facility", feat.GetField("Bauart"))
+
+        if self.IsFieldSet(feat, "Name"):
+            newFeat.SetField("name", feat.GetField("name"))
+        else:
+            newFeat.SetField("name", args['name'])
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/importer.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,139 @@
+import ogr, osr
+
+class Importer:
+
+    def __init__(self, dbconn, river_id, dest_srs):
+        self.dbconn   = dbconn
+        self.river_id = river_id
+        self.dest_srs = osr.SpatialReference()
+        self.dest_srs.ImportFromEPSG(dest_srs)
+
+
+    def getKind(self, path):
+        raise NotImplementedError("Importer.getKind is abstract!")
+
+
+    def getPath(self, base):
+        raise NotImplementedError("Importer.getPath is abstract!")
+
+
+    def getTablename(self):
+        raise NotImplementedError("Importer.getTablename is abstract!")
+
+
+    def IsFieldSet(self, feat, name):
+        try:
+            isset = feat.GetField(name)
+            return isset is not None
+        except:
+            return False
+
+
+    def IsDoubleFieldSet(self, feat, name):
+        try:
+            isset = feat.GetFieldAsDouble(name)
+            return isset is not None
+        except:
+            return False
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def walkOverShapes(self, shape):
+        print "---"
+        (name, path) = shape
+        if not self.isShapeRelevant(name, path):
+            print "Skip shapefile '%s'" % name
+            return
+
+        shp = ogr.Open(shape[1])
+        if shp is None:
+            print "Shapefile '%s' could not be opened!" % path
+            return
+
+        print "Opened shapefile '%s'" % path
+        srcLayer = shp.GetLayerByName(name)
+
+        if srcLayer is None:
+            print "Layer '%s' was not found!" % name
+            return
+
+        return self.shape2Database(srcLayer, name, path)
+
+
+    def transform(self, feat):
+        geometry = feat.GetGeometryRef()
+        src_srs  = geometry.GetSpatialReference()
+
+        if src_srs is None:
+            print "Error: No source SRS given! No transformation possible!"
+            return feat
+
+        transformer = osr.CoordinateTransformation(src_srs, self.dest_srs)
+        geometry.Transform(transformer)
+
+        return feat
+
+
+    def shape2Database(self, srcLayer, name, path):
+        table     = ogr.Open(self.dbconn)
+        destLayer = table.GetLayerByName(self.getTablename())
+
+        if srcLayer is None:
+            print "Shapefile is None!"
+            return -1
+
+        if destLayer is None:
+            print "No destination layer given!"
+            return -1
+
+        count = srcLayer.GetFeatureCount()
+        print "Try to add %i features to database." % count
+
+        srcLayer.ResetReading()
+
+        geomType    = -1
+        success     = 0
+        unsupported = 0
+        creationFailed = 0
+        featureDef  = destLayer.GetLayerDefn()
+
+        for feat in srcLayer:
+            geom     = feat.GetGeometryRef()
+
+            if geom is None:
+                continue
+
+            geomType = geom.GetGeometryType()
+
+            if self.isGeometryValid(geomType):
+                newFeat = self.createNewFeature(featureDef,
+                                                feat,
+                                                name=name,
+                                                path=path)
+
+                if newFeat is not None:
+                    newFeat = self.transform(newFeat)
+                    res = destLayer.CreateFeature(newFeat)
+                    if res is None or res > 0:
+                        print "Error while inserting feature: %r" % res
+                    else:
+                        success = success + 1
+                else:
+                    creationFailed = creationFailed + 1
+            else:
+                unsupported = unsupported + 1
+
+        print "Inserted %i features" % success
+        print "Failed to create %i features" % creationFailed
+        print "Found %i unsupported features" % unsupported
+
+        try:
+            destLayer.CommitTransaction()
+        except e:
+            print "Exception while committing transaction."
+
+        return geomType
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/km.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,45 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="river_axes_km"
+PATH="Geodaesie/Flussachse+km"
+
+
+class KM(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 1
+
+
+    def isShapeRelevant(self, name, path):
+        return name == "km"
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat = ogr.Feature(featureDef)
+        newFeat.SetGeometry(feat.GetGeometryRef())
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsDoubleFieldSet(feat, "km"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("km"))
+        elif self.IsDoubleFieldSet(feat, "KM"):
+            newFeat.SetField("km", feat.GetFieldAsDouble("KM"))
+        else:
+            return None
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/lines.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,51 @@
+import ogr
+
+from importer import Importer
+
+TABLE_NAME="lines"
+PATH="Geodaesie/Linien"
+
+
+class Line(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        return geomType == 2 or geomType == -2147483646
+
+
+    def isShapeRelevant(self, name, path):
+        return True
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        newFeat  = ogr.Feature(featureDef)
+        geometry = feat.GetGeometryRef()
+        geometry.SetCoordinateDimension(2)
+
+        newFeat.SetGeometry(geometry)
+        newFeat.SetField("name", args['name'])
+
+        if self.IsFieldSet(feat, "river_id"):
+            newFeat.SetField("river_id", feat.GetField("river_id"))
+        else:
+            newFeat.SetField("river_id", self.river_id)
+
+        if self.IsFieldSet(feat, "TYP"):
+            newFeat.SetField("kind", feat.GetFieldAsDouble("TYP"))
+        else:
+            newFeat.SetField("kind", "DAMM")
+
+        if self.IsFieldSet(feat, "Z"):
+            newFeat.SetField("z", feat.GetFieldAsDouble("Z"))
+        else:
+            newFeat.SetField("z", 9999)
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/shpimporter.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,60 @@
+import ogr
+
+import utils
+from uesg  import UESG
+from axis  import Axis
+from km    import KM
+from lines import Line
+from fixpoints import Fixpoint
+from buildings import Building
+from crosssectiontracks import CrosssectionTrack
+from floodplains import Floodplain
+from boundaries import HydrBoundary, HydrBoundaryPoly
+from hws import HWS
+from gauges import GaugeLocation
+from catchments import Catchment
+
+DBCONN='OCI:user/pass@host'
+PATH='/path/to/Gewaesser/Elbe'
+RIVER_ID=the_river_id
+DEST_SRS=31467
+
+
+def getImporters():
+    return [
+        Axis(DBCONN, RIVER_ID, DEST_SRS),
+        KM(DBCONN, RIVER_ID, DEST_SRS),
+        CrosssectionTrack(DBCONN, RIVER_ID, DEST_SRS),
+        Line(DBCONN, RIVER_ID, DEST_SRS),
+        Fixpoint(DBCONN, RIVER_ID, DEST_SRS),
+        Building(DBCONN, RIVER_ID, DEST_SRS),
+        Floodplain(DBCONN, RIVER_ID, DEST_SRS),
+        HydrBoundary(DBCONN, RIVER_ID, DEST_SRS),
+        HydrBoundaryPoly(DBCONN, RIVER_ID, DEST_SRS),
+        HWS(DBCONN, RIVER_ID, DEST_SRS),
+        GaugeLocation(DBCONN, RIVER_ID, DEST_SRS),
+        Catchment(DBCONN, RIVER_ID, DEST_SRS),
+        UESG(DBCONN, RIVER_ID, DEST_SRS)
+        ]
+
+
+if __name__ == '__main__':
+    importers = getImporters()
+
+    types = {}
+
+    for importer in importers:
+        shapes = utils.findShapefiles(importer.getPath(PATH))
+        print "Found %i Shapefiles" % len(shapes)
+
+        for shpTuple in shapes:
+            geomType = importer.walkOverShapes(shpTuple)
+            try:
+                num = types[geomType]
+                types[geomType] = num+1
+            except:
+                types[geomType] = 1
+
+    for key in types:
+        print "%i x geometry type %s" % (types[key], key)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/uesg.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,89 @@
+import ogr
+
+from importer import Importer
+
+
+TABLE_NAME="floodmaps"
+PATH="Hydrologie/UeSG/Berechnung"
+
+
+class UESG(Importer):
+
+    def getPath(self, base):
+        return "%s/%s" % (base, PATH)
+
+
+    def getTablename(self):
+        return TABLE_NAME
+
+
+    def isGeometryValid(self, geomType):
+        if geomType == 3 or geomType == 6:
+            return True
+        else:
+            return False
+
+
+    def getKind(self, path):
+        kind = 0
+        if path.find("Berechnung") > 0:
+            kind = kind + 100
+
+            if path.find("Aktuell") > 0:
+                kind = kind + 10
+            else:
+                kind = kind + 20
+
+            if path.find("Land") > 0:
+                kind = kind + 2
+            else:
+                kind = kind + 1
+        else:
+            kind = kind + 200
+
+        return kind
+
+
+    def createNewFeature(self, featureDef, feat, **args):
+        kind  = self.getKind(args['path'])
+
+        newFeat = ogr.Feature(featureDef)
+        newFeat.SetGeometry(feat.GetGeometryRef())
+
+        if self.IsFieldSet(feat, "river_id"):
+            riverId = feat.GetField(feat)
+        else:
+            riverId = self.river_id
+
+        if self.IsFieldSet(feat, "diff"):
+            diff = feat.GetFieldAsDouble("diff")
+        else:
+            diff = 0
+
+        if self.IsFieldSet(feat, "count"):
+            count = feat.GetFieldAsInteger("count")
+        else:
+            count = 0
+
+        if self.IsFieldSet(feat, "area"):
+            area = feat.GetFieldAsDouble("area")
+        else:
+            area = 0
+
+        if self.IsFieldSet(feat, "perimeter"):
+            perimeter = feat.GetFieldAsDouble("perimeter")
+        else:
+            perimeter = 0
+
+        groupId = 2
+
+        newFeat.SetField("river_id", riverId)
+        newFeat.SetField("diff", diff)
+        newFeat.SetField("count", count)
+        newFeat.SetField("area", area)
+        newFeat.SetField("perimeter", perimeter)
+        newFeat.SetField("kind", kind)
+        newFeat.SetField("name", args['name'])
+
+        return newFeat
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/shpimporter/utils.py	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,20 @@
+import os
+
+SHP='.shp'
+
+def findShapefiles(path):
+    shapes = []
+
+    for root, dirs, files in os.walk(path):
+        if len(files) == 0:
+            continue
+
+        print "Processing directory '%s' with %i files " % (root, len(files))
+
+        for f in files:
+            idx = f.find(SHP)
+            if (idx+len(SHP)) == len(f):
+                shapes.append((f.replace(SHP, ''), root + "/" + f))
+
+    return shapes
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/contrib/spatial-info.sh	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+mvn -e \
+    -Dlog4j.configuration=file://`readlink -f contrib/log4j.properties` \
+    -Dflys.backend.user=USER \
+    -Dflys.backend.password=PASSWORD \
+    -Dflys.backend.url=jdbc:postgresql://THE.DATABASE.HOST:PORT/DBNAME \
+    -Dflys.backend.spatial.river=Saar \
+    -Dexec.mainClass=de.intevation.flys.backend.SpatialInfo \
+    exec:java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/annotation-types.xml	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,94 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<annotation>
+    <types>
+        <type name="Abzweigung"/>
+        <type name="Berechnungsstrecke"/>
+        <type name="Brücke"/>
+        <type name="Bundesland"/>
+        <type name="Deich-Kilometrierung"/>
+        <type name="Einmündung"/>
+        <type name="Fähre"/>
+        <type name="Grenze"/>
+        <type name="Hafen"/>
+        <type name="Hauptpegel"/>
+        <type name="Hauptzufluss"/>
+        <type name="HW-Marken"/>
+        <type name="HW-Schutz"/>
+        <type name="Insel"/>
+        <type name="Kraftwerk"/>
+        <type name="Landkreis"/>
+        <type name="Landesgewässerkilometrierung"/>
+        <type name="Meldestelle"/>
+        <type name="Nebenarm / Altarm"/>
+        <type name="sonstige Messstelle"/>
+        <type name="Morphologische Messstelle"/>
+        <type name="Pegel (inkl. Nebenpegel)"/>
+        <type name="Ort"/>
+        <type name="Schleuse"/>
+        <type name="Stauwehr"/>
+        <type name="Staatsgrenze"/>
+        <type name="Staat"/>
+        <type name="WSA"/>
+        <type name="WSD"/>
+        <type name="Zufluss (inkl. kleinerer Gewässer)"/>
+        <type name="Sonstige" default="true"/>
+    </types>
+
+    <patterns>
+        <file pattern="^Verzweigung$" type="Abzweigung"/>
+        <file pattern="^Brücken$" type="Brücke"/>
+        <file pattern="^Deich.*$" type="Deich"/>
+        <file pattern="^Hafen$" type="Hafen"/>
+        <file pattern="^Hauptpegel$" type="Hauptpegel"/>
+        <file pattern="^Zufluss$" type="Hauptzufluss"/>
+        <file pattern="^Zufluß$"  type="Hauptzufluss"/>
+        <file pattern="^HW-Marken$" type="HW-Marken"/>
+        <file pattern="^HW-Schutzanlagen$" type="HW-Schutz"/>
+        <file pattern="^Morphologische_Messstelle$" type="Morphologische Messstelle"/>
+        <file pattern="^Pegel$" type="Pegel (inkl. Nebenpegel)"/>
+        <file pattern="^Pegel-alle$" type="Pegel (inkl. Nebenpegel)"/>
+        <file pattern="^Stauwehr$" type="Stauwehr"/>
+        <file pattern="^Wehr$" type="Stauwehr"/>
+        <file pattern="^Zufluss-alle$" type="Zufluss (inkl. kleinerer Gewässer)"/>
+        <file pattern="^Zufluß-alle$" type="Zufluss (inkl. kleinerer Gewässer)"/>
+
+        <line pattern="^Abz\.?[:\s].*$" type="Abzweigung"/>
+        <line pattern="^Berechnungsstrecke.*$" type="Berechnungsstrecke"/>
+        <line pattern="^Brücke[:\s].*$" type="Brücke"/>
+        <line pattern="^Bundesland[:\s].*$" type="Bundesland"/>
+        <line pattern="^Einmündung[:\s].*$" type="Einmündung"/>
+        <line pattern="^Fähre[:\s].*$" type="Fähre"/>
+        <line pattern="^Grenze[:\s].*$" type="Grenze"/>
+        <line pattern="^Hafen[:\s].*$" type="Hafen"/>
+        <line pattern="^Pegel[:\s].*$" type="Hauptpegel"/>
+        <line pattern="^Zufluss[:\s].*$" type="Hauptzufluss"/>
+        <line pattern="^Zufluß[:\s].*$" type="Hauptzufluss"/>
+        <line pattern="^HW-Marken[:\s].*$" type="HW-Marken"/>
+        <line pattern="^Deich[:\s].*$" type="HW-Schutz"/>
+        <line pattern="^HW-Schutz[:\s].*$" type="HW-Schutz"/>
+        <line pattern="^HW-Schutzanlage[:\s].*$" type="HW-Schutz"/>
+        <line pattern="^Sommerdeich[:\s].*$" type="HW-Schutz"/>
+        <line pattern="^Hauptdeich[:\s].*$" type="HW-Schutz"/>
+        <line pattern="^Insel[:\s].*$" type="Insel"/>
+        <line pattern="^Kraftwerk[:\s].*$" type="Kraftwerk"/>
+        <line pattern="^Landkreis[:\s].*$" type="Landkreis"/>
+        <line pattern="^Meldestelle[:\s].*$" type="Meldestelle"/>
+        <line pattern="^Nebenarm[:\s].*$" type="Nebenarm / Altarm"/>
+        <line pattern="^Altarm[:\s].*$" type="Nebenarm / Altarm"/>
+        <line pattern="^Messstelle[:\s].*$" type="sonstige Messstelle"/>
+        <line pattern="^Messtelle[:\s].*$" type="sonstige Messstelle"/>
+        <line pattern="^Gemeinde[:\s].*$" type="Ort"/>
+        <line pattern="^Ort[:\s].*$" type="Ort"/>
+        <line pattern="^Orte[:\s].*$" type="Ort"/>
+        <line pattern="^Stadt[:\s].*$" type="Ort"/>
+        <line pattern="^Schleuse[:\s].*$" type="Schleuse"/>
+        <line pattern="^Stauwehr[:\s].*$" type="Stauwehr"/>
+        <line pattern="^Wehr[:\s].*$" type="Stauwehr"/>
+        <line pattern="^Nadelwehr[:\s].*$" type="Stauwehr"/>
+        <line pattern="^Hauptwehr[:\s].*$" type="Stauwehr"/>
+        <line pattern="^Staatsgrenze[:\s].*$" type="Staatsgrenze"/>
+        <line pattern="^Staat[:\s].*$" type="Staat"/>
+        <line pattern="^WSA[:\s].*$" type="WSA"/>
+        <line pattern="^WSD[:\s].*$" type="WSD"/>
+    </patterns>
+</annotation>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/import-dems.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,165 @@
+-- SAAR
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    0,
+    7.9,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0000-0079_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    8.0,
+    20.4,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0080-0204_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    20.5,
+    31.4,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0205-0314_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    31.5,
+    54.1,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0315-0541_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    54.2,
+    65.5,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0542-0655_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    65.6,
+    82.8,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0656-0828_long.txt'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Saar'),
+    82.9,
+    93.1,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Saar/Geodaesie/Hoehenmodelle/km0829-0931_erweitert.txt'
+);
+
+
+-- ELBE
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    0.0,
+    101.1,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_00000_10110.grd'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    99.2,
+    203.0,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_09920_20300.grd'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    202.0,
+    299.8,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_20200_29980.grd'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    298.1,
+    401.0,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_29810_40100.grd'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    400.0,
+    500.9,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_40000_50090.grd'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Elbe'),
+    500.1,
+    583.3,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Elbe/Geodaesie/Hoehenmodelle/m_50010_58330.grd'
+);
+
+
+-- MOSEL
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    0.0,
+    5.8,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0000-0580.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    5.8,
+    15.3,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0058-0153.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    15.3,
+    41.6,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0153-0416.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    41.4,
+    101.2,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0414-1012O.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    41.4,
+    101.21,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/0414-1012W.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    101.2,
+    148.8,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1012-1488.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    148.8,
+    166.6,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1488-1666.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    166.6,
+    196.0,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1666-1960.xyz'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    196.0,
+    204.4,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/1960-2044.XYZ'
+);
+
+INSERT INTO dem (river_id, lower, upper, path) VALUES (
+    (SELECT id FROM rivers WHERE name = 'Mosel'),
+    204.4,
+    218.4,
+    '/vol1/projects/Geospatial/flys-3.0/testdaten/Gewaesser/Mosel/Geodaesie/Hoehenmodelle/DGMW-ASCII/2044-2184.XYZ'
+);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-drop-minfo.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,96 @@
+SET AUTOCOMMIT ON;
+
+ALTER TABLE elevation_model DROP CONSTRAINT fk_unit;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_bed_single_river_id;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_type;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_location_system;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_cur_elevation_model;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_old_elevation_model;
+ALTER TABLE bed_height_single DROP CONSTRAINT fk_range;
+ALTER TABLE bed_height_single_values DROP CONSTRAINT fk_bed_single_values_parent;
+ALTER TABLE bed_height_epoch_values DROP CONSTRAINT fk_bed_epoch_values_parent;
+ALTER TABLE bed_height_epoch DROP CONSTRAINT fk_epoch_cur_elevation_model;
+ALTER TABLE bed_height_epoch DROP CONSTRAINT fk_epoch_old_elevation_model;
+ALTER TABLE bed_height_epoch DROP CONSTRAINT fk_epoch_range;
+ALTER TABLE depths DROP CONSTRAINT fk_depths_unit_id;
+ALTER TABLE sediment_density DROP CONSTRAINT fk_sd_depth_id;
+ALTER TABLE sediment_density DROP CONSTRAINT fk_sd_unit_id;
+ALTER TABLE sediment_density_values DROP CONSTRAINT fk_sdv_sediment_density_id;
+ALTER TABLE morphologic_width DROP CONSTRAINT fk_mw_river_id;
+ALTER TABLE morphologic_width DROP CONSTRAINT fk_mw_unit_id;
+ALTER TABLE morphologic_width_values DROP CONSTRAINT fk_mwv_morphologic_width_id;
+ALTER TABLE flow_velocity_model_values DROP CONSTRAINT fk_fvv_flow_velocity_model_id;
+ALTER TABLE flow_velocity_model DROP CONSTRAINT fk_fvm_river_id;
+ALTER TABLE flow_velocity_model DROP CONSTRAINT fk_fvm_discharge_zone_id;
+ALTER TABLE discharge_zone DROP CONSTRAINT fk_dz_river_id;
+ALTER TABLE flow_velocity_measurements DROP CONSTRAINT fk_fvm_rivers_id;
+ALTER TABLE flow_velocity_measure_values DROP CONSTRAINT fk_fvmv_measurements_id;
+ALTER TABLE grain_fraction DROP CONSTRAINT fk_gf_unit_id;
+ALTER TABLE sediment_yield DROP CONSTRAINT fk_sy_grain_fraction_id;
+ALTER TABLE sediment_yield DROP CONSTRAINT fk_sy_unit_id;
+ALTER TABLE sediment_yield DROP CONSTRAINT fk_sy_time_interval_id;
+ALTER TABLE sediment_yield DROP CONSTRAINT fk_sy_river_id;
+ALTER TABLE sediment_yield_values DROP CONSTRAINT fk_syv_sediment_yield_id;
+ALTER TABLE waterlevel DROP CONSTRAINT fk_w_river_id;
+ALTER TABLE waterlevel DROP CONSTRAINT fk_w_unit_id;
+ALTER TABLE waterlevel_q_range DROP CONSTRAINT fk_wqr_waterlevel_id;
+ALTER TABLE waterlevel_values DROP CONSTRAINT fk_wv_waterlevel_q_range_id;
+ALTER TABLE waterlevel_difference DROP CONSTRAINT fk_wd_river_id;
+ALTER TABLE waterlevel_difference DROP CONSTRAINT fk_wd_unit_id;
+ALTER TABLE waterlevel_difference_column DROP CONSTRAINT fk_wdc_difference_id;
+ALTER TABLE waterlevel_difference_values DROP CONSTRAINT fk_wdv_column_id;
+
+DROP TABLE bed_height_type;
+DROP TABLE location_system;
+DROP TABLE elevation_model;
+DROP TABLE bed_height_single;
+DROP TABLE bed_height_single_values;
+DROP TABLE bed_height_epoch_values;
+DROP TABLE bed_height_epoch;
+DROP TABLE depths;
+DROP TABLE sediment_density;
+DROP TABLE sediment_density_values;
+DROP TABLE morphologic_width;
+DROP TABLE morphologic_width_values;
+DROP TABLE discharge_zone;
+DROP TABLE flow_velocity_model;
+DROP TABLE flow_velocity_model_values;
+DROP TABLE flow_velocity_measurements;
+DROP TABLE flow_velocity_measure_values;
+DROP TABLE grain_fraction;
+DROP TABLE sediment_yield;
+DROP TABLE sediment_yield_values;
+DROP TABLE waterlevel;
+DROP TABLE waterlevel_q_range;
+DROP TABLE waterlevel_values;
+DROP TABLE waterlevel_difference;
+DROP TABLE waterlevel_difference_column;
+DROP TABLE waterlevel_difference_values;
+
+DROP SEQUENCE BED_HEIGHT_TYPE_SEQ;
+DROP SEQUENCE LOCATION_SYSTEM_SEQ;
+DROP SEQUENCE ELEVATION_MODEL_SEQ;
+DROP SEQUENCE BED_HEIGHT_SINGLE_ID_SEQ;
+DROP SEQUENCE BED_SINGLE_VALUES_ID_SEQ;
+DROP SEQUENCE BED_EPOCH_VALUES_ID_SEQ;
+DROP SEQUENCE BED_HEIGHT_EPOCH_ID_SEQ;
+DROP SEQUENCE DEPTHS_ID_SEQ;
+DROP SEQUENCE SEDIMENT_DENSITY_ID_SEQ;
+DROP SEQUENCE SEDIMENT_DENSITY_VALUES_ID_SEQ;
+DROP SEQUENCE MORPHOLOGIC_WIDTH_ID_SEQ;
+DROP SEQUENCE MORPH_WIDTH_VALUES_ID_SEQ;
+DROP SEQUENCE DISCHARGE_ZONE_ID_SEQ;
+DROP SEQUENCE FLOW_VELOCITY_MODEL_ID_SEQ;
+DROP SEQUENCE FLOW_VELOCITY_M_VALUES_ID_SEQ;
+DROP SEQUENCE FV_MEASURE_ID_SEQ;
+DROP SEQUENCE FV_MEASURE_VALUES_ID_SEQ;
+DROP SEQUENCE GRAIN_FRACTION_ID_SEQ;
+DROP SEQUENCE SEDIMENT_YIELD_ID_SEQ;
+DROP SEQUENCE SEDIMENT_YIELD_VALUES_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_Q_RANGES_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_VALUES_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_DIFFERENCE_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_DIFF_COLUMN_ID_SEQ;
+DROP SEQUENCE WATERLEVEL_DIFF_VALUES_ID_SEQ;
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-drop-spatial.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,68 @@
+DROP TRIGGER river_axes_trigger;
+DROP TABLE river_axes;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'RIVER_AXES';
+DROP SEQUENCE RIVER_AXES_ID_SEQ;
+
+DROP TRIGGER river_axes_km_trigger;
+DROP TABLE river_axes_km;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'RIVER_AXES_KM';
+DROP SEQUENCE RIVER_AXES_KM_ID_SEQ;
+
+DROP TRIGGER cross_section_tracks_trigger;
+DROP TABLE cross_section_tracks;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'CROSS_SECTION_TRACKS';
+DROP SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
+
+DROP TRIGGER lines_trigger;
+DROP TABLE lines;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'LINES';
+DROP SEQUENCE LINES_ID_SEQ;
+
+DROP TRIGGER buildings_trigger;
+DROP TABLE buildings;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'BUILDINGS';
+DROP SEQUENCE BUILDINGS_ID_SEQ;
+
+DROP TRIGGER fixpoints_trigger;
+DROP TABLE fixpoints;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'FIXPOINTS';
+DROP SEQUENCE FIXPOINTS_ID_SEQ;
+
+DROP TRIGGER floodplain_trigger;
+DROP TABLE floodplain;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'FLOODPLAIN';
+DROP SEQUENCE FLOODPLAIN_ID_SEQ;
+
+DROP TRIGGER dem_trigger;
+DROP TABLE dem;
+DROP SEQUENCE DEM_ID_SEQ;
+
+DROP TRIGGER catchment_trigger;
+DROP TABLE catchment;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'CATCHMENT';
+DROP SEQUENCE CATCHMENT_ID_SEQ;
+
+DROP TRIGGER hws_trigger;
+DROP TABLE hws;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HWS';
+DROP SEQUENCE HWS_ID_SEQ;
+
+DROP TRIGGER floodmaps_trigger;
+DROP TABLE floodmaps;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'FLOODMAPS';
+DROP SEQUENCE FLOODMAPS_ID_SEQ;
+
+DROP TRIGGER hydr_boundaries_trigger;
+DROP TABLE hydr_boundaries;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HYDR_BOUNDARIES';
+DROP SEQUENCE HYDR_BOUNDARIES_ID_SEQ;
+
+DROP TRIGGER hydr_boundaries_poly_trigger;
+DROP TABLE hydr_boundaries_poly;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'HYDR_BOUNDARIES_POLY';
+DROP SEQUENCE HYDR_BOUNDARIES_POLY_ID_SEQ;
+
+DROP TRIGGER gauge_location_trigger;
+DROP TABLE gauge_location;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'GAUGE_LOCATION';
+DROP SEQUENCE GAUGE_LOCATION_ID_SEQ;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-drop.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,90 @@
+ALTER TABLE annotations DROP CONSTRAINT cAnnotationsRanges;
+ALTER TABLE annotations DROP CONSTRAINT cAnnotationsEdges;
+ALTER TABLE annotations DROP CONSTRAINT cAnnotationsPositions;
+ALTER TABLE annotations DROP CONSTRAINT cAnnotationsAttributes;
+ALTER TABLE annotations DROP CONSTRAINT cAnnotationsTypes;
+ALTER TABLE cross_section_lines DROP CONSTRAINT cQPSLinesCrossSections;
+ALTER TABLE cross_section_points DROP CONSTRAINT cQPSPointsCrossSectionLines;
+ALTER TABLE cross_sections DROP CONSTRAINT cCrossSectionsRivers;
+ALTER TABLE cross_sections DROP CONSTRAINT cCrossSectionsTimeIntervals;
+ALTER TABLE discharge_tables DROP CONSTRAINT cDischargeTablesTime_intervals;
+ALTER TABLE discharge_tables DROP CONSTRAINT cDischargeTablesGauges;
+ALTER TABLE gauges DROP CONSTRAINT cGaugesRivers;
+ALTER TABLE gauges DROP CONSTRAINT cGaugesRanges;
+ALTER TABLE hyk_entries DROP CONSTRAINT cHykEntriesHyks;
+ALTER TABLE hyk_flow_zones DROP CONSTRAINT cHykFlowZonesHykFormations;
+ALTER TABLE hyk_flow_zones DROP CONSTRAINT cHykFlowZonesHykFlowZoneTypes;
+ALTER TABLE hyks DROP CONSTRAINT cHyksRivers;
+ALTER TABLE hyk_formations DROP CONSTRAINT cHykFormationsHykEntries;
+ALTER TABLE main_values DROP CONSTRAINT cMainValuesTimeIntervals;
+ALTER TABLE main_values DROP CONSTRAINT cMainValuesGauges;
+ALTER TABLE main_values DROP CONSTRAINT cMainValuesNamedMainValues;
+ALTER TABLE named_main_values DROP CONSTRAINT cNamedMainValuesMainValueTypes;
+ALTER TABLE ranges DROP CONSTRAINT cRangesRivers;
+ALTER TABLE rivers DROP CONSTRAINT cRiversUnits;
+ALTER TABLE wst_column_q_ranges DROP CONSTRAINT cWstColumnQRangesWstColums;
+ALTER TABLE wst_column_q_ranges DROP CONSTRAINT cWstColumnQRangesWstQRanges;
+ALTER TABLE wst_column_values DROP CONSTRAINT cWstColumnValuesWstColumns;
+ALTER TABLE wst_columns DROP CONSTRAINT cWstColumnsTime_intervals;
+ALTER TABLE wst_columns DROP CONSTRAINT cWstColumnsWsts;
+ALTER TABLE wst_q_ranges DROP CONSTRAINT cWstQRangesRanges;
+ALTER TABLE wsts DROP CONSTRAINT cWstsRivers;
+DROP TABLE annotation_types;
+DROP TABLE annotations;
+DROP TABLE attributes;
+DROP TABLE cross_section_lines;
+DROP TABLE cross_section_points;
+DROP TABLE cross_sections;
+DROP TABLE discharge_table_values;
+DROP TABLE discharge_tables;
+DROP TABLE edges;
+DROP TABLE gauges;
+DROP TABLE hyk_entries;
+DROP TABLE hyk_flow_zone_types;
+DROP TABLE hyk_flow_zones;
+DROP TABLE hyk_formations;
+DROP TABLE hyks;
+DROP TABLE main_value_types;
+DROP TABLE main_values;
+DROP TABLE named_main_values;
+DROP TABLE positions;
+DROP TABLE ranges;
+DROP TABLE rivers;
+DROP TABLE time_intervals;
+DROP TABLE units;
+DROP TABLE wst_column_q_ranges;
+DROP TABLE wst_column_values;
+DROP TABLE wst_columns;
+DROP TABLE wst_q_ranges;
+DROP TABLE wsts;
+DROP SEQUENCE ANNOTATION_TYPES_ID_SEQ;
+DROP SEQUENCE ANNOTATIONS_ID_SEQ;
+DROP SEQUENCE ATTRIBUTES_ID_SEQ;
+DROP SEQUENCE CROSS_SECTION_LINES_ID_SEQ;
+DROP SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+DROP SEQUENCE CROSS_SECTIONS_ID_SEQ;
+DROP SEQUENCE DISCHARGE_TABLE_VALUES_ID_SEQ;
+DROP SEQUENCE DISCHARGE_TABLES_ID_SEQ;
+DROP SEQUENCE EDGES_ID_SEQ;
+DROP SEQUENCE GAUGES_ID_SEQ;
+DROP SEQUENCE HYK_ENTRIES_ID_SEQ;
+DROP SEQUENCE HYK_FLOW_ZONE_TYPES_ID_SEQ;
+DROP SEQUENCE HYK_FLOW_ZONES_ID_SEQ;
+DROP SEQUENCE HYK_FORMATIONS_ID_SEQ;
+DROP SEQUENCE HYKS_ID_SEQ;
+DROP SEQUENCE MAIN_VALUE_TYPES_ID_SEQ;
+DROP SEQUENCE MAIN_VALUES_ID_SEQ;
+DROP SEQUENCE NAMED_MAIN_VALUES_ID_SEQ;
+DROP SEQUENCE POSITIONS_ID_SEQ;
+DROP SEQUENCE RANGES_ID_SEQ;
+DROP SEQUENCE RIVERS_ID_SEQ;
+DROP SEQUENCE TIME_INTERVALS_ID_SEQ;
+DROP SEQUENCE UNITS_ID_SEQ;
+DROP SEQUENCE WST_COLUMN_Q_RANGES_ID_SEQ;
+DROP SEQUENCE WST_COLUMN_VALUES_ID_SEQ;
+DROP SEQUENCE WST_COLUMNS_ID_SEQ;
+DROP SEQUENCE WST_Q_RANGES_ID_SEQ;
+DROP SEQUENCE WSTS_ID_SEQ;
+DROP VIEW wst_value_table;
+DROP VIEW wst_w_values ;
+DROP VIEW wst_q_values;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-minfo.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,353 @@
+SET AUTOCOMMIT ON;
+
+CREATE SEQUENCE LOCATION_SYSTEM_SEQ;
+
+CREATE TABLE location_system (
+    id          NUMBER(38,0) NOT NULL,
+    name        VARCHAR(32)  NOT NULL,
+    description VARCHAR(255),
+    PRIMARY KEY(id)
+);
+
+
+CREATE SEQUENCE ELEVATION_MODEL_SEQ;
+
+CREATE TABLE elevation_model (
+    id          NUMBER(38,0) NOT NULL,
+    name        VARCHAR(32)  NOT NULL,
+    unit_id     NUMBER(38,0) NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_unit FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+CREATE SEQUENCE BED_HEIGHT_TYPE_SEQ;
+
+CREATE TABLE bed_height_type (
+    id          NUMBER(38,0) NOT NULL,
+    name        VARCHAR(16)  NOT NULL,
+    description VARCHAR(255),
+    PRIMARY KEY(id)
+);
+
+
+
+CREATE SEQUENCE BED_HEIGHT_SINGLE_ID_SEQ;
+
+CREATE TABLE bed_height_single (
+    id                      NUMBER(38,0) NOT NULL,
+    river_id                NUMBER(38,0) NOT NULL,
+    year                    NUMBER(38,0) NOT NULL,
+    sounding_width          NUMBER(38,0) NOT NULL,
+    type_id                 NUMBER(38,0) NOT NULL,
+    location_system_id      NUMBER(38,0) NOT NULL,
+    cur_elevation_model_id  NUMBER(38,0) NOT NULL,
+    old_elevation_model_id  NUMBER(38,0),
+    range_id                NUMBER(38,0) NOT NULL,
+    evaluation_by           VARCHAR(255),
+    description             VARCHAR(255),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_single_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_type FOREIGN KEY (type_id) REFERENCES bed_height_type(id),
+    CONSTRAINT fk_location_system FOREIGN KEY (location_system_id) REFERENCES location_system(id),
+    CONSTRAINT fk_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_range FOREIGN KEY (range_id) REFERENCES ranges(id)
+);
+
+
+CREATE SEQUENCE BED_HEIGHT_EPOCH_ID_SEQ;
+
+CREATE TABLE bed_height_epoch (
+    id                      NUMBER(38,0) NOT NULL,
+    river_id                NUMBER(38,0) NOT NULL,
+    time_interval_id        NUMBER(38,0) NOT NULL,
+    -- sounding_with           NUMBER(38,0) NOT NULL,
+    -- type_id                 NUMBER(38,0) NOT NULL,
+    cur_elevation_model_id  NUMBER(38,0) NOT NULL,
+    old_elevation_model_id  NUMBER(38,0),
+    range_id                NUMBER(38,0) NOT NULL,
+    evaluation_by           VARCHAR(255),
+    description             VARCHAR(255),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_time_interval FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id),
+    CONSTRAINT fk_epoch_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_epoch_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_epoch_range FOREIGN KEY (range_id) REFERENCES ranges(id)
+);
+
+
+CREATE SEQUENCE BED_SINGLE_VALUES_ID_SEQ;
+
+CREATE TABLE bed_height_single_values (
+    id                      NUMBER(38,0) NOT NULL,
+    bed_height_single_id    NUMBER(38,0) NOT NULL,
+    station                 NUMBER(38,2) NOT NULL,
+    height                  NUMBER(38,2),
+    uncertainty             NUMBER(38,2),
+    data_gap                NUMBER(38,2) NOT NULL,
+    sounding_width          NUMBER(38,2) NOT NULL,
+    width                   NUMBER(38,2) NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_single_values_parent FOREIGN KEY (bed_height_single_id) REFERENCES bed_height_single(id)
+);
+
+
+CREATE SEQUENCE BED_EPOCH_VALUES_ID_SEQ;
+
+CREATE TABLE bed_height_epoch_values (
+    id                      NUMBER(38,0) NOT NULL,
+    bed_height_epoch_id     NUMBER(38,0) NOT NULL,
+    station                 NUMBER(38,2) NOT NULL,
+    height                  NUMBER(38,2),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_epoch_values_parent FOREIGN KEY (bed_height_epoch_id) REFERENCES bed_height_epoch(id)
+);
+
+
+CREATE SEQUENCE DEPTHS_ID_SEQ;
+
+CREATE TABLE depths (
+    id      NUMBER(38,0) NOT NULL,
+    lower   NUMBER(38,2) NOT NULL,
+    upper   NUMBER(38,2) NOT NULL,
+    unit_id NUMBER(38,0) NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_depths_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_DENSITY_ID_SEQ;
+
+CREATE TABLE sediment_density (
+    id          NUMBER(38,0) NOT NULL,
+    river_id    NUMBER(38,0) NOT NULL,
+    depth_id    NUMBER(38,0) NOT NULL,
+    unit_id     NUMBER(38,0) NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_sd_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sd_depth_id FOREIGN KEY (depth_id) REFERENCES depths(id),
+    CONSTRAINT fk_sd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_DENSITY_VALUES_ID_SEQ;
+
+CREATE TABLE sediment_density_values (
+    id                  NUMBER(38,0) NOT NULL,
+    sediment_density_id NUMBER(38,0) NOT NULL,
+    station             NUMBER(38,2) NOT NULL,
+    density             NUMBER(38,2) NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_sdv_sediment_density_id FOREIGN KEY(sediment_density_id) REFERENCES sediment_density(id)
+);
+
+
+CREATE SEQUENCE MORPHOLOGIC_WIDTH_ID_SEQ;
+
+CREATE TABLE morphologic_width (
+    id          NUMBER(38,0) NOT NULL,
+    river_id    NUMBER(38,0) NOT NULL,
+    unit_id     NUMBER(38,0) NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_mw_river_id FOREIGN KEY(river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_mw_unit_id FOREIGN KEY(unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE MORPH_WIDTH_VALUES_ID_SEQ;
+
+CREATE TABLE morphologic_width_values (
+    id                      NUMBER(38,0) NOT NULL,
+    morphologic_width_id    NUMBER(38,0) NOT NULL,
+    station                 NUMBER(38,3) NOT NULL,
+    width                   NUMBER(38,3) NOT NULL,
+    description             VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_mwv_morphologic_width_id FOREIGN KEY (morphologic_width_id) REFERENCES morphologic_width(id)
+);
+
+
+CREATE SEQUENCE DISCHARGE_ZONE_ID_SEQ;
+
+CREATE TABLE discharge_zone (
+    id                      NUMBER(38,0) NOT NULL,
+    river_id                NUMBER(38,0) NOT NULL,
+    gauge_name              VARCHAR(64)  NOT NULL, -- this is not very proper, but there are gauges with no db instance
+    value                   NUMBER(38,3) NOT NULL,
+    lower_discharge         VARCHAR(16)  NOT NULL,
+    upper_discharge         VARCHAR(16),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_dz_river_id FOREIGN KEY (river_id) REFERENCES rivers(id)
+);
+
+
+CREATE SEQUENCE FLOW_VELOCITY_MODEL_ID_SEQ;
+
+CREATE TABLE flow_velocity_model (
+    id                  NUMBER(38,0) NOT NULL,
+    river_id            NUMBER(38,0) NOT NULL,
+    discharge_zone_id   NUMBER(38,0) NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvm_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_fvm_discharge_zone_id FOREIGN KEY (discharge_zone_id) REFERENCES discharge_zone (id)
+);
+
+
+CREATE SEQUENCE FLOW_VELOCITY_M_VALUES_ID_SEQ;
+
+CREATE TABLE flow_velocity_model_values (
+    id                      NUMBER(38,0) NOT NULL,
+    flow_velocity_model_id  NUMBER(38,0) NOT NULL,
+    station                 NUMBER(38,3) NOT NULL,
+    q                       NUMBER(38,3) NOT NULL,
+    total_channel           NUMBER(38,3) NOT NULL,
+    main_channel            NUMBER(38,3) NOT NULL,
+    shear_stress            NUMBER(38,3) NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_fvv_flow_velocity_model_id FOREIGN KEY (flow_velocity_model_id) REFERENCES flow_velocity_model(id)
+);
+
+
+
+CREATE SEQUENCE FV_MEASURE_ID_SEQ;
+
+CREATE TABLE flow_velocity_measurements (
+    id          NUMBER(38,0) NOT NULL,
+    river_id    NUMBER(38,0) NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvm_rivers_id FOREIGN KEY (river_id) REFERENCES rivers(id)
+);
+
+CREATE SEQUENCE FV_MEASURE_VALUES_ID_SEQ;
+
+CREATE TABLE flow_velocity_measure_values (
+    id              NUMBER(38,0) NOT NULL,
+    measurements_id NUMBER(38,0) NOT NULL,
+    station         NUMBER(38,3) NOT NULL,
+    datetime        TIMESTAMP,
+    w               NUMBER(38,3) NOT NULL,
+    q               NUMBER(38,3) NOT NULL,
+    v               NUMBER(38,3) NOT NULL,
+    description     VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvmv_measurements_id FOREIGN KEY (measurements_id) REFERENCES flow_velocity_measurements (id)
+);
+
+
+CREATE SEQUENCE GRAIN_FRACTION_ID_SEQ;
+
+CREATE TABLE grain_fraction (
+    id      NUMBER(38,0)   NOT NULL,
+    name    VARCHAR(64)    NOT NULL,
+    lower   NUMBER(38,3),
+    upper   NUMBER(38,3),
+    unit_id NUMBER (38,0),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_gf_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_YIELD_ID_SEQ;
+
+CREATE TABLE sediment_yield (
+    id                  NUMBER(38,0) NOT NULL,
+    river_id            NUMBER(38,0) NOT NULL,
+    grain_fraction_id   NUMBER(38,0),
+    unit_id             NUMBER(38,0) NOT NULL,
+    time_interval_id    NUMBER(38,0) NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sy_grain_fraction_id FOREIGN KEY (grain_fraction_id) REFERENCES grain_fraction(id),
+    CONSTRAINT fk_sy_unit_id FOREIGN KEY (unit_id) REFERENCES units(id),
+    CONSTRAINT fk_sy_time_interval_id FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_YIELD_VALUES_ID_SEQ;
+
+CREATE TABLE sediment_yield_values (
+    id                  NUMBER(38,0) NOT NULL,
+    sediment_yield_id   NUMBER(38,0) NOT NULL,
+    station             NUMBER(38,3) NOT NULL,
+    value               NUMBER(38,3) NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_syv_sediment_yield_id FOREIGN KEY (sediment_yield_id) REFERENCES sediment_yield(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_ID_SEQ;
+
+CREATE TABLE waterlevel (
+    id          NUMBER(38,0) NOT NULL,
+    river_id    NUMBER(38,0) NOT NULL,
+    unit_id     NUMBER(38,0) NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_w_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_w_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_Q_RANGES_ID_SEQ;
+
+CREATE TABLE waterlevel_q_range (
+    id              NUMBER(38,0) NOT NULL,
+    waterlevel_id   NUMBER(38,0) NOT NULL,
+    q               NUMBER(38,2) NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wqr_waterlevel_id FOREIGN KEY (waterlevel_id) REFERENCES waterlevel(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_VALUES_ID_SEQ;
+
+CREATE TABLE waterlevel_values(
+    id                      NUMBER(38,0) NOT NULL,
+    waterlevel_q_range_id   NUMBER(38,0) NOT NULL,
+    station                 NUMBER(38,3) NOT NULL,
+    w                       NUMBER(38,2) NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wv_waterlevel_q_range_id FOREIGN KEY (waterlevel_q_range_id) REFERENCES waterlevel_q_range(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFFERENCE_ID_SEQ;
+
+CREATE TABLE waterlevel_difference (
+    id          NUMBER(38,0) NOT NULL,
+    river_id    NUMBER(38,0) NOT NULL,
+    unit_id     NUMBER(38,0) NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wd_river_id FOREIGN KEY (river_id) REFERENCES rivers (id),
+    CONSTRAINT fk_wd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFF_COLUMN_ID_SEQ;
+
+CREATE TABLE waterlevel_difference_column (
+    id              NUMBER(38,0) NOT NULL,
+    difference_id   NUMBER(38,0) NOT NULL,
+    description     VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wdc_difference_id FOREIGN KEY (difference_id) REFERENCES waterlevel_difference (id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFF_VALUES_ID_SEQ;
+
+CREATE TABLE waterlevel_difference_values (
+    id          NUMBER(38,0) NOT NULL,
+    column_id   NUMBER(38,0) NOT NULL,
+    station     NUMBER(38,3) NOT NULL,
+    value       NUMBER(38,2) NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wdv_column_id FOREIGN KEY (column_id) REFERENCES waterlevel_difference_column (id)
+);
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-spatial.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,276 @@
+-- Geodaesie/Flussachse+km/achse
+CREATE SEQUENCE RIVER_AXES_ID_SEQ;
+CREATE TABLE river_axes(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    kind     NUMBER(38) DEFAULT 0 NOT NULL,
+    name     VARCHAR(64),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('river_axes', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER river_axes_trigger BEFORE INSERT ON river_axes FOR each ROW
+    BEGIN
+        SELECT RIVER_AXES_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX river_axes_spatial_idx ON river_axes(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+
+-- Geodaesie/Flussachse+km/km.shp
+CREATE SEQUENCE RIVER_AXES_KM_ID_SEQ;
+CREATE TABLE river_axes_km(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    km NUMBER(6,3),
+    name     VARCHAR(64),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('river_axes_km', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER river_axes_km_trigger BEFORE INSERT ON river_axes_km FOR each ROW
+    BEGIN
+        SELECT river_axes_km_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX river_axes_km_spatial_idx ON river_axes_km(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=point');
+
+
+--Geodaesie/Querprofile/QP-Spuren/qps.shp
+CREATE SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
+CREATE TABLE cross_section_tracks (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    km       NUMBER(38,12) NOT NULL,
+    z        NUMBER(38,12) DEFAULT 0 NOT NULL,
+    name     VARCHAR(64),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('cross_section_tracks', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER cross_section_tracks_trigger BEFORE INSERT ON cross_section_tracks FOR each ROW
+    BEGIN
+        SELECT CROSS_SECTION_TRACKS_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX CrossSectionTracks_spatial_idx ON cross_section_tracks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+
+-- TODO: TestMe. Fix Importer-Script. Fix oracle_spatial_idx.sql script.
+-- Geodaesie/Linien/rohre-und-speeren
+CREATE SEQUENCE LINES_ID_SEQ;
+CREATE TABLE lines (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    kind     VARCHAR2(16) NOT NULL,
+    z        NUMBER(38,12) DEFAULT 0,
+    name     VARCHAR(64),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('lines', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER lines_trigger BEFORE INSERT ON lines FOR each ROW
+    BEGIN
+        SELECT LINES_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+-- NOTE: Should lines should be 3D.
+-- TODO: Test index. 
+--CREATE INDEX lines_idx ON lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+-- 'kind':
+-- 0: ROHR1
+-- 1: DAMM
+
+
+-- Geodaesie/Bauwerke/Wehre.shp
+CREATE SEQUENCE BUILDINGS_ID_SEQ;
+CREATE TABLE buildings(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    name VARCHAR2(255),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('buildings', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER buildings_trigger BEFORE INSERT ON buildings FOR each ROW
+    BEGIN
+        SELECT BUILDINGS_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX buildings_spatial_idx ON buildings(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+
+-- Geodaesie/Festpunkte/Festpunkte.shp
+CREATE SEQUENCE FIXPOINTS_ID_SEQ;
+CREATE TABLE fixpoints (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    x NUMBER(38,11),
+    y NUMBER(38,11),
+    km NUMBER(38,11) NOT NULL,
+    HPGP VARCHAR2(255),
+    name VARCHAR(64),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('fixpoints', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER fixpoints_trigger BEFORE INSERT ON fixpoints FOR each ROW
+    BEGIN
+        SELECT FIXPOINTS_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX fixpoints_spatial_idx ON fixpoints(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
+
+
+-- Hydrologie/Hydr. Grenzen/talaue.shp
+CREATE SEQUENCE FLOODPLAIN_ID_SEQ;
+CREATE TABLE floodplain(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    name     VARCHAR(64),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('floodplain', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER floodplain_trigger BEFORE INSERT ON floodplain FOR each ROW
+    BEGIN
+        SELECT FLOODPLAIN_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX floodplain_spatial_idx ON floodplain(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POLYGON');
+
+
+-- TODO: Test-Me. Fix Importer-Skript.
+-- NOTE: It's not a spatial schema!
+-- Geodaesie/Hoehenmodelle/*
+CREATE SEQUENCE DEM_ID_SEQ;
+CREATE TABLE dem (
+    ID NUMBER PRIMARY KEY NOT NULL,
+    river_id NUMBER(38),
+    -- XXX Should we use the ranges table instead?
+    lower    NUMBER(19,5),
+    upper    NUMBER(19,5),
+    path     VARCHAR(256),
+    UNIQUE (river_id, lower, upper)
+);
+CREATE OR REPLACE TRIGGER dem_trigger BEFORE INSERT ON dem FOR each ROW
+    BEGIN
+        SELECT DEM_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+
+
+-- Hydrologie/Einzugsgebiete/EZG.shp
+CREATE SEQUENCE CATCHMENT_ID_SEQ;
+CREATE TABLE catchment(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    area NUMBER(19,5),
+    name VARCHAR2(255),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('CATCHMENT', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+
+CREATE TRIGGER catchment_trigger BEFORE INSERT ON catchment FOR each ROW
+    BEGIN
+        SELECT CATCHMENT_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX catchment_spatial_idx ON catchment(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=polygon');
+
+--Hydrologie/HW-Schutzanlagen/hws.shp
+CREATE SEQUENCE HWS_ID_SEQ;
+CREATE TABLE hws(
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    hws_facility VARCHAR2(255),
+    type VARCHAR2(255),
+    name VARCHAR(64),
+    ID NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hws', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER hws_trigger BEFORE INSERT ON hws FOR each ROW
+    BEGIN
+        SELECT HWS_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+--CREATE INDEX hws_spatial_idx ON hws(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+
+--Hydrologie/UeSG
+CREATE SEQUENCE FLOODMAPS_ID_SEQ;
+CREATE TABLE floodmaps (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    name VARCHAR(255),
+    kind NUMBER(38),
+    diff NUMBER(19,5),
+    count NUMBER(38),
+    area NUMBER(19,5),
+    perimeter NUMBER(19,5),
+    id NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('floodmaps', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER floodmaps_trigger BEFORE INSERT ON floodmaps FOR each ROW
+    BEGIN
+        SELECT FLOODMAPS_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+CREATE INDEX floodmaps_spatial_idx ON floodmaps(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON');
+
+
+--Hydrologie/Hydr.Grenzen/Linien
+CREATE SEQUENCE HYDR_BOUNDARIES_ID_SEQ;
+CREATE TABLE hydr_boundaries (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    name VARCHAR(255),
+    kind NUMBER(38),
+    id NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hydr_boundaries', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER hydr_boundaries_trigger BEFORE INSERT ON hydr_boundaries FOR each ROW
+    BEGIN
+        SELECT HYDR_BOUNDARIES_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+CREATE INDEX hydr_boundaries_idx ON hydr_boundaries(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+
+CREATE SEQUENCE HYDR_BOUNDARIES_POLY_ID_SEQ;
+CREATE TABLE hydr_boundaries_poly (
+    OGR_FID NUMBER(38),
+    GEOM MDSYS.SDO_GEOMETRY,
+    river_id NUMBER(38),
+    name VARCHAR(255),
+    kind NUMBER(38),
+    id NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('hydr_boundaries_poly', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER hydr_boundaries_poly_trigger BEFORE INSERT ON hydr_boundaries_poly FOR each ROW
+    BEGIN
+        SELECT HYDR_BOUNDARIES_POLY_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+CREATE INDEX hydr_boundaries_poly_idx ON hydr_boundaries_poly(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=MULTIPOLYGON');
+
+
+-- Hydrologie/Streckendaten/
+CREATE SEQUENCE GAUGE_LOCATION_ID_SEQ;
+CREATE TABLE gauge_location (
+    OGR_FID     NUMBER(38),
+    GEOM        MDSYS.SDO_GEOMETRY,
+    river_id    NUMBER(38),
+    name        VARCHAR(64),
+    id          NUMBER PRIMARY KEY NOT NULL
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('gauge_location', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001),MDSYS.SDO_DIM_ELEMENT('Z',-100000,100000,0.002)), 31467);
+CREATE OR REPLACE TRIGGER gauge_location_trigger BEFORE INSERT ON gauge_location FOR EACH ROW
+    BEGIN
+        SELECT GAUGE_LOCATION_ID_SEQ.nextval INTO :new.id FROM dual;
+    END;
+/
+CREATE INDEX gauge_location_idx ON gauge_location(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle-spatial_idx.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,9 @@
+CREATE INDEX catchment_spatial_idx ON catchment(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=multipolygon');
+CREATE INDEX river_axes_km_spatial_idx ON river_axes_km(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=point');
+CREATE INDEX buildings_spatial_idx ON buildings(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+CREATE INDEX fixpoints_spatial_idx ON fixpoints(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
+CREATE INDEX river_axes_spatial_idx ON river_axes(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+CREATE INDEX CrossSectionTracks_spatial_idx ON cross_section_tracks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+CREATE INDEX hws_spatial_idx ON hws(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
+CREATE INDEX floodplain_spatial_idx ON floodplain(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POLYGON');
+CREATE INDEX lines_idx ON lines(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=LINE');
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,414 @@
+-- ANNOTATION_TYPES
+CREATE SEQUENCE ANNOTATION_TYPES_ID_SEQ;
+
+CREATE TABLE annotation_types (
+    id              NUMBER(38,0) NOT NULL, 
+    name            VARCHAR2(255),
+    PRIMARY KEY     (id)
+);
+
+
+-- ANNOTATIONS
+CREATE SEQUENCE ANNOTATIONS_ID_SEQ;
+
+CREATE TABLE annotations (
+    id              NUMBER(38,0) NOT NULL, 
+    attribute_id    NUMBER(38,0),
+    edge_id         NUMBER(38,0),
+    position_id     NUMBER(38,0),
+    range_id        NUMBER(38,0),
+    type_id         NUMBER(38,0),
+    PRIMARY KEY     (id)
+);
+
+
+-- ATTRIBUTES 
+CREATE SEQUENCE ATTRIBUTES_ID_SEQ;
+
+CREATE TABLE attributes (
+    id              NUMBER(38,0) NOT NULL, 
+    value           VARCHAR2(255), 
+    primary key     (id)
+);
+
+
+-- CROSS_SECTION_LINES
+CREATE SEQUENCE CROSS_SECTION_LINES_ID_SEQ;
+
+CREATE TABLE cross_section_lines (
+    id                  NUMBER(38,0) NOT NULL,
+    km                  NUMBER(38,2),
+    cross_section_id    NUMBER(38,0), 
+    PRIMARY KEY         (id)
+);
+
+
+-- CROSS_SECTION_POINTS
+CREATE SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+
+CREATE TABLE cross_section_points (
+    id                      NUMBER(38,0) NOT NULL,
+    col_pos                 NUMBER(38,0),
+    x                       NUMBER(38,2),
+    y                       NUMBER(38,2),
+    cross_section_line_id   NUMBER(38,0),
+    PRIMARY KEY             (id)
+);
+
+
+-- CROSS_SECTIONS
+CREATE SEQUENCE CROSS_SECTIONS_ID_SEQ;
+
+CREATE TABLE cross_sections (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    river_id            NUMBER(38,0),
+    time_interval_id    NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- DISCHARGE_TABLE_VALUES
+CREATE SEQUENCE DISCHARGE_TABLE_VALUES_ID_SEQ;
+
+CREATE TABLE discharge_table_values (
+    id                  NUMBER(38,0) NOT NULL,
+    q                   NUMBER(38,2),
+    w                   NUMBER(38,2),
+    table_id            NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- DISCHARGE_TABLES
+CREATE SEQUENCE DISCHARGE_TABLES_ID_SEQ;
+
+CREATE TABLE discharge_tables (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    kind                NUMBER(38,0),
+    gauge_id            NUMBER(38,0),
+    time_interval_id    NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- EDGES
+CREATE SEQUENCE EDGES_ID_SEQ;
+
+CREATE TABLE edges (
+    id                  NUMBER(38,0) NOT NULL,
+    bottom              NUMBER(38,2),
+    top                 NUMBER(38,2),
+    PRIMARY KEY         (id)
+);
+
+
+-- GAUGES
+CREATE SEQUENCE GAUGES_ID_SEQ;
+
+CREATE TABLE gauges (
+    id                  NUMBER(38,0) NOT NULL,
+    aeo                 NUMBER(38,2),
+    datum               NUMBER(38,2), 
+    name                VARCHAR2(255),
+    station             NUMBER(38,2),
+    official_number     NUMBER(38,0),
+    range_id            NUMBER(38,0),
+    river_id            NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- HYK_ENTRIES
+CREATE SEQUENCE HYK_ENTRIES_ID_SEQ;
+
+CREATE TABLE hyk_entries (
+    id                  NUMBER(38,0) NOT NULL,
+    km                  NUMBER(38,2),
+    measure             TIMESTAMP,
+    hyk_id              NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- HYK_FLOW_ZONE_TYPES
+CREATE SEQUENCE HYK_FLOW_ZONE_TYPES_ID_SEQ;
+
+CREATE TABLE hyk_flow_zone_types (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    name                VARCHAR2(255),
+    PRIMARY KEY         (id)
+);
+
+
+-- HYK_FLOW_ZONES
+CREATE SEQUENCE HYK_FLOW_ZONES_ID_SEQ;
+
+CREATE TABLE hyk_flow_zones (
+    id                  NUMBER(38,0) NOT NULL,
+    a                   NUMBER(38,2),
+    b                   NUMBER(38,2),
+    formation_id        NUMBER(38,0),
+    type_id             NUMBER(38,0),
+    primary key         (id)
+);
+
+
+-- HYK_FORMATIONS
+CREATE SEQUENCE HYK_FORMATIONS_ID_SEQ;
+
+CREATE TABLE hyk_formations (
+    id                  NUMBER(38,0) NOT NULL,
+    bottom              NUMBER(38,2),
+    distance_hf         NUMBER(38,2),
+    distance_vl         NUMBER(38,2),
+    distance_vr         NUMBER(38,2),
+    formation_num       NUMBER(38,0),
+    top                 NUMBER(38,2),
+    hyk_entry_id        NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- HYKS
+CREATE SEQUENCE HYKS_ID_SEQ;
+
+CREATE TABLE hyks (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    river_id            NUMBER(38,0),
+    primary key         (id)
+);
+
+
+-- MAIN_VALUE_TYPES
+CREATE SEQUENCE MAIN_VALUE_TYPES_ID_SEQ;
+
+CREATE TABLE main_value_types (
+    id                  NUMBER(38,0) NOT NULL,
+    name                VARCHAR2(255),
+    PRIMARY KEY         (id)
+);
+
+
+-- MAIN_VALUES
+CREATE SEQUENCE MAIN_VALUES_ID_SEQ;
+
+CREATE TABLE main_values (
+    id                  NUMBER(38,0) NOT NULL,
+    value               NUMBER(38,2),
+    gauge_id            NUMBER(38,0),
+    named_value_id      NUMBER(38,0),
+    time_interval_id    NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- NAMED_MAIN_VALUES
+CREATE SEQUENCE NAMED_MAIN_VALUES_ID_SEQ;
+
+CREATE TABLE named_main_values (
+    id                  NUMBER(38,0) NOT NULL,
+    name                VARCHAR2(255),
+    type_id             NUMBER(38,0),
+    PRIMARY KEY (id)
+);
+
+
+-- POSITIONS
+CREATE SEQUENCE POSITIONS_ID_SEQ;
+
+CREATE TABLE positions (
+    id                  NUMBER(38,0) NOT NULL,
+    value               VARCHAR2(255 char),
+    PRIMARY KEY         (id)
+);
+
+
+--- RANGES
+CREATE SEQUENCE RANGES_ID_SEQ;
+
+CREATE TABLE ranges (
+    id                  NUMBER(38,0) NOT NULL,
+    a                   NUMBER(38,10),
+    b                   NUMBER(38,10),
+    river_id            NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- RIVERS
+CREATE SEQUENCE RIVERS_ID_SEQ;
+
+CREATE TABLE rivers (
+    id                  NUMBER(38,0) NOT NULL,
+    km_up               NUMBER(38,0),
+    name                VARCHAR2(255),
+    wst_unit_id         NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- TIME_INTERVALS
+CREATE SEQUENCE TIME_INTERVALS_ID_SEQ;
+
+CREATE TABLE time_intervals (
+    id                  NUMBER(38,0) NOT NULL, 
+    start_time          TIMESTAMP,
+    stop_time           TIMESTAMP,
+    PRIMARY KEY         (id)
+);
+
+
+--- UNITS
+CREATE SEQUENCE UNITS_ID_SEQ;
+
+CREATE TABLE units (
+    id                  NUMBER(38,0) NOT NULL,
+    name                VARCHAR2(255),
+    PRIMARY KEY         (id)
+);
+
+
+-- WST_COLUMN_Q_RANGES
+CREATE SEQUENCE WST_COLUMN_Q_RANGES_ID_SEQ;
+
+CREATE TABLE wst_column_q_ranges (
+    id                  NUMBER(38,0) NOT NULL,
+    wst_column_id       NUMBER(38,0),
+    wst_q_range_id      NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- WST_COLUMN_VALUES
+CREATE SEQUENCE WST_COLUMN_VALUES_ID_SEQ;
+
+CREATE TABLE wst_column_values (
+    id                  NUMBER(38,0) NOT NULL,
+    position            NUMBER(38,2),
+    w                   NUMBER(38,2),
+    wst_column_id       NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- WST_COLUMNS
+CREATE SEQUENCE WST_COLUMNS_ID_SEQ;
+
+CREATE TABLE wst_columns (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    name                VARCHAR2(255),
+    position            NUMBER(38,0),
+    time_interval_id    NUMBER(38,0),
+    wst_id              NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- WST_Q_RANGES
+CREATE SEQUENCE WST_Q_RANGES_ID_SEQ;
+
+CREATE TABLE wst_q_ranges (
+    id                  NUMBER(38,0) NOT NULL,
+    q                   NUMBER(38,2),
+    range_id            NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- WSTS
+CREATE SEQUENCE WSTS_ID_SEQ;
+
+CREATE TABLE wsts (
+    id                  NUMBER(38,0) NOT NULL,
+    description         VARCHAR2(255),
+    kind                NUMBER(38,0),
+    river_id            NUMBER(38,0),
+    PRIMARY KEY         (id)
+);
+
+
+-- ADD CONSTRAINTs
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsRanges FOREIGN KEY (range_id) REFERENCES ranges;
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsEdges FOREIGN KEY (edge_id) REFERENCES edges;
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsPositions FOREIGN KEY (position_id) REFERENCES positions;
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsAttributes FOREIGN KEY (attribute_id) REFERENCES attributes;
+ALTER TABLE annotations ADD CONSTRAINT cAnnotationsTypes FOREIGN KEY (type_id) REFERENCES annotation_types;
+ALTER TABLE cross_section_lines ADD CONSTRAINT cQPSLinesCrossSections FOREIGN KEY (cross_section_id) REFERENCES cross_sections;
+ALTER TABLE cross_section_points ADD CONSTRAINT cQPSPointsCrossSectionLines FOREIGN KEY (cross_section_line_id) REFERENCES cross_section_lines;
+ALTER TABLE cross_sections ADD CONSTRAINT cCrossSectionsRivers FOREIGN KEY (river_id) REFERENCES rivers;
+ALTER TABLE cross_sections ADD CONSTRAINT cCrossSectionsTimeIntervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
+ALTER TABLE discharge_table_values ADD CONSTRAINT cTableValuesDischargeTables foreign key (table_id) REFERENCES discharge_tables;
+ALTER TABLE discharge_tables ADD CONSTRAINT cDischargeTablesTime_intervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
+ALTER TABLE discharge_tables ADD CONSTRAINT cDischargeTablesGauges FOREIGN KEY (gauge_id) REFERENCES gauges;
+ALTER TABLE gauges ADD CONSTRAINT cGaugesRivers FOREIGN KEY (river_id) REFERENCES rivers;
+ALTER TABLE gauges ADD CONSTRAINT cGaugesRanges FOREIGN KEY (range_id) REFERENCES ranges;
+ALTER TABLE hyk_entries ADD CONSTRAINT cHykEntriesHyks FOREIGN KEY (hyk_id) REFERENCES hyks;
+ALTER TABLE hyk_flow_zones ADD CONSTRAINT cHykFlowZonesHykFormations FOREIGN KEY (formation_id) REFERENCES hyk_formations;
+ALTER TABLE hyk_flow_zones ADD CONSTRAINT cHykFlowZonesHykFlowZoneTypes FOREIGN KEY (type_id) REFERENCES hyk_flow_zone_types;
+ALTER TABLE hyks ADD CONSTRAINT cHyksRivers FOREIGN KEY (river_id) REFERENCES rivers;
+ALTER TABLE hyk_formations ADD CONSTRAINT cHykFormationsHykEntries FOREIGN KEY (hyk_entry_id) REFERENCES hyk_entries;
+ALTER TABLE main_values ADD CONSTRAINT cMainValuesTimeIntervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
+ALTER TABLE main_values ADD CONSTRAINT cMainValuesGauges FOREIGN KEY (gauge_id) REFERENCES gauges;
+ALTER TABLE main_values ADD CONSTRAINT cMainValuesNamedMainValues FOREIGN KEY (named_value_id) REFERENCES named_main_values;
+ALTER TABLE named_main_values ADD CONSTRAINT cNamedMainValuesMainValueTypes FOREIGN KEY (type_id) REFERENCES main_value_types;
+ALTER TABLE ranges ADD CONSTRAINT cRangesRivers FOREIGN KEY (river_id) REFERENCES rivers;
+ALTER TABLE rivers ADD CONSTRAINT cRiversUnits FOREIGN KEY (wst_unit_id) REFERENCES units;
+ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstColums FOREIGN KEY (wst_column_id) REFERENCES wst_columns;
+ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstQRanges FOREIGN KEY (wst_q_range_id) REFERENCES wst_q_ranges;
+ALTER TABLE wst_column_values ADD CONSTRAINT cWstColumnValuesWstColumns FOREIGN KEY (wst_column_id) REFERENCES wst_columns;
+ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsTime_intervals FOREIGN KEY (time_interval_id) REFERENCES time_intervals;
+ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsWsts FOREIGN KEY (wst_id) REFERENCES wsts;
+ALTER TABLE wst_q_ranges ADD CONSTRAINT cWstQRangesRanges FOREIGN KEY (range_id) REFERENCES RANGES;
+ALTER TABLE wsts ADD CONSTRAINT cWstsRivers FOREIGN KEY (river_id) REFERENCES rivers;
+
+-- VIEWS
+
+CREATE VIEW wst_value_table AS
+    SELECT wcv.position AS position,
+           w,
+           (SELECT q
+            FROM   wst_column_q_ranges wcqr
+                   JOIN wst_q_ranges wqr
+                     ON wcqr.wst_q_range_id = wqr.id
+                   JOIN ranges r
+                     ON r.id = wqr.range_id
+            WHERE  wcqr.wst_column_id = wc.id
+                   AND wcv.position BETWEEN r.a AND r.b) AS q,
+           wc.position                                   AS column_pos,
+           w.id                                          AS wst_id
+    FROM   wst_column_values wcv
+           JOIN wst_columns wc
+             ON wcv.wst_column_id = wc.id
+           JOIN wsts w
+             ON wc.wst_id = w.id
+    ORDER  BY wcv.position ASC,
+          wc.position DESC;
+
+-- view to select the w values of a WST
+CREATE VIEW wst_w_values  AS
+    SELECT wcv.position   AS km, 
+           wcv.w          AS w,  
+           wc.position    AS column_pos, 
+           w.id           AS wst_id
+        FROM wst_column_values wcv
+        JOIN wst_columns wc ON wcv.wst_column_id = wc.id
+        JOIN wsts w         ON wc.wst_id = w.id
+    ORDER BY wcv.position, wc.position;
+
+-- view to select the q values of a WST
+CREATE VIEW wst_q_values AS
+    SELECT wc.position AS column_pos,
+           wqr.q       AS q, 
+           r.a         AS a, 
+           r.b         AS b,
+           wc.wst_id   AS wst_id
+    FROM wst_column_q_ranges wcqr
+    JOIN wst_q_ranges wqr ON wcqr.wst_q_range_id = wqr.id
+    JOIN ranges r         ON wqr.range_id        = r.id
+    JOIN wst_columns wc   ON wcqr.wst_column_id  = wc.id
+    ORDER BY wc.position, wcqr.wst_column_id, r.a;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/oracle_create_user.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,12 @@
+--CREATE TABLESPACE for user
+CREATE TABLESPACE "test" DATAFILE '/u01/app/oracle/oradata/XE/test.dbf' SIZE 500M AUTOEXTEND ON NEXT 100M MAXSIZE 1G LOGGING ONLINE PERMANENT BLOCKSIZE 8192 EXTENT MANAGEMENT LOCAL AUTOALLOCATE DEFAULT NOCOMPRESS SEGMENT SPACE MANAGEMENT AUTO;
+--CREATE USER
+CREATE USER test IDENTIFIED BY test;
+-- USER SQL
+ALTER USER test DEFAULT TABLESPACE "test" TEMPORARY TABLESPACE "TEMP" ACCOUNT UNLOCK ;
+-- QUOTA for user on TABLESPACE
+ALTER USER test QUOTA UNLIMITED ON "test";
+GRANT ALL on "MDSYS"."ALL_SDO_GEOM_METADATA" to test ;
+GRANT CREATE SESSION TO test ;
+GRANT CREATE VIEW TO test;
+GRANT CONNECT, RESOURCE TO test;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/postgresql-minfo.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,354 @@
+BEGIN;
+
+CREATE SEQUENCE LOCATION_SYSTEM_SEQ;
+
+CREATE TABLE location_system (
+    id          int NOT NULL,
+    name        VARCHAR(32)  NOT NULL,
+    description VARCHAR(255),
+    PRIMARY KEY(id)
+);
+
+
+CREATE SEQUENCE ELEVATION_MODEL_SEQ;
+
+CREATE TABLE elevation_model (
+    id          int NOT NULL,
+    name        VARCHAR(32)  NOT NULL,
+    unit_id     int NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_unit FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+CREATE SEQUENCE BED_HEIGHT_TYPE_SEQ;
+
+CREATE TABLE bed_height_type (
+    id          int NOT NULL,
+    name        VARCHAR(16)  NOT NULL,
+    description VARCHAR(255),
+    PRIMARY KEY(id)
+);
+
+
+
+CREATE SEQUENCE BED_HEIGHT_SINGLE_ID_SEQ;
+
+CREATE TABLE bed_height_single (
+    id                      int NOT NULL,
+    river_id                int NOT NULL,
+    year                    int NOT NULL,
+    sounding_width          int NOT NULL,
+    type_id                 int NOT NULL,
+    location_system_id      int NOT NULL,
+    cur_elevation_model_id  int NOT NULL,
+    old_elevation_model_id  int,
+    range_id                int NOT NULL,
+    evaluation_by           VARCHAR(255),
+    description             VARCHAR(255),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_single_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_type FOREIGN KEY (type_id) REFERENCES bed_height_type(id),
+    CONSTRAINT fk_location_system FOREIGN KEY (location_system_id) REFERENCES location_system(id),
+    CONSTRAINT fk_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_range FOREIGN KEY (range_id) REFERENCES ranges(id)
+);
+
+
+CREATE SEQUENCE BED_HEIGHT_EPOCH_ID_SEQ;
+
+CREATE TABLE bed_height_epoch (
+    id                      int NOT NULL,
+    river_id                int NOT NULL,
+    time_interval_id        int NOT NULL,
+    -- sounding_with           int NOT NULL,
+    -- type_id                 int NOT NULL,
+    cur_elevation_model_id  int NOT NULL,
+    old_elevation_model_id  int,
+    range_id                int NOT NULL,
+    evaluation_by           VARCHAR(255),
+    description             VARCHAR(255),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_time_interval FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id),
+    CONSTRAINT fk_epoch_cur_elevation_model FOREIGN KEY (cur_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_epoch_old_elevation_model FOREIGN KEY (old_elevation_model_id) REFERENCES elevation_model(id),
+    CONSTRAINT fk_epoch_range FOREIGN KEY (range_id) REFERENCES ranges(id)
+);
+
+
+CREATE SEQUENCE BED_SINGLE_VALUES_ID_SEQ;
+
+CREATE TABLE bed_height_single_values (
+    id                      int NOT NULL,
+    bed_height_single_id    int NOT NULL,
+    station                 NUMERIC NOT NULL,
+    height                  NUMERIC,
+    uncertainty             NUMERIC,
+    data_gap                NUMERIC NOT NULL,
+    sounding_width          NUMERIC NOT NULL,
+    width                   NUMERIC NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_single_values_parent FOREIGN KEY (bed_height_single_id) REFERENCES bed_height_single(id)
+);
+
+
+CREATE SEQUENCE BED_EPOCH_VALUES_ID_SEQ;
+
+CREATE TABLE bed_height_epoch_values (
+    id                      int NOT NULL,
+    bed_height_epoch_id     int NOT NULL,
+    station                 NUMERIC NOT NULL,
+    height                  NUMERIC,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_bed_epoch_values_parent FOREIGN KEY (bed_height_epoch_id) REFERENCES bed_height_epoch(id)
+);
+
+
+CREATE SEQUENCE DEPTHS_ID_SEQ;
+
+CREATE TABLE depths (
+    id      int NOT NULL,
+    lower   NUMERIC NOT NULL,
+    upper   NUMERIC NOT NULL,
+    unit_id int NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_depths_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_DENSITY_ID_SEQ;
+
+CREATE TABLE sediment_density (
+    id          int NOT NULL,
+    river_id    int NOT NULL,
+    depth_id    int NOT NULL,
+    unit_id     int NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_sd_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sd_depth_id FOREIGN KEY (depth_id) REFERENCES depths(id),
+    CONSTRAINT fk_sd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_DENSITY_VALUES_ID_SEQ;
+
+CREATE TABLE sediment_density_values (
+    id                  int NOT NULL,
+    sediment_density_id int NOT NULL,
+    station             NUMERIC NOT NULL,
+    density             NUMERIC NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_sdv_sediment_density_id FOREIGN KEY(sediment_density_id) REFERENCES sediment_density(id)
+);
+
+
+CREATE SEQUENCE MORPHOLOGIC_WIDTH_ID_SEQ;
+
+CREATE TABLE morphologic_width (
+    id          int NOT NULL,
+    river_id    int NOT NULL,
+    unit_id     int NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_mw_river_id FOREIGN KEY(river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_mw_unit_id FOREIGN KEY(unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE MORPH_WIDTH_VALUES_ID_SEQ;
+
+CREATE TABLE morphologic_width_values (
+    id                      int NOT NULL,
+    morphologic_width_id    int NOT NULL,
+    station                 NUMERIC NOT NULL,
+    width                   NUMERIC NOT NULL,
+    description             VARCHAR(256),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_mwv_morphologic_width_id FOREIGN KEY (morphologic_width_id) REFERENCES morphologic_width(id)
+);
+
+
+CREATE SEQUENCE DISCHARGE_ZONE_ID_SEQ;
+
+CREATE TABLE discharge_zone (
+    id                      int NOT NULL,
+    river_id                int NOT NULL,
+    gauge_name              VARCHAR(64)  NOT NULL, -- this is not very proper, but there are gauges with no db instance
+    value                   NUMERIC NOT NULL,
+    lower_discharge         VARCHAR(16)  NOT NULL,
+    upper_discharge         VARCHAR(16),
+    PRIMARY KEY(id),
+    CONSTRAINT fk_dz_river_id FOREIGN KEY (river_id) REFERENCES rivers(id)
+);
+
+
+CREATE SEQUENCE FLOW_VELOCITY_MODEL_ID_SEQ;
+
+CREATE TABLE flow_velocity_model (
+    id                  int NOT NULL,
+    river_id            int NOT NULL,
+    discharge_zone_id   int NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvm_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_fvm_discharge_zone_id FOREIGN KEY (discharge_zone_id) REFERENCES discharge_zone (id)
+);
+
+
+CREATE SEQUENCE FLOW_VELOCITY_M_VALUES_ID_SEQ;
+
+CREATE TABLE flow_velocity_model_values (
+    id                      int NOT NULL,
+    flow_velocity_model_id  int NOT NULL,
+    station                 NUMERIC NOT NULL,
+    q                       NUMERIC NOT NULL,
+    total_channel           NUMERIC NOT NULL,
+    main_channel            NUMERIC NOT NULL,
+    shear_stress            NUMERIC NOT NULL,
+    PRIMARY KEY(id),
+    CONSTRAINT fk_fvv_flow_velocity_model_id FOREIGN KEY (flow_velocity_model_id) REFERENCES flow_velocity_model(id)
+);
+
+
+
+CREATE SEQUENCE FV_MEASURE_ID_SEQ;
+
+CREATE TABLE flow_velocity_measurements (
+    id          int NOT NULL,
+    river_id    int NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvm_rivers_id FOREIGN KEY (river_id) REFERENCES rivers(id)
+);
+
+CREATE SEQUENCE FV_MEASURE_VALUES_ID_SEQ;
+
+CREATE TABLE flow_velocity_measure_values (
+    id              int NOT NULL,
+    measurements_id int NOT NULL,
+    station         NUMERIC NOT NULL,
+    datetime        TIMESTAMP,
+    w               NUMERIC NOT NULL,
+    q               NUMERIC NOT NULL,
+    v               NUMERIC NOT NULL,
+    description     VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_fvmv_measurements_id FOREIGN KEY (measurements_id) REFERENCES flow_velocity_measurements (id)
+);
+
+
+CREATE SEQUENCE GRAIN_FRACTION_ID_SEQ;
+
+CREATE TABLE grain_fraction (
+    id      int   NOT NULL,
+    name    VARCHAR(64)    NOT NULL,
+    lower   NUMERIC,
+    upper   NUMERIC,
+    unit_id int,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_gf_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_YIELD_ID_SEQ;
+
+CREATE TABLE sediment_yield (
+    id                  int NOT NULL,
+    river_id            int NOT NULL,
+    grain_fraction_id   int,
+    unit_id             int NOT NULL,
+    time_interval_id    int NOT NULL,
+    description         VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_sy_grain_fraction_id FOREIGN KEY (grain_fraction_id) REFERENCES grain_fraction(id),
+    CONSTRAINT fk_sy_unit_id FOREIGN KEY (unit_id) REFERENCES units(id),
+    CONSTRAINT fk_sy_time_interval_id FOREIGN KEY (time_interval_id) REFERENCES time_intervals(id)
+);
+
+
+CREATE SEQUENCE SEDIMENT_YIELD_VALUES_ID_SEQ;
+
+CREATE TABLE sediment_yield_values (
+    id                  int NOT NULL,
+    sediment_yield_id   int NOT NULL,
+    station             NUMERIC NOT NULL,
+    value               NUMERIC NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_syv_sediment_yield_id FOREIGN KEY (sediment_yield_id) REFERENCES sediment_yield(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_ID_SEQ;
+
+CREATE TABLE waterlevel (
+    id          int NOT NULL,
+    river_id    int NOT NULL,
+    unit_id     int NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_w_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
+    CONSTRAINT fk_w_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_Q_RANGES_ID_SEQ;
+
+CREATE TABLE waterlevel_q_range (
+    id              int NOT NULL,
+    waterlevel_id   int NOT NULL,
+    q               NUMERIC NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wqr_waterlevel_id FOREIGN KEY (waterlevel_id) REFERENCES waterlevel(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_VALUES_ID_SEQ;
+
+CREATE TABLE waterlevel_values(
+    id                      int NOT NULL,
+    waterlevel_q_range_id   int NOT NULL,
+    station                 NUMERIC NOT NULL,
+    w                       NUMERIC NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wv_waterlevel_q_range_id FOREIGN KEY (waterlevel_q_range_id) REFERENCES waterlevel_q_range(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFFERENCE_ID_SEQ;
+
+CREATE TABLE waterlevel_difference (
+    id          int NOT NULL,
+    river_id    int NOT NULL,
+    unit_id     int NOT NULL,
+    description VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wd_river_id FOREIGN KEY (river_id) REFERENCES rivers (id),
+    CONSTRAINT fk_wd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFF_COLUMN_ID_SEQ;
+
+CREATE TABLE waterlevel_difference_column (
+    id              int NOT NULL,
+    difference_id   int NOT NULL,
+    description     VARCHAR(256),
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wdc_difference_id FOREIGN KEY (difference_id) REFERENCES waterlevel_difference (id)
+);
+
+
+CREATE SEQUENCE WATERLEVEL_DIFF_VALUES_ID_SEQ;
+
+CREATE TABLE waterlevel_difference_values (
+    id          int NOT NULL,
+    column_id   int NOT NULL,
+    station     NUMERIC NOT NULL,
+    value       NUMERIC NOT NULL,
+    PRIMARY KEY (id),
+    CONSTRAINT fk_wdv_column_id FOREIGN KEY (column_id) REFERENCES waterlevel_difference_column (id)
+);
+
+COMMIT;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/postgresql-spatial.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,161 @@
+BEGIN;
+
+-- Geodaesie/Flussachse+km/achse
+CREATE SEQUENCE RIVER_AXES_ID_SEQ;
+CREATE TABLE river_axes (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    kind     int             NOT NULL DEFAULT 0
+);
+SELECT AddGeometryColumn('river_axes', 'geom', 31466, 'LINESTRING', 2);
+ALTER TABLE river_axes ALTER COLUMN id SET DEFAULT NEXTVAL('RIVER_AXES_ID_SEQ');
+
+
+-- TODO: TestMe.
+-- Geodaesie/Flussachse+km/km.shp
+CREATE SEQUENCE RIVER_AXES_KM_ID_SEQ;
+CREATE TABLE river_axes_km (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    km       NUMERIC NOT NULL
+);
+SELECT AddGeometryColumn('river_axes_km', 'geom', 31466, 'POINT', 2);
+ALTER TABLE river_axes_km ALTER COLUMN id SET DEFAULT NEXTVAL('RIVER_AXES_KM_ID_SEQ');
+
+
+--Geodaesie/Querprofile/QP-Spuren/qps.shp
+CREATE SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
+CREATE TABLE cross_section_tracks (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    km       NUMERIC NOT NULL,
+    z        NUMERIC NOT NULL DEFAULT 0
+);
+SELECT AddGeometryColumn('cross_section_tracks', 'geom', 31466, 'LINESTRING', 2);
+ALTER TABLE cross_section_tracks ALTER COLUMN id SET DEFAULT NEXTVAL('CROSS_SECTION_TRACKS_ID_SEQ');
+
+
+-- Geodaesie/Linien/rohre-und-spreen
+CREATE SEQUENCE LINES_ID_SEQ;
+CREATE TABLE lines (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    kind     VARCHAR(16) NOT NULL,
+    z        NUMERIC DEFAULT 0
+);
+SELECT AddGeometryColumn('lines', 'geom', 31466, 'LINESTRING', 4);
+ALTER TABLE lines ALTER COLUMN id SET DEFAULT NEXTVAL('LINES_ID_SEQ');
+-- 'kind':
+-- 0: ROHR1
+-- 1: DAMM
+
+
+-- Geodaesie/Bauwerke/Wehre.shp
+CREATE SEQUENCE BUILDINGS_ID_SEQ;
+CREATE TABLE buildings (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    name     VARCHAR(256)
+);
+SELECT AddGeometryColumn('buildings', 'geom', 31466, 'LINESTRING', 2);
+ALTER TABLE buildings ALTER COLUMN id SET DEFAULT NEXTVAL('BUILDINGS_ID_SEQ');
+
+
+-- Geodaesie/Festpunkte/Festpunkte.shp
+CREATE SEQUENCE FIXPOINTS_ID_SEQ;
+CREATE TABLE fixpoints (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    x        int,
+    y        int,
+    km       NUMERIC NOT NULL,
+    HPGP     VARCHAR(2)
+);
+SELECT AddGeometryColumn('fixpoints', 'geom', 31466, 'POINT', 2);
+ALTER TABLE fixpoints ALTER COLUMN id SET DEFAULT NEXTVAL('FIXPOINTS_ID_SEQ');
+
+
+-- Hydrologie/Hydr. Grenzen/talaue.shp
+CREATE SEQUENCE FLOODPLAIN_ID_SEQ;
+CREATE TABLE floodplain (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id)
+);
+SELECT AddGeometryColumn('floodplain', 'geom', 31466, 'POLYGON', 2);
+ALTER TABLE floodplain ALTER COLUMN id SET DEFAULT NEXTVAL('FLOODPLAIN_ID_SEQ');
+
+
+-- Geodaesie/Hoehenmodelle/*
+CREATE SEQUENCE DEM_ID_SEQ;
+CREATE TABLE dem (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    -- XXX Should we use the ranges table instead?
+    lower    NUMERIC,
+    upper    NUMERIC,
+    path     VARCHAR(256),
+    UNIQUE (river_id, lower, upper)
+);
+ALTER TABLE dem ALTER COLUMN id SET DEFAULT NEXTVAL('DEM_ID_SEQ');
+
+
+-- Hydrologie/Einzugsgebiete/EZG.shp
+CREATE SEQUENCE CATCHMENT_ID_SEQ;
+CREATE TABLE catchment (
+    id int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    area NUMERIC,
+    name VARCHAR(256)
+);
+SELECT AddGeometryColumn('catchment','geom',31466,'POLYGON',2);
+ALTER TABLE catchment ALTER COLUMN id SET DEFAULT NEXTVAL('CATCHMENT_ID_SEQ');
+
+
+--Hydrologie/HW-Schutzanlagen/hws.shp
+CREATE SEQUENCE HWS_ID_SEQ;
+CREATE TABLE hws (
+    id int PRIMARY KEY NOT NULL,
+    river_id int REFERENCES rivers(id),
+    hws_facility VARCHAR(256),
+    type VARCHAR(256)
+);
+SELECT AddGeometryColumn('hws','geom',31466,'LINESTRING',2);
+ALTER TABLE hws ALTER COLUMN id SET DEFAULT NEXTVAL('HWS_ID_SEQ');
+
+
+--
+--Hydrologie/UeSG
+--
+-- 'kind' can be one of:
+-- 200 = Messung
+-- 111 = Berechnung->Aktuell->BfG
+-- 112 = Berechnung->Aktuell->Land
+-- 121 = Berechnung->Potenziell->BfG
+-- 122 = Berechnung->Potenziell->Land
+--
+CREATE SEQUENCE FLOODMAPS_SEQ;
+CREATE FUNCTION floodmaps_id_func() RETURNS trigger AS $floodmaps_id_func$
+    BEGIN
+        NEW.id := nextval('floodmaps_seq');
+        RETURN NEW;
+    END;
+$floodmaps_id_func$ LANGUAGE plpgsql;
+
+CREATE TABLE floodmaps (
+    id         int PRIMARY KEY NOT NULL,
+    river_id   int REFERENCES rivers(id),
+    name       varchar(64) NOT NULL,
+    kind       int NOT NULL,
+    diff       real,
+    count      int,
+    area       real,
+    perimeter  real
+);
+SELECT AddGeometryColumn('floodmaps', 'geom', 31466, 'MULTIPOLYGON', 2);
+ALTER TABLE floodmaps DROP CONSTRAINT enforce_geotype_geom;
+ALTER TABLE floodmaps ADD CONSTRAINT enforce_geotype_geom CHECK (geometrytype(geom) = 'POLYGON'::text OR geometrytype(geom) = 'MULTIPOLYGON'::text);
+ALTER TABLE floodmaps ALTER COLUMN id SET DEFAULT NEXTVAL('FLOODMAPS_SEQ');
+
+CREATE TRIGGER floodmaps_id_trigger BEFORE INSERT OR UPDATE ON floodmaps
+    FOR EACH ROW EXECUTE PROCEDURE floodmaps_id_func();
+END;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/doc/schema/postgresql.sql	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,356 @@
+BEGIN;
+
+CREATE SEQUENCE UNITS_ID_SEQ;
+
+CREATE TABLE units (
+    id   int PRIMARY KEY NOT NULL,
+    name VARCHAR(32)     NOT NULL UNIQUE
+);
+
+-- Gewaesser
+CREATE SEQUENCE RIVERS_ID_SEQ;
+
+CREATE TABLE rivers (
+    id          int PRIMARY KEY NOT NULL,
+    name        VARCHAR(256)    NOT NULL UNIQUE,
+    km_up       BOOLEAN         NOT NULL DEFAULT true,
+    wst_unit_id int             NOT NULL REFERENCES units(id)
+);
+
+-- Bruecke, Haefen, etc.
+CREATE SEQUENCE ATTRIBUTES_ID_SEQ;
+
+CREATE TABLE attributes (
+    id    int PRIMARY KEY NOT NULL,
+    value VARCHAR(256)    NOT NULL UNIQUE
+);
+
+-- segments from/to at a river
+CREATE SEQUENCE RANGES_ID_SEQ;
+
+CREATE TABLE ranges (
+    id       int PRIMARY KEY NOT NULL,
+    river_id int             NOT NULL REFERENCES rivers(id),
+    a        NUMERIC         NOT NULL,
+    b        NUMERIC,
+    UNIQUE (river_id, a, b)
+);
+
+-- Lage 'links', 'rechts', etc.
+CREATE SEQUENCE POSITIONS_ID_SEQ;
+
+CREATE TABLE positions (
+    id    int PRIMARY KEY NOT NULL,
+    value VARCHAR(256)    NOT NULL UNIQUE
+);
+
+-- Kante 'oben', 'unten'
+CREATE SEQUENCE EDGES_ID_SEQ;
+
+CREATE TABLE edges (
+    id     int PRIMARY KEY NOT NULL,
+    top    NUMERIC,
+    bottom NUMERIC
+);
+
+-- Types of annotatations (Hafen, Bruecke, Zufluss, ...)
+CREATE SEQUENCE ANNOTATION_TYPES_ID_SEQ;
+
+CREATE TABLE annotation_types (
+    id    int PRIMARY KEY NOT NULL,
+    name  VARCHAR(256)    NOT NULL UNIQUE
+);
+
+-- Some object (eg. Hafen) at a segment of river
+-- plus its position.
+CREATE SEQUENCE ANNOTATIONS_ID_SEQ;
+
+CREATE TABLE annotations (
+    id           int PRIMARY KEY NOT NULL,
+    range_id     int             NOT NULL REFERENCES ranges(id),
+    attribute_id int             NOT NULL REFERENCES attributes(id),
+    position_id  int REFERENCES positions(id),
+    edge_id      int REFERENCES edges(id),
+    type_id      int REFERENCES annotation_types(id)
+);
+
+-- Pegel
+CREATE SEQUENCE GAUGES_ID_SEQ;
+
+CREATE TABLE gauges (
+    id              int PRIMARY KEY NOT NULL,
+    name            VARCHAR(256)    NOT NULL,
+    river_id        int             NOT NULL REFERENCES rivers(id),
+    station         NUMERIC         NOT NULL UNIQUE,
+    aeo             NUMERIC         NOT NULL,
+    official_number int8                     UNIQUE,
+
+    -- Pegelnullpunkt
+    datum    NUMERIC NOT NULL,
+    -- Streckengueltigkeit
+    range_id int REFERENCES ranges (id),
+
+    UNIQUE (name, river_id),
+    UNIQUE (river_id, station)
+);
+
+-- Type of a Hauptwert 'W', 'Q', 'D', etc.
+CREATE SEQUENCE MAIN_VALUE_TYPES_ID_SEQ;
+
+CREATE TABLE main_value_types (
+    id   int PRIMARY KEY NOT NULL,
+    name VARCHAR(256)    NOT NULL UNIQUE
+);
+
+--  Named type of a Hauptwert (eg. HQ100)
+CREATE SEQUENCE NAMED_MAIN_VALUES_ID_SEQ;
+
+CREATE TABLE named_main_values (
+    id      int PRIMARY KEY NOT NULL,
+    name    VARCHAR(256)    NOT NULL UNIQUE,
+    type_id int NOT NULL REFERENCES main_value_types(id),
+    UNIQUE (name, type_id)
+);
+
+-- Table for time intervals
+CREATE SEQUENCE TIME_INTERVALS_ID_SEQ;
+
+CREATE TABLE time_intervals (
+    id         int PRIMARY KEY NOT NULL,
+    start_time TIMESTAMP       NOT NULL,
+    stop_time  TIMESTAMP,
+    CHECK (start_time <= stop_time)
+);
+
+-- Stammdaten
+CREATE SEQUENCE MAIN_VALUES_ID_SEQ;
+
+CREATE TABLE main_values (
+    id             int PRIMARY KEY NOT NULL,
+    gauge_id       int NOT NULL REFERENCES gauges(id),
+    named_value_id int NOT NULL REFERENCES named_main_values(id),
+    value          NUMERIC NOT NULL,
+
+    time_interval_id int REFERENCES time_intervals(id),
+
+    -- TODO: better checks
+    UNIQUE (gauge_id, named_value_id, time_interval_id)
+);
+
+-- Abflusstafeln
+CREATE SEQUENCE DISCHARGE_TABLES_ID_SEQ;
+
+CREATE TABLE discharge_tables (
+    id               int PRIMARY KEY NOT NULL,
+    gauge_id         int NOT NULL REFERENCES gauges(id),
+    description      VARCHAR(256) NOT NULL,
+    kind             int NOT NULL DEFAULT 0,
+    time_interval_id int REFERENCES time_intervals(id)
+
+    -- TODO: better checks
+    -- UNIQUE (gauge_id, kind, time_interval_id)
+);
+
+-- Values of the Abflusstafeln
+CREATE SEQUENCE DISCHARGE_TABLE_VALUES_ID_SEQ;
+
+CREATE TABLE discharge_table_values (
+    id       int PRIMARY KEY NOT NULL,
+    table_id int NOT NULL REFERENCES discharge_tables(id),
+    q        NUMERIC NOT NULL,
+    w        NUMERIC NOT NULL,
+
+    UNIQUE (table_id, q, w)
+);
+
+-- WST files
+CREATE SEQUENCE WSTS_ID_SEQ;
+
+CREATE TABLE wsts (
+    id          int PRIMARY KEY NOT NULL,
+    river_id    int NOT NULL REFERENCES rivers(id),
+    description VARCHAR(256) NOT NULL,
+    kind        int NOT NULL DEFAULT 0,
+    -- TODO: more meta infos
+    UNIQUE (river_id, description)
+);
+
+-- columns of WST files
+CREATE SEQUENCE WST_COLUMNS_ID_SEQ;
+
+CREATE TABLE wst_columns (
+    id          int PRIMARY KEY NOT NULL,
+    wst_id      int NOT NULL REFERENCES wsts(id),
+    name        VARCHAR(256) NOT NULL,
+    description VARCHAR(256),
+    position    int NOT NULL DEFAULT 0,
+
+    time_interval_id int REFERENCES time_intervals(id),
+
+    UNIQUE (wst_id, name),
+    UNIQUE (wst_id, position)
+);
+
+-- w values in  WST file column
+CREATE SEQUENCE WST_COLUMN_VALUES_ID_SEQ;
+
+CREATE TABLE wst_column_values (
+    id            int PRIMARY KEY NOT NULL,
+    wst_column_id int NOT NULL REFERENCES wst_columns(id),
+    position      NUMERIC NOT NULL,
+    w             NUMERIC NOT NULL,
+
+    UNIQUE (position, wst_column_id),
+    UNIQUE (position, wst_column_id, w)
+);
+
+-- bind q values to range
+CREATE SEQUENCE WST_Q_RANGES_ID_SEQ;
+
+CREATE TABLE wst_q_ranges (
+    id       int PRIMARY KEY NOT NULL,
+    range_id int NOT NULL REFERENCES ranges(id),
+    q        NUMERIC NOT NULL
+);
+
+-- bind q ranges to wst columns
+CREATE SEQUENCE WST_COLUMN_Q_RANGES_ID_SEQ;
+
+CREATE TABLE wst_column_q_ranges (
+    id             int PRIMARY KEY NOT NULL,
+    wst_column_id  int NOT NULL REFERENCES wst_columns(id),
+    wst_q_range_id int NOT NULL REFERENCES wst_q_ranges(id),
+
+    UNIQUE (wst_column_id, wst_q_range_id)
+);
+
+CREATE VIEW wst_value_table AS
+    SELECT wcv.position AS position,
+           w,
+           (SELECT q
+            FROM   wst_column_q_ranges wcqr
+                   JOIN wst_q_ranges wqr
+                     ON wcqr.wst_q_range_id = wqr.id
+                   JOIN ranges r
+                     ON r.id = wqr.range_id
+            WHERE  wcqr.wst_column_id = wc.id
+                   AND wcv.position BETWEEN r.a AND r.b) AS q,
+           wc.position                                   AS column_pos,
+           w.id                                          AS wst_id
+    FROM   wst_column_values wcv
+           JOIN wst_columns wc
+             ON wcv.wst_column_id = wc.id
+           JOIN wsts w
+             ON wc.wst_id = w.id
+    ORDER  BY wcv.position ASC,
+          wc.position DESC;
+
+-- view to select the w values of a WST
+CREATE VIEW wst_w_values AS
+    SELECT wcv."position" AS km, 
+           wcv.w          AS w,  
+           wc."position"  AS column_pos, 
+           w.id           AS wst_id
+        FROM wst_column_values wcv
+        JOIN wst_columns wc ON wcv.wst_column_id = wc.id
+        JOIN wsts w         ON wc.wst_id = w.id
+    ORDER BY wcv."position", wc."position";
+
+-- view to select the q values of a WST
+CREATE VIEW wst_q_values AS
+    SELECT wc.position AS column_pos,
+           wqr.q       AS q, 
+           r.a         AS a, 
+           r.b         AS b,
+           wc.wst_id   AS wst_id
+    FROM wst_column_q_ranges wcqr
+    JOIN wst_q_ranges wqr ON wcqr.wst_q_range_id = wqr.id
+    JOIN ranges r         ON wqr.range_id        = r.id
+    JOIN wst_columns wc   ON wcqr.wst_column_id  = wc.id
+    ORDER BY wc.position, wcqr.wst_column_id, r.a;
+
+-- data for the cross-sections
+
+CREATE SEQUENCE CROSS_SECTIONS_ID_SEQ;
+
+CREATE TABLE cross_sections (
+    id               int PRIMARY KEY NOT NULL,
+    river_id         int             NOT NULL REFERENCES rivers(id),
+    time_interval_id int                      REFERENCES time_intervals(id),
+    description      VARCHAR(256)
+);
+
+CREATE SEQUENCE CROSS_SECTION_LINES_ID_SEQ;
+
+CREATE TABLE cross_section_lines (
+    id               int PRIMARY KEY NOT NULL,
+    km               NUMERIC         NOT NULL,
+    cross_section_id int             NOT NULL REFERENCES cross_sections(id),
+    UNIQUE (km, cross_section_id)
+);
+
+CREATE SEQUENCE CROSS_SECTION_POINTS_ID_SEQ;
+
+CREATE TABLE cross_section_points (
+    id                    int PRIMARY KEY NOT NULL,
+    cross_section_line_id int             NOT NULL REFERENCES cross_section_lines(id),
+    col_pos               int             NOT NULL,
+    x                     NUMERIC         NOT NULL,
+    y                     NUMERIC         NOT NULL,
+    UNIQUE (cross_section_line_id, col_pos)
+);
+
+-- Hydraulische Kenngroessen
+
+CREATE SEQUENCE HYKS_ID_SEQ;
+
+CREATE TABLE hyks (
+    id          int PRIMARY KEY NOT NULL,
+    river_id    int             NOT NULL REFERENCES rivers(id),
+    description VARCHAR(256)    NOT NULL
+);
+
+CREATE SEQUENCE HYK_ENTRIES_ID_SEQ;
+
+CREATE TABLE hyk_entries (
+    id          int PRIMARY KEY NOT NULL,
+    hyk_id      int             NOT NULL REFERENCES hyks(id),
+    km          NUMERIC         NOT NULL,
+    measure     TIMESTAMP,
+    UNIQUE (hyk_id, km)
+);
+
+CREATE SEQUENCE HYK_FORMATIONS_ID_SEQ;
+
+CREATE TABLE hyk_formations (
+    id            int PRIMARY KEY NOT NULL,
+    formation_num int             NOT NULL DEFAULT 0,
+    hyk_entry_id  int             NOT NULL REFERENCES hyk_entries(id),
+    top           NUMERIC         NOT NULL,
+    bottom        NUMERIC         NOT NULL,
+    distance_vl   NUMERIC         NOT NULL,
+    distance_hf   NUMERIC         NOT NULL,
+    distance_vr   NUMERIC         NOT NULL,
+    UNIQUE (hyk_entry_id, formation_num)
+);
+
+CREATE SEQUENCE HYK_FLOW_ZONE_TYPES_ID_SEQ;
+
+CREATE TABLE hyk_flow_zone_types (
+    id          int PRIMARY KEY NOT NULL,
+    name        VARCHAR(50)     NOT NULL UNIQUE,
+    description VARCHAR(256)
+);
+
+CREATE SEQUENCE HYK_FLOW_ZONES_ID_SEQ;
+
+CREATE TABLE hyk_flow_zones (
+    id           int PRIMARY KEY NOT NULL,
+    formation_id int             NOT NULL REFERENCES hyk_formations(id),
+    type_id      int             NOT NULL REFERENCES hyk_flow_zone_types(id),
+    a            NUMERIC         NOT NULL,
+    b            NUMERIC         NOT NULL,
+    CHECK (a <= b)
+);
+
+COMMIT;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/pom.xml	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,113 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <groupId>de.intevation.flys</groupId>
+  <artifactId>flys-backend</artifactId>
+  <version>1.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+
+  <name>flys-backend</name>
+  <url>http://maven.apache.org</url>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>hibernate3-maven-plugin</artifactId>
+        <version>2.2</version>
+        <!--
+        <configuration>
+            <componentProperties>
+                <propertyfile>src/main/config/hbm.properties</propertyfile>
+            </componentProperties>
+        </configuration>
+        -->
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.5</source>
+          <target>1.5</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>de.intevation.artifacts.common</groupId>
+      <artifactId>artifacts-common</artifactId>
+      <version>1.0-SNAPSHOT</version>
+    </dependency>    
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>3.8.1</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-core</artifactId>
+      <version>3.6.5.Final</version>
+    </dependency>
+    <dependency>
+      <groupId>org.hibernate</groupId>
+      <artifactId>hibernate-entitymanager</artifactId>
+      <version>3.6.5.Final</version>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <version>1.2.14</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-dbcp</groupId>
+      <artifactId>commons-dbcp</artifactId>
+      <version>1.4</version>
+    </dependency>
+    <dependency>
+      <groupId>postgresql</groupId>
+      <artifactId>postgresql</artifactId>
+      <version>8.4-702.jdbc4</version>
+      <scope>runtime</scope>
+    </dependency>
+    <dependency>
+        <groupId>org.hibernatespatial</groupId>
+        <artifactId>hibernate-spatial-postgis</artifactId>
+        <version>1.1</version>
+    </dependency>
+    <dependency>
+        <groupId>org.hibernatespatial</groupId>
+        <artifactId>hibernate-spatial-oracle</artifactId>
+        <version>1.1</version>
+    </dependency>
+    <dependency>
+        <groupId>org.postgis</groupId>
+        <artifactId>postgis-jdbc</artifactId>
+        <version>1.3.3</version>
+    </dependency>
+  </dependencies>
+
+  <repositories>
+    <repository>
+      <id>repository.jboss.org/nexus</id>
+      <name>JBoss Repository - Nexus</name>
+      <url>http://repository.jboss.org/nexus/content/groups/public/</url>
+    </repository>
+    <repository>
+        <id>OSGEO GeoTools repo</id>
+        <url>http://download.osgeo.org/webdav/geotools</url>
+    </repository>
+    <repository>
+        <id>Hibernate Spatial repo</id>
+        <url>http://www.hibernatespatial.org/repository</url>
+    </repository>
+  </repositories>
+</project>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/App.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,29 @@
+package de.intevation.flys;
+
+import de.intevation.flys.backend.SessionFactoryProvider;
+
+import org.hibernate.cfg.Configuration;
+
+import org.hibernate.dialect.resolver.DialectFactory;
+
+public class App
+{
+    public static void dumpSchema(Configuration cfg) {
+        System.out.println("BEGIN;");
+
+        String [] setupScript = cfg.generateSchemaCreationScript(
+            DialectFactory.constructDialect(
+                SessionFactoryProvider.DEFAULT_DIALECT));
+
+        for (String line: setupScript) {
+            System.out.println(line + ";");
+        }
+
+        System.out.println("COMMIT;");
+    }
+
+    public static void main(String [] args) {
+        dumpSchema(SessionFactoryProvider.createConfiguration());
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SessionFactoryProvider.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,361 @@
+package de.intevation.flys.backend;
+
+import de.intevation.artifacts.common.utils.Config;
+
+import java.lang.management.ManagementFactory;
+import java.util.Properties;
+
+import javax.management.InstanceAlreadyExistsException;
+import javax.management.MalformedObjectNameException;
+import javax.management.MBeanRegistrationException;
+import javax.management.MBeanServer;
+import javax.management.NotCompliantMBeanException;
+import javax.management.ObjectName;
+
+import org.hibernate.SessionFactory;
+
+import org.hibernate.impl.SessionFactoryImpl;
+
+import org.hibernate.jmx.StatisticsService;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.cfg.Environment;
+
+import de.intevation.flys.model.Annotation;
+import de.intevation.flys.model.AnnotationType;
+import de.intevation.flys.model.Attribute;
+import de.intevation.flys.model.BedHeightEpoch;
+import de.intevation.flys.model.BedHeightEpochValue;
+import de.intevation.flys.model.BedHeightSingle;
+import de.intevation.flys.model.BedHeightSingleValue;
+import de.intevation.flys.model.BedHeightType;
+import de.intevation.flys.model.Building;
+import de.intevation.flys.model.Catchment;
+import de.intevation.flys.model.CrossSection;
+import de.intevation.flys.model.CrossSectionLine;
+import de.intevation.flys.model.CrossSectionPoint;
+import de.intevation.flys.model.CrossSectionTrack;
+import de.intevation.flys.model.Depth;
+import de.intevation.flys.model.DGM;
+import de.intevation.flys.model.DischargeTable;
+import de.intevation.flys.model.DischargeTableValue;
+import de.intevation.flys.model.DischargeZone;
+import de.intevation.flys.model.Edge;
+import de.intevation.flys.model.ElevationModel;
+import de.intevation.flys.model.Fixpoint;
+import de.intevation.flys.model.Floodmaps;
+import de.intevation.flys.model.Floodplain;
+import de.intevation.flys.model.FlowVelocityMeasurement;
+import de.intevation.flys.model.FlowVelocityMeasurementValue;
+import de.intevation.flys.model.FlowVelocityModel;
+import de.intevation.flys.model.FlowVelocityModelValue;
+import de.intevation.flys.model.Gauge;
+import de.intevation.flys.model.GaugeLocation;
+import de.intevation.flys.model.GrainFraction;
+import de.intevation.flys.model.Hws;
+import de.intevation.flys.model.HydrBoundary;
+import de.intevation.flys.model.HydrBoundaryPoly;
+import de.intevation.flys.model.HYK;
+import de.intevation.flys.model.HYKEntry;
+import de.intevation.flys.model.HYKFormation;
+import de.intevation.flys.model.HYKFlowZoneType;
+import de.intevation.flys.model.HYKFlowZone;
+import de.intevation.flys.model.Line;
+import de.intevation.flys.model.LocationSystem;
+import de.intevation.flys.model.MainValueType;
+import de.intevation.flys.model.MorphologicalWidth;
+import de.intevation.flys.model.MorphologicalWidthValue;
+import de.intevation.flys.model.NamedMainValue;
+import de.intevation.flys.model.MainValue;
+import de.intevation.flys.model.Position;
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.RiverAxis;
+import de.intevation.flys.model.RiverAxisKm;
+import de.intevation.flys.model.SedimentDensity;
+import de.intevation.flys.model.SedimentDensityValue;
+import de.intevation.flys.model.SedimentYield;
+import de.intevation.flys.model.SedimentYieldValue;
+import de.intevation.flys.model.TimeInterval;
+import de.intevation.flys.model.Unit;
+import de.intevation.flys.model.Waterlevel;
+import de.intevation.flys.model.WaterlevelDifference;
+import de.intevation.flys.model.WaterlevelDifferenceColumn;
+import de.intevation.flys.model.WaterlevelDifferenceValue;
+import de.intevation.flys.model.WaterlevelQRange;
+import de.intevation.flys.model.WaterlevelValue;
+import de.intevation.flys.model.WstColumn;
+import de.intevation.flys.model.WstColumnQRange;
+import de.intevation.flys.model.WstColumnValue;
+import de.intevation.flys.model.Wst;
+import de.intevation.flys.model.WstQRange;
+
+import org.apache.log4j.Logger;
+
+public final class SessionFactoryProvider
+{
+    private static Logger log = Logger.getLogger(SessionFactoryProvider.class);
+
+    public static final String XPATH_USER =
+        "/artifact-database/backend-database/user/text()";
+
+    public static final String XPATH_PASSWORD =
+        "/artifact-database/backend-database/password/text()";
+
+    public static final String XPATH_DIALECT =
+        "/artifact-database/backend-database/dialect/text()";
+
+    public static final String XPATH_DRIVER =
+        "/artifact-database/backend-database/driver/text()";
+
+    public static final String XPATH_URL =
+        "/artifact-database/backend-database/url/text()";
+
+    public static final String DEFAULT_USER =
+        System.getProperty("flys.backend.user", "flys");
+
+    public static final String DEFAULT_PASSWORD =
+        System.getProperty("flys.backend.password", "flys");
+
+    public static final String DEFAULT_DIALECT =
+        System.getProperty(
+            "flys.backend.dialect",
+            "org.hibernate.dialect.PostgreSQLDialect");
+
+    public static final String DEFAULT_DRIVER =
+        System.getProperty(
+            "flys.backend.driver",
+            "org.postgresql.Driver");
+
+    public static final String DEFAULT_URL =
+        System.getProperty(
+            "flys.backend.url",
+            "jdbc:postgresql://localhost:5432/flys");
+
+    public static final boolean ENABLE_JMX =
+        Boolean.getBoolean("flys.backend.enablejmx");
+
+    private static SessionFactory sessionFactory;
+
+    private SessionFactoryProvider() {
+    }
+
+    public static synchronized SessionFactory getSessionFactory() {
+        if (sessionFactory == null) {
+            String user =
+                Config.getStringXPath(XPATH_USER, DEFAULT_USER);
+            String password =
+                Config.getStringXPath(XPATH_PASSWORD, DEFAULT_PASSWORD);
+            String dialect =
+                Config.getStringXPath(XPATH_DIALECT, DEFAULT_DIALECT);
+            String driver =
+                Config.getStringXPath(XPATH_DRIVER, DEFAULT_DRIVER);
+            String url =
+                Config.getStringXPath(XPATH_URL, DEFAULT_URL);
+
+            sessionFactory = createSessionFactory(
+                user, password, dialect, driver, url);
+        }
+        return sessionFactory;
+    }
+
+    public static SessionFactory createSessionFactory() {
+        return createSessionFactory(
+            DEFAULT_USER,
+            DEFAULT_PASSWORD,
+            DEFAULT_DIALECT,
+            DEFAULT_DRIVER,
+            DEFAULT_URL);
+    }
+
+    public static SessionFactory createSessionFactory(
+        String user,
+        String password,
+        String dialect,
+        String driver,
+        String url
+    ) {
+        Configuration cfg = createConfiguration(
+            user, password, dialect, driver, url);
+
+        SessionFactory factory = cfg.buildSessionFactory();
+
+        if (ENABLE_JMX) {
+            registerAsMBean(factory);
+        }
+        else {
+            log.info("No JMX support for hibernate.");
+        }
+
+        return factory;
+    }
+
+
+    public static void registerAsMBean(SessionFactory factory) {
+
+        StatisticsService statsMBean = new StatisticsService();
+        statsMBean.setSessionFactory(factory);
+        statsMBean.setStatisticsEnabled(true);
+
+        try {
+            MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+            mbs.registerMBean(
+                statsMBean,
+                new ObjectName("Hibernate:application=Statistics"));
+
+            log.info("Enabled JMX support for hibernate.");
+        }
+        catch (MalformedObjectNameException mone) {
+            log.warn(mone, mone);
+        }
+        catch (InstanceAlreadyExistsException iaee) {
+            log.warn(iaee, iaee);
+        }
+        catch (MBeanRegistrationException mbre) {
+            log.warn(mbre, mbre);
+        }
+        catch (NotCompliantMBeanException ncmbe) {
+            log.warn(ncmbe, ncmbe);
+        }
+    }
+
+
+    public static Configuration createConfiguration() {
+        return createConfiguration(
+            DEFAULT_USER,
+            DEFAULT_PASSWORD,
+            DEFAULT_DIALECT,
+            DEFAULT_DRIVER,
+            DEFAULT_URL);
+    }
+
+    public static Configuration createConfiguration(
+        String user,
+        String password,
+        String dialect,
+        String driver,
+        String url
+    ) {
+        Configuration cfg = new Configuration();
+
+        // TODO: Use package reflection here.
+        cfg.addAnnotatedClass(Annotation.class);
+        cfg.addAnnotatedClass(AnnotationType.class);
+        cfg.addAnnotatedClass(Attribute.class);
+        cfg.addAnnotatedClass(BedHeightEpoch.class);
+        cfg.addAnnotatedClass(BedHeightEpochValue.class);
+        cfg.addAnnotatedClass(BedHeightSingle.class);
+        cfg.addAnnotatedClass(BedHeightSingleValue.class);
+        cfg.addAnnotatedClass(BedHeightType.class);
+        cfg.addAnnotatedClass(Building.class);
+        cfg.addAnnotatedClass(Catchment.class);
+        cfg.addAnnotatedClass(CrossSection.class);
+        cfg.addAnnotatedClass(CrossSectionLine.class);
+        cfg.addAnnotatedClass(CrossSectionPoint.class);
+        cfg.addAnnotatedClass(CrossSectionTrack.class);
+        cfg.addAnnotatedClass(Depth.class);
+        cfg.addAnnotatedClass(DGM.class);
+        cfg.addAnnotatedClass(DischargeTable.class);
+        cfg.addAnnotatedClass(DischargeTableValue.class);
+        cfg.addAnnotatedClass(DischargeZone.class);
+        cfg.addAnnotatedClass(Edge.class);
+        cfg.addAnnotatedClass(ElevationModel.class);
+        cfg.addAnnotatedClass(Fixpoint.class);
+        cfg.addAnnotatedClass(Floodplain.class);
+        cfg.addAnnotatedClass(Floodmaps.class);
+        cfg.addAnnotatedClass(FlowVelocityMeasurement.class);
+        cfg.addAnnotatedClass(FlowVelocityMeasurementValue.class);
+        cfg.addAnnotatedClass(FlowVelocityModel.class);
+        cfg.addAnnotatedClass(FlowVelocityModelValue.class);
+        cfg.addAnnotatedClass(Gauge.class);
+        cfg.addAnnotatedClass(GaugeLocation.class);
+        cfg.addAnnotatedClass(GrainFraction.class);
+        cfg.addAnnotatedClass(Hws.class);
+        cfg.addAnnotatedClass(HydrBoundary.class);
+        cfg.addAnnotatedClass(HydrBoundaryPoly.class);
+        cfg.addAnnotatedClass(HYK.class);
+        cfg.addAnnotatedClass(HYKEntry.class);
+        cfg.addAnnotatedClass(HYKFormation.class);
+        cfg.addAnnotatedClass(HYKFlowZoneType.class);
+        cfg.addAnnotatedClass(HYKFlowZone.class);
+        cfg.addAnnotatedClass(Line.class);
+        cfg.addAnnotatedClass(LocationSystem.class);
+        cfg.addAnnotatedClass(MainValueType.class);
+        cfg.addAnnotatedClass(MorphologicalWidth.class);
+        cfg.addAnnotatedClass(MorphologicalWidthValue.class);
+        cfg.addAnnotatedClass(NamedMainValue.class);
+        cfg.addAnnotatedClass(MainValue.class);
+        cfg.addAnnotatedClass(Position.class);
+        cfg.addAnnotatedClass(Range.class);
+        cfg.addAnnotatedClass(River.class);
+        cfg.addAnnotatedClass(RiverAxis.class);
+        cfg.addAnnotatedClass(RiverAxisKm.class);
+        cfg.addAnnotatedClass(SedimentDensity.class);
+        cfg.addAnnotatedClass(SedimentDensityValue.class);
+        cfg.addAnnotatedClass(SedimentYield.class);
+        cfg.addAnnotatedClass(SedimentYieldValue.class);
+        cfg.addAnnotatedClass(TimeInterval.class);
+        cfg.addAnnotatedClass(Unit.class);
+        cfg.addAnnotatedClass(Waterlevel.class);
+        cfg.addAnnotatedClass(WaterlevelDifference.class);
+        cfg.addAnnotatedClass(WaterlevelDifferenceColumn.class);
+        cfg.addAnnotatedClass(WaterlevelDifferenceValue.class);
+        cfg.addAnnotatedClass(WaterlevelQRange.class);
+        cfg.addAnnotatedClass(WaterlevelValue.class);
+        cfg.addAnnotatedClass(WstColumn.class);
+        cfg.addAnnotatedClass(WstColumnQRange.class);
+        cfg.addAnnotatedClass(WstColumnValue.class);
+        cfg.addAnnotatedClass(Wst.class);
+        cfg.addAnnotatedClass(WstQRange.class);
+
+        if (log.isDebugEnabled()) {
+            log.debug("user: "    + user);
+            log.debug("dialect: " + dialect);
+            log.debug("driver: "  + driver);
+            log.debug("url: "     + url);
+        }
+
+        Properties props = new Properties();
+
+        // We rely on our own connection pool
+        props.setProperty(
+            "hibernate.connection.provider_class",
+            "org.hibernate.connection.DBCPConnectionProvider");
+
+        props.setProperty(Environment.DIALECT, dialect);
+        props.setProperty(Environment.USER,    user);
+        props.setProperty(Environment.PASS,    password);
+        props.setProperty(Environment.DRIVER,  driver);
+        props.setProperty(Environment.URL,     url);
+
+        cfg.mergeProperties(props);
+
+        return cfg;
+    }
+
+
+    public static String getProperty(SessionFactoryImpl factory, String key) {
+        Properties props = factory.getProperties();
+        return props.getProperty(key);
+    }
+
+    public static String getUser(SessionFactoryImpl factory) {
+        return getProperty(factory, Environment.USER);
+    }
+
+
+    public static String getPass(SessionFactoryImpl factory) {
+        return getProperty(factory, Environment.PASS);
+    }
+
+
+    public static String getURL(SessionFactoryImpl factory) {
+        return getProperty(factory, Environment.URL);
+    }
+
+
+    public static String getDriver(SessionFactoryImpl factory) {
+        return getProperty(factory, Environment.DRIVER);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SessionHolder.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,43 @@
+package de.intevation.flys.backend;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+
+
+public class SessionHolder
+{
+    private static Logger logger =
+        Logger.getLogger(SessionHolder.class);
+
+    public static final ThreadLocal<Session> HOLDER =
+        new ThreadLocal<Session>() {
+            protected Session initialValue() {
+                return create();
+            }
+        };
+
+    private SessionHolder() {
+    }
+
+    public synchronized static Session create() {
+        logger.debug("create");
+        SessionFactory sessionFactory =
+            SessionFactoryProvider.getSessionFactory();
+        return sessionFactory.openSession();
+    }
+
+    public static Session acquire() {
+        logger.debug("acquire");
+        Session session = create();
+        HOLDER.set(session);
+        return session;
+    }
+
+    public static void release() {
+        logger.debug("release");
+        HOLDER.remove();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/SpatialInfo.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,166 @@
+package de.intevation.flys.backend;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+import de.intevation.flys.backend.SessionFactoryProvider;
+import de.intevation.flys.model.Building;
+import de.intevation.flys.model.CrossSectionTrack;
+import de.intevation.flys.model.Fixpoint;
+import de.intevation.flys.model.Line;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.RiverAxis;
+
+
+public class SpatialInfo {
+
+    private static Logger logger = Logger.getLogger(SpatialInfo.class);
+
+    protected static String RIVERNAME = System.getProperty(
+        "flys.backend.spatial.river", "Saar");
+
+    protected Session session;
+
+
+    public static void main(String[] args) {
+        logger.info("Start SpatialInfo application.");
+
+        SpatialInfo spatial = null;
+
+        try {
+            spatial = new SpatialInfo();
+
+            River river = spatial.getRiver(RIVERNAME);
+            if (river == null) {
+                logger.warn("Could not find river '" + RIVERNAME + "'!");
+                return;
+            }
+
+            logger.info("Spatial information of River '" + RIVERNAME + "'");
+            spatial.doRiverAxisInfo(river);
+            spatial.doCrossSectionTracksInfo(river);
+            spatial.doLinesInfo(river);
+            spatial.doBuildingsInfo(river);
+            spatial.doFixpointsInfo(river);
+        }
+        finally {
+            if (spatial != null) {
+                spatial.close();
+            }
+        }
+
+        logger.info("Finish SpatialInfo application.");
+    }
+
+
+    public SpatialInfo() {
+        session = SessionFactoryProvider
+            .createSessionFactory()
+            .openSession();
+    }
+
+
+    public void close() {
+        session.close();
+    }
+
+
+    protected River getRiver(String rivername) {
+        Query query = session.createQuery(
+            "from River where name =:name");
+        query.setParameter("name", rivername);
+
+        List<River> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No river '" + rivername + "' found!");
+            return null;
+        }
+
+        return list.get(0);
+    }
+
+
+    protected void doRiverAxisInfo(River river) {
+        List<RiverAxis> axis = RiverAxis.getRiverAxis(river.getName());
+        if (axis != null && axis.size() > 0) {
+            logger.debug("TODO: Compute length and boundary.");
+        }
+        else {
+            logger.warn("River has no RiverAxis.");
+        }
+    }
+
+
+    protected void doCrossSectionTracksInfo(River river) {
+        Query query = session.createQuery(
+            "from CrossSectionTrack where river =:river");
+        query.setParameter("river", river);
+
+        List<CrossSectionTrack> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No CrossSectionTracks for '" + river.getName() + "' found!");
+            return;
+        }
+        else {
+            logger.info("River contains " + list.size() + " CrossSectionTracks.");
+        }
+    }
+
+
+    protected void doLinesInfo(River river) {
+        Query query = session.createQuery(
+            "from Line where river =:river");
+        query.setParameter("river", river);
+
+        List<Line> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No Lines for '" + river.getName() + "' found!");
+            return;
+        }
+        else {
+            logger.info("River contains " + list.size() + " Lines.");
+        }
+    }
+
+
+    protected void doBuildingsInfo(River river) {
+        Query query = session.createQuery(
+            "from Building where river =:river");
+        query.setParameter("river", river);
+
+        List<Building> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No Buildings for '" + river.getName() + "' found!");
+            return;
+        }
+        else {
+            logger.info("River contains " + list.size() + " Buildings.");
+        }
+    }
+
+
+    protected void doFixpointsInfo(River river) {
+        Query query = session.createQuery(
+            "from Fixpoint where river =:river");
+        query.setParameter("river", river);
+
+        List<Fixpoint> list = query.list();
+
+        if (list == null || list.size() == 0) {
+            logger.warn("No Fixpoints for '" + river.getName() + "' found!");
+            return;
+        }
+        else {
+            logger.info("River contains " + list.size() + " Fixpoints.");
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/Config.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,158 @@
+package de.intevation.flys.importer;
+
+public class Config
+{
+    public static final String DRY_RUN =
+        "flys.backend.importer.dry.run";
+
+    public static final String INFO_GEW_FILE =
+        "flys.backend.importer.infogew.file";
+
+    public static final String ANNOTATION_TYPES =
+        "flys.backend.importer.annotation.types";
+
+    public static final String SKIP_GAUGES =
+        "flys.backend.importer.skip.gauges";
+
+    public static final String SKIP_ANNOTATIONS =
+        "flys.backend.importer.skip.annotations";
+
+    public static final String SKIP_PRFS =
+        "flys.backend.importer.skip.prfs";
+
+    public static final String SKIP_HYKS =
+        "flys.backend.importer.skip.hyks";
+
+    public static final String SKIP_WST =
+        "flys.backend.importer.skip.wst";
+
+    public static final String SKIP_EXTRA_WSTS =
+        "flys.backend.importer.skip.extra.wsts";
+
+    public static final String SKIP_FIXATIONS =
+        "flys.backend.importer.skip.fixations";
+
+    public static final String SKIP_OFFICIAL_LINES =
+        "flys.backend.importer.skip.official.lines";
+
+    public static final String SKIP_FLOOD_WATER =
+        "flys.backend.importer.skip.flood.water";
+
+    public static final String SKIP_FLOOD_PROTECTION =
+        "flys.backend.importer.skip.flood.protection";
+
+    public static final String SKIP_BED_HEIGHT_SINGLE =
+        "flys.backend.importer.skip.bed.height.single";
+
+    public static final String SKIP_BED_HEIGHT_EPOCH =
+        "flys.backend.importer.skip.bed.height.epoch";
+
+    public static final String SKIP_SEDIMENT_DENSITY =
+        "flys.backend.importer.skip.sediment.density";
+
+    public static final String SKIP_MORPHOLOGICAL_WIDTH =
+        "flys.backend.importer.skip.morphological.width";
+
+    public static final String SKIP_FLOW_VELOCITY =
+        "flys.backend.importer.skip.flow.velocity";
+
+    public static final String SKIP_SEDIMENT_YIELD =
+        "flys.backend.importer.skip.sediment.yield";
+
+    public static final String SKIP_WATERLEVELS =
+        "flys.backend.importer.skip.waterlevels";
+
+    public static final String SKIP_WATERLEVEL_DIFFERENCES =
+        "flys.backend.importer.skip.waterlevel.differences";
+
+
+    public static final Config INSTANCE = new Config();
+
+    private Config () {
+    }
+
+    public boolean dryRun() {
+        return Boolean.getBoolean(DRY_RUN);
+    }
+
+    public String getInfoGewFile() {
+        return System.getProperty(INFO_GEW_FILE);
+    }
+
+    public String getAnnotationTypes() {
+        return System.getProperty(ANNOTATION_TYPES);
+    }
+
+    public boolean skipGauges() {
+        return Boolean.getBoolean(SKIP_GAUGES);
+    }
+
+    public boolean skipAnnotations() {
+        return Boolean.getBoolean(SKIP_ANNOTATIONS);
+    }
+
+    public boolean skipPRFs() {
+        return Boolean.getBoolean(SKIP_PRFS);
+    }
+
+    public boolean skipHYKs() {
+        return Boolean.getBoolean(SKIP_HYKS);
+    }
+
+    public boolean skipWst() {
+        return Boolean.getBoolean(SKIP_WST);
+    }
+
+    public boolean skipExtraWsts() {
+        return Boolean.getBoolean(SKIP_EXTRA_WSTS);
+    }
+
+    public boolean skipFixations() {
+        return Boolean.getBoolean(SKIP_FIXATIONS);
+    }
+
+    public boolean skipOfficialLines() {
+        return Boolean.getBoolean(SKIP_OFFICIAL_LINES);
+    }
+
+    public boolean skipFloodWater() {
+        return Boolean.getBoolean(SKIP_FLOOD_WATER);
+    }
+
+    public boolean skipFloodProtection() {
+        return Boolean.getBoolean(SKIP_FLOOD_PROTECTION);
+    }
+
+    public boolean skipBedHeightSingle() {
+        return Boolean.getBoolean(SKIP_BED_HEIGHT_SINGLE);
+    }
+
+    public boolean skipBedHeightEpoch() {
+        return Boolean.getBoolean(SKIP_BED_HEIGHT_EPOCH);
+    }
+
+    public boolean skipSedimentDensity() {
+        return Boolean.getBoolean(SKIP_SEDIMENT_DENSITY);
+    }
+
+    public boolean skipMorphologicalWidth() {
+        return Boolean.getBoolean(SKIP_MORPHOLOGICAL_WIDTH);
+    }
+
+    public boolean skipFlowVelocity() {
+        return Boolean.getBoolean(SKIP_FLOW_VELOCITY);
+    }
+
+    public boolean skipSedimentYield() {
+        return Boolean.getBoolean(SKIP_SEDIMENT_YIELD);
+    }
+
+    public boolean skipWaterlevels() {
+        return Boolean.getBoolean(SKIP_WATERLEVELS);
+    }
+
+    public boolean skipWaterlevelDifferences() {
+        return Boolean.getBoolean(SKIP_WATERLEVEL_DIFFERENCES);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportAnnotation.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,146 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Annotation;
+import de.intevation.flys.model.AnnotationType;
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.Position;
+import de.intevation.flys.model.Attribute;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Edge;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportAnnotation
+implements   Comparable<ImportAnnotation>
+{
+    protected ImportAttribute      attribute;
+    protected ImportPosition       position;
+    protected ImportRange          range;
+    protected ImportEdge           edge;
+    protected ImportAnnotationType type;
+
+    protected Annotation      peer;
+
+    public ImportAnnotation() {
+    }
+
+    public ImportAnnotation(
+        ImportAttribute      attribute,
+        ImportPosition       position,
+        ImportRange          range,
+        ImportEdge           edge,
+        ImportAnnotationType type
+    ) {
+        this.attribute = attribute;
+        this.position  = position;
+        this.range     = range;
+        this.edge      = edge;
+        this.type      = type;
+    }
+
+    public int compareTo(ImportAnnotation other) {
+        int d = attribute.compareTo(other.attribute);
+        if (d != 0) {
+            return d;
+        }
+
+        if ((d = position.compareTo(other.position)) != 0) {
+            return d;
+        }
+
+        if ((d = range.compareTo(other.range)) != 0) {
+            return d;
+        }
+
+        if (edge == null && other.edge != null) return -1;
+        if (edge != null && other.edge == null) return +1;
+        if (edge == null && other.edge == null) return 0;
+
+        if ((d = edge.compareTo(other.edge)) != 0) {
+            return d;
+        }
+
+        if (type == null && other.type != null) return -1;
+        if (type != null && other.type == null) return +1;
+        if (type == null && other.type == null) return 0;
+
+        return type.compareTo(other.type);
+    }
+
+    public ImportAttribute getAttribute() {
+        return attribute;
+    }
+
+    public void setAttribute(ImportAttribute attribute) {
+        this.attribute = attribute;
+    }
+
+    public ImportPosition getPosition() {
+        return position;
+    }
+
+    public void setPosition(ImportPosition position) {
+        this.position = position;
+    }
+
+    public ImportRange getRange() {
+        return range;
+    }
+
+    public void setRange(ImportRange range) {
+        this.range = range;
+    }
+
+    public ImportEdge getEdge() {
+        return edge;
+    }
+
+    public void setEdge(ImportEdge edge) {
+        this.edge = edge;
+    }
+
+    public ImportAnnotationType getType() {
+        return type;
+    }
+
+    public void setType(ImportAnnotationType type) {
+        this.type = type;
+    }
+
+    public Annotation getPeer(River river) {
+        if (peer == null) {
+            Range          r = range.getPeer(river);
+            Attribute      a = attribute.getPeer();
+            Position       p = position.getPeer();
+            Edge           e = edge != null ? edge.getPeer() : null;
+            AnnotationType t = type != null ? type.getPeer() : null;
+
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from Annotation where "    +
+                "range=:range and "         +
+                "attribute=:attribute and " +
+                "position=:position and "   +
+                "edge=:edge and "           +
+                "type=:type");
+            query.setParameter("range",     r);
+            query.setParameter("attribute", a);
+            query.setParameter("position",  p);
+            query.setParameter("edge",      e);
+            query.setParameter("type",      t);
+            List<Annotation> annotations = query.list();
+            if (annotations.isEmpty()) {
+                peer = new Annotation(r, a, p, e, t);
+                session.save(peer);
+            }
+            else {
+                peer = annotations.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportAnnotationType.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,54 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.AnnotationType;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportAnnotationType
+implements   Comparable<ImportAnnotationType>
+{
+    protected String         name;
+    protected AnnotationType peer;
+
+    public ImportAnnotationType() {
+    }
+
+    public ImportAnnotationType(String name) {
+        this.name = name;
+    }
+
+    public int compareTo(ImportAnnotationType other) {
+        return name.compareTo(other.name);
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    public AnnotationType getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from AnnotationType where name=:name");
+            query.setParameter("name", name);
+            List<AnnotationType> types = query.list();
+            if (types.isEmpty()) {
+                peer = new AnnotationType(name);
+                session.save(peer);
+            }
+            else {
+                peer = types.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportAttribute.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,65 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Attribute;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportAttribute
+implements   Comparable<ImportAttribute>
+{
+    protected String value;
+
+    protected Attribute peer;
+
+    public ImportAttribute() {
+    }
+
+    public ImportAttribute(String value) {
+        this.value = value;
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+
+    public int compareTo(ImportAttribute other) {
+        return value.compareTo(other.value);
+    }
+
+    @Override
+    public boolean equals(Object other) {
+        if (other == this) return true;
+        if (!(other instanceof ImportAttribute)) return false;
+        return value.equals(((ImportAttribute)other).value);
+    }
+
+    @Override
+    public int hashCode() {
+        return value.hashCode();
+    }
+
+    public Attribute getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from Attribute where value=:value");
+            query.setString("value", value);
+            List<Attribute> attributes = query.list();
+            if (attributes.isEmpty()) {
+                peer = new Attribute(value);
+                session.save(peer);
+            }
+            else {
+                peer = attributes.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeight.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,44 @@
+package de.intevation.flys.importer;
+
+
+import java.sql.SQLException;
+
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.River;
+
+
+public interface ImportBedHeight {
+
+    String getDescription();
+
+    void addValue(ImportBedHeightValue value);
+
+    void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException;
+
+    Object getPeer(River river);
+
+    int getValueCount();
+
+    void setYear(int year);
+
+    void setTimeInterval(ImportTimeInterval timeInterval);
+
+    void setSoundingWidth(int soundingWidth);
+
+    void setDescription(String description);
+
+    void setEvaluationBy(String evaluationBy);
+
+    void setRange(ImportRange range);
+
+    void setType(ImportBedHeightType type);
+
+    void setLocationSystem(ImportLocationSystem locationSystem);
+
+    void setCurElevationModel(ImportElevationModel model);
+
+    void setOldElevationModel(ImportElevationModel model);
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,198 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import java.sql.SQLException;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.BedHeightEpoch;
+import de.intevation.flys.model.ElevationModel;
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.TimeInterval;
+
+
+public class ImportBedHeightEpoch implements ImportBedHeight
+{
+    private static Logger log = Logger.getLogger(ImportBedHeightEpoch.class);
+
+    protected String evaluationBy;
+    protected String description;
+
+    protected ImportTimeInterval   timeInterval;
+    protected ImportRange          range;
+    protected ImportElevationModel curElevationModel;
+    protected ImportElevationModel oldElevationModel;
+
+    protected List<ImportBedHeightEpochValue> values;
+
+    protected BedHeightEpoch peer;
+
+
+    public ImportBedHeightEpoch(String description) {
+        this.description = description;
+        this.values      = new ArrayList<ImportBedHeightEpochValue>();
+    }
+
+
+    public String getDescription() {
+        return description;
+    }
+
+
+    public int getValueCount() {
+        return values.size();
+    }
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+
+    public void setEvaluationBy(String evaluationBy) {
+        this.evaluationBy = evaluationBy;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public void setRange(ImportRange range) {
+        this.range = range;
+    }
+
+    public void setCurElevationModel(ImportElevationModel curElevationModel) {
+        this.curElevationModel = curElevationModel;
+    }
+
+    public void setOldElevationModel(ImportElevationModel oldElevationModel) {
+        this.oldElevationModel = oldElevationModel;
+    }
+
+    public void setYear(int year) {
+        // do nothing
+    }
+
+    public void setSoundingWidth(int soundingWidth) {
+        // do nothing
+    }
+
+    public void setLocationSystem(ImportLocationSystem locationSystem) {
+        // do nothing
+    }
+
+    public void setType(ImportBedHeightType type) {
+        // do nothing
+    }
+
+    @Override
+    public void addValue(ImportBedHeightValue value) {
+        values.add((ImportBedHeightEpochValue) value);
+    }
+
+
+    @Override
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("Store dependencies for epoch: '" + getDescription() + "'");
+
+        if (curElevationModel != null) {
+            curElevationModel.storeDependencies();
+        }
+
+        if (oldElevationModel != null) {
+            oldElevationModel.storeDependencies();
+        }
+
+        BedHeightEpoch peer = getPeer(river);
+
+        log.debug("store values now...");
+
+        for (ImportBedHeightEpochValue value: values) {
+            value.storeDependencies(peer);
+        }
+
+        Session session = ImporterSession.getInstance().getDatabaseSession();
+        session.flush();
+    }
+
+
+    @Override
+    public BedHeightEpoch getPeer(River river) {
+        if (peer == null) {
+            ElevationModel theCurModel = curElevationModel != null
+                ? curElevationModel.getPeer()
+                : null;
+
+            if (theCurModel == null) {
+                log.warn("Skip file - invalid current elevation model.");
+                return null;
+            }
+
+            TimeInterval theTime = timeInterval != null
+                ? timeInterval.getPeer()
+                : null;
+
+            if (theTime == null) {
+                log.warn("Skip file - invalid time range.");
+                return null;
+            }
+
+            Range theRange = range != null ? range.getPeer(river) : null;
+
+            if (theRange == null) {
+                log.warn("Skip file - invalid km range.");
+                return null;
+            }
+
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from BedHeightEpoch where " +
+                "   river=:river and " +
+                "   timeInterval=:timeInterval and " +
+                "   curElevationModel=:curElevationModel and " +
+                "   range=:range and " +
+                "   evaluationBy=:evaluationBy and " +
+                "   description=:description");
+
+            query.setParameter("river", river);
+            query.setParameter("timeInterval", theTime);
+            query.setParameter("curElevationModel", theCurModel);
+            query.setParameter("range", theRange);
+            query.setParameter("evaluationBy", evaluationBy);
+            query.setParameter("description", description);
+
+            List<BedHeightEpoch> bedHeights = query.list();
+
+            if (bedHeights.isEmpty()) {
+                log.info("Create new BedHeightEpoch DB instance.");
+
+                peer = new BedHeightEpoch(
+                    river,
+                    theTime,
+                    theRange,
+                    theCurModel,
+                    oldElevationModel != null ? oldElevationModel.getPeer() : null,
+                    evaluationBy,
+                    description
+                );
+
+                session.save(peer);
+            }
+            else {
+                peer = bedHeights.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightEpochValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,75 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.BedHeightEpoch;
+import de.intevation.flys.model.BedHeightEpochValue;
+
+
+public class ImportBedHeightEpochValue implements ImportBedHeightValue {
+
+    private static final Logger log =
+        Logger.getLogger(ImportBedHeightEpochValue.class);
+
+
+    private BigDecimal station;
+    private BigDecimal height;
+
+    private BedHeightEpochValue peer;
+
+
+    public ImportBedHeightEpochValue() {
+    }
+
+
+    public ImportBedHeightEpochValue(BigDecimal station, BigDecimal height) {
+        this.station = station;
+        this.height  = height;
+    }
+
+
+    public void storeDependencies(BedHeightEpoch bedHeight) {
+        getPeer(bedHeight);
+    }
+
+
+    public BedHeightEpochValue getPeer(BedHeightEpoch bedHeight) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from BedHeightEpochValue where " +
+                "   bedHeight=:bedHeight and " +
+                "   station=:station and " +
+                "   height=:height");
+
+            query.setParameter("bedHeight", bedHeight);
+            query.setParameter("station", station);
+            query.setParameter("height", height);
+
+            List<BedHeightEpochValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new BedHeightEpochValue(
+                    bedHeight,
+                    station,
+                    height
+                );
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,189 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import java.sql.SQLException;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.BedHeightSingle;
+import de.intevation.flys.model.BedHeightType;
+import de.intevation.flys.model.ElevationModel;
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.River;
+
+
+public class ImportBedHeightSingle implements ImportBedHeight
+{
+    private static Logger log = Logger.getLogger(ImportBedHeightSingle.class);
+
+    protected int year;
+    protected int soundingWidth;
+
+    protected String evaluationBy;
+    protected String description;
+
+    protected ImportRange          range;
+    protected ImportBedHeightType  type;
+    protected ImportLocationSystem locationSystem;
+    protected ImportElevationModel curElevationModel;
+    protected ImportElevationModel oldElevationModel;
+
+    protected List<ImportBedHeightSingleValue> values;
+
+    protected BedHeightSingle peer;
+
+
+    public ImportBedHeightSingle(String description) {
+        this.description = description;
+        this.values      = new ArrayList<ImportBedHeightSingleValue>();
+    }
+
+
+    public String getDescription() {
+        return description;
+    }
+
+    public int getValueCount() {
+        return values.size();
+    }
+
+
+    public void setYear(int year) {
+        this.year = year;
+    }
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        // do nothing
+    }
+
+    public void setSoundingWidth(int soundingWidth) {
+        this.soundingWidth = soundingWidth;
+    }
+
+    public void setEvaluationBy(String evaluationBy) {
+        this.evaluationBy = evaluationBy;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public void setRange(ImportRange range) {
+        this.range = range;
+    }
+
+    public void setType(ImportBedHeightType type) {
+        this.type = type;
+    }
+
+    public void setLocationSystem(ImportLocationSystem locationSystem) {
+        this.locationSystem = locationSystem;
+    }
+
+    public void setCurElevationModel(ImportElevationModel curElevationModel) {
+        this.curElevationModel = curElevationModel;
+    }
+
+    public void setOldElevationModel(ImportElevationModel oldElevationModel) {
+        this.oldElevationModel = oldElevationModel;
+    }
+
+    @Override
+    public void addValue(ImportBedHeightValue value) {
+        values.add((ImportBedHeightSingleValue) value);
+    }
+
+    @Override
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("Store dependencies for single: '" + getDescription() + "'");
+
+        if (type != null) {
+            type.storeDependencies();
+        }
+
+        if (locationSystem != null) {
+            locationSystem.storeDependencies();
+        }
+
+        if (curElevationModel != null) {
+            curElevationModel.storeDependencies();
+        }
+
+        if (oldElevationModel != null) {
+            oldElevationModel.storeDependencies();
+        }
+
+        BedHeightSingle peer = getPeer(river);
+
+        for (ImportBedHeightSingleValue value: values) {
+            value.storeDependencies(peer);
+        }
+
+        Session session = ImporterSession.getInstance().getDatabaseSession();
+        session.flush();
+    }
+
+    @Override
+    public BedHeightSingle getPeer(River river) {
+        if (peer == null) {
+            BedHeightType  theType     = type != null ? type.getPeer() : null;
+            ElevationModel theCurModel = curElevationModel.getPeer();
+            Range          theRange    = range != null ? range.getPeer(river) : null;
+
+            if (theType == null || theCurModel == null || theRange == null) {
+                log.warn("Skip invalid file '" + description + "'");
+                return null;
+            }
+
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from BedHeightSingle where " +
+                "river=:river and year=:year and soundingWidth=:soundingWidth " +
+                "and type=:type and locationSystem=:locationSystem and " +
+                "curElevationModel=:curElevationModel and range=:range");
+
+            query.setParameter("river", river);
+            query.setParameter("year", year);
+            query.setParameter("soundingWidth", soundingWidth);
+            query.setParameter("type", theType);
+            query.setParameter("locationSystem", locationSystem.getPeer());
+            query.setParameter("curElevationModel", theCurModel);
+            query.setParameter("range", range.getPeer(river));
+
+            List<BedHeightSingle> bedHeights = query.list();
+            if (bedHeights.isEmpty()) {
+                log.info("Create new BedHeightSingle DB instance.");
+
+                peer = new BedHeightSingle(
+                    river,
+                    year,
+                    soundingWidth,
+                    theType,
+                    locationSystem.getPeer(),
+                    theCurModel,
+                    oldElevationModel != null ? oldElevationModel.getPeer() : null,
+                    range.getPeer(river),
+                    evaluationBy,
+                    description
+                );
+
+                session.save(peer);
+            }
+            else {
+                peer = bedHeights.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightSingleValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,101 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.BedHeightSingle;
+import de.intevation.flys.model.BedHeightSingleValue;
+
+
+public class ImportBedHeightSingleValue implements ImportBedHeightValue {
+
+    private static final Logger log =
+        Logger.getLogger(ImportBedHeightSingleValue.class);
+
+
+    protected ImportBedHeightSingle bedHeight;
+
+    protected BigDecimal station;
+    protected BigDecimal height;
+    protected BigDecimal uncertainty;
+    protected BigDecimal dataGap;
+    protected BigDecimal soundingWidth;
+    protected BigDecimal width;
+
+    protected BedHeightSingleValue peer;
+
+
+    public ImportBedHeightSingleValue(
+        ImportBedHeightSingle bedHeight,
+        BigDecimal station,
+        BigDecimal height,
+        BigDecimal uncertainty,
+        BigDecimal dataGap,
+        BigDecimal soundingWidth,
+        BigDecimal width
+    ) {
+        this.bedHeight     = bedHeight;
+        this.station       = station;
+        this.height        = height;
+        this.uncertainty   = uncertainty;
+        this.dataGap       = dataGap;
+        this.soundingWidth = soundingWidth;
+        this.width         = width;
+    }
+
+
+    public void storeDependencies(BedHeightSingle bedHeight) {
+        getPeer(bedHeight);
+    }
+
+
+    public BedHeightSingleValue getPeer(BedHeightSingle bedHeight) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from BedHeightSingleValue where " +
+                "   bedHeight=:bedHeight and " +
+                "   station=:station and " +
+                "   height=:height and " +
+                "   uncertainty=:uncertainty and " +
+                "   dataGap=:dataGap and " +
+                "   soundingWidth=:soundingWidth and " +
+                "   width=:width");
+
+            query.setParameter("bedHeight", bedHeight);
+            query.setParameter("station", station);
+            query.setParameter("height", height);
+            query.setParameter("uncertainty", uncertainty);
+            query.setParameter("dataGap", dataGap);
+            query.setParameter("soundingWidth", soundingWidth);
+            query.setParameter("width", width);
+
+            List<BedHeightSingleValue> values = query.list();
+            if (values.isEmpty()) {
+                peer = new BedHeightSingleValue(
+                    bedHeight,
+                    station,
+                    height,
+                    uncertainty,
+                    dataGap,
+                    soundingWidth,
+                    width
+                );
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightType.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,60 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.BedHeightType;
+
+
+public class ImportBedHeightType {
+
+    private static final Logger log =
+        Logger.getLogger(ImportBedHeightType.class);
+
+    protected String name;
+    protected String description;
+
+    protected BedHeightType peer;
+
+
+    public ImportBedHeightType(String name, String description) {
+        this.name        = name;
+        this.description = description;
+    }
+
+
+    public void storeDependencies() {
+        BedHeightType type = getPeer();
+    }
+
+
+    public BedHeightType getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from BedHeightType where " +
+                "name=:name and description=:description");
+
+            query.setParameter("name", name);
+            query.setParameter("description", description);
+
+            List<BedHeightType> types = query.list();
+
+            if (types.isEmpty()) {
+                peer = new BedHeightType(name, description);
+                session.save(peer);
+            }
+            else {
+                peer = types.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,7 @@
+package de.intevation.flys.importer;
+
+
+public interface ImportBedHeightValue {
+
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportCrossSection.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,119 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.CrossSection;
+import de.intevation.flys.model.TimeInterval;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+public class ImportCrossSection
+{
+    private static Logger log = Logger.getLogger(ImportRiver.class);
+
+    protected ImportRiver                  river;
+    protected String                       description;
+    protected ImportTimeInterval           timeInterval;
+    protected List<ImportCrossSectionLine> lines;
+
+    protected CrossSection peer;
+
+    public ImportCrossSection() {
+    }
+
+    public ImportCrossSection(
+        ImportRiver                  river,
+        String                       description,
+        ImportTimeInterval           timeInterval,
+        List<ImportCrossSectionLine> lines
+    ) {
+        this.river        = river;
+        this.description  = description;
+        this.timeInterval = timeInterval;
+        this.lines        = lines;
+        wireWithLines();
+    }
+
+    public void wireWithLines() {
+        for (ImportCrossSectionLine line: lines) {
+            line.setCrossSection(this);
+        }
+    }
+
+    public ImportRiver getRiver() {
+        return river;
+    }
+
+    public void setRiver(ImportRiver river) {
+        this.river = river;
+    }
+
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public ImportTimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    public void storeDependencies() {
+
+        log.info("store cross section '" + description + "'");
+
+        getPeer();
+
+        int i = 1, N = lines.size();
+
+        for (ImportCrossSectionLine line: lines) {
+            line.storeDependencies();
+            log.info("  stored " + i + " lines. remaining: " + (N-i));
+            ++i;
+        }
+    }
+
+    public CrossSection getPeer() {
+
+        if (peer == null) {
+            River r = river.getPeer();
+            TimeInterval t = timeInterval != null
+                ? timeInterval.getPeer()
+                : null;
+
+            Session session =
+                ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from CrossSection where " +
+                "river=:r and "            +
+                "timeInterval=:t and "     +
+                "description=:d");
+
+            query.setParameter("r", r);
+            query.setParameter("t", t);
+            query.setParameter("d", description);
+
+            List<CrossSection> crossSections = query.list();
+            if (crossSections.isEmpty()) {
+                peer = new CrossSection(r, t, description);
+                session.save(peer);
+            }
+            else {
+                peer = crossSections.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportCrossSectionLine.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,118 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.CrossSection;
+import de.intevation.flys.model.CrossSectionPoint;
+import de.intevation.flys.model.CrossSectionLine;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+import java.util.Comparator;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class ImportCrossSectionLine
+{
+    public static final Comparator<CrossSectionPoint> INDEX_CMP =
+        new Comparator<CrossSectionPoint>() {
+            public int compare(CrossSectionPoint a, CrossSectionPoint b) {
+                return a.getColPos().compareTo(b.getColPos());
+            }
+        };
+
+    protected Double         km;
+    protected ImportCrossSection crossSection;
+    protected List<XY>           points;
+
+    protected CrossSectionLine peer;
+
+    public ImportCrossSectionLine() {
+    }
+
+    public ImportCrossSectionLine(Double km, List<XY> points) {
+        this.km     = km;
+        this.points = points;
+    }
+
+    public ImportCrossSection getCrossSection() {
+        return crossSection;
+    }
+
+    public void setCrossSection(ImportCrossSection crossSection) {
+        this.crossSection = crossSection;
+    }
+
+    public Double getKm() {
+        return km;
+    }
+
+    public void setKm(Double km) {
+        this.km = km;
+    }
+
+    public void storeDependencies() {
+        storePoints();
+    }
+
+    protected void storePoints() {
+        CrossSectionLine csl = getPeer();
+
+        Map<CrossSectionPoint, CrossSectionPoint> map =
+            new TreeMap<CrossSectionPoint, CrossSectionPoint>(INDEX_CMP);
+
+        // build index for faster collision lookup
+        List<CrossSectionPoint> ps = csl.getPoints();
+        if (ps != null) {
+            for (CrossSectionPoint point: ps) {
+                map.put(point, point);
+            }
+        }
+
+        Session session =
+            ImporterSession.getInstance().getDatabaseSession();
+
+        CrossSectionPoint key = new CrossSectionPoint();
+
+        for (XY xy: points) {
+            key.setColPos(xy.getIndex());
+            CrossSectionPoint csp = map.get(key);
+            if (csp == null) { // create new
+                csp = new CrossSectionPoint(
+                    csl, key.getColPos(),
+                    Double.valueOf(xy.getX()),
+                    Double.valueOf(xy.getY()));
+            }
+            else { // update old
+                csp.setX(Double.valueOf(xy.getX()));
+                csp.setY(Double.valueOf(xy.getY()));
+            }
+            session.save(csp);
+        }
+    }
+
+    public CrossSectionLine getPeer() {
+        if (peer == null) {
+            CrossSection cs = crossSection.getPeer();
+
+            Session session =
+                ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from CrossSectionLine where crossSection=:cs and km=:km");
+            query.setParameter("cs", cs);
+            query.setParameter("km", km);
+
+            List<CrossSectionLine> lines = query.list();
+            if (lines.isEmpty()) {
+                peer = new CrossSectionLine(cs, km);
+                session.save(peer);
+            }
+            else {
+                peer = lines.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportDepth.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,75 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.Depth;
+
+
+public class ImportDepth {
+
+    private static Logger log = Logger.getLogger(ImportDepth.class);
+
+
+    protected Depth peer;
+
+    protected BigDecimal lower;
+    protected BigDecimal upper;
+
+    protected ImportUnit unit;
+
+
+    public ImportDepth(BigDecimal lower, BigDecimal upper, ImportUnit unit) {
+        this.lower = lower;
+        this.upper = upper;
+        this.unit  = unit;
+    }
+
+
+    public void storeDependencies() {
+        log.info("store dependencies");
+
+        getPeer();
+    }
+
+
+    public Depth getPeer() {
+        log.info("get peer");
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from Depth where " +
+                "   lower=:lower and " +
+                "   upper=:upper and " +
+                "   unit=:unit");
+
+            query.setParameter("lower", lower);
+            query.setParameter("upper", upper);
+            query.setParameter("unit", unit.getPeer());
+
+            List<Depth> depths = query.list();
+
+            if (depths.isEmpty()) {
+                log.debug("Create new Depth DB instance.");
+
+                peer = new Depth(lower, upper, unit.getPeer());
+
+                session.save(peer);
+            }
+            else {
+                peer = depths.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportDischargeTable.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,116 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.DischargeTable;
+import de.intevation.flys.model.Gauge;
+import de.intevation.flys.model.TimeInterval;
+
+import org.apache.log4j.Logger;
+
+public class ImportDischargeTable
+{
+    private static Logger log = Logger.getLogger(ImportDischargeTable.class);
+
+    protected DischargeTable peer;
+
+    protected String         description;
+
+    protected Integer        kind;
+
+    protected List<ImportDischargeTableValue> dischargeTableValues;
+
+    protected ImportTimeInterval timeInterval;
+
+    public ImportDischargeTable() {
+        this(0, null);
+    }
+
+    public ImportDischargeTable(int kind, String description) {
+        this.kind            = kind;
+        this.description     = description;
+        dischargeTableValues = new ArrayList<ImportDischargeTableValue>();
+    }
+
+
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public void addDischargeTableValue(ImportDischargeTableValue value) {
+        dischargeTableValues.add(value);
+    }
+
+
+    public void setDischargeTableValues(List<ImportDischargeTableValue> values){
+        this.dischargeTableValues = values;
+    }
+
+
+    public List<ImportDischargeTableValue> getDischargeTableValues() {
+        return dischargeTableValues;
+    }
+
+    public ImportTimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+
+    public DischargeTable getPeer(Gauge gauge) {
+        if (peer == null) {
+            TimeInterval ti = timeInterval != null
+                ? timeInterval.getPeer()
+                : null;
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from DischargeTable where " +
+                "gauge.id=:gauge and kind=:kind and " +
+                "description=:description and timeInterval=:interval");
+            query.setParameter("gauge",       gauge.getId());
+            query.setParameter("description", description);
+            query.setParameter("kind",        kind);
+            query.setParameter("interval",    ti);
+
+            List<DischargeTable> dischargeTables = query.list();
+            if (dischargeTables.isEmpty()) {
+                peer = new DischargeTable(gauge, description, kind, ti);
+                session.save(peer);
+            }
+            else {
+                peer = dischargeTables.get(0);
+            }
+        }
+
+        return peer;
+    }
+
+
+    public void storeDependencies(Gauge gauge) {
+        log.info("store discharge table '" + description + "'");
+        storeDischargeTableValues(gauge);
+    }
+
+
+    public void storeDischargeTableValues(Gauge gauge) {
+        DischargeTable dischargeTable = getPeer(gauge);
+
+        for (ImportDischargeTableValue value: dischargeTableValues) {
+            value.getPeer(dischargeTable);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportDischargeTableValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,34 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import de.intevation.flys.model.DischargeTable;
+import de.intevation.flys.model.DischargeTableValue;
+
+
+public class ImportDischargeTableValue
+{
+    private BigDecimal q;
+    private BigDecimal w;
+
+    private DischargeTableValue peer;
+
+    public ImportDischargeTableValue() {
+    }
+
+
+    public ImportDischargeTableValue(BigDecimal q, BigDecimal w) {
+        this.q = q;
+        this.w = w;
+    }
+
+
+    public DischargeTableValue getPeer(DischargeTable dischargeTable) {
+        if (peer == null) {
+            peer = ImporterSession.getInstance()
+                .getDischargeTableValue(dischargeTable, q, w);
+        }
+
+        return peer;
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportDischargeZone.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,90 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.DischargeZone;
+import de.intevation.flys.model.River;
+
+
+public class ImportDischargeZone {
+
+    private static final Logger log =
+        Logger.getLogger(ImportDischargeZone.class);
+
+
+    private String gaugeName;
+
+    private BigDecimal value;
+
+    private String lowerDischarge;
+    private String upperDischarge;
+
+    private DischargeZone peer;
+
+
+    public ImportDischargeZone(
+        String     gaugeName,
+        BigDecimal value,
+        String     lowerDischarge,
+        String     upperDischarge
+    ) {
+        this.gaugeName      = gaugeName;
+        this.value          = value;
+        this.lowerDischarge = lowerDischarge;
+        this.upperDischarge = upperDischarge;
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.debug("store dependencies");
+
+        getPeer(river);
+    }
+
+
+    public DischargeZone getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from DischargeZone where " +
+                "   river=:river and " +
+                "   gaugeName=:gaugeName and " +
+                "   value=:value"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("gaugeName", gaugeName);
+            query.setParameter("value", value);
+
+            List<DischargeZone> zone = query.list();
+
+            if (zone.isEmpty()) {
+                peer = new DischargeZone(
+                    river,
+                    gaugeName,
+                    value,
+                    lowerDischarge,
+                    upperDischarge);
+
+                session.save(peer);
+            }
+            else {
+                peer = zone.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportEdge.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,75 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Edge;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+public class ImportEdge
+implements   Comparable<ImportEdge>
+{
+    protected BigDecimal top;
+    protected BigDecimal bottom;
+
+    protected Edge peer;
+
+    public ImportEdge() {
+    }
+
+    public ImportEdge(BigDecimal top, BigDecimal bottom) {
+        this.top    = top;
+        this.bottom = bottom;
+    }
+
+    public BigDecimal getTop() {
+        return top;
+    }
+
+    public void setTop(BigDecimal top) {
+        this.top = top;
+    }
+
+    public BigDecimal getBottom() {
+        return bottom;
+    }
+
+    public void setBottom(BigDecimal bottom) {
+        this.bottom = bottom;
+    }
+
+    private static final int compare(BigDecimal a, BigDecimal b) {
+        if (a == null && b != null) return -1;
+        if (a != null && b == null) return +1;
+        if (a == null && b == null) return  0;
+        return a.compareTo(b);
+    }
+
+    public int compareTo(ImportEdge other) {
+        int cmp = compare(top, other.top);
+        return cmp != 0 ? cmp : compare(bottom, other.bottom);
+    }
+
+    public Edge getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from Edge where top=:top and bottom=:bottom");
+            query.setParameter("top", top);
+            query.setParameter("bottom", bottom);
+            List<Edge> edges = query.list();
+            if (edges.isEmpty()) {
+                peer = new Edge(top, bottom);
+                session.save(peer);
+            }
+            else {
+                peer = edges.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportElevationModel.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,59 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.ElevationModel;
+
+
+public class ImportElevationModel {
+
+    private static final Logger log =
+        Logger.getLogger(ImportElevationModel.class);
+
+    protected String name;
+
+    protected ImportUnit unit;
+
+    protected ElevationModel peer;
+
+
+    public ImportElevationModel(String name, ImportUnit unit) {
+        this.name = name;
+        this.unit = unit;
+    }
+
+
+    public void storeDependencies() {
+        ElevationModel model = getPeer();
+    }
+
+    public ElevationModel getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from ElevationModel where " +
+                "name=:name and unit=:unit");
+            query.setParameter("name", name);
+            query.setParameter("unit", unit.getPeer());
+            List<ElevationModel> models = query.list();
+
+            if (models.isEmpty()) {
+                log.info("Create new ElevationModel DB instance.");
+
+                peer = new ElevationModel(name, unit.getPeer());
+                session.save(peer);
+            }
+            else {
+                peer = models.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurement.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,92 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.FlowVelocityMeasurement;
+import de.intevation.flys.model.River;
+
+
+public class ImportFlowVelocityMeasurement {
+
+    private static final Logger log =
+        Logger.getLogger(ImportFlowVelocityMeasurement.class);
+
+
+    private String description;
+
+    private List<ImportFlowVelocityMeasurementValue> values;
+
+    private FlowVelocityMeasurement peer;
+
+
+    public ImportFlowVelocityMeasurement() {
+        this(null);
+    }
+
+
+    public ImportFlowVelocityMeasurement(String description) {
+        this.description = description;
+        this.values      = new ArrayList<ImportFlowVelocityMeasurementValue>();
+    }
+
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public void addValue(ImportFlowVelocityMeasurementValue value) {
+        this.values.add(value);
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.debug("store dependencies");
+
+        FlowVelocityMeasurement peer = getPeer(river);
+
+        for (ImportFlowVelocityMeasurementValue value: values) {
+            value.storeDependencies(peer);
+        }
+    }
+
+
+    public FlowVelocityMeasurement getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from FlowVelocityMeasurement where " +
+                "   river=:river and " +
+                "   description=:description"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("description", description);
+
+            List<FlowVelocityMeasurement> measurement = query.list();
+
+            if (measurement.isEmpty()) {
+                peer = new FlowVelocityMeasurement(river, description);
+
+                session.save(peer);
+            }
+            else {
+                peer = measurement.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurementValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,100 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+import java.sql.SQLException;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.FlowVelocityMeasurement;
+import de.intevation.flys.model.FlowVelocityMeasurementValue;
+
+
+public class ImportFlowVelocityMeasurementValue {
+
+    private static final Logger log =
+        Logger.getLogger(ImportFlowVelocityMeasurementValue.class);
+
+
+    private Date datetime;
+
+    private String description;
+
+    private BigDecimal station;
+    private BigDecimal w;
+    private BigDecimal q;
+    private BigDecimal v;
+
+    private FlowVelocityMeasurementValue peer;
+
+
+    public ImportFlowVelocityMeasurementValue(
+        Date       datetime,
+        BigDecimal station,
+        BigDecimal w,
+        BigDecimal q,
+        BigDecimal v,
+        String     description
+    ) {
+        this.datetime    = datetime;
+        this.station     = station;
+        this.w           = w;
+        this.q           = q;
+        this.v           = v;
+        this.description = description;
+    }
+
+
+
+    public void storeDependencies(FlowVelocityMeasurement measurement)
+    throws SQLException, ConstraintViolationException
+    {
+        log.debug("store dependencies");
+
+        getPeer(measurement);
+    }
+
+
+    public FlowVelocityMeasurementValue getPeer(FlowVelocityMeasurement m) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from FlowVelocityMeasurementValue where " +
+                "   measurement=:measurement and " +
+                "   station=:station and " +
+                "   datetime=:datetime"
+            );
+
+            query.setParameter("measurement", m);
+            query.setParameter("station", station);
+            query.setParameter("datetime", datetime);
+
+            List<FlowVelocityMeasurementValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new FlowVelocityMeasurementValue(
+                    m,
+                    datetime,
+                    station,
+                    w,
+                    q,
+                    v,
+                    description);
+
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,118 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.DischargeZone;
+import de.intevation.flys.model.FlowVelocityModel;
+import de.intevation.flys.model.River;
+
+
+public class ImportFlowVelocityModel {
+
+    private static final Logger log =
+        Logger.getLogger(ImportFlowVelocityModel.class);
+
+
+    private String description;
+
+    private ImportDischargeZone dischargeZone;
+
+    private List<ImportFlowVelocityModelValue> values;
+
+    private FlowVelocityModel peer;
+
+
+    public ImportFlowVelocityModel() {
+        values = new ArrayList<ImportFlowVelocityModelValue>();
+    }
+
+
+    public ImportFlowVelocityModel(
+        ImportDischargeZone dischargeZone,
+        String              description
+    ) {
+        this();
+
+        this.dischargeZone = dischargeZone;
+        this.description   = description;
+    }
+
+
+    public void setDischargeZone(ImportDischargeZone dischargeZone) {
+        this.dischargeZone = dischargeZone;
+    }
+
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public void addValue(ImportFlowVelocityModelValue value) {
+        this.values.add(value);
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.debug("store dependencies");
+
+        if (dischargeZone == null) {
+            log.warn("skip flow velocity model: No discharge zone specified.");
+            return;
+        }
+
+        dischargeZone.storeDependencies(river);
+
+        FlowVelocityModel peer = getPeer(river);
+
+        int i = 0;
+
+        for (ImportFlowVelocityModelValue value: values) {
+            value.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " flow velocity model values.");
+    }
+
+
+    public FlowVelocityModel getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            DischargeZone zone = dischargeZone.getPeer(river);
+
+            Query query = session.createQuery(
+                "from FlowVelocityModel where " +
+                "   river=:river and " +
+                "   dischargeZone=:dischargeZone"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("dischargeZone", zone);
+
+            List<FlowVelocityModel> model = query.list();
+
+            if (model.isEmpty()) {
+                peer = new FlowVelocityModel(river, zone);
+                session.save(peer);
+            }
+            else {
+                peer = model.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,77 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+import java.sql.SQLException;
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.FlowVelocityModel;
+import de.intevation.flys.model.FlowVelocityModelValue;
+
+
+public class ImportFlowVelocityModelValue {
+
+    private BigDecimal station;
+    private BigDecimal q;
+    private BigDecimal totalChannel;
+    private BigDecimal mainChannel;
+    private BigDecimal shearStress;
+
+    private FlowVelocityModelValue peer;
+
+
+    public ImportFlowVelocityModelValue(
+        BigDecimal station,
+        BigDecimal q,
+        BigDecimal totalChannel,
+        BigDecimal mainChannel,
+        BigDecimal shearStress
+    ) {
+        this.station      = station;
+        this.q            = q;
+        this.totalChannel = totalChannel;
+        this.mainChannel  = mainChannel;
+        this.shearStress  = shearStress;
+    }
+
+
+    public void storeDependencies(FlowVelocityModel model)
+    throws SQLException, ConstraintViolationException
+    {
+        getPeer(model);
+    }
+
+
+    public FlowVelocityModelValue getPeer(FlowVelocityModel model) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from FlowVelocityModelValue where " +
+                "   flowVelocity=:model and " +
+                "   station=:station"
+            );
+
+            query.setParameter("model", model);
+            query.setParameter("station", station);
+
+            List<FlowVelocityModelValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new FlowVelocityModelValue(
+                    model, station, q, totalChannel, mainChannel, shearStress);
+
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportGauge.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,268 @@
+package de.intevation.flys.importer;
+
+import java.io.File;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import java.math.BigDecimal;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Gauge;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.io.IOException;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.parsers.AtFileParser;
+import de.intevation.flys.importer.parsers.StaFileParser;
+
+public class ImportGauge
+{
+    private static Logger log = Logger.getLogger(ImportGauge.class);
+
+    public static final String HISTORICAL_DISCHARGE_TABLES =
+        "Histor.Abflusstafeln";
+
+    protected ImportRange range;
+
+    protected File        staFile;
+    protected File        atFile;
+
+    protected String      name;
+    protected BigDecimal  aeo;
+    protected BigDecimal  datum;
+    protected BigDecimal  station;
+    protected Long        officialNumber;
+
+    protected Gauge  peer;
+
+    protected ImportDischargeTable dischargeTable;
+
+    protected List<ImportMainValueType>  mainValueTypes;
+    protected List<ImportNamedMainValue> namedMainValues;
+    protected List<ImportMainValue>      mainValues;
+    protected List<ImportDischargeTable> historicalDischargeTables;
+
+    public ImportGauge() {
+        historicalDischargeTables = new ArrayList<ImportDischargeTable>();
+    }
+
+    public ImportGauge(ImportRange range, File staFile, File atFile) {
+        this();
+        this.range   = range;
+        this.staFile = staFile;
+        this.atFile  = atFile;
+    }
+
+    public void setRange(ImportRange range) {
+        this.range = range;
+    }
+
+    public void setStaFile(File staFile) {
+        this.staFile = staFile;
+    }
+
+    public File getStaFile() {
+        return staFile;
+    }
+
+    public void setAtFile(File atFile) {
+        this.atFile = atFile;
+    }
+
+    public File getAtFile() {
+        return atFile;
+    }
+
+    public BigDecimal getAeo() {
+        return aeo;
+    }
+
+    public void setAeo(BigDecimal aeo) {
+        this.aeo = aeo;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public BigDecimal getDatum() {
+        return datum;
+    }
+
+    public void setDatum(BigDecimal datum) {
+        this.datum = datum;
+    }
+
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    public Long getOfficialNumber() {
+        return officialNumber;
+    }
+
+    public void setOfficialNumber(Long officialNumber) {
+        this.officialNumber = officialNumber;
+    }
+
+    public ImportDischargeTable getDischargeTable() {
+        return dischargeTable;
+    }
+
+    public void setDischargeTable(ImportDischargeTable dischargeTable) {
+        this.dischargeTable = dischargeTable;
+    }
+
+    public List<ImportMainValueType> getMainValueTypes() {
+        return mainValueTypes;
+    }
+
+    public void setMainValueTypes(List<ImportMainValueType> mainValueTypes) {
+        this.mainValueTypes = mainValueTypes;
+    }
+
+    public List<ImportNamedMainValue> getNamedMainValues() {
+        return namedMainValues;
+    }
+
+    public void setNamedMainValues(List<ImportNamedMainValue> namedMainValues) {
+        this.namedMainValues = namedMainValues;
+    }
+
+    public List<ImportMainValue> getMainValues() {
+        return mainValues;
+    }
+
+    public void setMainValues(List<ImportMainValue> mainValues) {
+        this.mainValues = mainValues;
+    }
+
+    public void parseDependencies() throws IOException {
+        StaFileParser sfp = new StaFileParser();
+        sfp.parse(this);
+
+        AtFileParser afp = new AtFileParser();
+        setDischargeTable(afp.parse(getAtFile()));
+        parseHistoricalDischargeTables();
+    }
+
+    public void parseHistoricalDischargeTables() throws IOException {
+        log.info("parse historical discharge tables");
+
+        File riverDir = atFile.getParentFile().getParentFile();
+
+        File histDischargeDir = FileTools.repair(
+            new File(riverDir, HISTORICAL_DISCHARGE_TABLES));
+
+        if (!histDischargeDir.isDirectory() || !histDischargeDir.canRead()) {
+            log.info("cannot find '" + histDischargeDir + "'");
+            return;
+        }
+
+        histDischargeDir = FileTools.repair(
+            new File(histDischargeDir, getName()));
+
+        if (!histDischargeDir.isDirectory() || !histDischargeDir.canRead()) {
+            log.info("cannot find '" + histDischargeDir + "'");
+            return;
+        }
+
+        File [] files = histDischargeDir.listFiles();
+
+        if (files == null) {
+            log.info("cannot read directory '" + histDischargeDir + "'");
+            return;
+        }
+
+        for (File file: files) {
+            if (!file.isFile() || !file.canRead()) {
+                continue;
+            }
+            String name = file.getName().toLowerCase();
+            if (!name.endsWith(".at")) {
+                continue;
+            }
+            log.info("found at file '" + file.getName() + "'");
+
+            AtFileParser afp = new AtFileParser();
+            historicalDischargeTables.add(
+                afp.parse(file, HISTORICAL_DISCHARGE_TABLES + "/", 1));
+        }
+    }
+
+    public void storeDependencies(River river) {
+
+        Gauge gauge = getPeer(river);
+
+        log.info("store main value types");
+        for (ImportMainValueType mainValueType: mainValueTypes) {
+            mainValueType.getPeer();
+        }
+
+        log.info("store named main values");
+        for (ImportNamedMainValue namedMainValue: namedMainValues) {
+            namedMainValue.getPeer();
+        }
+
+        log.info("store main values");
+        for (ImportMainValue mainValue: mainValues) {
+            mainValue.getPeer(river);
+        }
+
+        storeDischargeTable(gauge);
+        storeHistoricalDischargeTable(gauge);
+    }
+
+    public void storeDischargeTable(Gauge gauge) {
+        log.info("store discharge table");
+        dischargeTable.getPeer(gauge);
+        dischargeTable.storeDependencies(gauge);
+    }
+
+    public void storeHistoricalDischargeTable(Gauge gauge) {
+        log.info("store historical discharge tables");
+        for (ImportDischargeTable hdt: historicalDischargeTables) {
+            hdt.storeDependencies(gauge);
+        }
+    }
+
+    public Gauge getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from Gauge where name=:name " +
+                "and river.id=:river");
+            query.setString("name", name);
+            query.setParameter("river", river.getId());
+            List<Gauge> gauges = query.list();
+            if (gauges.isEmpty()) {
+                peer = new Gauge(
+                    name, river,
+                    station, aeo, datum,
+                    officialNumber,
+                    range.getPeer(river));
+                session.save(peer);
+            }
+            else {
+                peer = gauges.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportGrainFraction.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,89 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.GrainFraction;
+import de.intevation.flys.model.Unit;
+
+
+public class ImportGrainFraction {
+
+    private static final Logger log =
+        Logger.getLogger(ImportGrainFraction.class);
+
+    private String name;
+
+    private Double lower;
+    private Double upper;
+
+    private ImportUnit unit;
+
+    private GrainFraction peer;
+
+
+    public ImportGrainFraction(String name) {
+        this.name = name;
+    }
+
+
+    public ImportGrainFraction(
+        String     name,
+        Double     lower,
+        Double     upper,
+        ImportUnit unit
+    ) {
+        this.name  = name;
+        this.lower = lower;
+        this.upper = upper;
+        this.unit  = unit;
+    }
+
+
+    public void storeDependencies() {
+        log.debug("store dependencies");
+
+        getPeer();
+    }
+
+
+    public GrainFraction getPeer() {
+        log.debug("get peer");
+
+        Unit u = unit != null ? unit.getPeer() : null;
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from GrainFraction where " +
+                "   name=:name and " +
+                "   lower=:lower and " +
+                "   upper=:upper and " +
+                "   unit=:unit"
+            );
+
+            query.setParameter("name", name);
+            query.setParameter("lower", lower);
+            query.setParameter("upper", upper);
+            query.setParameter("unit", u);
+
+            List<GrainFraction> fractions = query.list();
+            if (fractions.isEmpty()) {
+                log.info("create new GrainFraction");
+
+                peer = new GrainFraction(name, lower, upper, u);
+                session.save(peer);
+            }
+            else {
+                peer = fractions.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportHYK.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,80 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.HYK;
+import de.intevation.flys.model.River;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import org.apache.log4j.Logger;
+
+public class ImportHYK
+{
+    private static Logger log = Logger.getLogger(ImportHYK.class);
+
+    protected ImportRiver river;
+    protected String      description;
+
+    protected List<ImportHYKEntry> entries;
+
+    protected HYK peer;
+
+    public ImportHYK() {
+        entries = new ArrayList<ImportHYKEntry>();
+    }
+
+    public ImportHYK(ImportRiver river, String description) {
+        this();
+        this.river       = river;
+        this.description = description;
+    }
+
+    public ImportRiver getRiver() {
+        return river;
+    }
+
+    public void setRiver(ImportRiver river) {
+        this.river = river;
+    }
+
+    public void addEntry(ImportHYKEntry entry) {
+        entries.add(entry);
+        entry.setHYK(this);
+    }
+
+    public void storeDependencies() {
+        log.info("store HYK '" + description + "'");
+        getPeer();
+        for (int i = 0, N = entries.size(); i < N; ++i) {
+            ImportHYKEntry entry = entries.get(i);
+            log.info("  store km " + entry.getKm() +
+                " (" + (i+1) + " of " + N + ")");
+            entry.storeDependencies();
+        }
+    }
+
+    public HYK getPeer() {
+        if (peer == null) {
+            River r = river.getPeer();
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            Query query = session.createQuery(
+                "from HYK where river=:river and description=:description");
+            query.setParameter("river", r);
+            query.setParameter("description", description);
+            List<HYK> hyks = query.list();
+            if (hyks.isEmpty()) {
+                peer = new HYK(r, description);
+                session.save(peer);
+            }
+            else {
+                peer = hyks.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportHYKEntry.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,93 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.HYKEntry;
+import de.intevation.flys.model.HYK;
+
+import java.util.Date;
+import java.util.List;
+import java.util.ArrayList;
+
+import java.math.BigDecimal;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+public class ImportHYKEntry
+{
+    protected ImportHYK  hyk;
+    protected BigDecimal km;
+    protected Date       measure;
+
+    protected List<ImportHYKFormation> formations;
+
+    protected HYKEntry peer;
+
+    public ImportHYKEntry() {
+        formations = new ArrayList<ImportHYKFormation>();
+    }
+
+    public ImportHYKEntry(
+        ImportHYK  hyk,
+        BigDecimal km,
+        Date       measure
+    ) {
+        this();
+        this.hyk     = hyk;
+        this.km      = km;
+        this.measure = measure;
+    }
+
+    public ImportHYK getHYK() {
+        return hyk;
+    }
+
+    public void setHYK(ImportHYK hyk) {
+        this.hyk = hyk;
+    }
+
+    public BigDecimal getKm() {
+        return km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+    public void addFormation(ImportHYKFormation formation) {
+        int numFormation = formations.size();
+        formations.add(formation);
+        formation.setFormationNum(numFormation);
+        formation.setEntry(this);
+    }
+
+    public void storeDependencies() {
+        getPeer();
+        for (ImportHYKFormation formation: formations) {
+            formation.storeDependencies();
+        }
+    }
+
+    public HYKEntry getPeer() {
+        if (peer == null) {
+            HYK h = hyk.getPeer();
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            Query query = session.createQuery(
+                "from HYKEntry where HYK=:hyk " +
+                "and km=:km and measure=:measure");
+            query.setParameter("hyk", h);
+            query.setParameter("km", km);
+            query.setParameter("measure", measure);
+            List<HYKEntry> entries = query.list();
+            if (entries.isEmpty()) {
+                peer = new HYKEntry(h, km, measure);
+                session.save(peer);
+            }
+            else {
+                peer = entries.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportHYKFlowZone.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,76 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.HYKFormation;
+import de.intevation.flys.model.HYKFlowZone;
+import de.intevation.flys.model.HYKFlowZoneType;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+public class ImportHYKFlowZone
+{
+    protected ImportHYKFormation    formation;
+    protected ImportHYKFlowZoneType type;
+    protected BigDecimal            a;
+    protected BigDecimal            b;
+
+    protected HYKFlowZone peer;
+
+    public ImportHYKFlowZone() {
+    }
+
+    public ImportHYKFlowZone(
+        ImportHYKFormation    formation,
+        ImportHYKFlowZoneType type,
+        BigDecimal            a,
+        BigDecimal            b
+    ) {
+        this.formation = formation;
+        this.type      = type;
+        this.a         = a;
+        this.b         = b;
+    }
+
+    public ImportHYKFormation getFormation() {
+        return formation;
+    }
+
+    public void setFormation(ImportHYKFormation formation) {
+        this.formation = formation;
+    }
+
+    public void storeDependencies() {
+        getPeer();
+    }
+
+    public HYKFlowZone getPeer() {
+        if (peer == null) {
+            HYKFormation    f = formation.getPeer();
+            HYKFlowZoneType t = type.getPeer();
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            Query query = session.createQuery(
+                "from HYKFlowZone where formation=:formation " +
+                "and type=:type and a=:a and b=:b");
+            query.setParameter("formation", f);
+            query.setParameter("type", t);
+            query.setParameter("a", a);
+            query.setParameter("b", b);
+            List<HYKFlowZone> zones = query.list();
+            if (zones.isEmpty()) {
+                peer = new HYKFlowZone(f, t, a, b);
+                session.save(peer);
+            }
+            else {
+                peer = zones.get(0);
+            }
+            
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportHYKFlowZoneType.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,41 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.HYKFlowZoneType;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportHYKFlowZoneType
+{
+    private String          name;
+    private HYKFlowZoneType peer;
+
+    public ImportHYKFlowZoneType() {
+    }
+
+    public ImportHYKFlowZoneType(String name) {
+        this.name = name;
+    }
+
+    public HYKFlowZoneType getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            Query query = session.createQuery(
+                "from HYKFlowZoneType where name=:name");
+            query.setParameter("name", name);
+            List<HYKFlowZoneType> flowZoneTypes = query.list();
+            if (flowZoneTypes.isEmpty()) {
+                peer = new HYKFlowZoneType(name);
+                session.save(peer);
+            }
+            else {
+                peer = flowZoneTypes.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportHYKFormation.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,150 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.HYKFormation;
+import de.intevation.flys.model.HYKEntry;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.math.BigDecimal;
+
+public class ImportHYKFormation
+{
+    protected int            formationNum;
+    protected ImportHYKEntry entry;
+    protected BigDecimal     top;
+    protected BigDecimal     bottom;
+    protected BigDecimal     distanceVL;
+    protected BigDecimal     distanceHF;
+    protected BigDecimal     distanceVR;
+
+    protected List<ImportHYKFlowZone> zones;
+
+    protected HYKFormation peer;
+
+    public ImportHYKFormation() {
+        zones = new ArrayList<ImportHYKFlowZone>();
+    }
+
+    public ImportHYKFormation(
+        int            formationNum,
+        ImportHYKEntry entry,
+        BigDecimal     top,
+        BigDecimal     bottom,
+        BigDecimal     distanceVL,
+        BigDecimal     distanceHF,
+        BigDecimal     distanceVR
+    ) {
+        this();
+        this.formationNum = formationNum;
+        this.entry        = entry;
+        this.top          = top;
+        this.bottom       = bottom;
+        this.distanceVL   = distanceVL;
+        this.distanceHF   = distanceHF;
+        this.distanceVR   = distanceVR;
+    }
+
+    public void addFlowZone(ImportHYKFlowZone zone) {
+        zones.add(zone);
+        zone.setFormation(this);
+    }
+
+    public int getFormationNum() {
+        return formationNum;
+    }
+
+    public void setFormationNum(int formationNum) {
+        this.formationNum = formationNum;
+    }
+
+    public ImportHYKEntry getEntry() {
+        return entry;
+    }
+
+    public void setEntry(ImportHYKEntry entry) {
+        this.entry = entry;
+    }
+
+    public BigDecimal getTop() {
+        return top;
+    }
+
+    public void setTop(BigDecimal top) {
+        this.top = top;
+    }
+
+    public BigDecimal getBottom() {
+        return bottom;
+    }
+
+    public void setBottom(BigDecimal bottom) {
+        this.bottom = bottom;
+    }
+
+    public BigDecimal getDistanceVL() {
+        return distanceVL;
+    }
+
+    public void setDistanceVL(BigDecimal distanceVL) {
+        this.distanceVL = distanceVL;
+    }
+
+    public BigDecimal getDistanceHF() {
+        return distanceHF;
+    }
+
+    public void setDistanceHF(BigDecimal distanceHF) {
+        this.distanceHF = distanceHF;
+    }
+
+    public BigDecimal getDistanceVR() {
+        return distanceVR;
+    }
+
+    public void setDistanceVR(BigDecimal distanceVR) {
+        this.distanceVR = distanceVR;
+    }
+
+    public void storeDependencies() {
+        getPeer();
+        for (ImportHYKFlowZone zone: zones) {
+            zone.storeDependencies();
+        }
+    }
+
+    public HYKFormation getPeer() {
+        if (peer == null) {
+            HYKEntry e = entry.getPeer();
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            Query query = session.createQuery(
+                "from HYKFormation where formationNum=:formationNum " +
+                "and entry=:entry and top=:top and bottom=:bottom " +
+                "and distanceVL=:distanceVL and distanceHF=:distanceHF " +
+                "and distanceVR=:distanceVR");
+            query.setParameter("formationNum", formationNum);
+            query.setParameter("entry", e);
+            query.setParameter("top", top);
+            query.setParameter("bottom", bottom);
+            query.setParameter("distanceVL", distanceVL);
+            query.setParameter("distanceHF", distanceHF);
+            query.setParameter("distanceVR", distanceVR);
+            List<HYKFormation> formations = query.list();
+            if (formations.isEmpty()) {
+                peer = new HYKFormation(
+                    formationNum, e, top, bottom,
+                    distanceVL, distanceHF, distanceVR);
+                session.save(peer);
+            }
+            else {
+                peer = formations.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportLocationSystem.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,60 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.LocationSystem;
+
+
+public class ImportLocationSystem {
+
+    private static final Logger log =
+        Logger.getLogger(ImportLocationSystem.class);
+
+
+    protected String name;
+    protected String description;
+
+    protected LocationSystem peer;
+
+
+    public ImportLocationSystem(String name, String description) {
+        this.name        = name;
+        this.description = description;
+    }
+
+    public void storeDependencies() {
+        log.info("store LocationSystem '" + name + "'");
+        LocationSystem ls = getPeer();
+
+        Session session = ImporterSession.getInstance().getDatabaseSession();
+        session.flush();
+    }
+
+    public LocationSystem getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from LocationSystem where " +
+                "name=:name and description=:description");
+            query.setParameter("name", name);
+            query.setParameter("description", description);
+
+            List<LocationSystem> lss = query.list();
+            if (lss.isEmpty()) {
+                peer = new LocationSystem(name, description);
+                session.save(peer);
+            }
+            else {
+                peer = lss.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportMainValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,83 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+import de.intevation.flys.model.MainValue;
+import de.intevation.flys.model.Gauge;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.NamedMainValue;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+public class ImportMainValue
+{
+    protected ImportGauge          gauge;
+    protected ImportNamedMainValue mainValue;
+    protected BigDecimal           value;
+
+    protected MainValue peer;
+
+    public ImportMainValue() {
+    }
+
+    public ImportMainValue(
+        ImportGauge          gauge,
+        ImportNamedMainValue mainValue,
+        BigDecimal           value
+    ) {
+        this.gauge     = gauge;
+        this.mainValue = mainValue;
+        this.value     = value;
+    }
+
+    public ImportGauge getGauge() {
+        return gauge;
+    }
+
+    public void setGauge(ImportGauge gauge) {
+        this.gauge = gauge;
+    }
+
+    public ImportNamedMainValue getMainValue() {
+        return mainValue;
+    }
+
+    public void setMainValue(ImportNamedMainValue mainValue) {
+        this.mainValue = mainValue;
+    }
+
+    public BigDecimal getValue() {
+        return value;
+    }
+
+    public void setValue(BigDecimal value) {
+        this.value = value;
+    }
+
+    public MainValue getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from MainValue where "
+                + "gauge.id=:gauge_id and mainValue.id=:name_id "
+                + "and value=:value");
+            Gauge          g = gauge.getPeer(river);
+            NamedMainValue n = mainValue.getPeer();
+            query.setParameter("gauge_id", g.getId());
+            query.setParameter("name_id",  n.getId());
+            query.setParameter("value",    value);
+            List<MainValue> values = query.list();
+            if (values.isEmpty()) {
+                peer = new MainValue(g, n, value, null);
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportMainValueType.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,65 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.MainValueType;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportMainValueType
+implements   Comparable<ImportMainValueType>
+{
+    protected String name;
+
+    protected MainValueType peer;
+
+    public ImportMainValueType() {
+    }
+
+    public ImportMainValueType(String name) {
+        this.name = name;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public int compareTo(ImportMainValueType other) {
+        return name.compareTo(other.name);
+    }
+
+    @Override
+    public boolean equals(Object other) {
+        if (other == this) return true;
+        if (!(other instanceof ImportMainValueType)) return false;
+        return name.equals(((ImportMainValueType)other).name);
+    }
+
+    @Override
+    public int hashCode() {
+        return name.hashCode();
+    }
+
+    public MainValueType getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from MainValueType where name=:name");
+            query.setString("name", name);
+            List<MainValueType> values = query.list();
+            if (values.isEmpty()) {
+                peer = new MainValueType(name);
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportMorphWidth.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,91 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.MorphologicalWidth;
+import de.intevation.flys.model.River;
+
+
+public class ImportMorphWidth {
+
+    private static Logger log = Logger.getLogger(ImportMorphWidth.class);
+
+
+    protected MorphologicalWidth peer;
+
+    protected ImportUnit unit;
+
+    protected List<ImportMorphWidthValue> values;
+
+
+    public ImportMorphWidth() {
+        this.values = new ArrayList<ImportMorphWidthValue>();
+    }
+
+
+    public void addValue(ImportMorphWidthValue value) {
+        this.values.add(value);
+    }
+
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("store dependencies");
+
+        MorphologicalWidth peer = getPeer(river);
+
+        log.info("store morphological width values");
+
+        for (ImportMorphWidthValue value: values) {
+            value.storeDependencies(peer);
+        }
+    }
+
+
+    public MorphologicalWidth getPeer(River river) {
+        log.info("get peer");
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from MorphologicalWidth where " +
+                "   river=:river and " +
+                "   unit=:unit");
+
+            query.setParameter("river", river);
+            query.setParameter("unit", unit.getPeer());
+
+            List<MorphologicalWidth> widths = query.list();
+
+            if (widths.isEmpty()) {
+                log.debug("Create new MorphologicalWidth DB instance.");
+
+                peer = new MorphologicalWidth(river, unit.getPeer());
+
+                session.save(peer);
+            }
+            else {
+                peer = widths.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportMorphWidthValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,81 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.MorphologicalWidth;
+import de.intevation.flys.model.MorphologicalWidthValue;
+
+
+public class ImportMorphWidthValue {
+
+    private static Logger log = Logger.getLogger(ImportMorphWidthValue.class);
+
+
+    protected MorphologicalWidthValue peer;
+
+    protected BigDecimal station;
+    protected BigDecimal width;
+
+    protected String description;
+
+
+    public ImportMorphWidthValue(
+        BigDecimal station,
+        BigDecimal width,
+        String     description
+    ) {
+        this.station     = station;
+        this.width       = width;
+        this.description = description;
+    }
+
+
+    public void storeDependencies(MorphologicalWidth parent) {
+        getPeer(parent);
+    }
+
+
+    public MorphologicalWidthValue getPeer(MorphologicalWidth parent) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from MorphologicalWidthValue where " +
+                "   morphologicalWidth=:morphologicalWidth and " +
+                "   station=:station and " +
+                "   width=:width and " +
+                "   description=:description");
+
+            query.setParameter("morphologicalWidth", parent);
+            query.setParameter("station", station);
+            query.setParameter("width", width);
+            query.setParameter("description", description);
+
+            List<MorphologicalWidthValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new MorphologicalWidthValue(
+                    parent,
+                    station,
+                    width,
+                    description
+                );
+
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportNamedMainValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,66 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import de.intevation.flys.model.NamedMainValue;
+import de.intevation.flys.model.MainValueType;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+public class ImportNamedMainValue
+{
+    protected ImportMainValueType mainValueType;
+    protected String              name;
+
+    protected NamedMainValue      peer;
+
+    public ImportNamedMainValue() {
+    }
+
+    public ImportNamedMainValue(
+        ImportMainValueType mainValueType,
+        String              name
+    ) {
+        this.mainValueType = mainValueType;
+        this.name          = name;
+    }
+
+    public ImportMainValueType getMainValueType() {
+        return mainValueType;
+    }
+
+    public void setMainValueType(ImportMainValueType mainValueType) {
+        this.mainValueType = mainValueType;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public NamedMainValue getPeer() {
+        if (peer == null) {
+            MainValueType type = mainValueType.getPeer();
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from NamedMainValue where " +
+                "name=:name and type.id=:id");
+            query.setString("name", name);
+            query.setParameter("id", type.getId());
+            List<NamedMainValue> named = query.list();
+            if (named.isEmpty()) {
+                peer = new NamedMainValue(name, type);
+                session.save(peer);
+            }
+            else {
+                peer = named.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportPosition.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,54 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Position;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportPosition
+implements   Comparable<ImportPosition>
+{
+    protected String value;
+
+    protected Position peer;
+
+    public ImportPosition() {
+    }
+
+    public ImportPosition(String value) {
+        this.value = value;
+    }
+
+    public int compareTo(ImportPosition other) {
+        return value.compareTo(other.value);
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+
+    public Position getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from Position where value=:value");
+            query.setString("value", value);
+            List<Position> positions = query.list();
+            if (positions.isEmpty()) {
+                peer = new Position(value);
+                session.save(peer);
+            }
+            else {
+                peer = positions.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportRange.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,70 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.River;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+public class ImportRange
+implements   Comparable<ImportRange>
+{
+    private static Logger log = Logger.getLogger(ImportRange.class);
+
+    protected BigDecimal a;
+    protected BigDecimal b;
+
+    protected Range peer;
+
+    public ImportRange() {
+    }
+
+    public ImportRange(BigDecimal a, BigDecimal b) {
+        this.a = a;
+        this.b = b;
+    }
+
+    private static final int compare(BigDecimal a, BigDecimal b) {
+        if (a == null && b == null) {
+            return 0;
+        }
+        if (a == null && b != null) {
+            return -1;
+        }
+        if (a != null && b == null) {
+            return +1;
+        }
+        return a.compareTo(b);
+    }
+
+    public int compareTo(ImportRange other) {
+        int cmp = compare(a, other.a);
+        if (cmp != 0) return cmp;
+        return compare(b, other.b);
+    }
+
+    public BigDecimal getA() {
+        return a;
+    }
+
+    public void setA(BigDecimal a) {
+        this.a = a;
+    }
+
+    public BigDecimal getB() {
+        return b;
+    }
+
+    public void setB(BigDecimal b) {
+        this.b = b;
+    }
+
+    public Range getPeer(River river) {
+        if (peer == null) {
+            peer = ImporterSession.getInstance().getRange(river, a, b);
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,1234 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.Calendar;
+
+import java.io.File;
+import java.io.IOException;
+
+import java.sql.SQLException;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.FileTools;
+import de.intevation.artifacts.common.utils.FileTools.HashedFile;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Unit;
+
+import de.intevation.flys.importer.parsers.BedHeightEpochParser;
+import de.intevation.flys.importer.parsers.BedHeightSingleParser;
+import de.intevation.flys.importer.parsers.FlowVelocityModelParser;
+import de.intevation.flys.importer.parsers.FlowVelocityMeasurementParser;
+import de.intevation.flys.importer.parsers.PRFParser;
+import de.intevation.flys.importer.parsers.HYKParser;
+import de.intevation.flys.importer.parsers.MorphologicalWidthParser;
+import de.intevation.flys.importer.parsers.AnnotationsParser;
+import de.intevation.flys.importer.parsers.AnnotationClassifier;
+import de.intevation.flys.importer.parsers.PegelGltParser;
+import de.intevation.flys.importer.parsers.SedimentDensityParser;
+import de.intevation.flys.importer.parsers.SedimentYieldParser;
+import de.intevation.flys.importer.parsers.WstParser;
+import de.intevation.flys.importer.parsers.WaterlevelDifferencesParser;
+import de.intevation.flys.importer.parsers.WaterlevelParser;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+public class ImportRiver
+{
+    private static Logger log = Logger.getLogger(ImportRiver.class);
+
+    public static final String PEGEL_GLT = "PEGEL.GLT";
+
+    public static final String FIXATIONS = "Fixierungen";
+
+    public static final String EXTRA_LONGITUDINALS =
+        "Zus.L\u00e4ngsschnitte";
+
+    public static final String [] OFFICIAL_LINES_FOLDERS = {
+        "Basisdaten",
+        "Fixierungen" };
+
+    public static final String OFFICIAL_LINES =
+        "Amtl_Linien.wst";
+
+    public static final String FLOOD_WATER = "HW-Marken";
+
+    public static final String FLOOD_PROTECTION =
+        "HW-Schutzanlagen";
+
+    public static final String MINFO_DIR = "Morphologie";
+
+    public static final String BED_HEIGHT_DIR = "Sohlhoehen";
+
+    public static final String BED_HEIGHT_SINGLE_DIR = "Einzeljahre";
+
+    public static final String BED_HEIGHT_EPOCH_DIR = "Epochen";
+
+    public static final String SEDIMENT_DENSITY_DIR = "Sedimentdichte";
+
+    public static final String MORPHOLOGICAL_WIDTH_DIR = "morphologische_Breite";
+
+    public static final String FLOW_VELOCITY_DIR = "Geschwindigkeit_Schubspannung";
+
+    public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen";
+
+    public static final String FLOW_VELOCITY_MEASUREMENTS = "v-Messungen";
+
+    public static final String SEDIMENT_YIELD_DIR = "Fracht";
+
+    public static final String SEDIMENT_YIELD_SINGLE_DIR = "Einzeljahre";
+
+    public static final String SEDIMENT_YIELD_EPOCH_DIR = "Epochen";
+
+    public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse";
+
+    public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen";
+
+    public static final String MINFO_WATERLEVEL_DIFF_DIR = "Wasserspiegeldifferenzen";
+
+
+    protected String name;
+
+    protected File   wstFile;
+
+    protected File   bbInfoFile;
+
+    protected List<ImportGauge> gauges;
+
+    protected List<ImportAnnotation> annotations;
+
+    protected List<ImportHYK> hyks;
+
+    protected List<ImportCrossSection> crossSections;
+
+    protected List<ImportWst> extraWsts;
+
+    protected List<ImportWst> fixations;
+
+    protected List<ImportWst> officialLines;
+
+    protected List<ImportWst> floodWater;
+
+    protected List<ImportWst> floodProtection;
+
+    protected List<ImportBedHeight> bedHeightSingles;
+
+    protected List<ImportBedHeight> bedHeightEpochs;
+
+    protected List<ImportSedimentDensity> sedimentDensities;
+
+    protected List<ImportMorphWidth> morphologicalWidths;
+
+    protected List<ImportFlowVelocityModel> flowVelocityModels;
+
+    protected List<ImportFlowVelocityMeasurement> flowVelocityMeasurements;
+
+    protected List<ImportSedimentYield> sedimentYields;
+
+    protected List<ImportWaterlevel> waterlevels;
+
+    protected List<ImportWaterlevelDifference> waterlevelDiffs;
+
+    protected ImportWst wst;
+
+    protected ImportUnit wstUnit;
+
+    protected AnnotationClassifier annotationClassifier;
+
+    protected River peer;
+
+    public ImportRiver() {
+        hyks                      = new ArrayList<ImportHYK>();
+        crossSections             = new ArrayList<ImportCrossSection>();
+        extraWsts                 = new ArrayList<ImportWst>();
+        fixations                 = new ArrayList<ImportWst>();
+        officialLines             = new ArrayList<ImportWst>();
+        floodWater                = new ArrayList<ImportWst>();
+        floodProtection           = new ArrayList<ImportWst>();
+        sedimentDensities         = new ArrayList<ImportSedimentDensity>();
+        morphologicalWidths       = new ArrayList<ImportMorphWidth>();
+        flowVelocityModels        = new ArrayList<ImportFlowVelocityModel>();
+        flowVelocityMeasurements  = new ArrayList<ImportFlowVelocityMeasurement>();
+        sedimentYields            = new ArrayList<ImportSedimentYield>();
+        waterlevels               = new ArrayList<ImportWaterlevel>();
+        waterlevelDiffs           = new ArrayList<ImportWaterlevelDifference>();
+    }
+
+    public ImportRiver(
+        String               name,
+        File                 wstFile,
+        File                 bbInfoFile,
+        AnnotationClassifier annotationClassifier
+    ) {
+        this();
+        this.name                 = name;
+        this.wstFile              = wstFile;
+        this.bbInfoFile           = bbInfoFile;
+        this.annotationClassifier = annotationClassifier;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public File getWstFile() {
+        return wstFile;
+    }
+
+    public void setWstFile(File wstFile) {
+        this.wstFile = wstFile;
+    }
+
+    public File getBBInfo() {
+        return bbInfoFile;
+    }
+
+    public void setBBInfo(File bbInfoFile) {
+        this.bbInfoFile = bbInfoFile;
+    }
+
+    public ImportWst getWst() {
+        return wst;
+    }
+
+    public void setWst(ImportWst wst) {
+        this.wst = wst;
+    }
+
+    public File getMinfoDir() {
+        File riverDir  = wstFile.getParentFile().getParentFile().getParentFile();
+        return new File(riverDir, MINFO_DIR);
+    }
+
+    public void parseDependencies() throws IOException {
+        parseGauges();
+        parseAnnotations();
+        parsePRFs();
+        parseHYKs();
+        parseWst();
+        parseExtraWsts();
+        parseFixations();
+        parseOfficialLines();
+        parseFloodWater();
+        parseFloodProtection();
+        parseBedHeight();
+        parseSedimentDensity();
+        parseMorphologicalWidth();
+        parseFlowVelocity();
+        parseSedimentYield();
+        parseWaterlevels();
+        parseWaterlevelDifferences();
+    }
+
+    public void parseFloodProtection() throws IOException {
+        if (Config.INSTANCE.skipFloodProtection()) {
+            log.info("skip parsing flood protection");
+            return;
+        }
+
+        log.info("Parse flood protection wst file");
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+
+        File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION));
+
+        if (!dir.isDirectory() || !dir.canRead()) {
+            log.info("no directory '" + dir + "' found");
+            return;
+        }
+
+        File [] files = dir.listFiles();
+
+        if (files == null) {
+            log.warn("cannot read '" + dir + "'");
+            return;
+        }
+
+        for (File file: files) {
+            if (!file.isFile() || !file.canRead()) {
+                continue;
+            }
+            String name = file.getName().toLowerCase();
+            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
+                continue;
+            }
+            log.info("found file '" + file.getName() + "'");
+            WstParser wstParser = new WstParser();
+            wstParser.parse(file);
+            ImportWst iw = wstParser.getWst();
+            iw.setKind(5);
+            iw.setDescription(FLOOD_PROTECTION + "/" + iw.getDescription());
+            floodProtection.add(iw);
+        }
+    }
+
+
+    public void parseBedHeight() throws IOException {
+        File minfoDir     = getMinfoDir();
+        File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR);
+        File singlesDir   = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR);
+        File epochDir     = new File(bedHeightDir, BED_HEIGHT_EPOCH_DIR);
+
+        if (Config.INSTANCE.skipBedHeightSingle()) {
+            log.info("skip parsing bed height single.");
+        }
+        else {
+            log.info("Parse bed height single.");
+            parseBedHeightSingles(singlesDir);
+        }
+
+        if (Config.INSTANCE.skipBedHeightEpoch()) {
+            log.info("skip parsing bed height epochs.");
+        }
+        else {
+            log.info("Parse bed height epochs.");
+            parseBedHeightEpochs(epochDir);
+        }
+    }
+
+
+    protected void parseSedimentDensity() throws IOException {
+        log.debug("Parse sediment density");
+
+        if (Config.INSTANCE.skipSedimentDensity()) {
+            log.info("skip parsing sediment density.");
+            return;
+        }
+
+        File minfoDir = getMinfoDir();
+        File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR);
+
+        File[] files = sediment.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot parse directory '" + sediment + "'");
+            return;
+        }
+
+        SedimentDensityParser parser = new SedimentDensityParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        sedimentDensities = parser.getSedimentDensities();
+
+        log.info("Parsed " + sedimentDensities.size() + " sediment densities.");
+    }
+
+
+    protected void parseMorphologicalWidth() throws IOException {
+        log.debug("Parse morphological width");
+
+        if (Config.INSTANCE.skipMorphologicalWidth()) {
+            log.info("skip parsing morphological width.");
+            return;
+        }
+
+        File minfoDir = getMinfoDir();
+        File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR);
+
+        File[] files = morphDir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot parse directory '" + morphDir + "'");
+            return;
+        }
+
+        MorphologicalWidthParser parser = new MorphologicalWidthParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        morphologicalWidths = parser.getMorphologicalWidths();
+
+        log.info("Parsed " + morphologicalWidths.size() + " morph. widths files.");
+    }
+
+
+    protected void parseFlowVelocity() throws IOException {
+        log.debug("Parse flow velocity");
+
+        if (Config.INSTANCE.skipFlowVelocity()) {
+            log.info("skip parsing flow velocity");
+            return;
+        }
+
+        File minfoDir   = getMinfoDir();
+        File flowDir    = new File(minfoDir, FLOW_VELOCITY_DIR);
+        File modelDir   = new File(flowDir, FLOW_VELOCITY_MODEL);
+        File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS);
+
+        File[] modelFiles   = modelDir.listFiles();
+        File[] measureFiles = measureDir.listFiles();
+
+        if (modelFiles == null) {
+            log.warn("Cannot parse directory '" + modelDir + "'");
+        }
+        else {
+            FlowVelocityModelParser parser = new FlowVelocityModelParser();
+
+            for (File model: modelFiles) {
+                log.debug("Parse file '" + model + "'");
+                parser.parse(model);
+            }
+
+            flowVelocityModels = parser.getModels();
+        }
+
+        if (measureFiles == null) {
+            log.warn("Cannot parse directory '" + measureDir + "'");
+        }
+        else {
+            FlowVelocityMeasurementParser parser =
+                new FlowVelocityMeasurementParser();
+
+            for (File measurement: measureFiles) {
+                log.debug("Parse file '" + measurement + "'");
+                parser.parse(measurement);
+            }
+
+            flowVelocityMeasurements = parser.getMeasurements();
+        }
+    }
+
+
+    protected void parseSedimentYield() throws IOException {
+        log.debug("Parse sediment yield data");
+
+        if (Config.INSTANCE.skipSedimentYield()) {
+            log.info("skip parsing sediment yield data");
+            return;
+        }
+
+        File minfoDir         = getMinfoDir();
+        File sedimentYieldDir = new File(minfoDir, SEDIMENT_YIELD_DIR);
+
+        File singleDir = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR);
+        File epochDir  = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR);
+
+        File[] singles = singleDir.listFiles();
+        File[] epochs  = epochDir.listFiles();
+
+        SedimentYieldParser parser = new SedimentYieldParser();
+
+        if (singles == null || singles.length == 0) {
+            log.warn("Cannot parse directory '" + singleDir + "'");
+        }
+        else {
+            for (File file: singles) {
+                if (file.isDirectory()) {
+                    for (File child: file.listFiles()) {
+                        parser.parse(child);
+                    }
+                }
+                else {
+                    parser.parse(file);
+                }
+            }
+        }
+
+        if (epochs == null || epochs.length == 0) {
+            log.warn("Cannot parse directory '" + epochDir + "'");
+        }
+        else {
+            for (File file: epochs) {
+                if (file.isDirectory()) {
+                    for (File child: file.listFiles()) {
+                        parser.parse(child);
+                    }
+                }
+                else {
+                    parser.parse(file);
+                }
+            }
+        }
+
+        sedimentYields = parser.getSedimentYields();
+    }
+
+
+    protected void parseWaterlevels() throws IOException {
+        if (Config.INSTANCE.skipWaterlevels()) {
+            log.info("skip parsing waterlevels");
+            return;
+        }
+
+        log.info("Parse waterlevels");
+
+        File minfo  = getMinfoDir();
+        File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
+        File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR);
+
+        File[] files = wspDir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot read directory '" + wspDir + "'");
+            return;
+        }
+
+        WaterlevelParser parser = new WaterlevelParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        waterlevels = parser.getWaterlevels();
+    }
+
+
+    protected void parseWaterlevelDifferences() throws IOException {
+        if (Config.INSTANCE.skipWaterlevelDifferences()) {
+            log.info("skip parsing waterlevel differences");
+            return;
+        }
+
+        log.info("Parse waterlevel differences");
+
+        File minfo  = getMinfoDir();
+        File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
+        File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR);
+
+        File[] files = diffDir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot read directory '" + diffDir + "'");
+            return;
+        }
+
+        WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        waterlevelDiffs = parser.getDifferences();
+    }
+
+
+    protected void parseBedHeightSingles(File dir) throws IOException {
+        log.debug("Parse bed height singles");
+
+        File[] files = dir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot parse directory '" + dir + "'");
+            return;
+        }
+
+        BedHeightSingleParser parser = new BedHeightSingleParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        bedHeightSingles = parser.getBedHeights();
+    }
+
+
+    protected void parseBedHeightEpochs(File dir) throws IOException {
+        log.debug("Parse bed height epochs");
+
+        File[] files = dir.listFiles();
+
+        if (files == null) {
+            log.warn("Cannot parse directory '" + dir + "'");
+            return;
+        }
+
+        BedHeightEpochParser parser = new BedHeightEpochParser();
+
+        for (File file: files) {
+            parser.parse(file);
+        }
+
+        bedHeightEpochs = parser.getBedHeights();
+    }
+
+
+    public void parseFloodWater() throws IOException {
+        if (Config.INSTANCE.skipFloodWater()) {
+            log.info("skip parsing flod water");
+            return;
+        }
+
+        log.info("Parse flood water wst file");
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+
+        File dir = FileTools.repair(new File(riverDir, FLOOD_WATER));
+
+        if (!dir.isDirectory() || !dir.canRead()) {
+            log.info("no directory '" + dir + "' found");
+            return;
+        }
+
+        File [] files = dir.listFiles();
+
+        if (files == null) {
+            log.warn("cannot read '" + dir + "'");
+            return;
+        }
+
+        for (File file: files) {
+            if (!file.isFile() || !file.canRead()) {
+                continue;
+            }
+            String name = file.getName().toLowerCase();
+            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
+                continue;
+            }
+            log.info("found file '" + file.getName() + "'");
+            WstParser wstParser = new WstParser();
+            wstParser.parse(file);
+            ImportWst iw = wstParser.getWst();
+            iw.setKind(4);
+            iw.setDescription(FLOOD_WATER + "/" + iw.getDescription());
+            floodWater.add(iw);
+        }
+    }
+
+    public void parseOfficialLines() throws IOException {
+        if (Config.INSTANCE.skipOfficialLines()) {
+            log.info("skip parsing official lines");
+            return;
+        }
+
+        log.info("Parse official wst files");
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+
+        for (String folder: OFFICIAL_LINES_FOLDERS) {
+            File dir = FileTools.repair(new File(riverDir, folder));
+
+            if (!dir.isDirectory() || !dir.canRead()) {
+                log.info("no directory '" + folder + "' found");
+                continue;
+            }
+
+            File file = FileTools.repair(new File(dir, OFFICIAL_LINES));
+            if (!file.isFile() || !file.canRead()) {
+                log.warn("no official lines wst file found");
+                continue;
+            }
+            log.debug("Found WST file: " + file);
+
+            WstParser wstParser = new WstParser();
+            wstParser.parse(file);
+            ImportWst iw = wstParser.getWst();
+            iw.setKind(3);
+            iw.setDescription(folder + "/" + iw.getDescription());
+            officialLines.add(iw);
+        } // for all folders
+
+    }
+
+    public void parseFixations() throws IOException {
+        if (Config.INSTANCE.skipFixations()) {
+            log.info("skip parsing fixations");
+            return;
+        }
+
+        log.info("Parse fixation wst files");
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+
+        File fixDir = FileTools.repair(
+            new File(riverDir, FIXATIONS));
+
+        if (!fixDir.isDirectory() || !fixDir.canRead()) {
+            log.info("no fixation wst file directory found");
+            return;
+        }
+
+        File [] files = fixDir.listFiles();
+
+        if (files == null) {
+            log.warn("cannot read fixations wst file directory");
+            return;
+        }
+
+        for (File file: files) {
+            if (!file.isFile() || !file.canRead()) {
+                continue;
+            }
+            String name = file.getName().toLowerCase();
+            if (!name.endsWith(".wst")) {
+                continue;
+            }
+            log.debug("Found WST file: " + file);
+
+            WstParser wstParser = new WstParser();
+            wstParser.parse(file);
+            ImportWst iw = wstParser.getWst();
+            iw.setKind(2);
+            iw.setDescription(FIXATIONS+ "/" + iw.getDescription());
+            fixations.add(iw);
+        }
+    }
+
+    public void parseExtraWsts() throws IOException {
+        if (Config.INSTANCE.skipExtraWsts()) {
+            log.info("skip parsing extra WST files");
+            return;
+        }
+
+        log.info("Parse extra longitudinal wst files");
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+
+        File extraDir = FileTools.repair(
+            new File(riverDir, EXTRA_LONGITUDINALS));
+
+        if (!extraDir.isDirectory() || !extraDir.canRead()) {
+            log.info("no extra longitudinal wst file directory found");
+            return;
+        }
+
+        File [] files = extraDir.listFiles();
+
+        if (files == null) {
+            log.warn("cannot read extra longitudinal wst file directory");
+            return;
+        }
+
+        for (File file: files) {
+            if (!file.isFile() || !file.canRead()) {
+                continue;
+            }
+            String name = file.getName().toLowerCase();
+            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
+                continue;
+            }
+            log.debug("Found WST file: " + file);
+
+            WstParser wstParser = new WstParser();
+            wstParser.parse(file);
+            ImportWst iw = wstParser.getWst();
+            iw.setKind(1);
+            iw.setDescription(EXTRA_LONGITUDINALS + "/" + iw.getDescription());
+            extraWsts.add(iw);
+        }
+
+    }
+
+    public void parseWst() throws IOException {
+        if (Config.INSTANCE.skipWst()) {
+            log.info("skip parsing WST file");
+            return;
+        }
+
+        WstParser wstParser = new WstParser();
+        wstParser.parse(wstFile);
+        wst = wstParser.getWst();
+    }
+
+    public void parseGauges() throws IOException {
+        if (Config.INSTANCE.skipGauges()) {
+            log.info("skip parsing gauges");
+            return;
+        }
+
+        File gltFile = new File(wstFile.getParentFile(), PEGEL_GLT);
+        gltFile = FileTools.repair(gltFile);
+
+        if (!gltFile.isFile() || !gltFile.canRead()) {
+            log.warn("cannot read gauges from '" + gltFile + "'");
+            return;
+        }
+
+        PegelGltParser pgltp = new PegelGltParser();
+        pgltp.parse(gltFile);
+
+        gauges = pgltp.getGauges();
+
+        for (ImportGauge gauge: gauges) {
+            gauge.parseDependencies();
+        }
+    }
+
+    public void parseAnnotations() throws IOException {
+        if (Config.INSTANCE.skipAnnotations()) {
+            log.info("skip parsing annotations");
+            return;
+        }
+
+        File riverDir = wstFile.getParentFile().getParentFile();
+        AnnotationsParser aparser =
+            new AnnotationsParser(annotationClassifier);
+        aparser.parse(riverDir);
+
+        annotations = aparser.getAnnotations();
+    }
+
+    public void parseHYKs() {
+        if (Config.INSTANCE.skipHYKs()) {
+            log.info("skip parsing HYK files");
+            return;
+        }
+
+        log.info("looking for HYK files");
+        HYKParser parser = new HYKParser();
+        File riverDir = wstFile
+            .getParentFile()  // Basisdaten
+            .getParentFile()  // Hydrologie
+            .getParentFile(); // <river>
+
+        parser.parseHYKs(riverDir, new HYKParser.Callback() {
+
+            Set<HashedFile> hfs = new HashSet<HashedFile>();
+
+            @Override
+            public boolean hykAccept(File file) {
+                HashedFile hf = new HashedFile(file);
+                boolean success = hfs.add(hf);
+                if (!success) {
+                    log.warn("HYK file '" + file + "' seems to be a duplicate.");
+                }
+                return success;
+            }
+
+            @Override
+            public void hykParsed(HYKParser parser) {
+                log.debug("callback from HYK parser");
+                ImportHYK hyk = parser.getHYK();
+                hyk.setRiver(ImportRiver.this);
+                hyks.add(hyk);
+            }
+        });
+    }
+
+    public void parsePRFs() {
+        if (Config.INSTANCE.skipPRFs()) {
+            log.info("skip parsing PRFs");
+            return;
+        }
+
+        log.info("looking for PRF files");
+        PRFParser parser = new PRFParser();
+        File riverDir = wstFile
+            .getParentFile()  // Basisdaten
+            .getParentFile()  // Hydrologie
+            .getParentFile(); // <river>
+
+        parser.parsePRFs(riverDir, new PRFParser.Callback() {
+
+            Set<HashedFile> prfs = new HashSet<HashedFile>();
+
+            @Override
+            public boolean prfAccept(File file) {
+                HashedFile hf = new HashedFile(file);
+                boolean success = prfs.add(hf);
+                if (!success) {
+                    log.warn("PRF file '" + file + "' seems to be a duplicate.");
+                }
+                return success;
+            }
+
+            @Override
+            public void prfParsed(PRFParser parser) {
+                log.debug("callback from PRF parser");
+
+                String  description = parser.getDescription();
+                Integer year        = parser.getYear();
+                ImportTimeInterval ti = year != null
+                    ? new ImportTimeInterval(yearToDate(year))
+                    : null;
+
+                List<ImportCrossSectionLine> lines =
+                    new ArrayList<ImportCrossSectionLine>();
+
+                for (Map.Entry<Double, List<XY>> entry: parser.getData().entrySet()) {
+                    Double km         = entry.getKey();
+                    List<XY>   points = entry.getValue();
+                    lines.add(new ImportCrossSectionLine(km, points));
+                }
+
+                crossSections.add(new ImportCrossSection(
+                    ImportRiver.this, description, ti, lines));
+            }
+        });
+    }
+
+    public static Date yearToDate(int year) {
+        Calendar cal = Calendar.getInstance();
+        cal.set(year, 5, 15, 12, 0, 0);
+        long ms = cal.getTimeInMillis();
+        cal.setTimeInMillis(ms - ms%1000);
+        return cal.getTime();
+    }
+
+    public void storeDependencies() {
+        storeWstUnit();
+        storeAnnotations();
+        storeHYKs();
+        storeCrossSections();
+        storeGauges();
+        storeWst();
+        storeExtraWsts();
+        storeFixations();
+        storeOfficialLines();
+        storeFloodWater();
+        storeFloodProtection();
+        storeBedHeight();
+        storeSedimentDensity();
+        storeMorphologicalWidth();
+        storeFlowVelocity();
+        storeSedimentYield();
+        storeWaterlevels();
+        storeWaterlevelDifferences();
+    }
+
+    public void storeWstUnit() {
+        if (wst == null) {
+            wstUnit = new ImportUnit("NN + m");
+        }
+        else {
+            wstUnit = wst.getUnit();
+        }
+    }
+
+    public void storeHYKs() {
+        if (!Config.INSTANCE.skipHYKs()) {
+            log.info("store HYKs");
+            getPeer();
+            for (ImportHYK hyk: hyks) {
+                hyk.storeDependencies();
+            }
+        }
+    }
+
+    public void storeCrossSections() {
+        if (!Config.INSTANCE.skipPRFs()) {
+            log.info("store cross sections");
+            getPeer();
+            for (ImportCrossSection crossSection: crossSections) {
+                crossSection.storeDependencies();
+            }
+        }
+    }
+
+    public void storeWst() {
+        if (!Config.INSTANCE.skipWst()) {
+            River river = getPeer();
+            wst.storeDependencies(river);
+        }
+    }
+
+    public void storeFixations() {
+        if (!Config.INSTANCE.skipFixations()) {
+            log.info("store fixation wsts");
+            River river = getPeer();
+            for (ImportWst wst: fixations) {
+                log.debug("name: " + wst.getDescription());
+                wst.storeDependencies(river);
+            }
+        }
+    }
+
+    public void storeExtraWsts() {
+        if (!Config.INSTANCE.skipExtraWsts()) {
+            log.info("store extra wsts");
+            River river = getPeer();
+            for (ImportWst wst: extraWsts) {
+                log.debug("name: " + wst.getDescription());
+                wst.storeDependencies(river);
+            }
+        }
+    }
+
+    public void storeOfficialLines() {
+        if (!Config.INSTANCE.skipOfficialLines()) {
+            log.info("store official lines wsts");
+            River river = getPeer();
+            for (ImportWst wst: officialLines) {
+                log.debug("name: " + wst.getDescription());
+                wst.storeDependencies(river);
+            }
+        }
+    }
+
+    public void storeFloodWater() {
+        if (!Config.INSTANCE.skipFloodWater()) {
+            log.info("store flood water wsts");
+            River river = getPeer();
+            for (ImportWst wst: floodWater) {
+                log.debug("name: " + wst.getDescription());
+                wst.storeDependencies(river);
+            }
+        }
+    }
+
+    public void storeFloodProtection() {
+        if (!Config.INSTANCE.skipFloodProtection()) {
+            log.info("store flood protection wsts");
+            River river = getPeer();
+            for (ImportWst wst: floodProtection) {
+                log.debug("name: " + wst.getDescription());
+                wst.storeDependencies(river);
+            }
+        }
+    }
+
+
+    public void storeBedHeight() {
+        if (!Config.INSTANCE.skipBedHeightSingle()) {
+            log.info("store bed heights single");
+            storeBedHeightSingle();
+        }
+
+        if (!Config.INSTANCE.skipBedHeightEpoch()) {
+            log.info("store bed height epoch.");
+            storeBedHeightEpoch();
+        }
+    }
+
+
+    private void storeBedHeightSingle() {
+        River river = getPeer();
+
+        if (bedHeightSingles != null) {
+            for (ImportBedHeight tmp: bedHeightSingles) {
+                ImportBedHeightSingle single = (ImportBedHeightSingle) tmp;
+
+                String desc = single.getDescription();
+
+                log.debug("name: " + desc);
+
+                try {
+                    single.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+            }
+        }
+        else {
+            log.info("No single bed heights to store.");
+        }
+    }
+
+
+    private void storeBedHeightEpoch() {
+        River river = getPeer();
+
+        if (bedHeightEpochs != null) {
+            for (ImportBedHeight tmp: bedHeightEpochs) {
+                ImportBedHeightEpoch epoch = (ImportBedHeightEpoch) tmp;
+
+                String desc = epoch.getDescription();
+
+                log.debug("name: " + desc);
+
+                try {
+                    epoch.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+            }
+        }
+        else {
+            log.info("No epoch bed heights to store.");
+        }
+    }
+
+    public void storeSedimentDensity() {
+        if (!Config.INSTANCE.skipSedimentDensity()) {
+            log.info("store sediment density");
+
+            River river = getPeer();
+
+            for (ImportSedimentDensity density: sedimentDensities) {
+                String desc = density.getDescription();
+
+                log.debug("name: " + desc);
+
+                try {
+                    density.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("File '" + desc + "' is broken!");
+                }
+            }
+        }
+    }
+
+    public void storeMorphologicalWidth() {
+        if (!Config.INSTANCE.skipMorphologicalWidth()) {
+            log.info("store morphological width");
+
+            River river = getPeer();
+
+            for (ImportMorphWidth width: morphologicalWidths) {
+                try {
+                    width.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while parsing file for morph. width.");
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while parsing file for morph. width.");
+                }
+            }
+        }
+    }
+
+    public void storeFlowVelocity() {
+        if (!Config.INSTANCE.skipFlowVelocity()) {
+            log.info("store flow velocity");
+
+            River river = getPeer();
+
+            for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels){
+                try {
+                    flowVelocityModel.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while storing flow velocity model.", sqle);
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while storing flow velocity model.", cve);
+                }
+            }
+
+            for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) {
+                try {
+                    m.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while storing flow velocity measurement.", sqle);
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while storing flow velocity measurement.", cve);
+                }
+            }
+        }
+    }
+
+
+    public void storeSedimentYield() {
+        if (!Config.INSTANCE.skipSedimentYield()) {
+            log.info("store sediment yield data");
+
+            River river = getPeer();
+
+            for (ImportSedimentYield sedimentYield: sedimentYields) {
+                try {
+                    sedimentYield.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while storing sediment yield.", sqle);
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while storing sediment yield.", cve);
+                }
+            }
+        }
+    }
+
+
+    public void storeWaterlevels() {
+        if (!Config.INSTANCE.skipWaterlevels()) {
+            log.info("store waterlevels");
+
+            River river = getPeer();
+
+            for (ImportWaterlevel waterlevel: waterlevels) {
+                waterlevel.storeDependencies(river);
+            }
+        }
+    }
+
+
+    public void storeWaterlevelDifferences() {
+        if (!Config.INSTANCE.skipWaterlevelDifferences()) {
+            log.info("store waterlevel differences");
+
+            River river = getPeer();
+
+            for (ImportWaterlevelDifference diff: waterlevelDiffs) {
+                try {
+                    diff.storeDependencies(river);
+                }
+                catch (SQLException sqle) {
+                    log.error("Error while storing waterlevel diff.", sqle);
+                }
+                catch (ConstraintViolationException cve) {
+                    log.error("Error while storing waterlevel diff.", cve);
+                }
+            }
+        }
+    }
+
+
+    public void storeAnnotations() {
+        if (!Config.INSTANCE.skipAnnotations()) {
+            River river = getPeer();
+            for (ImportAnnotation annotation: annotations) {
+                annotation.getPeer(river);
+            }
+        }
+    }
+
+    public void storeGauges() {
+        if (!Config.INSTANCE.skipGauges()) {
+            log.info("store gauges:");
+            River river = getPeer();
+            Session session = ImporterSession.getInstance()
+                .getDatabaseSession();
+            for (ImportGauge gauge: gauges) {
+                log.info("\tgauge: " + gauge.getName());
+                gauge.storeDependencies(river);
+                ImporterSession.getInstance().getDatabaseSession();
+                session.flush();
+            }
+        }
+    }
+
+    public River getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from River where name=:name");
+
+            Unit u = wstUnit.getPeer();
+
+            query.setString("name", name);
+            List<River> rivers = query.list();
+            if (rivers.isEmpty()) {
+                log.info("Store new river '" + name + "'");
+                peer = new River(name, u);
+                session.save(peer);
+            }
+            else {
+                peer = rivers.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,116 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.SedimentDensity;
+
+
+public class ImportSedimentDensity {
+
+    private static Logger log = Logger.getLogger(ImportSedimentDensity.class);
+
+
+    protected SedimentDensity peer;
+
+    protected ImportDepth depth;
+
+    protected ImportUnit unit;
+
+    protected String description;
+
+    protected List<ImportSedimentDensityValue> values;
+
+
+    public ImportSedimentDensity(String description) {
+        this.description = description;
+        this.values      = new ArrayList<ImportSedimentDensityValue>();
+    }
+
+
+    public String getDescription() {
+        return description;
+    }
+
+
+    public void setDepth(ImportDepth depth) {
+        this.depth = depth;
+    }
+
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+
+    public void addValue(ImportSedimentDensityValue value) {
+        values.add(value);
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("store dependencies");
+
+        if (depth != null) {
+            depth.storeDependencies();
+        }
+
+        log.info("store sediment density values.");
+
+        SedimentDensity peer = getPeer(river);
+
+        for (ImportSedimentDensityValue value: values) {
+            value.storeDependencies(peer);
+        }
+    }
+
+
+    public SedimentDensity getPeer(River river) {
+        log.info("get peer");
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from SedimentDensity where " +
+                "   river=:river and " +
+                "   depth=:depth and " +
+                "   unit=:unit");
+
+            query.setParameter("river", river);
+            query.setParameter("depth", depth.getPeer());
+            query.setParameter("unit", unit.getPeer());
+
+            List<SedimentDensity> density = query.list();
+
+            if (density.isEmpty()) {
+                log.debug("Create new SedimentDensity DB instance.");
+
+                peer = new SedimentDensity(
+                    river,
+                    depth.getPeer(),
+                    unit.getPeer(),
+                    description);
+
+                session.save(peer);
+            }
+            else {
+                peer = density.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,87 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.SedimentDensity;
+import de.intevation.flys.model.SedimentDensityValue;
+
+
+public class ImportSedimentDensityValue {
+
+    private static final Logger log =
+        Logger.getLogger(ImportSedimentDensityValue.class);
+
+
+    protected SedimentDensityValue peer;
+
+    protected BigDecimal station;
+
+    protected BigDecimal density;
+
+    protected String description;
+
+
+    public ImportSedimentDensityValue(
+        BigDecimal station,
+        BigDecimal density,
+        String     description
+    ) {
+        this.station     = station;
+        this.density     = density;
+        this.description = description;
+    }
+
+
+    public void storeDependencies(SedimentDensity sedimentDensity) {
+        log.info("store dependencies");
+
+        getPeer(sedimentDensity);
+    }
+
+
+    public SedimentDensityValue getPeer(SedimentDensity sedimentDensity) {
+        log.info("get peer");
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+
+            Query query = session.createQuery(
+                "from SedimentDensityValue where " +
+                "   sedimentDensity=:sedimentDensity and " +
+                "   station=:station and " +
+                "   density=:density and " +
+                "   description=:description");
+
+            query.setParameter("sedimentDensity", sedimentDensity);
+            query.setParameter("station", station);
+            query.setParameter("density", density);
+            query.setParameter("description", description);
+
+            List<SedimentDensityValue> values = query.list();
+            if (values.isEmpty()) {
+                log.debug("Create new SedimentDensityValue DB instance.");
+
+                peer = new SedimentDensityValue(
+                    sedimentDensity,
+                    station,
+                    density,
+                    description);
+
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentYield.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,133 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.GrainFraction;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.SedimentYield;
+import de.intevation.flys.model.TimeInterval;
+import de.intevation.flys.model.Unit;
+
+
+public class ImportSedimentYield {
+
+    private static Logger log = Logger.getLogger(ImportSedimentYield.class);
+
+    private ImportGrainFraction grainFraction;
+
+    private ImportUnit unit;
+
+    private ImportTimeInterval timeInterval;
+
+    private String description;
+
+    private List<ImportSedimentYieldValue> values;
+
+    private SedimentYield peer;
+
+
+    public ImportSedimentYield(String description) {
+        this.values      = new ArrayList<ImportSedimentYieldValue>();
+        this.description = description;
+    }
+
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+
+    public void setGrainFraction(ImportGrainFraction grainFraction) {
+        this.grainFraction = grainFraction;
+    }
+
+
+    public void addValue(ImportSedimentYieldValue value) {
+        this.values.add(value);
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.debug("store dependencies");
+
+        if (grainFraction != null) {
+            grainFraction.storeDependencies();
+        }
+
+        SedimentYield peer = getPeer(river);
+
+        int i = 0;
+
+        for (ImportSedimentYieldValue value: values) {
+            value.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " sediment yield values.");
+    }
+
+
+    public SedimentYield getPeer(River river) {
+        log.debug("get peer");
+
+        GrainFraction gf = grainFraction != null
+            ? grainFraction.getPeer()
+            : null;
+
+        Unit u = unit != null ? unit.getPeer() : null;
+
+        TimeInterval ti = timeInterval != null ? timeInterval.getPeer() : null;
+
+        if (ti == null || u == null) {
+            log.warn("Skip invalid SedimentYield: time interval or unit null!");
+            return null;
+        }
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from SedimentYield where " +
+                "   river=:river and " +
+                "   grainFraction=:grainFraction and " +
+                "   unit=:unit and " +
+                "   timeInterval=:timeInterval and " +
+                "   description=:description"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("grainFraction", gf);
+            query.setParameter("unit", u);
+            query.setParameter("timeInterval", ti);
+            query.setParameter("description", description);
+
+            List<SedimentYield> yields = query.list();
+            if (yields.isEmpty()) {
+                log.debug("create new SedimentYield");
+
+                peer = new SedimentYield(river, u, ti, gf, description);
+                session.save(peer);
+            }
+            else {
+                peer = yields.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentYieldValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,58 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.SedimentYield;
+import de.intevation.flys.model.SedimentYieldValue;
+
+
+public class ImportSedimentYieldValue {
+
+    private Double station;
+    private Double value;
+
+    private SedimentYieldValue peer;
+
+
+    public ImportSedimentYieldValue(Double station, Double value) {
+        this.station = station;
+        this.value   = value;
+    }
+
+
+    public void storeDependencies(SedimentYield sedimentYield) {
+        getPeer(sedimentYield);
+    }
+
+
+    public SedimentYieldValue getPeer(SedimentYield sedimentYield) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from SedimentYieldValue where " +
+                "   sedimentYield=:sedimentYield and " +
+                "   station=:station and " +
+                "   value=:value"
+            );
+
+            query.setParameter("sedimentYield", sedimentYield);
+            query.setParameter("station", station);
+            query.setParameter("value", value);
+
+            List<SedimentYieldValue> values = query.list();
+            if (values.isEmpty()) {
+                peer = new SedimentYieldValue(sedimentYield, station, value);
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportTimeInterval.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,69 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.TimeInterval;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+import java.util.Date;
+
+import org.apache.log4j.Logger;
+
+public class ImportTimeInterval
+{
+    private static Logger log = Logger.getLogger(ImportTimeInterval.class);
+
+    protected Date startTime;
+    protected Date stopTime;
+
+    protected TimeInterval peer;
+
+    public ImportTimeInterval() {
+    }
+
+    public ImportTimeInterval(Date startTime) {
+        this.startTime = startTime;
+    }
+
+    public ImportTimeInterval(Date startTime, Date stopTime) {
+        this.startTime = startTime;
+        this.stopTime  = stopTime;
+    }
+
+    public Date getStartTime() {
+        return startTime;
+    }
+
+    public void setStartTime(Date startTime) {
+        this.startTime = startTime;
+    }
+
+    public Date getStopTime() {
+        return stopTime;
+    }
+
+    public void setStopTime(Date stopTime) {
+        this.stopTime = stopTime;
+    }
+
+    public TimeInterval getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from TimeInterval where startTime=:a and stopTime=:b");
+            query.setParameter("a", startTime);
+            query.setParameter("b",  stopTime);
+            List<TimeInterval> intervals = query.list();
+            if (intervals.isEmpty()) {
+                peer = new TimeInterval(startTime, stopTime);
+                session.save(peer);
+            }
+            else {
+                peer = intervals.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportUnit.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,52 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.Unit;
+
+
+public class ImportUnit
+{
+    private static final Logger log = Logger.getLogger(ImportUnit.class);
+
+    protected String name;
+
+    protected Unit peer;
+
+
+    public ImportUnit(String name) {
+        this.name = name;
+    }
+
+
+    public String getName() {
+        return name;
+    }
+
+
+    public Unit getPeer() {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery("from Unit where name=:name");
+            query.setParameter("name", name);
+
+            List<Unit> units = query.list();
+            if (units.isEmpty()) {
+                log.info("Store new unit '" + name + "'");
+
+                peer = new Unit(name);
+                session.save(peer);
+            }
+            else {
+                peer = units.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevel.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,96 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Unit;
+import de.intevation.flys.model.Waterlevel;
+
+
+public class ImportWaterlevel {
+
+    private static final Logger log = Logger.getLogger(ImportWaterlevel.class);
+
+
+    private ImportUnit unit;
+
+    private String description;
+
+    private List<ImportWaterlevelQRange> qRanges;
+
+    private Waterlevel peer;
+
+
+    public ImportWaterlevel(String description) {
+        this.qRanges = new ArrayList<ImportWaterlevelQRange>();
+
+        this.description = description;
+    }
+
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+
+    public void addValue(ImportWaterlevelQRange qRange) {
+        this.qRanges.add(qRange);
+    }
+
+
+    public void storeDependencies(River river) {
+        log.info("store dependencies");
+
+        Waterlevel peer = getPeer(river);
+
+        int i = 0;
+
+        for (ImportWaterlevelQRange qRange: qRanges) {
+            qRange.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " waterlevel q ranges");
+    }
+
+
+    public Waterlevel getPeer(River river) {
+        Unit u = unit != null ? unit.getPeer() : null;
+        if (u == null) {
+            log.warn("skip invalid waterlevel - no unit set!");
+            return null;
+        }
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from Waterlevel where " +
+                "   river=:river and " +
+                "   unit=:unit and " +
+                "   description=:description"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("unit", u);
+            query.setParameter("description", description);
+
+            List<Waterlevel> wsts = query.list();
+            if (wsts.isEmpty()) {
+                peer = new Waterlevel(river, u, description);
+                session.save(peer);
+            }
+            else {
+                peer = wsts.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelDifference.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,101 @@
+package de.intevation.flys.importer;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.exception.ConstraintViolationException;
+
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Unit;
+import de.intevation.flys.model.WaterlevelDifference;
+
+
+public class ImportWaterlevelDifference {
+
+    private static final Logger log =
+        Logger.getLogger(ImportWaterlevelDifference.class);
+
+
+    private ImportUnit unit;
+
+    private String description;
+
+    private List<ImportWaterlevelDifferenceColumn> columns;
+
+    private WaterlevelDifference peer;
+
+
+    public ImportWaterlevelDifference(String description) {
+        this.columns = new ArrayList<ImportWaterlevelDifferenceColumn>();
+
+        this.description = description;
+    }
+
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+
+    public void addValue(ImportWaterlevelDifferenceColumn column) {
+        this.columns.add(column);
+    }
+
+
+    public void storeDependencies(River river)
+    throws SQLException, ConstraintViolationException
+    {
+        log.info("store dependencies");
+
+        WaterlevelDifference peer = getPeer(river);
+
+        int i = 0;
+
+        for (ImportWaterlevelDifferenceColumn column: columns) {
+            column.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " waterlevel difference columns");
+    }
+
+
+    public WaterlevelDifference getPeer(River river) {
+        Unit u = unit != null ? unit.getPeer() : null;
+        if (u == null) {
+            log.warn("skip invalid waterlevel difference - no unit set!");
+            return null;
+        }
+
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WaterlevelDifference where " +
+                "   river=:river and " +
+                "   unit=:unit and " +
+                "   description=:description"
+            );
+
+            query.setParameter("river", river);
+            query.setParameter("unit", u);
+            query.setParameter("description", description);
+
+            List<WaterlevelDifference> diffs = query.list();
+            if (diffs.isEmpty()) {
+                peer = new WaterlevelDifference(river, u, description);
+                session.save(peer);
+            }
+            else {
+                peer = diffs.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceColumn.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,81 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.WaterlevelDifference;
+import de.intevation.flys.model.WaterlevelDifferenceColumn;
+
+
+public class ImportWaterlevelDifferenceColumn {
+
+    private static final Logger log =
+        Logger.getLogger(ImportWaterlevelDifferenceColumn.class);
+
+
+    private String description;
+
+    private List<ImportWaterlevelDifferenceValue> values;
+
+    private WaterlevelDifferenceColumn peer;
+
+
+    public ImportWaterlevelDifferenceColumn(String description) {
+        this.values = new ArrayList<ImportWaterlevelDifferenceValue>();
+
+        this.description = description;
+    }
+
+
+    public void addValue(ImportWaterlevelDifferenceValue value) {
+        this.values.add(value);
+    }
+
+
+    public void storeDependencies(WaterlevelDifference difference) {
+        log.info("store dependencies");
+
+        WaterlevelDifferenceColumn peer = getPeer(difference);
+
+        int i = 0;
+
+        for (ImportWaterlevelDifferenceValue value: values) {
+            value.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " waterlevel difference values");
+    }
+
+
+    public WaterlevelDifferenceColumn getPeer(WaterlevelDifference diff) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WaterlevelDifferenceColumn where " +
+                "   difference=:difference and " +
+                "   description=:description"
+            );
+
+            query.setParameter("difference", diff);
+            query.setParameter("description", description);
+
+            List<WaterlevelDifferenceColumn> cols = query.list();
+            if (cols.isEmpty()) {
+                peer = new WaterlevelDifferenceColumn(diff, description);
+                session.save(peer);
+            }
+            else {
+                peer = cols.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelDifferenceValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,64 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.WaterlevelDifferenceColumn;
+import de.intevation.flys.model.WaterlevelDifferenceValue;
+
+
+public class ImportWaterlevelDifferenceValue {
+
+    private static final Logger log =
+        Logger.getLogger(ImportWaterlevelDifferenceValue.class);
+
+
+    private Double station;
+    private Double value;
+
+    private WaterlevelDifferenceValue peer;
+
+
+    public ImportWaterlevelDifferenceValue(Double station, Double value) {
+        this.station = station;
+        this.value   = value;
+    }
+
+
+    public void storeDependencies(WaterlevelDifferenceColumn column) {
+        getPeer(column);
+    }
+
+
+    public WaterlevelDifferenceValue getPeer(WaterlevelDifferenceColumn column) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WaterlevelDifferenceValue where " +
+                "   column=:column and " +
+                "   station=:station and " +
+                "   value=:value"
+            );
+
+            query.setParameter("column", column);
+            query.setParameter("station", station);
+            query.setParameter("value", value);
+
+            List<WaterlevelDifferenceValue> values = query.list();
+            if (values.isEmpty()) {
+                peer = new WaterlevelDifferenceValue(column, station, value);
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelQRange.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,79 @@
+package de.intevation.flys.importer;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.Waterlevel;
+import de.intevation.flys.model.WaterlevelQRange;
+
+
+public class ImportWaterlevelQRange {
+
+    private static final Logger log =
+        Logger.getLogger(ImportWaterlevelQRange.class);
+
+
+    private Double q;
+
+    private List<ImportWaterlevelValue> values;
+
+    private WaterlevelQRange peer;
+
+
+    public ImportWaterlevelQRange(Double q) {
+        this.values = new ArrayList<ImportWaterlevelValue>();
+        this.q      = q;
+    }
+
+    public void addValue(ImportWaterlevelValue value) {
+        this.values.add(value);
+    }
+
+
+    public void storeDependencies(Waterlevel waterlevel) {
+        log.info("store dependencies");
+
+        WaterlevelQRange peer = getPeer(waterlevel);
+
+        int i = 0;
+
+        for (ImportWaterlevelValue value: values) {
+            value.storeDependencies(peer);
+            i++;
+        }
+
+        log.info("stored " + i + " waterlevel values");
+    }
+
+
+    public WaterlevelQRange getPeer(Waterlevel waterlevel) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WaterlevelQRange where " +
+                "   waterlevel=:waterlevel and " +
+                "   q=:q"
+            );
+
+            query.setParameter("waterlevel", waterlevel);
+            query.setParameter("q", q);
+
+            List<WaterlevelQRange> qRanges = query.list();
+            if (qRanges.isEmpty()) {
+                peer = new WaterlevelQRange(waterlevel, q);
+                session.save(peer);
+            }
+            else {
+                peer = qRanges.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWaterlevelValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,59 @@
+package de.intevation.flys.importer;
+
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.model.WaterlevelQRange;
+import de.intevation.flys.model.WaterlevelValue;
+
+
+public class ImportWaterlevelValue {
+
+    private Double station;
+    private Double w;
+
+    private WaterlevelValue peer;
+
+
+    public ImportWaterlevelValue(Double station, Double w) {
+        this.station = station;
+        this.w       = w;
+    }
+
+
+    public void storeDependencies(WaterlevelQRange qRange) {
+        getPeer(qRange);
+    }
+
+
+    public WaterlevelValue getPeer(WaterlevelQRange qRange) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WaterlevelValue where " +
+                "   qrange=:qrange and " +
+                "   station=:station and " +
+                "   w=:w"
+            );
+
+            query.setParameter("qrange", qRange);
+            query.setParameter("station", station);
+            query.setParameter("w", w);
+
+            List<WaterlevelValue> values = query.list();
+
+            if (values.isEmpty()) {
+                peer = new WaterlevelValue(qRange, station, w);
+                session.save(peer);
+            }
+            else {
+                peer = values.get(0);
+            }
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWst.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,112 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Wst;
+import de.intevation.flys.model.River;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class ImportWst
+{
+    private static Logger log = Logger.getLogger(ImportWst.class);
+
+    protected String description;
+
+    protected Integer kind;
+
+    protected List<ImportWstColumn> columns;
+
+    protected ImportUnit unit;
+
+    protected Wst peer;
+
+    public ImportWst() {
+        kind = 0;
+        columns = new ArrayList<ImportWstColumn>();
+    }
+
+    public ImportWst(String description) {
+        this();
+        this.description = description;
+    }
+
+    public String getDescription() {
+        return description;
+    }
+
+    public Integer getKind() {
+        return kind;
+    }
+
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public void setNumberColumns(int numColumns) {
+        for (int i = 0; i < numColumns; ++i) {
+            columns.add(new ImportWstColumn(this, null, null, i));
+        }
+    }
+
+    public int getNumberColumns() {
+        return columns.size();
+    }
+
+    public ImportWstColumn getColumn(int index) {
+        return columns.get(index);
+    }
+
+    public ImportUnit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(ImportUnit unit) {
+        this.unit = unit;
+    }
+
+    public void storeDependencies(River river) {
+
+        log.info("store '" + description + "'");
+        Wst wst = getPeer(river);
+
+        for (ImportWstColumn column: columns) {
+            column.storeDependencies(river);
+        }
+
+        Session session = ImporterSession.getInstance().getDatabaseSession();
+        session.flush();
+    }
+
+    public Wst getPeer(River river) {
+        if (peer == null) {
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from Wst where " +
+                "river=:river and description=:description and kind=:kind");
+            query.setParameter("river",       river);
+            query.setParameter("description", description);
+            query.setParameter("kind",        kind);
+            List<Wst> wsts = query.list();
+            if (wsts.isEmpty()) {
+                peer = new Wst(river, description, kind);
+                session.save(peer);
+            }
+            else {
+                peer = wsts.get(0);
+            }
+
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstColumn.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,144 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.Wst;
+import de.intevation.flys.model.WstColumn;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.TimeInterval;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+public class ImportWstColumn
+{
+    private static Logger log = Logger.getLogger(ImportWstColumn.class);
+
+    protected ImportWst wst;
+    protected String    name;
+    protected String    description;
+    protected Integer   position;
+
+    protected ImportTimeInterval timeInterval;
+
+    protected List<ImportWstColumnQRange> columnQRanges;
+    protected List<ImportWstColumnValue>  columnValues;
+
+    protected WstColumn peer;
+
+    public ImportWstColumn() {
+        columnQRanges = new ArrayList<ImportWstColumnQRange>();
+        columnValues  = new ArrayList<ImportWstColumnValue>();
+    }
+
+    public ImportWstColumn(
+        ImportWst wst,
+        String    name,
+        String    description,
+        Integer   position
+    ) {
+        this();
+        this.wst         = wst;
+        this.name        = name;
+        this.description = description;
+        this.position    = position;
+    }
+
+    public ImportWst getWst() {
+        return wst;
+    }
+
+    public void setWst(ImportWst wst) {
+        this.wst = wst;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public Integer getPosition() {
+        return position;
+    }
+
+    public void setPosition(Integer position) {
+        this.position = position;
+    }
+
+    public void addColumnValue(BigDecimal position, BigDecimal w) {
+        columnValues.add(
+            new ImportWstColumnValue(this, position, w));
+    }
+
+    public void addColumnQRange(ImportWstQRange columnQRange) {
+        columnQRanges.add(
+            new ImportWstColumnQRange(this, columnQRange));
+    }
+
+    public void storeDependencies(River river) {
+        log.info("store column '" + name + "'");
+        WstColumn column = getPeer(river);
+
+        for (ImportWstColumnQRange columnQRange: columnQRanges) {
+            columnQRange.getPeer(river);
+        }
+
+        for (ImportWstColumnValue columnValue: columnValues) {
+            columnValue.getPeer(river);
+        }
+    }
+
+    public ImportTimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(ImportTimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    public WstColumn getPeer(River river) {
+        if (peer == null) {
+            Wst w = wst.getPeer(river);
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WstColumn where " +
+                "wst=:wst and name=:name and description=:description" +
+                " and position=:position");
+            query.setParameter("wst",         w);
+            query.setParameter("name",        name);
+            query.setParameter("description", description);
+            query.setParameter("position",    position);
+
+            TimeInterval ti = timeInterval != null
+                ? timeInterval.getPeer()
+                : null;
+
+            List<WstColumn> columns = query.list();
+            if (columns.isEmpty()) {
+                peer = new WstColumn(w, name, description, position, ti);
+                session.save(peer);
+            }
+            else {
+                peer = columns.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstColumnQRange.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,69 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.WstColumnQRange;
+import de.intevation.flys.model.WstQRange;
+import de.intevation.flys.model.WstColumn;
+import de.intevation.flys.model.River;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportWstColumnQRange
+{
+    protected ImportWstColumn wstColumn;
+    protected ImportWstQRange qRange;
+
+    protected WstColumnQRange peer;
+
+    public ImportWstColumnQRange() {
+    }
+
+    public ImportWstColumnQRange(
+        ImportWstColumn wstColumn,
+        ImportWstQRange qRange
+    ) {
+        this.wstColumn = wstColumn;
+        this.qRange    = qRange;
+    }
+
+    public ImportWstColumn getWstColumn() {
+        return wstColumn;
+    }
+
+    public void setWstColumn(ImportWstColumn wstColumn) {
+        this.wstColumn = wstColumn;
+    }
+
+    public ImportWstQRange getQRange() {
+        return qRange;
+    }
+
+    public void setQRange(ImportWstQRange qRange) {
+        this.qRange = qRange;
+    }
+
+    public WstColumnQRange getPeer(River river) {
+        if (peer == null) {
+            WstColumn c = wstColumn.getPeer(river);
+            WstQRange q = qRange.getPeer(river);
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WstColumnQRange where " +
+                "wstColumn=:c and wstQRange=:q");
+            query.setParameter("c", c);
+            query.setParameter("q", q);
+            List<WstColumnQRange> cols = query.list();
+            if (cols.isEmpty()) {
+                peer = new WstColumnQRange(c, q);
+                session.save(peer);
+            }
+            else {
+                peer = cols.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstColumnValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,68 @@
+package de.intevation.flys.importer;
+
+import de.intevation.flys.model.WstColumnValue;
+import de.intevation.flys.model.WstColumn;
+import de.intevation.flys.model.River;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+public class ImportWstColumnValue
+{
+    protected Logger logger = Logger.getLogger(ImportWstColumnValue.class);
+
+    protected BigDecimal      position;
+    protected BigDecimal      w;
+    protected ImportWstColumn wstColumn;
+
+    protected WstColumnValue  peer;
+
+    public ImportWstColumnValue() {
+    }
+
+    public ImportWstColumnValue(
+        ImportWstColumn wstColumn,
+        BigDecimal      position,
+        BigDecimal      w
+    ) {
+        this.wstColumn = wstColumn;
+        this.position  = position;
+        this.w         = w;
+    }
+
+    public BigDecimal getPosition() {
+        return position;
+    }
+
+    public void setPosition(BigDecimal position) {
+        this.position = position;
+    }
+
+    public BigDecimal getW() {
+        return w;
+    }
+
+    public void setW(BigDecimal w) {
+        this.w = w;
+    }
+
+    public ImportWstColumn getWstColumn() {
+        return wstColumn;
+    }
+
+    public void setWstColumn(ImportWstColumn wstColumn) {
+        this.wstColumn = wstColumn;
+    }
+
+    public WstColumnValue getPeer(River river) {
+        if (peer == null) {
+            WstColumn c = wstColumn.getPeer(river);
+            peer = ImporterSession.getInstance().getWstColumnValue(
+                c, position, w);
+        }
+
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportWstQRange.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,69 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import de.intevation.flys.model.WstQRange;
+import de.intevation.flys.model.River;
+import de.intevation.flys.model.Range;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import java.util.List;
+
+public class ImportWstQRange
+{
+    protected ImportRange range;
+    protected BigDecimal  q;
+
+    protected WstQRange peer;
+
+    public ImportWstQRange() {
+    }
+
+    public ImportWstQRange(
+        ImportRange range,
+        BigDecimal  q
+    ) {
+        this.range = range;
+        this.q     = q;
+    }
+
+    public ImportRange getRange() {
+        return range;
+    }
+
+    public void setRange(ImportRange range) {
+        this.range = range;
+    }
+
+    public BigDecimal getQ() {
+        return q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+
+    public WstQRange getPeer(River river) {
+        if (peer == null) {
+            Range r = range.getPeer(river);
+            Session session = ImporterSession.getInstance().getDatabaseSession();
+            Query query = session.createQuery(
+                "from WstQRange where " +
+                "range=:range and q=:q");
+            query.setParameter("range", r);
+            query.setParameter("q",     q);
+            List<WstQRange> wstQRanges = query.list();
+            if (wstQRanges.isEmpty()) {
+                peer = new WstQRange(r, q);
+                session.save(peer);
+            }
+            else {
+                peer = wstQRanges.get(0);
+            }
+        }
+        return peer;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/Importer.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,146 @@
+package de.intevation.flys.importer;
+
+import de.intevation.artifacts.common.utils.XMLUtils;
+
+import de.intevation.flys.importer.parsers.InfoGewParser;
+import de.intevation.flys.importer.parsers.AnnotationClassifier;
+
+import java.io.File;
+import java.io.IOException;
+
+import java.util.List;
+
+import java.sql.SQLException;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Transaction;
+import org.hibernate.HibernateException;
+
+import org.w3c.dom.Document;
+
+public class Importer
+{
+    private static Logger log = Logger.getLogger(Importer.class);
+
+    protected List<ImportRiver> rivers;
+
+    public Importer() {
+    }
+
+    public Importer(List<ImportRiver> rivers) {
+        this.rivers = rivers;
+    }
+
+    public List<ImportRiver> getRivers() {
+        return rivers;
+    }
+
+    public void setRivers(List<ImportRiver> rivers) {
+        this.rivers = rivers;
+    }
+
+    public void writeRivers() {
+        log.debug("write rivers started");
+
+        for (ImportRiver river: rivers) {
+            log.debug("writing river '" + river.getName() + "'");
+            river.storeDependencies();
+            ImporterSession.getInstance().getDatabaseSession().flush();
+        }
+
+        log.debug("write rivers finished");
+    }
+
+    public void writeToDatabase() {
+
+        Transaction tx = null;
+
+        try {
+            tx = ImporterSession.getInstance()
+                .getDatabaseSession().beginTransaction();
+
+            try {
+                writeRivers();
+            }
+            catch (HibernateException he) {
+                Throwable t = he.getCause();
+                while (t instanceof SQLException) {
+                    SQLException sqle = (SQLException)t;
+                    log.error("SQL exeception chain:", sqle);
+                    t = sqle.getNextException();
+                }
+                throw he;
+            }
+
+            tx.commit();
+        }
+        catch (RuntimeException re) {
+            if (tx != null) {
+                tx.rollback();
+            }
+            throw re;
+        }
+    }
+
+    public static AnnotationClassifier getAnnotationClassifier() {
+        String annotationTypes = Config.INSTANCE.getAnnotationTypes();
+
+        if (annotationTypes == null) {
+            log.info("no annotation types file configured.");
+            return null;
+        }
+
+        File file = new File(annotationTypes);
+
+        log.info("use annotation types file '" + file + "'");
+
+        if (!(file.isFile() && file.canRead())) {
+            log.warn("annotation type file '" + file + "' is not readable.");
+            return null;
+        }
+
+        Document rules = XMLUtils.parseDocument(file);
+
+        if (rules == null) {
+            log.warn("cannot parse annotation types file.");
+            return null;
+        }
+
+        return new AnnotationClassifier(rules);
+    }
+
+    public static void main(String [] args) {
+
+        InfoGewParser infoGewParser = new InfoGewParser(
+            getAnnotationClassifier());
+
+        log.info("Start parsing rivers...");
+
+        for (String gew: args) {
+            log.info("parsing info gew file: " + gew);
+            try {
+                infoGewParser.parse(new File(gew));
+            }
+            catch (IOException ioe) {
+                log.error("cannot while parsing: " + gew);
+            }
+        }
+
+        String gew = Config.INSTANCE.getInfoGewFile();
+        if (gew != null && gew.length() > 0) {
+            log.info("parsing info gew file: " + gew);
+            try {
+                infoGewParser.parse(new File(gew));
+            }
+            catch (IOException ioe) {
+                log.error("cannot parse file: " + gew);
+            }
+        }
+
+        if (!Config.INSTANCE.dryRun()) {
+            new Importer(infoGewParser.getRivers()).writeToDatabase();
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImporterSession.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,187 @@
+package de.intevation.flys.importer;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.TreeMap;
+
+import java.math.BigDecimal;
+
+import org.hibernate.SessionFactory;
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.FlushMode;
+
+import de.intevation.flys.backend.SessionFactoryProvider;
+
+import de.intevation.flys.model.WstColumnValue;
+import de.intevation.flys.model.WstColumn;
+import de.intevation.flys.model.DischargeTableValue;
+import de.intevation.flys.model.DischargeTable;
+import de.intevation.flys.model.Range;
+import de.intevation.flys.model.River;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.LRUCache;
+
+public class ImporterSession
+{
+    private static Logger log = Logger.getLogger(ImporterSession.class);
+
+    private static final ThreadLocal<ImporterSession> SESSION =
+        new ThreadLocal<ImporterSession>() {
+            @Override
+            protected ImporterSession initialValue() {
+                return new ImporterSession();
+            }
+        };
+
+    protected Session databaseSession;
+
+    protected LRUCache<Integer, Map<ValueKey, WstColumnValue>>
+        wstColumnValues;
+
+    protected LRUCache<Integer, Map<ValueKey, DischargeTableValue>>
+        dischargeTableValues;
+
+    protected LRUCache<Integer, Map<ValueKey, Range>>
+        ranges;
+
+    public static ImporterSession getInstance() {
+        return SESSION.get();
+    }
+
+    public ImporterSession() {
+        SessionFactory sessionFactory =
+            SessionFactoryProvider.createSessionFactory();
+        databaseSession = sessionFactory.openSession();
+        //databaseSession.setFlushMode(FlushMode.MANUAL);
+
+        wstColumnValues =
+            new LRUCache<Integer, Map<ValueKey, WstColumnValue>>();
+
+        dischargeTableValues =
+            new LRUCache<Integer, Map<ValueKey, DischargeTableValue>>();
+
+        ranges = new LRUCache<Integer, Map<ValueKey, Range>>();
+    }
+
+    public Session getDatabaseSession() {
+        return databaseSession;
+    }
+
+    public WstColumnValue getWstColumnValue(
+        WstColumn  column,
+        BigDecimal position,
+        BigDecimal w
+    ) {
+        Integer c = column.getId();
+
+        Map<ValueKey, WstColumnValue> map = wstColumnValues.get(c);
+
+        if (map == null) {
+            map = new TreeMap<ValueKey, WstColumnValue>(
+                ValueKey.EPSILON_COMPARATOR);
+            wstColumnValues.put(c, map);
+            Query query = databaseSession.createQuery(
+                "from WstColumnValue where wstColumn.id=:cid");
+            query.setParameter("cid", c);
+            for (Iterator iter = query.iterate(); iter.hasNext();) {
+                WstColumnValue wcv = (WstColumnValue)iter.next();
+                map.put(new ValueKey(wcv.getPosition(), wcv.getW()), wcv);
+            }
+        }
+
+        ValueKey key = new ValueKey(position, w);
+
+        WstColumnValue wcv = map.get(key);
+
+        if (wcv != null) {
+            return wcv;
+        }
+
+        wcv = new WstColumnValue(column, position, w);
+
+        databaseSession.save(wcv);
+
+        map.put(key, wcv);
+
+        return wcv;
+    }
+
+    public DischargeTableValue getDischargeTableValue(
+        DischargeTable table,
+        BigDecimal     q,
+        BigDecimal     w
+    ) {
+        Integer t = table.getId();
+
+        Map<ValueKey, DischargeTableValue> map =
+            dischargeTableValues.get(t);
+
+        if (map == null) {
+            map = new TreeMap<ValueKey, DischargeTableValue>(
+                ValueKey.EPSILON_COMPARATOR);
+            dischargeTableValues.put(t, map);
+            Query query = databaseSession.createQuery(
+                "from DischargeTableValue where dischargeTable.id=:tid");
+            query.setParameter("tid", t);
+            for (Iterator iter = query.iterate(); iter.hasNext();) {
+                DischargeTableValue dctv = (DischargeTableValue)iter.next();
+                map.put(new ValueKey(dctv.getQ(), dctv.getW()), dctv);
+            }
+        }
+
+        ValueKey key = new ValueKey(q, w);
+
+        DischargeTableValue dctv = map.get(key);
+
+        if (dctv != null) {
+            return dctv;
+        }
+
+        dctv = new DischargeTableValue(table, q, w);
+
+        databaseSession.save(dctv);
+
+        map.put(key, dctv);
+
+        return dctv;
+    }
+
+    public Range getRange(River river, BigDecimal a, BigDecimal b) {
+        Integer r = river.getId();
+
+        Map<ValueKey, Range> map = ranges.get(r);
+
+        if (map == null) {
+            map = new TreeMap<ValueKey, Range>(
+                ValueKey.EPSILON_COMPARATOR);
+            ranges.put(r, map);
+            Query query = databaseSession.createQuery(
+                "from Range where river.id=:rid");
+            query.setParameter("rid", r);
+            for (Iterator iter = query.iterate(); iter.hasNext();) {
+                Range range = (Range)iter.next();
+                map.put(new ValueKey(range.getA(), range.getB()), range);
+            }
+        }
+
+        ValueKey key = new ValueKey(a, b);
+
+        Range range = map.get(key);
+
+        if (range != null) {
+            return range;
+        }
+
+        range = new Range(a, b, river);
+
+        databaseSession.save(range);
+
+        map.put(key, range);
+
+        return range;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ValueKey.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,63 @@
+package de.intevation.flys.importer;
+
+import java.math.BigDecimal;
+
+import java.util.Comparator;
+
+public class ValueKey
+{
+    public static final double EPSILON = 1e-6;
+
+    public static final Comparator<ValueKey> EPSILON_COMPARATOR =
+        new Comparator<ValueKey>()
+    {
+        public int compare(ValueKey x, ValueKey y) {
+            int cmp = ValueKey.compare(x.a, y.a);
+            if (cmp != 0) return cmp;
+            return ValueKey.compare(x.b, y.b);
+        }
+    };
+
+    public static int compare(BigDecimal a, BigDecimal b) {
+        if (a == null && b == null) return  0;
+        if (a != null && b == null) return +1;
+        if (a == null && b != null) return -1;
+
+        double diff = a.doubleValue() - b.doubleValue();
+        if (diff < -EPSILON) return -1;
+        return diff > EPSILON ? +1 : 0;
+    }
+
+    protected BigDecimal a;
+    protected BigDecimal b;
+
+    public ValueKey() {
+    }
+
+    public ValueKey(BigDecimal a, BigDecimal b) {
+        this.a = a;
+        this.b = b;
+    }
+
+    @Override
+    public int hashCode() {
+        return ((a != null ? a.hashCode() : 0) << 16)
+              | (b != null ? b.hashCode() : 0);
+    }
+
+    @Override
+    public boolean equals(Object other) {
+        if (!(other instanceof ValueKey)) {
+            return false;
+        }
+        ValueKey o = (ValueKey)other;
+        return !(
+               (a == null && o.a != null)
+            || (a != null && o.a == null)
+            || (a != null && !a.equals(o.a))
+            || (b == null && o.b != null)
+            || (b != null && o.b == null)
+            || (b != null && !b.equals(o.b)));
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/XY.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,54 @@
+package de.intevation.flys.importer;
+
+public class XY
+implements   Comparable<XY>
+{
+    public static final double X_EPSILON = 1e-4;
+
+    protected double x;
+    protected double y;
+    protected int    index;
+
+    public XY() {
+    }
+
+    public XY(double x, double y, int index) {
+        this.x     = x;
+        this.y     = y;
+        this.index = index;
+    }
+
+    @Override
+    public int compareTo(XY other) {
+        if (x + X_EPSILON < other.x) return -1;
+        if (x > other.x + X_EPSILON) return +1;
+        if (index < other.index)     return -1;
+        if (index > other.index)     return +1;
+        return 0;
+    }
+
+    public double getX() {
+        return x;
+    }
+
+    public void setX(double x) {
+        this.x = x;
+    }
+
+    public double getY() {
+        return y;
+    }
+
+    public void setY(double y) {
+        this.y = y;
+    }
+
+    public int getIndex() {
+        return index;
+    }
+
+    public void setIndex(int index) {
+        this.index = index;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/AnnotationClassifier.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,232 @@
+package de.intevation.flys.importer.parsers;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.NodeList;
+import org.w3c.dom.Element;
+
+import javax.xml.xpath.XPathConstants;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.List;
+import java.util.ArrayList;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.XMLUtils;
+
+import de.intevation.flys.importer.ImportAnnotationType;
+import de.intevation.flys.importer.Importer;
+
+public class AnnotationClassifier
+{
+    private static Logger log = Logger.getLogger(Importer.class);
+
+    public static final String TYPES_XPATH =
+        "/annotation/types/type";
+
+    public static final String FILE_PATTERNS_XPATH =
+        "/annotation/patterns/file";
+
+    public static final String DESCRIPTION_PATTERNS_XPATH =
+        "/annotation/patterns/line";
+
+
+    public static class Pair {
+
+        protected Pattern              pattern;
+        protected ImportAnnotationType annType;
+
+        public Pair(Pattern pattern, ImportAnnotationType annType) {
+            this.pattern  = pattern;
+            this.annType = annType;
+        }
+
+        public ImportAnnotationType match(String s) {
+            Matcher m = pattern.matcher(s);
+            return m.matches() ? annType : null;
+        }
+    } // class Pair
+
+
+    protected Map<String, ImportAnnotationType> types;
+    protected List<Pair>                        filePatterns;
+    protected List<Pair>                        descPatterns;
+
+    protected ImportAnnotationType defaultType;
+
+    public AnnotationClassifier() {
+    }
+
+    public AnnotationClassifier(Document rules) {
+        types        = new HashMap<String, ImportAnnotationType>();
+        filePatterns = new ArrayList<Pair>();
+        descPatterns = new ArrayList<Pair>();
+
+        buildRules(rules);
+    }
+
+    protected void buildRules(Document rules) {
+        buildTypes(rules);
+        buildFilePatterns(rules);
+        buildDescriptionPatterns(rules);
+    }
+
+    protected void buildTypes(Document rules) {
+
+        NodeList typeList = (NodeList)XMLUtils.xpath(
+            rules,
+            TYPES_XPATH,
+            XPathConstants.NODESET,
+            null);
+
+        if (typeList == null) {
+            log.info("no rules found.");
+            return;
+        }
+
+        for (int i = 0, N = typeList.getLength(); i < N; ++i) {
+            Element typeElement = (Element)typeList.item(i);
+            String name = typeElement.getAttribute("name");
+            if (name.length() == 0) {
+                log.warn("rule has no name");
+                continue;
+            }
+
+            ImportAnnotationType aic = new ImportAnnotationType(name);
+
+            types.put(name, aic);
+
+            if (typeElement.getAttribute("default").equals("true")) {
+                defaultType = aic;
+            }
+        }
+    }
+
+    protected void buildFilePatterns(Document rules) {
+
+        NodeList patternList = (NodeList)XMLUtils.xpath(
+            rules,
+            FILE_PATTERNS_XPATH,
+            XPathConstants.NODESET,
+            null);
+
+        if (patternList == null) {
+            log.info("no file patterns found.");
+            return;
+        }
+
+        for (int i = 0, N = patternList.getLength(); i < N; ++i) {
+            Element element = (Element)patternList.item(i);
+            Pair pair = buildPair(element);
+            if (pair != null) {
+                filePatterns.add(pair);
+            }
+        }
+    }
+
+    protected void buildDescriptionPatterns(Document rules) {
+
+        NodeList patternList = (NodeList)XMLUtils.xpath(
+            rules,
+            DESCRIPTION_PATTERNS_XPATH,
+            XPathConstants.NODESET,
+            null);
+
+        if (patternList == null) {
+            log.info("no line patterns found.");
+            return;
+        }
+
+        for (int i = 0, N = patternList.getLength(); i < N; ++i) {
+            Element element = (Element)patternList.item(i);
+            Pair pair = buildPair(element);
+            if (pair != null) {
+                descPatterns.add(pair);
+            }
+        }
+    }
+
+    protected Pair buildPair(Element element) {
+        String pattern = element.getAttribute("pattern");
+        String type    = element.getAttribute("type");
+
+        if (pattern.length() == 0) {
+            log.warn("pattern has no 'pattern' attribute.");
+            return null;
+        }
+
+        if (type.length() == 0) {
+            log.warn("pattern has no 'type' attribute.");
+            return null;
+        }
+
+        ImportAnnotationType annType = types.get(type);
+
+        if (annType == null) {
+            log.warn("pattern has unknown type '" + type + "'");
+            return null;
+        }
+
+        Pattern p;
+
+        try {
+            p = Pattern.compile(pattern,
+                    Pattern.CASE_INSENSITIVE|Pattern.UNICODE_CASE);
+        }
+        catch (IllegalArgumentException iae) {
+            log.warn("pattern '" + pattern + "' is invalid.", iae);
+            return null;
+        }
+
+        return new Pair(p, annType);
+    }
+
+    public ImportAnnotationType getDefaultType() {
+        return defaultType;
+    }
+
+    public ImportAnnotationType classifyFile(String filename) {
+        return classifyFile(filename, null);
+    }
+
+    public ImportAnnotationType classifyFile(
+        String                filename,
+        ImportAnnotationType def
+    ) {
+        if (filename.toLowerCase().endsWith(".km")) {
+            filename = filename.substring(0, filename.length()-3);
+        }
+
+        for (Pair pair: filePatterns) {
+            ImportAnnotationType annType = pair.match(filename);
+            if (annType != null) {
+                return annType;
+            }
+        }
+
+        return def;
+    }
+
+    public ImportAnnotationType classifyDescription(String description) {
+        return classifyDescription(description, null);
+    }
+
+    public ImportAnnotationType classifyDescription(
+        String                description,
+        ImportAnnotationType def
+    ) {
+        for (Pair pair: descPatterns) {
+            ImportAnnotationType annType = pair.match(description);
+            if (annType != null) {
+                return annType;
+            }
+        }
+
+        return def;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/AnnotationsParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,196 @@
+package de.intevation.flys.importer.parsers;
+
+import java.util.HashMap;
+import java.util.TreeSet;
+import java.util.List;
+import java.util.ArrayList;
+
+import java.io.IOException;
+import java.io.File;
+import java.io.LineNumberReader;
+import java.io.InputStreamReader;
+import java.io.FileInputStream;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.importer.ImportAnnotation;
+import de.intevation.flys.importer.ImportRange;
+import de.intevation.flys.importer.ImportEdge;
+import de.intevation.flys.importer.ImportAnnotationType;
+import de.intevation.flys.importer.ImportAttribute;
+import de.intevation.flys.importer.ImportPosition;
+
+public class AnnotationsParser
+{
+    private static Logger log = Logger.getLogger(AnnotationsParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final String [] TO_SCAN = {
+        "Basisdaten",
+        "Streckendaten"
+    };
+
+    protected HashMap<String, ImportAttribute> attributes;
+    protected HashMap<String, ImportPosition>  positions;
+    protected TreeSet<ImportAnnotation>        annotations;
+    protected AnnotationClassifier             classifier;
+
+    public AnnotationsParser() {
+        this(null);
+    }
+
+    public AnnotationsParser(AnnotationClassifier classifier) {
+        attributes  = new HashMap<String, ImportAttribute>();
+        positions   = new HashMap<String, ImportPosition>();
+        annotations = new TreeSet<ImportAnnotation>();
+        this.classifier = classifier;
+    }
+
+    public void parseFile(File file) throws IOException {
+        log.info("parsing km file: '" + file + "'");
+
+        ImportAnnotationType defaultIAT = null;
+
+        if (classifier != null) {
+            defaultIAT = classifier.classifyFile(
+                file.getName(),
+                classifier.getDefaultType());
+        }
+
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = null;
+            while ((line = in.readLine()) != null) {
+                if ((line = line.trim()).length() == 0
+                || line.startsWith("*")) {
+                    continue;
+                }
+
+                String [] parts = line.split("\\s*;\\s*");
+
+                if (parts.length < 3) {
+                    log.warn("not enough columns in line "
+                        + in.getLineNumber());
+                    continue;
+                }
+
+                ImportPosition position = positions.get(parts[0]);
+                if (position == null) {
+                    position = new ImportPosition(parts[0]);
+                    positions.put(parts[0], position);
+                }
+
+                ImportAttribute attribute = attributes.get(parts[1]);
+                if (attribute == null) {
+                    attribute = new ImportAttribute(parts[1]);
+                    attributes.put(parts[1], attribute);
+                }
+
+                String [] r = parts[2].replace(",", ".").split("\\s*#\\s*");
+
+                BigDecimal from, to;
+
+                try {
+                    from = new BigDecimal(r[0]);
+                    to   = r.length < 2 ? null : new BigDecimal(r[1]);
+                    if (to != null && from.compareTo(to) > 0) {
+                        BigDecimal t = from; from = to; to = t;
+                    }
+                }
+                catch (NumberFormatException nfe) {
+                    log.warn("invalid number in line " + in.getLineNumber());
+                    continue;
+                }
+
+                ImportEdge edge = null;
+
+                if (parts.length == 4) { // Only 'Unterkante'
+                    try {
+                        edge = new ImportEdge(
+                            null,
+                            new BigDecimal(parts[3].trim().replace(',', '.')));
+                    }
+                    catch (NumberFormatException nfe) {
+                        log.warn("cannot parse 'Unterkante' in line " +
+                            in.getLineNumber());
+                    }
+                }
+                else if (parts.length > 4) { // 'Unterkante' and 'Oberkante'
+                    String bottom = parts[3].trim().replace(',', '.');
+                    String top    = parts[4].trim().replace(',', '.');
+                    try {
+                        BigDecimal b = bottom.length() == 0
+                            ? null
+                            : new BigDecimal(bottom);
+                        BigDecimal t = top.length() == 0
+                            ? null
+                            : new BigDecimal(top);
+                        edge = new ImportEdge(t, b);
+                    }
+                    catch (NumberFormatException nfe) {
+                        log.warn(
+                            "cannot parse 'Unterkante' or 'Oberkante' in line "
+                            + in.getLineNumber());
+                    }
+                }
+
+                ImportRange range = new ImportRange(from, to);
+
+                ImportAnnotationType type = classifier != null
+                    ? classifier.classifyDescription(line, defaultIAT)
+                    : null;
+
+                ImportAnnotation annotation = new ImportAnnotation(
+                    attribute, position, range, edge, type);
+
+                if (!annotations.add(annotation)) {
+                    log.warn("duplicated annotation '" + parts[0] +
+                        "' in line " + in.getLineNumber());
+                }
+            }
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+    }
+
+    public void parse(File root) throws IOException {
+
+        for (String toScan: TO_SCAN) {
+            File directory = FileTools.repair(new File(root, toScan));
+            if (!directory.isDirectory()) {
+                log.warn("'" + directory + "' is not a directory.");
+                continue;
+            }
+            File [] files = directory.listFiles();
+            if (files == null) {
+                log.warn("cannot list directory '" + directory + "'");
+                continue;
+            }
+
+            for (File file: files) {
+                if (file.isFile() && file.canRead()
+                && file.getName().toLowerCase().endsWith(".km")) {
+                    parseFile(file);
+                }
+            }
+        } // for all directories to scan
+    }
+
+    public List<ImportAnnotation> getAnnotations() {
+        return new ArrayList<ImportAnnotation>(annotations);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/AtFileParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,206 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.IOException;
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportDischargeTable;
+import de.intevation.flys.importer.ImportDischargeTableValue;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import java.util.Date;
+import java.util.Calendar;
+
+import de.intevation.flys.importer.ImportTimeInterval;
+
+public class AtFileParser {
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    private static Logger logger = Logger.getLogger(AtFileParser.class);
+
+
+    // regular expression from hell to find out time range
+    public static final Pattern DATE_LINE = Pattern.compile(
+        "^\\*\\s*Abflu[^t]+tafel?\\s*([^\\d]+)"  +
+        "(\\d{1,2})?\\.?(\\d{1,2})?\\.?(\\d{2,4})\\s*(?:(?:bis)|-)?\\s*" +
+        "(?:(\\d{1,2})?\\.?(\\d{1,2})?\\.?(\\d{2,4}))?\\s*.*$");
+
+    public AtFileParser() {
+    }
+
+
+    public ImportDischargeTable parse(File file) throws IOException {
+        return parse(file, "", 0);
+    }
+
+    public ImportDischargeTable parse(
+        File   file,
+        String prefix,
+        int    kind
+    )
+    throws IOException {
+
+        logger.info("parsing AT file: " + file);
+
+        BufferedReader br = null;
+
+        String line       = null;
+
+        boolean beginning = true;
+
+        ImportDischargeTable dischargeTable =
+            new ImportDischargeTable(kind, prefix + file.getName());
+
+        Date from = null;
+        Date to   = null;
+
+        try {
+            br = new BufferedReader(
+                 new InputStreamReader(
+                 new FileInputStream(file), ENCODING));
+
+            while ((line = br.readLine()) != null) {
+
+                String tmp = line.trim();
+
+                if (tmp.length() == 0) {
+                    continue;
+                }
+
+                Matcher m = DATE_LINE.matcher(tmp);
+                if (m.matches()) {
+                    from = guessDate(m.group(2), m.group(3), m.group(4));
+                    to   = guessDate(m.group(5), m.group(6), m.group(7));
+                    if (from == null) {
+                        Date t = from; from = to; to = t;
+                    }
+                    continue;
+                }
+
+                if (tmp.startsWith("#! name=")) {
+                    // XXX Skip the name,  because we don't know where to save
+                    // it at the moment
+
+                    //String name = tmp.substring(8);
+                    continue;
+                }
+
+                if (tmp.startsWith("#") || tmp.startsWith("*")) {
+                    continue;
+                }
+
+                String[] splits = tmp.replace(',', '.').split("\\s+");
+
+                if ((splits.length < 2) || (splits.length > 11)) {
+                    logger.warn("Found an invalid row in the AT file.");
+                    continue;
+                }
+
+                String strW = splits[0].trim();
+                double W    = Double.parseDouble(strW);
+
+                /* shift is used to differenciate between lines with
+                 * exactly 10 Qs and lines with less than 10 Qs. The shift
+                 * is only modified when it is the first line.
+                 */
+                int shift = -1;
+
+                if (splits.length != 11 && beginning) {
+                    shift = 10 - splits.length;
+                }
+
+
+                for (int i = 1; i < splits.length; i++) {
+                    double iW = W + shift + i;
+                    double iQ = Double.parseDouble(splits[i].trim());
+
+                    dischargeTable.addDischargeTableValue(
+                        new ImportDischargeTableValue(
+                            new BigDecimal(iQ/100.0),
+                            new BigDecimal(iW/100.0)));
+                }
+
+                beginning = false;
+            }
+        }
+        catch (NumberFormatException pe) {
+            logger.warn(pe.getMessage());
+        }
+        finally {
+            if (br != null) {
+                br.close();
+            }
+        }
+
+        if (from != null) {
+            if (to != null && from.compareTo(to) > 0) {
+                Date t = from; from = to; to = t;
+            }
+            logger.info("from: " + from + " to: " + to);
+            ImportTimeInterval interval = new ImportTimeInterval(from, to);
+            dischargeTable.setTimeInterval(interval);
+        }
+
+        logger.info("Finished parsing AT file: " + file);
+
+        return dischargeTable;
+    }
+
+    public static Date guessDate(String day, String month, String year) {
+        if (day == null && month == null && year == null) {
+            return null;
+        }
+
+        logger.debug("day: " + day + " month: " + month + " year: " + year);
+
+        int dayI = 15;
+        if (day != null) {
+            try {
+                dayI = Integer.parseInt(day.trim());
+            }
+            catch (NumberFormatException nfe) {
+            }
+        }
+
+        int monthI = 6;
+        if (month != null) {
+            try {
+                monthI = Integer.parseInt(month.trim());
+            }
+            catch (NumberFormatException nfe) {
+            }
+        }
+
+        int yearI = 1900;
+        if (year != null) {
+            try {
+                yearI = Integer.parseInt(year.trim());
+                if (yearI < 100) {
+                    if (yearI < 20) {
+                        yearI += 2000;
+                    }
+                    else {
+                        yearI += 1900;
+                    }
+                }
+            }
+            catch (NumberFormatException nfe) {
+            }
+        }
+
+        Calendar cal = Calendar.getInstance();
+        cal.set(yearI, monthI-1, dayI, 12, 0, 0);
+        long ms = cal.getTimeInMillis();
+        cal.setTimeInMillis(ms - ms%1000);
+        return cal.getTime();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightEpochParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,52 @@
+package de.intevation.flys.importer.parsers;
+
+
+import java.math.BigDecimal;
+
+import java.text.ParseException;
+
+
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportBedHeight;
+import de.intevation.flys.importer.ImportBedHeightEpoch;
+import de.intevation.flys.importer.ImportBedHeightEpochValue;
+
+
+public class BedHeightEpochParser extends BedHeightParser {
+
+    private static final Logger log =
+        Logger.getLogger(BedHeightEpochParser.class);
+
+
+
+    @Override
+    protected ImportBedHeight newImportBedHeight(String description) {
+        return new ImportBedHeightEpoch(description);
+    }
+
+
+    @Override
+    protected void handleDataLine(ImportBedHeight obj, String line) {
+        String[] values = line.split(SEPERATOR_CHAR);
+
+        if (values == null || values.length < 2 || values[0].length() == 0 || values[1].length() == 0) {
+            //log.warn("Skip invalid data line: " + line);
+            return;
+        }
+
+        try {
+            ImportBedHeightEpochValue value = new ImportBedHeightEpochValue(
+                new BigDecimal(nf.parse(values[0]).doubleValue()),
+                new BigDecimal(nf.parse(values[1]).doubleValue())
+            );
+
+            obj.addValue(value);
+        }
+        catch (ParseException e) {
+            log.warn("Error while parsing number from data row: " + line);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,405 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+
+import java.math.BigDecimal;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+import java.util.Locale;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportBedHeight;
+import de.intevation.flys.importer.ImportBedHeightType;
+import de.intevation.flys.importer.ImportElevationModel;
+import de.intevation.flys.importer.ImportLocationSystem;
+import de.intevation.flys.importer.ImportRange;
+import de.intevation.flys.importer.ImportTimeInterval;
+import de.intevation.flys.importer.ImportUnit;
+import de.intevation.flys.model.BedHeightType;
+
+
+public abstract class BedHeightParser {
+
+    private static final Logger log =
+        Logger.getLogger(BedHeightParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final Locale DEFAULT_LOCALE = Locale.GERMAN;
+
+    public static final String START_META_CHAR = "#";
+    public static final String SEPERATOR_CHAR  = ";";
+
+    public static final Pattern META_YEAR =
+        Pattern.compile("^Jahr: (\\d*).*");
+
+    public static final Pattern META_TIMEINTERVAL =
+        Pattern.compile("^Zeitraum: Epoche (\\d*)-(\\d*).*");
+
+    public static final Pattern META_TYPE =
+        Pattern.compile("^Aufnahmeart: (.*).*");
+
+    public static final Pattern META_LOCATION_SYSTEM =
+        Pattern.compile("^Lagesystem: (.*).*");
+
+    public static final Pattern META_CUR_ELEVATION_SYSTEM =
+        Pattern.compile("^H.hensystem:\\s(\\w++) (.* )??\\[(.*)\\].*");
+
+    public static final Pattern META_OLD_ELEVATION_SYSTEM =
+        Pattern.compile("^urspr.ngliches H.hensystem:\\s(\\w++) (.* )??\\[(.*)\\].*");
+
+    public static final Pattern META_SOUNDING_WIDTH =
+        Pattern.compile("^ausgewertete Peilbreite: (\\d*).*");
+
+    public static final Pattern META_RANGE =
+        Pattern.compile("^Strecke:\\D*(\\d++.\\d*)-(\\d++.\\d*).*");
+
+    public static final Pattern META_EVALUATION_BY =
+        Pattern.compile("^Auswerter: (.*).*");
+
+    public static final Pattern META_COMMENTS =
+        Pattern.compile("^Weitere Bemerkungen: (.*).*");
+
+
+    protected static NumberFormat nf = NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    protected List<ImportBedHeight> bedHeights;
+
+
+    protected abstract ImportBedHeight newImportBedHeight(String description);
+
+    protected abstract void handleDataLine(
+        ImportBedHeight importBedHeight,
+        String          line
+    );
+
+
+
+    public BedHeightParser() {
+        this.bedHeights = new ArrayList<ImportBedHeight>();
+    }
+
+
+    public List<ImportBedHeight> getBedHeights() {
+        return bedHeights;
+    }
+
+
+    public void parse(File file) throws IOException {
+        log.info("Parsing bed height single file '" + file + "'");
+
+        ImportBedHeight obj = newImportBedHeight(file.getName());
+
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = null;
+            while ((line = in.readLine()) != null) {
+                if ((line = line.trim()).length() == 0) {
+                    continue;
+                }
+
+                if (line.startsWith(START_META_CHAR)) {
+                    handleMetaLine(obj, line);
+                }
+                else {
+                    handleDataLine(obj, line);
+                }
+            }
+
+            log.info("File contained " + obj.getValueCount() + " values.");
+            bedHeights.add(obj);
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+    }
+
+
+    protected static String stripMetaLine(String line) {
+        String tmp = line.substring(1, line.length());
+
+        if (tmp.startsWith(" ")) {
+            return tmp.substring(1, tmp.length());
+        }
+        else {
+            return tmp;
+        }
+    }
+
+
+    public static Date getDateFromYear(int year) {
+        Calendar cal = Calendar.getInstance();
+        cal.set(year, 0, 1);
+
+        return cal.getTime();
+    }
+
+
+    protected void handleMetaLine(ImportBedHeight obj, String line) {
+        String meta = stripMetaLine(line);
+
+        if (handleMetaYear(obj, meta)) {
+            return;
+        }
+        else if (handleMetaTimeInterval(obj, meta)) {
+            return;
+        }
+        else if (handleMetaSoundingWidth(obj, meta)) {
+            return;
+        }
+        else if (handleMetaComment(obj, meta)) {
+            return;
+        }
+        else if (handleMetaEvaluationBy(obj, meta)) {
+            return;
+        }
+        else if (handleMetaRange(obj, meta)) {
+            return;
+        }
+        else if (handleMetaType(obj, meta)) {
+            return;
+        }
+        else if (handleMetaLocationSystem(obj, meta)) {
+            return;
+        }
+        else if (handleMetaCurElevationModel(obj, meta)) {
+            return;
+        }
+        else if (handleMetaOldElevationModel(obj, meta)) {
+            return;
+        }
+        else {
+            log.warn("Meta line did not match any known type: " + line);
+        }
+    }
+
+
+    protected boolean handleMetaYear(ImportBedHeight obj, String line) {
+        Matcher m = META_YEAR.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1);
+
+            try {
+                obj.setYear(Integer.valueOf(tmp));
+                return true;
+            }
+            catch (NumberFormatException e) {
+                log.warn("Error while parsing year!", e);
+            }
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaTimeInterval(ImportBedHeight obj, String line) {
+        Matcher m = META_TIMEINTERVAL.matcher(line);
+
+        if (m.matches()) {
+            String lo = m.group(1);
+            String up = m.group(2);
+
+            log.debug("Found time interval: " + lo + " - " + up);
+
+            try {
+                int lower = Integer.valueOf(lo);
+                int upper = Integer.valueOf(up);
+
+                Date fromYear = getDateFromYear(lower);
+                Date toYear   = getDateFromYear(upper);
+
+                obj.setTimeInterval(new ImportTimeInterval(fromYear, toYear));
+            }
+            catch (NumberFormatException e) {
+                log.warn("Error while parsing timeinterval!", e);
+            }
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaSoundingWidth(ImportBedHeight obj, String line) {
+        Matcher m = META_SOUNDING_WIDTH.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1);
+
+            try {
+                obj.setSoundingWidth(Integer.valueOf(tmp));
+                return true;
+            }
+            catch (NumberFormatException e) {
+                log.warn("Error while parsing sounding width!", e);
+            }
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaComment(ImportBedHeight obj, String line) {
+        Matcher m = META_COMMENTS.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1);
+
+            obj.setDescription(tmp);
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaEvaluationBy(
+        ImportBedHeight obj,
+        String                line
+    ) {
+        Matcher m = META_EVALUATION_BY.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1);
+            tmp = tmp.replace(";", "");
+
+            obj.setEvaluationBy(tmp);
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaRange(ImportBedHeight obj, String line) {
+        Matcher m = META_RANGE.matcher(line);
+
+        if (m.matches() && m.groupCount() >= 2) {
+            String a = m.group(1).replace(";", "");
+            String b = m.group(2).replace(";", "");
+
+            try {
+                BigDecimal lower = new BigDecimal(nf.parse(a).doubleValue());
+                BigDecimal upper = new BigDecimal(nf.parse(b).doubleValue());
+
+                obj.setRange(new ImportRange(lower, upper));
+
+                return true;
+            }
+            catch (ParseException e) {
+                log.warn("Error while parsing range!", e);
+            }
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaType(ImportBedHeight obj, String line) {
+        Matcher m = META_TYPE.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1).replace(";", "");
+
+            obj.setType(new ImportBedHeightType(
+                BedHeightType.getBedHeightName(tmp),
+                tmp));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaLocationSystem(
+        ImportBedHeight obj,
+        String          line
+    ) {
+        Matcher m = META_LOCATION_SYSTEM.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1).replace(";", "");
+
+            obj.setLocationSystem(new ImportLocationSystem(tmp, tmp));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaCurElevationModel(
+        ImportBedHeight obj,
+        String          line
+    ) {
+        Matcher m = META_CUR_ELEVATION_SYSTEM.matcher(line);
+
+        if (m.matches()) {
+            String name = m.group(1);
+            String num  = m.group(2);
+            String unit = m.group(3);
+
+            obj.setCurElevationModel(new ImportElevationModel(
+                name + " " + num,
+                new ImportUnit(unit)
+            ));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaOldElevationModel(
+        ImportBedHeight obj,
+        String          line
+    ) {
+        Matcher m = META_OLD_ELEVATION_SYSTEM.matcher(line);
+
+        if (m.matches()) {
+            String name = m.group(1);
+            String num  = m.group(2);
+            String unit = m.group(3);
+
+            obj.setOldElevationModel(new ImportElevationModel(
+                name + " " + num,
+                new ImportUnit(unit)
+            ));
+
+            return true;
+        }
+
+        return false;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,59 @@
+package de.intevation.flys.importer.parsers;
+
+
+import java.math.BigDecimal;
+
+import java.text.ParseException;
+
+
+
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportBedHeight;
+import de.intevation.flys.importer.ImportBedHeightSingle;
+import de.intevation.flys.importer.ImportBedHeightSingleValue;
+
+
+public class BedHeightSingleParser extends BedHeightParser {
+
+    private static final Logger log =
+        Logger.getLogger(BedHeightSingleParser.class);
+
+
+
+    @Override
+    protected ImportBedHeight newImportBedHeight(String description) {
+        return new ImportBedHeightSingle(description);
+    }
+
+
+
+    @Override
+    protected void handleDataLine(ImportBedHeight obj, String line) {
+        String[] values = line.split(SEPERATOR_CHAR);
+
+        if (values == null || values.length < 6) {
+            //log.warn("Error while parsing data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            ImportBedHeightSingleValue value = new ImportBedHeightSingleValue(
+                (ImportBedHeightSingle) obj,
+                new BigDecimal(nf.parse(values[0]).doubleValue()),
+                new BigDecimal(nf.parse(values[1]).doubleValue()),
+                new BigDecimal(nf.parse(values[2]).doubleValue()),
+                new BigDecimal(nf.parse(values[3]).doubleValue()),
+                new BigDecimal(nf.parse(values[4]).doubleValue()),
+                new BigDecimal(nf.parse(values[5]).doubleValue())
+            );
+
+            obj.addValue(value);
+        }
+        catch (ParseException e) {
+            log.warn("Error while parsing data row.", e);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/FlowVelocityMeasurementParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,103 @@
+package de.intevation.flys.importer.parsers;
+
+import de.intevation.flys.importer.ImportFlowVelocityMeasurement;
+import de.intevation.flys.importer.ImportFlowVelocityMeasurementValue;
+
+import java.math.BigDecimal;
+
+import java.text.DateFormat;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+public class FlowVelocityMeasurementParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(FlowVelocityMeasurementParser.class);
+
+    private static final NumberFormat nf =
+        NumberFormat.getInstance(DEFAULT_LOCALE);
+
+    private static final DateFormat df =
+        new SimpleDateFormat("dd.MM.yyyy HH:mm:ss");
+
+
+    private List<ImportFlowVelocityMeasurement> measurements;
+
+    private ImportFlowVelocityMeasurement current;
+
+
+    public FlowVelocityMeasurementParser() {
+        measurements = new ArrayList<ImportFlowVelocityMeasurement>();
+    }
+
+
+    public List<ImportFlowVelocityMeasurement> getMeasurements() {
+        return measurements;
+    }
+
+    @Override
+    protected void reset() {
+        current = new ImportFlowVelocityMeasurement();
+    }
+
+
+    @Override
+    protected void finish() {
+        measurements.add(current);
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    public void handleMetaLine(String line) {
+        line = line.replace(";", "");
+        current.setDescription(line);
+    }
+
+
+    public void handleDataLine(String line) {
+        String[] cols = line.split(SEPERATOR_CHAR);
+
+        if (cols.length < 8) {
+            log.warn("skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            double km     = nf.parse(cols[1]).doubleValue();
+            double w      = nf.parse(cols[5]).doubleValue();
+            double q      = nf.parse(cols[6]).doubleValue();
+            double v      = nf.parse(cols[7]).doubleValue();
+
+            String timestr     = cols[3] + " " + cols[4];
+            String description = cols.length > 8 ? cols[8] : null;
+
+            current.addValue(new ImportFlowVelocityMeasurementValue(
+                df.parse(timestr),
+                new BigDecimal(km),
+                new BigDecimal(w),
+                new BigDecimal(q),
+                new BigDecimal(v),
+                description
+            ));
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing flow velocity values.", pe);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/FlowVelocityModelParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,239 @@
+package de.intevation.flys.importer.parsers;
+
+import java.math.BigDecimal;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportDischargeZone;
+import de.intevation.flys.importer.ImportFlowVelocityModel;
+import de.intevation.flys.importer.ImportFlowVelocityModelValue;
+
+
+public class FlowVelocityModelParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(FlowVelocityModelParser.class);
+
+    private static final Pattern META_REGEX =
+        Pattern.compile(".*Rechnung (.*) \\(Pegel (.*)\\).*");
+
+    private static final Pattern META_GAUGE =
+        Pattern.compile("(.*) Q=(\\w*)m3/s");
+
+    private static final Pattern META_MAINVALUE_A =
+        Pattern.compile("([a-zA-Z]+)+(\\d+)*");
+
+    private static final Pattern META_MAINVALUE_B =
+        Pattern.compile("(([a-zA-Z]+)+(\\d+)*)\\s*-\\s*(([a-zA-Z]+)+(\\d+)*)");
+
+    private static final Pattern META_MAINVALUE_C =
+        Pattern.compile("([0-9]++)\\s?(\\w*)|([0-9]++,[0-9]++)\\s?(\\w*)");
+
+    private static final Pattern META_MAINVALUE_D =
+        Pattern.compile("(([0-9]*)\\s?(\\w*)|([0-9]++,[0-9]++)\\s?(\\w*)) bis (([0-9]*)\\s?(\\w*)|([0-9]++,[0-9]++)\\s?(\\w*))");
+
+    private static final Pattern META_MAINVALUE_E =
+        Pattern.compile("(([a-zA-Z]+)+(\\d+)*) bis (([a-zA-Z]+)+(\\d+)*)");
+
+    private static final NumberFormat nf =
+        NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    private List<ImportFlowVelocityModel> models;
+
+    private ImportFlowVelocityModel current;
+
+
+    public FlowVelocityModelParser() {
+        models = new ArrayList<ImportFlowVelocityModel>();
+    }
+
+
+    public List<ImportFlowVelocityModel> getModels() {
+        return models;
+    }
+
+    @Override
+    protected void reset() {
+        current = new ImportFlowVelocityModel();
+    }
+
+
+    @Override
+    protected void finish() {
+        models.add(current);
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    protected void handleMetaLine(String line) {
+        Matcher m = META_REGEX.matcher(line);
+
+        if (m.matches()) {
+            String mainValueStr = m.group(1);
+            String gaugeStr     = m.group(2);
+
+            Object[] valueData = handleMainValueString(mainValueStr);
+            Object[] gaugeData = handleGaugeString(gaugeStr);
+
+            if (valueData == null || valueData.length < 2) {
+                log.warn("skip invalid MainValue part: '" + line + "'");
+                return;
+            }
+
+            if (gaugeData == null || gaugeData.length < 2) {
+                log.warn("skip invalid gauge part: '" + line + "'");
+                return;
+            }
+
+            if (log.isDebugEnabled()) {
+                log.debug("Found meta information:");
+                log.debug("   Gauge: " + gaugeData[0]);
+                log.debug("   Value: " + gaugeData[1]);
+                log.debug("   Lower: " + valueData[0]);
+                log.debug("   upper: " + valueData[1]);
+            }
+
+            current.setDischargeZone(new ImportDischargeZone(
+                (String) gaugeData[0],
+                (BigDecimal) gaugeData[1],
+                (String) valueData[0],
+                (String) valueData[1]
+            ));
+        }
+    }
+
+
+    protected Object[] handleMainValueString(String mainValueStr) {
+        Matcher mA = META_MAINVALUE_A.matcher(mainValueStr);
+        if (mA.matches()) {
+            String name = mA.group(0);
+
+            return new Object[] { name, name };
+        }
+
+        Matcher mB = META_MAINVALUE_B.matcher(mainValueStr);
+        if (mB.matches()) {
+            String lower = mB.group(1);
+            String upper = mB.group(4);
+
+            return new Object[] { lower, upper };
+        }
+
+        Matcher mC = META_MAINVALUE_C.matcher(mainValueStr);
+        if (mC.matches()) {
+            String facA  = mC.group(1);
+            String nameA = mC.group(2);
+            String facB  = mC.group(3);
+            String nameB = mC.group(4);
+
+            String fac  = facA  != null ? facA  : facB;
+            String name = nameA != null ? nameA : nameB;
+
+            String mainValue = fac + " " + name;
+
+            return new Object[] { mainValue, mainValue };
+        }
+
+        Matcher mD = META_MAINVALUE_D.matcher(mainValueStr);
+        if (mD.matches()) {
+            String loFacA  = mD.group(2);
+            String loNameA = mD.group(3);
+            String loFacB  = mD.group(4);
+            String loNameB = mD.group(5);
+
+            String upFacA  = mD.group(7);
+            String upNameA = mD.group(8);
+            String upFacB  = mD.group(9);
+            String upNameB = mD.group(10);
+
+            String loFac  = loFacA  != null ? loFacA  : loFacB;
+            String loName = loNameA != null ? loNameA : loNameB;
+
+            String upFac  = upFacA  != null ? upFacA  : upFacB;
+            String upName = upNameA != null ? upNameA : upNameB;
+
+            String loMainValue = loFac + " " + loName;
+            String upMainValue = upFac + " " + upName;
+
+            return new Object[] { loMainValue, upMainValue };
+        }
+
+        Matcher mE = META_MAINVALUE_E.matcher(mainValueStr);
+        if (mE.matches()) {
+            String lower = mE.group(1);
+            String upper = mE.group(4);
+
+            return new Object[] { lower, upper };
+        }
+
+        return null;
+    }
+
+
+    protected Object[] handleGaugeString(String gaugeStr) {
+        Matcher m = META_GAUGE.matcher(gaugeStr);
+
+        if (m.matches()) {
+            String name = m.group(1);
+            String qStr = m.group(2);
+
+            try {
+                return new Object[] {
+                    name,
+                    new BigDecimal(nf.parse(qStr).doubleValue()) };
+            }
+            catch (ParseException pe) {
+                log.warn("Error while parsing Q value: '" + qStr + "'");
+            }
+        }
+
+        return null;
+    }
+
+
+    protected void handleDataLine(String line) {
+        String[] cols = line.split(SEPERATOR_CHAR);
+
+        if (cols.length < 5) {
+            log.warn("skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            double km     = nf.parse(cols[0]).doubleValue();
+            double q      = nf.parse(cols[1]).doubleValue();
+            double total  = nf.parse(cols[2]).doubleValue();
+            double main   = nf.parse(cols[3]).doubleValue();
+            double stress = nf.parse(cols[4]).doubleValue();
+
+            current.addValue(new ImportFlowVelocityModelValue(
+                new BigDecimal(km),
+                new BigDecimal(q),
+                new BigDecimal(total),
+                new BigDecimal(main),
+                new BigDecimal(stress)
+            ));
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing flow velocity values.", pe);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/HYKParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,344 @@
+package de.intevation.flys.importer.parsers;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.importer.ImportHYK;
+import de.intevation.flys.importer.ImportHYKEntry;
+import de.intevation.flys.importer.ImportHYKFormation;
+import de.intevation.flys.importer.ImportHYKFlowZone;
+import de.intevation.flys.importer.ImportHYKFlowZoneType;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Date;
+import java.util.Calendar;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+public class HYKParser
+{
+    private static Logger log = Logger.getLogger(HYKParser.class);
+
+    public interface Callback {
+        boolean hykAccept(File file);
+        void    hykParsed(HYKParser parser);
+    } // interface Callback
+
+    public static enum State {
+        LINE_1, LINE_2, LINE_3, LINE_4, LINE_5, LINE_6
+    };
+
+    private static final String ENCODING = "ISO-8859-1";
+
+    protected Map<String, ImportHYKFlowZoneType> flowZoneTypes;
+
+    protected ImportHYK hyk;
+
+    public HYKParser() {
+        flowZoneTypes = new HashMap<String, ImportHYKFlowZoneType>();
+    }
+
+    public ImportHYK getHYK() {
+        return hyk;
+    }
+
+    private static Date yearToDate(Integer year) {
+        if (year == null) {
+            return null;
+        }
+        Calendar cal = Calendar.getInstance();
+        cal.set(year, 0, 1, 12, 0, 0);
+        long ms = cal.getTimeInMillis();
+        cal.setTimeInMillis(ms - ms%1000);
+        return cal.getTime();
+    }
+
+    public boolean parse(File file) {
+
+        boolean debug = log.isDebugEnabled();
+
+        log.info("Parsing HYK file '" + file + "'");
+
+        LineNumberReader in = null;
+
+        String description =
+            file.getParentFile().getName() + "/" + file.getName();
+
+        hyk = new ImportHYK(null, description);
+
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line;
+
+            State state = State.LINE_1;
+
+            int numFormations = 0;
+
+            BigDecimal km         = null;
+            BigDecimal top        = null;
+            BigDecimal bottom     = null;
+            BigDecimal distanceVL = null;
+            BigDecimal distanceHF = null;
+            BigDecimal distanceVR = null;
+
+            Integer    year       = null;
+            int        numZones   = 0;
+
+            ImportHYKFlowZoneType [] fzts     = null;
+            BigDecimal            [] coords   = null;
+            int                      coordPos = 0;
+
+            ImportHYKEntry     entry     = null;
+            ImportHYKFormation formation = null;
+
+            while ((line = in.readLine()) != null) {
+
+                if (line.startsWith("*") || line.startsWith("----")) {
+                    continue;
+                }
+
+                line = line.trim();
+
+                if (state != State.LINE_5 && line.length() == 0) {
+                    continue;
+                }
+
+                String [] parts = line.split("\\s+");
+
+                if (debug) {
+                    log.debug("'" + line + "': " + state);
+                }
+
+                switch (state) {
+                    case LINE_1:
+                        if (parts.length < 2) {
+                            log.error("1: not enough elements in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+
+                        if (parts.length == 2) {
+                            // no year given
+                            year = null;
+                        }
+                        else {
+                            try {
+                                year = Integer.valueOf(parts[1]);
+                            }
+                            catch (NumberFormatException nfe) {
+                                log.error(
+                                    "year is not an integer in line " +
+                                    in.getLineNumber());
+                                return false;
+                            }
+                        }
+                        try {
+                            km = new BigDecimal(parts[0]);
+                            numFormations = Integer.parseInt(
+                                parts[parts.length > 2 ? 2 : 1]);
+                        }
+                        catch (NumberFormatException nfe) {
+                            log.error(
+                                "parsing number of formations " +
+                                "or km failed in line " + in.getLineNumber());
+                            return false;
+                        }
+                        entry = new ImportHYKEntry(hyk, km, yearToDate(year));
+                        hyk.addEntry(entry);
+
+                        state = State.LINE_2;
+                        break;
+
+                    case LINE_2:
+                        if (parts.length < 3) {
+                            log.error("2: not enough elements in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        try {
+                            numZones = Integer.parseInt(parts[0]);
+                            bottom   = new BigDecimal(parts[1]);
+                            top      = new BigDecimal(parts[2]);
+                        }
+                        catch (NumberFormatException nfe) {
+                            log.error(
+                                "parsing num zones, bottom or top height " +
+                                "failed in line " + in.getLineNumber());
+                            return false;
+                        }
+                        formation = new ImportHYKFormation();
+                        formation.setBottom(bottom);
+                        formation.setTop(top);
+                        entry.addFormation(formation);
+
+                        state = State.LINE_3;
+                        break;
+
+                    case LINE_3:
+                        if (parts.length != numZones) {
+                            log.error(
+                                "number of flow zones mismatches " +
+                                "in line " + in.getLineNumber());
+                            return false;
+                        }
+
+                        fzts = new ImportHYKFlowZoneType[parts.length];
+                        for (int i = 0; i < fzts.length; ++i) {
+                            fzts[i] = getFlowZoneType(parts[i]);
+                        }
+                        coords = new BigDecimal[numZones];
+                        state = State.LINE_4;
+                        break;
+
+                    case LINE_4:
+                        try {
+                            int N = Math.min(parts.length, coords.length);
+                            for (coordPos = 0; coordPos < N; ++coordPos) {
+                                coords[coordPos] =
+                                    new BigDecimal(parts[coordPos]);
+                            }
+                        }
+                        catch (NumberFormatException nfe) {
+                            log.error("cannot parse number in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        state = State.LINE_5;
+                        break;
+
+                    case LINE_5:
+                        if (parts.length + coordPos < coords.length) {
+                            log.error("5: not enough elements in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        try {
+                            for (int i = 0; 
+                                i < parts.length && coordPos < coords.length;
+                                ++i, ++coordPos
+                            ) {
+                                coords[coordPos] = new BigDecimal(parts[i]);
+                            }
+                        }
+                        catch (NumberFormatException nfe) {
+                            log.error("cannot parse number in line " + 
+                                in.getLineNumber());
+                            return false;
+                        }
+                        for (int i = 0; i < coords.length; ++i) {
+                            BigDecimal a = coords[i];
+                            BigDecimal b = coords[i == coords.length-1 ? i : i+1];
+                            if (a.compareTo(b) > 0) {
+                                log.warn("zone coordinates swapped in line " + 
+                                    in.getLineNumber());
+                                BigDecimal c = a; a = b; b = c;
+                            }
+                            ImportHYKFlowZone zone = new ImportHYKFlowZone(
+                                formation, fzts[i], a, b);
+                            formation.addFlowZone(zone);
+                        }
+                        state = State.LINE_6;
+                        break;
+
+                    case LINE_6:
+                        if (parts.length < 3) {
+                            log.error("6: not enough elements in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        try {
+                            distanceVL = new BigDecimal(parts[0]);
+                            distanceHF = new BigDecimal(parts[1]);
+                            distanceVR = new BigDecimal(parts[2]);
+                        }
+                        catch (NumberFormatException nfe) {
+                            log.error("cannot parse number in line " +
+                                in.getLineNumber());
+                            return false;
+                        }
+                        formation.setDistanceVL(distanceVL);
+                        formation.setDistanceHF(distanceHF);
+                        formation.setDistanceVR(distanceVR);
+
+                        // continue with next formation.
+                        state = --numFormations > 0 // formations left?
+                            ? State.LINE_2
+                            : State.LINE_1;
+                        break;
+                }
+            }
+        }
+        catch (IOException ioe) {
+            log.error(ioe);
+            return false;
+        }
+        finally {
+            if (in != null) {
+                try {
+                    in.close();
+                }
+                catch (IOException ioe) {
+                    log.error(ioe);
+                }
+            }
+        }
+        return true;
+    }
+
+    protected ImportHYKFlowZoneType getFlowZoneType(String name) {
+        name = name.toUpperCase();
+        ImportHYKFlowZoneType fzt = flowZoneTypes.get(name);
+        if (fzt == null) {
+            log.info("New flow zone type: " + name);
+            fzt = new ImportHYKFlowZoneType(name);
+            flowZoneTypes.put(name, fzt);
+        }
+        return fzt;
+    }
+
+    protected void reset() {
+        hyk = null;
+    }
+
+    public void parseHYKs(File root, final Callback callback) {
+
+        FileTools.walkTree(root, new FileTools.FileVisitor() {
+            @Override
+            public boolean visit(File file) {
+                if (file.isFile() && file.canRead()
+                && file.getName().toLowerCase().endsWith(".hyk")
+                && (callback == null || callback.hykAccept(file))) {
+                    reset();
+                    boolean success = parse(file);
+                    log.info("parsing " + (success ? "succeeded" : "failed"));
+                    if (success && callback != null) {
+                        callback.hykParsed(HYKParser.this);
+                    }
+                }
+                return true;
+            }
+        });
+    }
+
+    public static void main(String [] args) {
+
+        HYKParser parser = new HYKParser();
+
+        for (String arg: args) {
+            parser.parseHYKs(new File(arg), null);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/InfoGewParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,135 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.importer.ImportRiver;
+
+public class InfoGewParser
+{
+    private static Logger log = Logger.getLogger(InfoGewParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final Pattern GEWAESSER =
+        Pattern.compile("^\\s*Gew\u00e4sser\\s*:\\s*(.+)");
+
+    public static final Pattern WST_DATEI =
+        Pattern.compile("^\\s*WSTDatei\\s*:\\s*(.+)");
+
+    public static final Pattern BB_INFO =
+        Pattern.compile("^\\s*B\\+B-Info\\s*:\\s*(.+)");
+
+    protected ArrayList<ImportRiver> rivers;
+
+    protected AnnotationClassifier annotationClassifier;
+
+    public InfoGewParser() {
+        this(null);
+    }
+
+    public InfoGewParser(AnnotationClassifier annotationClassifier) {
+        rivers = new ArrayList<ImportRiver>();
+        this.annotationClassifier = annotationClassifier;
+    }
+
+    public List<ImportRiver> getRivers() {
+        return rivers;
+    }
+
+    public static final String normalize(String f) {
+        return f.replace("\\", "/").replace("/", File.separator);
+    }
+
+    public void parse(File file) throws IOException {
+
+        LineNumberReader in = null;
+
+        File root = file.getParentFile();
+
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = null;
+
+            String riverName  = null;
+            File   wstFile    = null;
+            File   bbInfoFile = null;
+
+            while ((line = in.readLine()) != null) {
+                if ((line = line.trim()).length() == 0) {
+                    continue;
+                }
+                Matcher m = GEWAESSER.matcher(line);
+
+                if (m.matches()) {
+                    String river = m.group(1);
+                    log.info("Found river '" + river + "'");
+                    if (riverName != null) {
+                        rivers.add(new ImportRiver(
+                            riverName,
+                            wstFile,
+                            bbInfoFile,
+                            annotationClassifier));
+                    }
+                    riverName  = river;
+                    wstFile    = null;
+                    bbInfoFile = null;
+                }
+                else if ((m = WST_DATEI.matcher(line)).matches()) {
+                    String wstFilename = m.group(1);
+                    File wst = new File(wstFilename = normalize(wstFilename));
+                    if (!wst.isAbsolute()) {
+                        wst = new File(root, wstFilename);
+                    }
+                    wst = FileTools.repair(wst);
+                    log.info("Found wst file '" + wst + "'");
+                    if (!wst.isFile() || !wst.canRead()) {
+                        log.warn("cannot access WST file '" + wstFilename + "'");
+                        continue;
+                    }
+                    wstFile = wst;
+                }
+                else if ((m = BB_INFO.matcher(line)).matches()) {
+                    //TODO: Make it relative to the wst file.
+                    String bbInfo = m.group(1);
+                    bbInfoFile = new File(normalize(bbInfo));
+                }
+            }
+            if (riverName != null) {
+                rivers.add(new ImportRiver(
+                    riverName,
+                    wstFile,
+                    bbInfoFile,
+                    annotationClassifier));
+            }
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+
+        for (ImportRiver river: rivers) {
+            river.parseDependencies();
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/LineParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,93 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+
+import java.util.Calendar;
+import java.util.Date;
+import java.util.Locale;
+
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+import org.apache.log4j.Logger;
+
+
+public abstract class LineParser {
+
+    private static final Logger log = Logger.getLogger(LineParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final Locale DEFAULT_LOCALE = Locale.GERMAN;
+
+    public static final String START_META_CHAR = "#";
+    public static final String SEPERATOR_CHAR  = ";";
+
+
+    protected abstract void handleLine(String line);
+
+    protected abstract void reset();
+
+    protected abstract void finish();
+
+
+    /**
+     * This method reads each line of <i>file</i>. At the beginning,
+     * <i>reset()</i> is called; afterwars for each line <i>handleLine()</i> is
+     * called; at the end <i>finish</i> is called.
+     *
+     * @param file The file which should be parsed.
+     */
+    public void parse(File file) throws IOException {
+        log.info("Parsing file '" + file + "'");
+
+        reset();
+
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = null;
+            while ((line = in.readLine()) != null) {
+                if ((line = line.trim()).length() == 0) {
+                    continue;
+                }
+
+                handleLine(line);
+            }
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+
+        finish();
+    }
+
+
+    protected static String stripMetaLine(String line) {
+        String tmp = line.substring(1, line.length());
+
+        if (tmp.startsWith(" ")) {
+            return tmp.substring(1, tmp.length());
+        }
+        else {
+            return tmp;
+        }
+    }
+
+
+    public static Date getDateFromYear(int year) {
+        Calendar cal = Calendar.getInstance();
+        cal.set(year, 0, 1);
+
+        return cal.getTime();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/MorphologicalWidthParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,123 @@
+package de.intevation.flys.importer.parsers;
+
+import java.math.BigDecimal;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportMorphWidth;
+import de.intevation.flys.importer.ImportMorphWidthValue;
+import de.intevation.flys.importer.ImportUnit;
+
+
+public class MorphologicalWidthParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(MorphologicalWidthParser.class);
+
+
+    public static final NumberFormat nf = NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    public static final Pattern META_UNIT =
+        Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+
+    protected List<ImportMorphWidth> morphWidths;
+
+    protected ImportMorphWidth current;
+
+
+    public MorphologicalWidthParser() {
+        morphWidths = new ArrayList<ImportMorphWidth>();
+    }
+
+
+    @Override
+    protected void reset() {
+        current = new ImportMorphWidth();
+    }
+
+
+    @Override
+    protected void finish() {
+        if (current != null) {
+            morphWidths.add(current);
+        }
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    protected void handleMetaLine(String line) {
+        if (handleMetaUnit(line)) {
+            return;
+        }
+        else {
+            log.warn("Unknown meta line: '" + line + "'");
+        }
+    }
+
+
+    protected boolean handleMetaUnit(String line) {
+        Matcher m = META_UNIT.matcher(line);
+
+        if (m.matches()) {
+            String unit = m.group(1);
+
+            current.setUnit(new ImportUnit(unit));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected void handleDataLine(String line) {
+        String[] vals = line.split(SEPERATOR_CHAR);
+
+        if (vals == null || vals.length < 2) {
+            log.warn("skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            BigDecimal km    = new BigDecimal(nf.parse(vals[0]).doubleValue());
+            BigDecimal width = new BigDecimal(nf.parse(vals[1]).doubleValue());
+
+            String desc = vals.length > 2 ? vals[2] : null;
+
+            current.addValue(new ImportMorphWidthValue(
+                km,
+                width,
+                desc
+            ));
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing numbers in '" + line + "'");
+        }
+    }
+
+
+    public List<ImportMorphWidth> getMorphologicalWidths() {
+        return morphWidths;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/PRFParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,416 @@
+package de.intevation.flys.importer.parsers;
+
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import java.io.File;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.IOException;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.XY;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+public class PRFParser
+{
+    private static Logger log = Logger.getLogger(PRFParser.class);
+
+    public static final String ENCODING =
+        System.getProperty("flys.backend.prf.encoding", "ISO-8859-1");
+
+    public static final Pattern DATA_PATTERN =
+        Pattern.compile(
+            "\\((\\d+)x\\s*,\\s*(\\d+)\\(" +
+            "\\s*f(\\d+)\\.(\\d+)\\s*,\\s*f(\\d+)\\.(\\d+)\\s*\\)?\\)?");
+
+    public static final Pattern KM_PATTERN =
+        Pattern.compile("\\((\\d+)x\\s*,\\s*f(\\d+)\\.(\\d+)\\s*\\)?");
+
+    public static final Pattern YEAR_PATTERN =
+        Pattern.compile("(\\d{4})");
+
+    public static final int MIN_YEAR = 1800;
+    public static final int MAX_YEAR = 2100;
+
+    public interface Callback {
+        boolean prfAccept(File file);
+        void    prfParsed(PRFParser parser);
+    } // interface Parser
+
+    public static class DataFormat {
+
+        protected int deleteChars;
+        protected int maxRepetitions;
+        protected int firstIntegerPlaces;
+        protected int firstFractionPlaces;
+        protected int secondIntegerPlaces;
+        protected int secondFractionPlaces;
+
+        protected double firstShift;
+        protected double secondShift;
+
+        public DataFormat() {
+        }
+
+        public DataFormat(Matcher m) {
+            deleteChars          = Integer.parseInt(m.group(1));
+            maxRepetitions       = Integer.parseInt(m.group(2));
+            firstIntegerPlaces   = Integer.parseInt(m.group(3));
+            firstFractionPlaces  = Integer.parseInt(m.group(4));
+            secondIntegerPlaces  = Integer.parseInt(m.group(5));
+            secondFractionPlaces = Integer.parseInt(m.group(6));
+
+            firstShift  = Math.pow(10, firstFractionPlaces);
+            secondShift = Math.pow(10, secondFractionPlaces);
+        }
+
+        public int extractData(String line, List<XY> kmData) {
+            int L = line.length();
+            if (L <= deleteChars) {
+                return -1;
+            }
+
+            int pos = deleteChars;
+
+            boolean debug = log.isDebugEnabled();
+
+
+            int rep = 0;
+            for (;rep < maxRepetitions; ++rep) {
+                if (pos >= L || pos + firstIntegerPlaces >= L) {
+                    break;
+                }
+                String first = line.substring(
+                    pos, pos + firstIntegerPlaces);
+
+                String second = line.substring(
+                    pos + firstIntegerPlaces, 
+                    Math.min(L, pos+firstIntegerPlaces+secondIntegerPlaces));
+
+                double x, y;
+                try {
+                    x = Double.parseDouble(first);
+                    y = Double.parseDouble(second);
+                }
+                catch (NumberFormatException nfe) {
+                    // broken line -> substract from dataset skip
+                    return -1;
+                }
+
+                if (first.indexOf('.') < 0) {
+                    x /= firstShift;
+                }
+
+                if (firstFractionPlaces > 0) {
+                    x = (int)(x*firstShift)/firstShift;
+                }
+
+                if (second.indexOf('.') < 0) {
+                    y /= secondShift;
+                }
+
+                if (secondFractionPlaces > 0) {
+                    y = (int)(y*secondShift)/secondShift;
+                }
+
+                kmData.add(new XY(x, y, kmData.size()));
+
+                pos += firstIntegerPlaces + secondIntegerPlaces;
+            }
+
+            return rep == maxRepetitions ? 1 : 0;
+        }
+    } // class DataFormat
+
+    public static class KMFormat {
+
+        protected int deleteChars;
+        protected int integerPlaces;
+        protected int fractionPlaces;
+
+        protected double shift;
+
+        public KMFormat() {
+        }
+
+        public KMFormat(Matcher m) {
+            deleteChars    = Integer.parseInt(m.group(1));
+            integerPlaces  = Integer.parseInt(m.group(2));
+            fractionPlaces = Integer.parseInt(m.group(3));
+
+            shift = Math.pow(10, fractionPlaces);
+        }
+
+        public double extractKm(String line) throws NumberFormatException {
+
+            if (line.length() <= deleteChars) {
+                throw new NumberFormatException("line too short");
+            }
+
+            String kmS =
+                line.substring(deleteChars, deleteChars+integerPlaces);
+
+            double km = Double.parseDouble(kmS.trim());
+
+            if (kmS.indexOf('.') < 0) {
+                km /= shift;
+            }
+
+            return fractionPlaces > 0
+                ? ((int)(km*shift))/shift
+                : km;
+        }
+    } // class KMFormat
+
+    protected Map<Double, List<XY>> data;
+
+    protected Integer year;
+
+    protected String description;
+
+
+    public PRFParser() {
+        data = new TreeMap<Double, List<XY>>();
+    }
+
+    public Integer getYear() {
+        return year;
+    }
+
+    public void setYear(Integer year) {
+        this.year = year;
+    }
+
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    public Map<Double, List<XY>> getData() {
+        return data;
+    }
+
+    public void setData(Map<Double, List<XY>> data) {
+        this.data = data;
+    }
+
+    protected void sortLists() {
+        for (List<XY> xy: data.values()) {
+            Collections.sort(xy);
+        }
+    }
+
+    public static final Integer findYear(String s) {
+        Matcher m = YEAR_PATTERN.matcher(s);
+        while (m.find()) {
+            int year = Integer.parseInt(m.group(1));
+            if (year >= MIN_YEAR && year <= MAX_YEAR) {
+                return Integer.valueOf(year);
+            }
+        }
+        return null;
+    }
+
+    public boolean parse(File file) {
+
+        if (!(file.isFile() && file.canRead())) {
+            log.warn("cannot open file '" + file + "'");
+            return false;
+        }
+
+        log.info("parsing PRF file: '" + file + "'");
+
+        description = file.getName();
+
+        year = findYear(file.getName());
+
+        if (year == null) {
+            File parent = file.getParentFile();
+            if (parent != null) {
+                description = parent.getName() + "/" + description;
+                year = findYear(parent.getName());
+            }
+        }
+
+        if (year != null) {
+            log.info("year of sounding: " + year);
+        }
+
+        LineNumberReader in = null;
+
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = in.readLine();
+
+            if (line == null || (line = line.trim()).length() == 0) {
+                log.warn("file is empty.");
+                return false;
+            }
+
+            Matcher m = DATA_PATTERN.matcher(line);
+
+            if (!m.matches()) {
+                log.warn("First line does not look like a PRF data pattern.");
+                return false;
+            }
+
+            DataFormat dataFormat = new DataFormat(m);
+
+            if ((line = in.readLine()) == null
+            || (line = line.trim()).length() == 0) {
+                log.warn("premature EOF. Expected integer in line 2");
+                return false;
+            }
+
+            try {
+                if (Integer.parseInt(line) != dataFormat.maxRepetitions) {
+                    log.warn("Expected " +
+                        dataFormat.maxRepetitions + " in line 2");
+                    return false;
+                }
+            }
+            catch (NumberFormatException nfe) {
+                log.warn("invalid integer in line 2", nfe);
+                return false;
+            }
+
+            if ((line = in.readLine()) == null) {
+                log.warn(
+                    "premature EOF. Expected pattern for km extraction");
+                return false;
+            }
+
+            m = KM_PATTERN.matcher(line);
+
+            if (!m.matches()) {
+                log.warn(
+                    "line 4 does not look like a PRF km extraction pattern.");
+                return false;
+            }
+
+            KMFormat kmFormat = new KMFormat(m);
+
+            if ((line = in.readLine()) == null
+            || (line = line.trim()).length() == 0) {
+                log.warn("premature EOF. Expected skip row count.");
+                return false;
+            }
+
+            int lineSkipCount;
+            try {
+                if ((lineSkipCount = Integer.parseInt(line)) < 0) {
+                    throw new IllegalArgumentException(lineSkipCount + " < 0");
+                }
+            }
+            catch (NumberFormatException nfe) {
+                log.warn(
+                    "line 5 is not an positive integer.");
+                return false;
+            }
+
+            int skip = lineSkipCount;
+
+            while ((line = in.readLine()) != null) {
+                if (skip > 0) {
+                    --skip;
+                    continue;
+                }
+                double km;
+                try {
+                    km = kmFormat.extractKm(line);
+                }
+                catch (NumberFormatException iae) {
+                    log.warn("cannot extract km in line + " + in.getLineNumber());
+                    return false;
+                }
+
+                Double station = Double.valueOf(km);
+
+                List<XY> kmData = data.get(station);
+
+                if (kmData == null) {
+                    //log.debug("found new km: " + station);
+                    kmData = new ArrayList<XY>();
+                    data.put(station, kmData);
+                }
+
+                int c = dataFormat.extractData(line, kmData);
+                if (c < 1) {
+                    skip = lineSkipCount + c;
+                }
+            }
+
+            // sort all the lists by x and index
+            sortLists();
+        }
+        catch (IOException ioe) {
+            log.error(ioe);
+            return false;
+        }
+        finally {
+            if (in != null) {
+                try {
+                    in.close();
+                }
+                catch (IOException ioe) {
+                    log.error(ioe);
+                }
+            }
+        }
+
+        return true;
+    }
+
+    public void reset() {
+        data.clear();
+        year        = null;
+        description = null;
+    }
+
+    public void parsePRFs(File root, final Callback callback) {
+
+        FileTools.walkTree(root, new FileTools.FileVisitor() {
+            @Override
+            public boolean visit(File file) {
+                if (file.isFile() && file.canRead()
+                && file.getName().toLowerCase().endsWith(".prf")
+                && (callback == null || callback.prfAccept(file))) {
+                    reset();
+                    boolean success = parse(file);
+                    log.info("parsing " + (success ? "succeeded" : "failed"));
+                    if (success && callback != null) {
+                        callback.prfParsed(PRFParser.this);
+                    }
+                }
+                return true;
+            }
+        });
+    }
+
+    public static void main(String [] args) {
+
+        PRFParser parser = new PRFParser();
+
+        for (String arg: args) {
+            parser.parsePRFs(new File(arg), null);
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/PegelGltParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,102 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+import java.math.BigDecimal;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifacts.common.utils.FileTools;
+
+import de.intevation.flys.importer.ImportGauge;
+import de.intevation.flys.importer.ImportRange;
+
+public class PegelGltParser
+{
+    private static Logger log = Logger.getLogger(PegelGltParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final String KM = "km:";
+
+    protected List<ImportGauge> gauges;
+
+    public PegelGltParser() {
+        gauges = new ArrayList<ImportGauge>();
+    }
+
+    public List<ImportGauge> getGauges() {
+        return gauges;
+    }
+
+    public void parse(File file) throws IOException {
+
+        File parent = file.getParentFile();
+
+        log.info("parsing GLT file '" + file + "'");
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = null;
+            while ((line = in.readLine()) != null) {
+                if ((line = line.trim()).length() == 0) {
+                    continue;
+                }
+
+                int kmPos = line.indexOf(KM);
+                if (kmPos < 0) {
+                    log.warn("no gauge found in line " + in.getLineNumber());
+                    continue;
+                }
+
+                String gaugeName = line.substring(0, kmPos).trim();
+                log.info("Found gauge '" + gaugeName + "'");
+
+                line = line.substring(kmPos + KM.length()).trim();
+
+                String [] parts = line.split("\\s+");
+                if (parts.length < 4) {
+                    log.warn("line " + in.getLineNumber()
+                        + " has not enough columns");
+                    continue;
+                }
+
+                BigDecimal from = new BigDecimal(parts[0].replace(",", "."));
+                BigDecimal to   = new BigDecimal(parts[1].replace(",", "."));
+                if (from.compareTo(from) > 0) {
+                    BigDecimal t = from; from = to; to = t;
+                }
+                ImportRange range = new ImportRange(from, to);
+                File staFile = FileTools.repair(new File(parent, parts[2]));
+                File atFile  = FileTools.repair(new File(parent, parts[3]));
+
+                if (log.isDebugEnabled()) {
+                    log.debug("\tfrom: " + from);
+                    log.debug("\tto: " + to);
+                    log.debug("\tsta: " + staFile);
+                    log.debug("\tat: " + atFile);
+                }
+
+                gauges.add(new ImportGauge(range, staFile, atFile));
+            }
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,169 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+
+import java.math.BigDecimal;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportDepth;
+import de.intevation.flys.importer.ImportSedimentDensity;
+import de.intevation.flys.importer.ImportSedimentDensityValue;
+import de.intevation.flys.importer.ImportUnit;
+
+
+public class SedimentDensityParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(SedimentDensityParser.class);
+
+
+    public static final NumberFormat nf = NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    public static final Pattern META_UNIT =
+        Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+    public static final Pattern META_DEPTH =
+        Pattern.compile("^Tiefe: (\\w++)-(\\w++)( (\\w++))?.*");
+
+
+    protected List<ImportSedimentDensity> sedimentDensities;
+
+    protected ImportSedimentDensity current;
+
+    protected String currentDescription;
+
+
+    public SedimentDensityParser() {
+        sedimentDensities = new ArrayList<ImportSedimentDensity>();
+    }
+
+
+    @Override
+    public void parse(File file) throws IOException {
+        currentDescription = file.getName();
+
+        super.parse(file);
+    }
+
+
+    @Override
+    protected void reset() {
+        current = new ImportSedimentDensity(currentDescription);
+    }
+
+
+    @Override
+    protected void finish() {
+        if (current != null) {
+            sedimentDensities.add(current);
+        }
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    protected void handleMetaLine(String line) {
+        if (handleMetaUnit(line)) {
+            return;
+        }
+        else if (handleMetaDepth(line)) {
+            return;
+        }
+        else {
+            log.warn("Unknown meta line: '" + line + "'");
+        }
+    }
+
+
+    protected boolean handleMetaUnit(String line) {
+        Matcher m = META_UNIT.matcher(line);
+
+        if (m.matches()) {
+            String unit = m.group(1);
+
+            current.setUnit(new ImportUnit(unit));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected boolean handleMetaDepth(String line) {
+        Matcher m = META_DEPTH.matcher(line);
+
+        if (m.matches()) {
+            String lo   = m.group(1);
+            String up   = m.group(2);
+            String unit = m.group(4);
+
+            try {
+                ImportDepth depth = new ImportDepth(
+                    new BigDecimal(nf.parse(lo).doubleValue()),
+                    new BigDecimal(nf.parse(up).doubleValue()),
+                    new ImportUnit(unit)
+                );
+
+                current.setDepth(depth);
+
+                return true;
+            }
+            catch (ParseException pe) {
+                log.warn("Error while parsing numbers in: '" + line + "'");
+            }
+        }
+
+        return false;
+    }
+
+
+    protected void handleDataLine(String line) {
+        String[] vals = line.split(SEPERATOR_CHAR);
+
+        if (vals == null || vals.length < 3) {
+            log.warn("skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            BigDecimal km      = new BigDecimal(nf.parse(vals[0]).doubleValue());
+            BigDecimal density = new BigDecimal(nf.parse(vals[1]).doubleValue());
+
+            current.addValue(new ImportSedimentDensityValue(
+                km,
+                density,
+                vals[2])
+            );
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing numbers in '" + line + "'");
+        }
+    }
+
+
+    public List<ImportSedimentDensity> getSedimentDensities() {
+        return sedimentDensities;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,390 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportGrainFraction;
+import de.intevation.flys.importer.ImportSedimentYield;
+import de.intevation.flys.importer.ImportSedimentYieldValue;
+import de.intevation.flys.importer.ImportTimeInterval;
+import de.intevation.flys.importer.ImportUnit;
+import de.intevation.flys.model.GrainFraction;
+
+
+public class SedimentYieldParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(SedimentYieldParser.class);
+
+
+    public static final NumberFormat nf = NumberFormat.getInstance(DEFAULT_LOCALE);
+
+
+    public static final String FRAKTION_START = "Fraktion:";
+
+    public static final String FRACTION_COARSE_STR =
+        "_Grobkorn.csv";
+
+    public static final String FRACTION_FINE_MIDDLE_STR =
+        "_Fein-Mittel-Kies.csv";
+
+    public static final String FRACTION_SAND =
+        "_Sand.csv";
+
+    public static final String FRACTION_SUSP_SAND =
+        "_susp_Sand.csv";
+
+    public static final String FRACTION_SUSP_SAND_BED =
+        "_susp_Sand_bettbildAnteil.csv";
+
+    public static final String FRACTION_SUSPENDED_SEDIMENT =
+        "_Schwebstoff.csv";
+
+
+    public static final Pattern TIMEINTERVAL_SINGLE =
+        Pattern.compile("\\D*([0-9]+?)\\D*");
+
+    public static final Pattern TIMEINTERVAL_EPOCH =
+        Pattern.compile("\\D*([0-9]+?)\\s*-\\s*([0-9]+?)\\D*");
+
+    public static final Pattern META_FRACTION =
+        Pattern.compile("^Fraktion: (.*)");
+
+    public static final Pattern META_UNIT =
+        Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+    public static final Pattern META_COLUMN_NAMES =
+        Pattern.compile("^Fluss-km.*");
+
+    public static final Pattern META_GRAIN_FRACTION_A =
+        Pattern.compile("\\D*(([0-9]+?,[0-9]+?)\\s*-|([0-9]++)\\s*-)(([0-9]+?,[0-9]+?)|([0-9]++))\\s*([a-zA-Z]+?)\\W*\\D*");
+
+    public static final Pattern META_GRAIN_FRACTION_B =
+        Pattern.compile("(<|>){1}\\s*(\\w++)\\s*(([0-9]+?,[0-9]+?)\\s*-|([0-9]++)\\s*-)(([0-9]+?,[0-9]+?)|([0-9]++))\\s*([a-zA-Z]+?)");
+
+    public static final Pattern META_GRAIN_FRACTION_C =
+        Pattern.compile("(<|>){1}\\s*((([0-9]+?,[0-9]+?)|([0-9]++))\\s*(\\w+))");
+
+
+    protected List<ImportSedimentYield> sedimentYields;
+
+    protected ImportSedimentYield[] current;
+
+    protected ImportGrainFraction grainFraction;
+
+    protected ImportUnit unit;
+
+    protected String description;
+
+    protected String[] columnNames;
+
+
+    public SedimentYieldParser() {
+        sedimentYields = new ArrayList<ImportSedimentYield>();
+    }
+
+
+    @Override
+    public void parse(File file) throws IOException {
+        description = file.getName();
+
+        super.parse(file);
+    }
+
+
+    @Override
+    protected void reset() {
+        current       = null;
+        grainFraction = null;
+        unit          = null;
+    }
+
+
+    @Override
+    protected void finish() {
+        if (current != null) {
+            for (ImportSedimentYield isy: current) {
+                sedimentYields.add(isy);
+            }
+        }
+
+        description = null;
+    }
+
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    protected void handleMetaLine(String line) {
+        if (handleMetaUnit(line)) {
+            return;
+        }
+        else if (handleMetaFraction(line)) {
+            return;
+        }
+        else if (handleColumnNames(line)) {
+            return;
+        }
+        else {
+            log.warn("Unknown meta line: '" + line + "'");
+        }
+    }
+
+
+    protected boolean handleMetaUnit(String line) {
+        Matcher m = META_UNIT.matcher(line);
+
+        if (m.matches()) {
+            unit = new ImportUnit(m.group(1));
+            return true;
+        }
+
+        return false;
+    }
+
+
+    public boolean handleMetaFraction(String line) {
+        Matcher m = META_FRACTION.matcher(line);
+
+        if (m.matches()) {
+            String tmp = m.group(1);
+
+            this.grainFraction = buildGrainFraction(tmp);
+
+            return true;
+        }
+        else if (line.startsWith(FRAKTION_START)) {
+            String newLine = line.replace(FRAKTION_START, "").trim();
+            if (newLine.length() == 0) {
+                log.debug("Found total grain fraction.");
+                this.grainFraction = new ImportGrainFraction(GrainFraction.TOTAL);
+
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+
+    public boolean handleColumnNames(String line) {
+        Matcher m = META_COLUMN_NAMES.matcher(line);
+
+        if (m.matches()) {
+            columnNames = line.split(SEPERATOR_CHAR);
+
+            initializeSedimentYields();
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    protected void handleDataLine(String line) {
+        String[] vals = line.split(SEPERATOR_CHAR);
+
+        if (vals == null || vals.length < columnNames.length-1) {
+            log.warn("skip invalid data line: '" + line + "'");
+            return;
+        }
+
+        try {
+            Double km = nf.parse(vals[0]).doubleValue();
+
+            for (int i = 1, n = columnNames.length-1; i < n; i++) {
+                String curVal = vals[i];
+
+                if (curVal != null && curVal.length() > 0) {
+                    current[i-1].addValue(new ImportSedimentYieldValue(
+                        km, nf.parse(vals[i]).doubleValue()
+                    ));
+                }
+            }
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing numbers in '" + line + "':", pe);
+        }
+    }
+
+
+    private void initializeSedimentYields() {
+        // skip first column (Fluss-km) and last column (Hinweise)
+        current = new ImportSedimentYield[columnNames.length-2];
+
+        for (int i = 0, n = columnNames.length; i < n-2; i++) {
+            current[i] = new ImportSedimentYield(this.description);
+            current[i].setTimeInterval(getTimeInterval(columnNames[i+1]));
+            current[i].setUnit(unit);
+            current[i].setGrainFraction(grainFraction);
+        }
+    }
+
+
+    private ImportTimeInterval getTimeInterval(String column) {
+        try {
+            Matcher a = TIMEINTERVAL_EPOCH.matcher(column);
+            if (a.matches()) {
+                int yearA = nf.parse(a.group(1)).intValue();
+                int yearB = nf.parse(a.group(2)).intValue();
+
+                return new ImportTimeInterval(
+                    getDateFromYear(yearA),
+                    getDateFromYear(yearB)
+                );
+            }
+
+            Matcher b = TIMEINTERVAL_SINGLE.matcher(column);
+            if (b.matches()) {
+                int year = nf.parse(b.group(1)).intValue();
+
+                return new ImportTimeInterval(getDateFromYear(year));
+            }
+
+            log.warn("Unknown time interval string: '" + column + "'");
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing years: " + column, pe);
+        }
+
+        return null;
+    }
+
+
+    private ImportGrainFraction buildGrainFraction(String gfStr) {
+        Matcher a = META_GRAIN_FRACTION_A.matcher(gfStr);
+        if (a.matches()) {
+            String lowerA = a.group(2);
+            String lowerB = a.group(3);
+
+            String upperA = a.group(4);
+            String upperB = a.group(5);
+
+            String unitStr = a.group(7);
+            String lower = lowerA != null ? lowerA : lowerB;
+            String upper = upperA != null ? upperA : upperB;
+
+            try {
+                return new ImportGrainFraction(
+                    getGrainFractionTypeName(this.description),
+                    nf.parse(lower).doubleValue(),
+                    nf.parse(upper).doubleValue(),
+                    new ImportUnit(unitStr)
+                );
+            }
+            catch (ParseException pe) {
+                log.warn("Error while parsing ranges of: '" + gfStr + "'");
+            }
+        }
+
+        Matcher b = META_GRAIN_FRACTION_B.matcher(gfStr);
+        if (b.matches()) {
+            String lowerA  = b.group(4);
+            String lowerB  = b.group(5);
+            String upperA  = b.group(6);
+            String upperB  = b.group(7);
+            String unitStr = b.group(9);
+
+            String lower = lowerA != null ? lowerA : lowerB;
+            String upper = upperA != null ? upperA : upperB;
+
+            try {
+                return new ImportGrainFraction(
+                    getGrainFractionTypeName(this.description),
+                    nf.parse(lower).doubleValue(),
+                    nf.parse(upper).doubleValue(),
+                    new ImportUnit(unitStr)
+                );
+            }
+            catch (ParseException pe) {
+                log.warn("Error while parsing ranges of: '" + gfStr + "'");
+            }
+        }
+
+        Matcher c = META_GRAIN_FRACTION_C.matcher(gfStr);
+        if (c.matches()) {
+            String oper     = c.group(1);
+            String valueStr = c.group(3);
+            String unitStr  = c.group(6);
+
+            try {
+                Double value = nf.parse(valueStr).doubleValue();
+
+                if (oper.equals(">")) {
+                    return new ImportGrainFraction(
+                        getGrainFractionTypeName(this.description),
+                        value,
+                        null,
+                        new ImportUnit(unitStr)
+                    );
+                }
+                else {
+                    return new ImportGrainFraction(
+                        getGrainFractionTypeName(this.description),
+                        null,
+                        value,
+                        new ImportUnit(unitStr)
+                    );
+                }
+            }
+            catch (ParseException pe) {
+                log.warn("Error while parsing ranges of: '" + gfStr + "'");
+            }
+        }
+
+        log.warn("Unknow grain fraction: '" + gfStr + "'");
+
+        return null;
+    }
+
+
+    public static String getGrainFractionTypeName(String filename) {
+        if (filename.endsWith(FRACTION_COARSE_STR)) {
+            return GrainFraction.COARSE;
+        }
+        else if (filename.endsWith(FRACTION_FINE_MIDDLE_STR)) {
+            return GrainFraction.FINE_MIDDLE;
+        }
+        else if (filename.endsWith(FRACTION_SAND)) {
+            return GrainFraction.SAND;
+        }
+        else if (filename.endsWith(FRACTION_SUSP_SAND)) {
+            return GrainFraction.SUSP_SAND;
+        }
+        else if (filename.endsWith(FRACTION_SUSP_SAND_BED)) {
+            return GrainFraction.SUSP_SAND_BED;
+        }
+        else if (filename.endsWith(FRACTION_SUSPENDED_SEDIMENT)) {
+            return GrainFraction.SUSPENDED_SEDIMENT;
+        }
+        else {
+            log.warn("Unknown grain fraction type: '" + filename + "'");
+            return "unknown";
+        }
+    }
+
+
+    public List<ImportSedimentYield> getSedimentYields() {
+        return sedimentYields;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/StaFileParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,190 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+import java.math.BigDecimal;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import java.util.HashMap;
+import java.util.ArrayList;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportMainValueType;
+import de.intevation.flys.importer.ImportMainValue;
+import de.intevation.flys.importer.ImportNamedMainValue;
+import de.intevation.flys.importer.ImportGauge;
+
+public class StaFileParser
+{
+    private static Logger log = Logger.getLogger(StaFileParser.class);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final String TYPES =
+        System.getProperty("flys.backend.main.value.types", "QWTD");
+
+    public static final boolean PARSE_GAUGE_NUMBERS =
+        Boolean.getBoolean("flys.backend.sta.parse.gauge.numbers");
+
+    public static final Pattern QWTD_ =
+        Pattern.compile("\\s*([^\\s]+)\\s+([^\\s]+)\\s+([" +
+            Pattern.quote(TYPES) + "]).*");
+
+    public StaFileParser() {
+    }
+
+    public boolean parse(ImportGauge gauge) throws IOException {
+
+        File file = gauge.getStaFile();
+
+        log.info("parsing STA file: " + file);
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String line = in.readLine();
+
+            if (line == null) {
+                log.warn("STA file is empty.");
+                return false;
+            }
+
+            if (line.length() < 37) {
+                log.warn("first line in STA file is too short.");
+                return false;
+            }
+
+            String gaugeName = line.substring(16, 37).trim();
+
+            Long gaugeNumber = null;
+
+            if (PARSE_GAUGE_NUMBERS) {
+                String gaugeNumberString = line.substring(0, 16).trim();
+
+                try {
+                    gaugeNumber = Long.parseLong(gaugeNumberString);
+                }
+                catch (NumberFormatException nfe) {
+                    log.warn("'" + gaugeNumberString +
+                        "' is not a valid long number.");
+                }
+            }
+
+            gauge.setName(gaugeName);
+            gauge.setOfficialNumber(gaugeNumber);
+
+            if (log.isDebugEnabled()) {
+                log.debug(
+                    "name/number: '" + gaugeName + "' '" + gaugeNumber + "'");
+            }
+
+            String [] values = line.substring(38).trim().split("\\s+", 2);
+
+            if (values.length < 2) {
+                log.warn("Not enough columns for aeo and datum");
+            }
+            try {
+                gauge.setAeo(new BigDecimal(values[0].replace(",", ".")));
+                gauge.setDatum(new BigDecimal(values[1].replace(",", ".")));
+            }
+            catch (NumberFormatException nfe) {
+                log.warn("cannot parse aeo or datum");
+                return false;
+            }
+
+            line = in.readLine();
+
+            if (line == null) {
+                log.warn("STA file has not enough lines");
+                return false;
+            }
+
+            if (line.length() < 36) {
+                log.warn("second line is too short");
+                return false;
+            }
+
+            try {
+                gauge.setStation(
+                    new BigDecimal(line.substring(29, 36).trim()));
+            }
+            catch (NumberFormatException nfe) {
+                log.warn("parsing of the datum of the gauge failed");
+                return false;
+            }
+
+            // overread the next six lines
+            for (int i = 0; i < 6; ++i) {
+                if ((line = in.readLine()) == null) {
+                    log.warn("STA file is too short");
+                    return false;
+                }
+            }
+
+            HashMap<String, ImportMainValueType> types =
+                new HashMap<String, ImportMainValueType>();
+
+            ArrayList<ImportNamedMainValue> namedMainValues =
+                new ArrayList<ImportNamedMainValue>();
+
+            ArrayList<ImportMainValue> mainValues =
+                new ArrayList<ImportMainValue>();
+
+            while ((line = in.readLine()) != null) {
+                Matcher m = QWTD_.matcher(line);
+                if (m.matches()) {
+                    BigDecimal value;
+                    try {
+                        value = new BigDecimal(m.group(2).replace(",", "."));
+                    }
+                    catch (NumberFormatException nfe) {
+                        log.warn("value not parseable in line "
+                            + in.getLineNumber());
+                        continue;
+                    }
+                    String typeString = m.group(3);
+                    log.debug("\t type: " + typeString);
+                    ImportMainValueType type = types.get(typeString);
+                    if (type == null) {
+                        type = new ImportMainValueType(typeString);
+                        types.put(typeString, type);
+                    }
+                    String name = m.group(1);
+                    ImportNamedMainValue namedMainValue =
+                        new ImportNamedMainValue(type, name);
+                    namedMainValues.add(namedMainValue);
+
+                    ImportMainValue mainValue =
+                        new ImportMainValue(gauge, namedMainValue, value);
+
+                    mainValues.add(mainValue);
+                }
+                else {
+                    // TODO: treat as a comment
+                }
+            }
+            gauge.setMainValueTypes(
+                new ArrayList<ImportMainValueType>(types.values()));
+            gauge.setNamedMainValues(namedMainValues);
+            gauge.setMainValues(mainValues);
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+        log.info("finished parsing STA file: " + file);
+        return true;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelDifferencesParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,174 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportUnit;
+import de.intevation.flys.importer.ImportWaterlevelDifference;
+import de.intevation.flys.importer.ImportWaterlevelDifferenceColumn;
+import de.intevation.flys.importer.ImportWaterlevelDifferenceValue;
+
+
+public class WaterlevelDifferencesParser extends LineParser {
+
+    private static final Logger log =
+        Logger.getLogger(WaterlevelDifferencesParser.class);
+
+    private static final NumberFormat nf =
+        NumberFormat.getInstance(DEFAULT_LOCALE);
+
+    public static final Pattern META_UNIT =
+        Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+
+    private List<ImportWaterlevelDifference> differences;
+
+    private ImportWaterlevelDifferenceColumn[] columns;
+
+    private ImportWaterlevelDifference current;
+
+
+    public WaterlevelDifferencesParser() {
+        differences = new ArrayList<ImportWaterlevelDifference>();
+    }
+
+
+    public List<ImportWaterlevelDifference> getDifferences() {
+        return differences;
+    }
+
+
+    @Override
+    public void parse(File file) throws IOException {
+        current = new ImportWaterlevelDifference(file.getName());
+
+        super.parse(file);
+    }
+
+
+    @Override
+    protected void reset() {
+    }
+
+
+    @Override
+    protected void finish() {
+        if (columns != null && current != null) {
+            for (ImportWaterlevelDifferenceColumn col: columns) {
+                current.addValue(col);
+            }
+
+            differences.add(current);
+        }
+
+        current = null;
+        columns = null;
+    }
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+        }
+        else {
+            handleDataLine(line);
+        }
+    }
+
+
+    private void handleMetaLine(String meta) {
+        if (handleMetaUnit(meta)) {
+            return;
+        }
+        else {
+            handleMetaColumnNames(meta);
+        }
+    }
+
+
+    private boolean handleMetaUnit(String meta) {
+        Matcher m = META_UNIT.matcher(meta);
+
+        if (m.matches()) {
+            String unit = m.group(1);
+            log.debug("Found unit: '" + unit + "'");
+
+            current.setUnit(new ImportUnit(unit));
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    private boolean handleMetaColumnNames(String meta) {
+        Pattern META_COLUMN_NAMES = Pattern.compile("Fluss-km;(.*)");
+        Matcher m = META_COLUMN_NAMES.matcher(meta);
+
+        if (m.matches()) {
+            String colStr = m.group(1);
+            String[] cols = colStr.split(SEPERATOR_CHAR);
+
+            log.debug("Found " + cols.length + " columns.");
+
+            initColumns(cols);
+
+            return true;
+        }
+
+        return false;
+    }
+
+
+    private void initColumns(String[] cols) {
+        columns = new ImportWaterlevelDifferenceColumn[cols.length];
+
+        for (int i = 0; i < cols.length; i++) {
+            String name = cols[i].replace("\"", "");
+
+            log.debug("Create new column '" + name + "'");
+            columns[i] = new ImportWaterlevelDifferenceColumn(name);
+        }
+    }
+
+
+    private void handleDataLine(String line) {
+        String[] cols = line.split(SEPERATOR_CHAR);
+
+        if (cols == null || cols.length < 2) {
+            log.warn("skip invalid waterlevel line: '" + line + "'");
+            return;
+        }
+
+        try {
+            Double station = nf.parse(cols[0]).doubleValue();
+
+            for (int i = 0; i < columns.length; i++) {
+                String value = cols[i+1];
+
+                try {
+                    columns[i].addValue(new ImportWaterlevelDifferenceValue(
+                        station,
+                        nf.parse(value).doubleValue()
+                    ));
+                }
+                catch (ParseException pe) {
+                    log.warn("Error while parsing value: '" + value + "'");
+                }
+            }
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing station: '" + line + "'");
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,160 @@
+package de.intevation.flys.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.importer.ImportUnit;
+import de.intevation.flys.importer.ImportWaterlevel;
+import de.intevation.flys.importer.ImportWaterlevelQRange;
+import de.intevation.flys.importer.ImportWaterlevelValue;
+
+
+public class WaterlevelParser extends LineParser {
+
+    private static final Logger log = Logger.getLogger(WaterlevelParser.class);
+
+    private static final NumberFormat nf =
+        NumberFormat.getInstance(DEFAULT_LOCALE);
+
+    private static final Pattern META_Q_RANGE =
+        Pattern.compile("Abfluss\\s\\[(.*)\\];(.*)");
+
+    public static final Pattern META_UNIT =
+        Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+
+    private List<ImportWaterlevel> waterlevels;
+
+    private ImportWaterlevel current;
+
+    private ImportWaterlevelQRange currentQ;
+
+    private String currentDescription;
+
+
+    public WaterlevelParser() {
+        waterlevels = new ArrayList<ImportWaterlevel>();
+    }
+
+
+    public List<ImportWaterlevel> getWaterlevels() {
+        return waterlevels;
+    }
+
+
+    @Override
+    public void parse(File file) throws IOException {
+        currentDescription = file.getName();
+
+        super.parse(file);
+    }
+
+
+    @Override
+    protected void reset() {
+        currentQ = null;
+        current  = new ImportWaterlevel(currentDescription);
+    }
+
+
+    @Override
+    protected void finish() {
+        if (current != null) {
+            if (currentQ != null) {
+                current.addValue(currentQ);
+            }
+
+            waterlevels.add(current);
+        }
+    }
+
+    @Override
+    protected void handleLine(String line) {
+        if (line.startsWith(START_META_CHAR)) {
+            handleMetaLine(stripMetaLine(line));
+            return;
+        }
+        else if (handleQRange(line)) {
+            return;
+        }
+        else {
+            handleDataLine(line);
+            return;
+        }
+    }
+
+
+    private void handleMetaLine(String meta) {
+        Matcher m = META_UNIT.matcher(meta);
+
+        if (m.matches()) {
+            String unit = m.group(1);
+            log.debug("Found unit: '" + unit + "'");
+
+            current.setUnit(new ImportUnit(unit));
+        }
+    }
+
+
+    private boolean handleQRange(String line) {
+        Matcher m = META_Q_RANGE.matcher(line);
+
+        if (m.matches()) {
+            String unitStr  = m.group(1);
+            String valueStr = m.group(2);
+
+            if (currentQ != null) {
+                if (current != null) {
+                    current.addValue(currentQ);
+                }
+                else {
+                    // this should never happen
+                    log.warn("Try to add Q range without waterlevel!");
+                }
+            }
+
+            try {
+                log.debug("Found new Q range: Q=" + valueStr);
+
+                currentQ = new ImportWaterlevelQRange(
+                    nf.parse(valueStr).doubleValue());
+
+                return true;
+            }
+            catch (ParseException pe) {
+                log.warn("Error while parsing Q range: '" + line + "'");
+            }
+        }
+
+        return false;
+    }
+
+
+    private void handleDataLine(String line) {
+        String[] cols = line.split(SEPERATOR_CHAR);
+
+        if (cols == null || cols.length < 2) {
+            log.warn("skip invalid waterlevel line: '" + line + "'");
+            return;
+        }
+
+        try {
+            Double station = nf.parse(cols[0]).doubleValue();
+            Double value   = nf.parse(cols[1]).doubleValue();
+
+            currentQ.addValue(new ImportWaterlevelValue(station, value));
+        }
+        catch (ParseException pe) {
+            log.warn("Error while parsing number values: '" + line + "'");
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WstParser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,448 @@
+package de.intevation.flys.importer.parsers;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.io.InputStreamReader;
+import java.io.FileInputStream;
+
+import java.text.NumberFormat;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.utils.StringUtil;
+import de.intevation.flys.utils.DateGuesser;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import java.math.BigDecimal;
+
+import de.intevation.flys.importer.ImportWstQRange;
+import de.intevation.flys.importer.ImportWstColumn;
+import de.intevation.flys.importer.ImportTimeInterval;
+import de.intevation.flys.importer.ImportRange;
+import de.intevation.flys.importer.ImportUnit;
+import de.intevation.flys.importer.ImportWst;
+
+public class WstParser
+{
+    private static Logger log = Logger.getLogger(WstParser.class);
+
+    public static final String COLUMN_BEZ_TEXT   = "column-bez-text";
+    public static final String COLUMN_BEZ_BREITE = "column-bez-breite";
+    public static final String COLUMN_QUELLE     = "column-quelle";
+    public static final String COLUMN_DATUM      = "column-datum";
+
+    public static final BigDecimal UNDEFINED_ZERO =
+        new BigDecimal(0.0);
+    public static final BigDecimal MIN_RANGE =
+        new BigDecimal(-Double.MAX_VALUE);
+    public static final BigDecimal MAX_RANGE =
+        new BigDecimal(Double.MAX_VALUE);
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    public static final Pattern UNIT_COMMENT =
+        Pattern.compile("\\*\\s*[kK][mM]\\s+(.+)");
+
+    public static final Pattern UNIT =
+        Pattern.compile("[^\\[]*\\[([^]]+)\\].*");
+
+    public static final BigDecimal INTERVAL_GAP =
+        new BigDecimal(0.00001);
+
+    protected ImportWst wst;
+
+    protected ImportRange lastRange;
+
+    public WstParser() {
+    }
+
+    public ImportWst getWst() {
+        return wst;
+    }
+
+    public void setWst(ImportWst wst) {
+        this.wst = wst;
+    }
+
+    public ImportTimeInterval guessDate(String string) {
+        try {
+            return new ImportTimeInterval(
+                DateGuesser.guessDate(string));
+        }
+        catch (IllegalArgumentException iae) {
+        }
+        return null;
+    }
+
+    public void parse(File file) throws IOException {
+
+        log.info("Parsing WST file '" + file + "'");
+
+        wst = new ImportWst(file.getName());
+
+        LineNumberReader in = null;
+        try {
+            in =
+                new LineNumberReader(
+                new InputStreamReader(
+                new FileInputStream(file), ENCODING));
+
+            String input;
+            boolean first = true;
+            int columnCount = 0;
+
+            String [] lsBezeichner   = null;
+            String [] langBezeichner = null;
+            int    [] colNaWidths    = null;
+            String [] quellen        = null;
+            String [] daten          = null;
+
+            BigDecimal [] aktAbfluesse   = null;
+            BigDecimal [] firstAbfluesse = null;
+
+            BigDecimal minKm = MAX_RANGE;
+            BigDecimal maxKm = MIN_RANGE;
+
+            boolean columnHeaderChecked = false;
+
+            String einheit = "Wasserstand [NN + m]";
+
+            HashSet<BigDecimal> kms = new HashSet<BigDecimal>();
+
+            while ((input = in.readLine()) != null) {
+                String line = input;
+                if (first) { // fetch number of columns
+                    if ((line = line.trim()).length() == 0) {
+                        continue;
+                    }
+                    try {
+                        columnCount = Integer.parseInt(line);
+                        if (columnCount <= 0) {
+                            throw new NumberFormatException(
+                                "number columns <= 0");
+                        }
+                        log.debug("Number of columns: " + columnCount);
+                        wst.setNumberColumns(columnCount);
+                        lsBezeichner = new String[columnCount];
+                    }
+                    catch (NumberFormatException nfe) {
+                        log.warn(nfe);
+                        continue;
+                    }
+                    first = false;
+                    continue;
+                }
+
+                line = line.replace(',', '.');
+
+                if (line.startsWith("*\u001f")) {
+                    BigDecimal [] data =
+                        parseLineAsDouble(line, columnCount, false, true);
+
+                    if (aktAbfluesse != null) {
+                        addInterval(minKm, maxKm, aktAbfluesse);
+                        minKm = MAX_RANGE;
+                        maxKm = MIN_RANGE;
+                    }
+
+                    aktAbfluesse = new BigDecimal[columnCount];
+                    log.debug("new q range: " + columnCount);
+                    for (int i = 0; i < Math.min(columnCount, data.length); ++i) {
+                        if (data[i] != null) {
+                            log.debug("  column: " + data[i]);
+                            aktAbfluesse[i] = data[i];
+                        }
+                    }
+
+                    if (firstAbfluesse == null) {
+                        firstAbfluesse = (BigDecimal [])aktAbfluesse.clone();
+                    }
+                    continue;
+                }
+
+                if (line.startsWith("*!")) {
+                    String spezial = line.substring(2).trim();
+
+                    if (spezial.length() == 0) {
+                        continue;
+                    }
+
+                    if (spezial.startsWith(COLUMN_BEZ_TEXT)) {
+                        spezial = spezial.substring(COLUMN_BEZ_TEXT.length()).trim();
+                        if (spezial.length() == 0) {
+                            continue;
+                        }
+                        langBezeichner = StringUtil.splitQuoted(spezial, '"');
+                    }
+                    else if (spezial.startsWith(COLUMN_BEZ_BREITE)) {
+                        spezial = spezial.substring(COLUMN_BEZ_BREITE.length()).trim();
+
+                        if (spezial.length() == 0) {
+                            continue;
+                        }
+
+                        String[] split = spezial.split("\\s+");
+
+                        colNaWidths = new int[split.length];
+                        for (int i=0; i < split.length; i++) {
+                            colNaWidths[i] = Integer.parseInt(split[i]);
+                        }
+                    }
+                    else if (spezial.startsWith(COLUMN_QUELLE)) {
+                        if (spezial.length() == 0) {
+                            continue;
+                        }
+                        quellen = StringUtil.splitQuoted(spezial, '"');
+                    }
+                    else if (spezial.startsWith(COLUMN_DATUM)) {
+                        spezial = spezial.substring(COLUMN_DATUM.length()).trim();
+                        if (spezial.length() == 0) {
+                            continue;
+                        }
+                        daten = StringUtil.splitQuoted(spezial, '"');
+                    }
+                    continue;
+                }
+
+                if (line.length() < 11) {
+                    continue;
+                }
+
+                if (line.startsWith("*")) {
+                    Matcher m = UNIT_COMMENT.matcher(line);
+                    if (m.matches()) {
+                        log.debug("unit comment found");
+                        // XXX: This hack is needed because desktop
+                        // FLYS is broken figuring out the unit
+                        String [] units = m.group(1).split("\\s{2,}");
+                        m = UNIT.matcher(units[0]);
+                        einheit = m.matches() ? m.group(1) : units[0];
+                        log.debug("unit: " + einheit);
+                    }
+                    continue;
+                }
+
+                if (firstAbfluesse != null) {
+                    if (!columnHeaderChecked) {
+                        int unknownCount = 0;
+                        HashSet<String> uniqueColumnNames =
+                            new HashSet<String>();
+                        for (int i = 0; i < lsBezeichner.length; ++i) {
+                            if (lsBezeichner[i] == null
+                            || lsBezeichner[i].length() == 0) {
+                                double q = firstAbfluesse[i].doubleValue();
+                                if (q < 0.001) {
+                                    lsBezeichner[i] =
+                                        "<unbekannt #" + unknownCount + ">";
+                                    ++unknownCount;
+                                }
+                                else {
+                                    lsBezeichner[i] = "Q="+format(q);
+                                }
+                            }
+                            String candidate = lsBezeichner[i];
+                            int collision = 1;
+                            while (!uniqueColumnNames.add(candidate)) {
+                                candidate = lsBezeichner[i] +
+                                    " (" + collision + ")";
+                                ++collision;
+                            }
+                            ImportWstColumn iwc = wst.getColumn(i);
+                            iwc.setName(candidate);
+                            iwc.setTimeInterval(guessDate(candidate));
+                        }
+                        columnHeaderChecked = true;
+                    }
+
+                    BigDecimal [] data =
+                        parseLineAsDouble(line, columnCount, true, false);
+
+                    BigDecimal kaem = data[0];
+
+                    if (!kms.add(kaem)) {
+                        log.warn(
+                            "km " + kaem +
+                            " (line " + in.getLineNumber() +
+                            ") found more than once. -> ignored");
+                        continue;
+                    }
+
+                    if (kaem.compareTo(minKm) < 0) {
+                        minKm = kaem;
+                    }
+                    if (kaem.compareTo(maxKm) > 0) {
+                        maxKm = kaem;
+                    }
+
+                    // extract values
+                    for (int i = 0; i < columnCount; ++i) {
+                        addValue(kaem, data[i+1], i);
+                    }
+
+                }
+                else { // firstAbfluesse == null
+                    if (langBezeichner != null) {
+                        lsBezeichner = StringUtil.fitArray(
+                            langBezeichner, lsBezeichner);
+                    }
+                    else if (colNaWidths != null) {
+                        for (int j = 0, i = 0, N = input.length();
+                             j < colNaWidths.length && i < N;
+                             i += colNaWidths[j++]
+                        ) {
+                            lsBezeichner[j] = input.substring(
+                                i, i+colNaWidths[j]).trim();
+                        }
+                    }
+                    else {
+                        // first column begins at position 8 in line
+                        for (int i = 8, col = 0; i < input.length(); i += 9) {
+                            if ((i + 9) > input.length()) {
+                                i = input.length() - 10;
+                            }
+                            // one column header is 9 chars wide
+                            lsBezeichner[col++] =
+                                input.substring(i, i + 9).trim();
+
+                            if (col == lsBezeichner.length) {
+                                break;
+                            }
+                        }
+                    }
+                }
+
+            }
+
+            wst.setUnit(new ImportUnit(einheit));
+
+            addInterval(minKm, maxKm, aktAbfluesse);
+        }
+        finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+    }
+
+    protected void addValue(BigDecimal km, BigDecimal w, int index) {
+        if (w != null) {
+            ImportWstColumn column = wst.getColumn(index);
+            column.addColumnValue(km, w);
+        }
+    }
+
+    private static final NumberFormat NF = getNumberFormat();
+
+    private static final NumberFormat getNumberFormat() {
+        NumberFormat nf = NumberFormat.getInstance();
+        nf.setMinimumFractionDigits(2);
+        nf.setMaximumFractionDigits(2);
+        return nf;
+    }
+
+    protected static String format(double value) {
+        return NF.format(value);
+    }
+
+    protected void addInterval(
+        BigDecimal    from,
+        BigDecimal    to,
+        BigDecimal [] values
+    ) {
+        log.debug("addInterval: " + from + " " + to);
+
+        if (values == null || from == MAX_RANGE) {
+            return;
+        }
+
+        if (to.compareTo(from) < 0) {
+            BigDecimal t = from; from = to; to = t;
+        }
+
+        ImportRange range = new ImportRange(from, to);
+
+        // little workaround to make the q ranges tightly fit.
+        // Leave a very small gap to ensure that the range queries
+        // still work.
+
+        if (lastRange != null) {
+            double d1 = Math.abs(
+                lastRange.getB().doubleValue() - range.getA().doubleValue());
+            double d2 = Math.abs(
+                range.getB().doubleValue() - lastRange.getA().doubleValue());
+
+            if (d1 < d2) {
+                lastRange.setB(range.getA().subtract(INTERVAL_GAP));
+            }
+            else {
+                range.setA(lastRange.getB().subtract(INTERVAL_GAP));
+            }
+        }
+
+        for (int i = 0; i < values.length; ++i) {
+            ImportWstColumn column = wst.getColumn(i);
+            ImportWstQRange wstQRange = new ImportWstQRange(range, values[i]);
+            column.addColumnQRange(wstQRange);
+        }
+
+        lastRange = range;
+    }
+
+    private static final BigDecimal [] parseLineAsDouble(
+        String  line,
+        int     count,
+        boolean bStation,
+        boolean bParseEmptyAsZero
+    ) {
+        String [] tokens = parseLine(line, count, bStation);
+
+        BigDecimal [] doubles = new BigDecimal[tokens.length];
+
+        for (int i = 0; i < doubles.length; ++i) {
+            String token = tokens[i].trim();
+            if (token.length() != 0) {
+                doubles[i] = new BigDecimal(token);
+            }
+            else if (bParseEmptyAsZero) {
+                doubles[i] = UNDEFINED_ZERO;
+            }
+        }
+
+        return doubles;
+    }
+
+    private static String [] parseLine(
+        String  line,
+        int     tokenCount,
+        boolean bParseStation
+    ) {
+        ArrayList<String> strings = new ArrayList<String>();
+
+        if (bParseStation) {
+            if (line.length() < 8) {
+                throw new IllegalArgumentException("station too short");
+            }
+            strings.add(line.substring(0, 8));
+        }
+
+        int pos = 9;
+        for (int i = 0; i < tokenCount; ++i) {
+            if (line.length() >= pos + 8) {
+                strings.add(line.substring(pos, pos + 8));
+            }
+            else {
+                strings.add("");
+            }
+            pos += 9;
+        }
+
+        return strings.toArray(new String[strings.size()]);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Annotation.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,111 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "annotations")
+public class Annotation
+implements   Serializable
+{
+    private Integer        id;
+    private Range          range;
+    private Attribute      attribute;
+    private Position       position;
+    private Edge           edge;
+    private AnnotationType type;
+
+    public Annotation() {
+    }
+
+    public Annotation(
+        Range          range,
+        Attribute      attribute,
+        Position       position,
+        Edge           edge,
+        AnnotationType type
+    ) {
+        this.range     = range;
+        this.attribute = attribute;
+        this.position  = position;
+        this.edge      = edge;
+        this.type      = type;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_ANNOTATIONS_ID_SEQ",
+        sequenceName   = "ANNOTATIONS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_ANNOTATIONS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "range_id")
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "attribute_id")
+    public Attribute getAttribute() {
+        return attribute;
+    }
+
+    public void setAttribute(Attribute attribute) {
+        this.attribute = attribute;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "position_id")
+    public Position getPosition() {
+        return position;
+    }
+
+    public void setPosition(Position position) {
+        this.position = position;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "edge_id")
+    public Edge getEdge() {
+        return edge;
+    }
+
+    public void setEdge(Edge edge) {
+        this.edge = edge;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "type_id")
+    public AnnotationType getType() {
+        return type;
+    }
+
+    public void setType(AnnotationType type) {
+        this.type = type;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/AnnotationType.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,54 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "annotation_types")
+public class AnnotationType
+implements   Serializable
+{
+    private Integer id;
+    private String  name;
+
+    public AnnotationType() {
+    }
+
+    public AnnotationType(String name) {
+        this.name = name;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_ANNOTATION_TYPES_ID_SEQ",
+        sequenceName   = "ANNOTATION_TYPES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_ANNOTATION_TYPES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Attribute.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,55 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "attributes")
+public class Attribute
+implements   Serializable
+{
+    private Integer id;
+
+    private String  value;
+
+    public Attribute() {
+    }
+
+    public Attribute(String value) {
+        this.value = value;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_ATTRIBUTES_ID_SEQ",
+        sequenceName   = "ATTRIBUTES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_ATTRIBUTES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "value")
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightEpoch.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,212 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "bed_height_epoch")
+public class BedHeightEpoch implements Serializable {
+
+    private Integer id;
+
+    private River river;
+
+    private TimeInterval timeInterval;
+
+    private ElevationModel curElevationModel;
+    private ElevationModel oldElevationModel;
+
+    private Range range;
+
+    private String evaluationBy;
+    private String description;
+
+    private List<BedHeightEpochValue> values;
+
+
+    public BedHeightEpoch() {
+    }
+
+
+    public BedHeightEpoch(
+        River          river,
+        TimeInterval   timeInterval,
+        Range          range,
+        ElevationModel curElevationModel,
+        ElevationModel oldElevationModel,
+        String         evaluationBy,
+        String         description
+    ) {
+        this.river             = river;
+        this.timeInterval      = timeInterval;
+        this.range             = range;
+        this.curElevationModel = curElevationModel;
+        this.oldElevationModel = oldElevationModel;
+        this.evaluationBy      = evaluationBy;
+        this.description       = description;
+        this.values            = new ArrayList<BedHeightEpochValue>();
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_BED_HEIGHT_EPOCH_ID_SEQ",
+        sequenceName   = "BED_HEIGHT_EPOCH_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_BED_HEIGHT_EPOCH_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id")
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "cur_elevation_model_id")
+    public ElevationModel getCurElevationModel() {
+        return curElevationModel;
+    }
+
+    public void setCurElevationModel(ElevationModel curElevationModel) {
+        this.curElevationModel = curElevationModel;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "old_elevation_model_id")
+    public ElevationModel getOldElevationModel() {
+        return oldElevationModel;
+    }
+
+    public void setOldElevationModel(ElevationModel oldElevationModel) {
+        this.oldElevationModel = oldElevationModel;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "range_id")
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @Column(name = "evaluation_by")
+    public String getEvaluationBy() {
+        return evaluationBy;
+    }
+
+    public void setEvaluationBy(String evaluationBy) {
+        this.evaluationBy = evaluationBy;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "bed_height_epoch_id")
+    public List<BedHeightEpochValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<BedHeightEpochValue> values) {
+        this.values = values;
+    }
+
+
+    public static List<BedHeightEpoch> getBedHeightEpochs(
+        River  river,
+        double kmLo,
+        double kmHi
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightEpoch where river=:river");
+
+        query.setParameter("river", river);
+
+        // TODO Do km range filtering in SQL statement
+
+        List<BedHeightEpoch> epochs = query.list();
+        List<BedHeightEpoch> good   = new ArrayList<BedHeightEpoch>();
+
+        for (BedHeightEpoch e: epochs) {
+            OUTER:
+            for (BedHeightEpochValue value: e.getValues()) {
+                double station = value.getStation().doubleValue();
+
+                if (station >= kmLo && station <= kmHi) {
+                    good.add(e);
+                    continue OUTER;
+                }
+            }
+        }
+
+        return good;
+    }
+
+
+    public static BedHeightEpoch getBedHeightEpochById(int id) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightEpoch where id=:id");
+
+        query.setParameter("id", id);
+
+        List<BedHeightEpoch> singles = query.list();
+
+        return singles != null ? singles.get(0) : null;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightEpochValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,119 @@
+package de.intevation.flys.model;
+
+import java.util.List;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "bed_height_epoch_values")
+public class BedHeightEpochValue
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(BedHeightEpochValue.class);
+
+    private Integer id;
+
+    private BedHeightEpoch bedHeight;
+
+    private BigDecimal station;
+    private BigDecimal height;
+
+
+    public BedHeightEpochValue() {
+    }
+
+    public BedHeightEpochValue(
+        BedHeightEpoch bedHeight,
+        BigDecimal station,
+        BigDecimal height
+    ) {
+        this.bedHeight = bedHeight;
+        this.station   = station;
+        this.height    = height;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_BED_EPOCH_VALUE_ID_SEQ",
+        sequenceName   = "BED_EPOCH_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_BED_EPOCH_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "bed_height_epoch_id" )
+    public BedHeightEpoch getBedHeight() {
+        return bedHeight;
+    }
+
+    public void setBedHeight(BedHeightEpoch bedHeight) {
+        this.bedHeight = bedHeight;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "height")
+    public BigDecimal getHeight() {
+        return height;
+    }
+
+    public void setHeight(BigDecimal height) {
+        this.height = height;
+    }
+
+
+    public static List<BedHeightEpochValue> getBedHeightEpochValues(
+        BedHeightEpoch epoch,
+        double kmLo,
+        double kmHi
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightEpochValue where bedHeight=:epoch " +
+            "   and station >= :kmLo and station <= :kmHi");
+
+        query.setParameter("epoch", epoch);
+        query.setParameter("kmLo", new BigDecimal(kmLo));
+        query.setParameter("kmHi", new BigDecimal(kmHi));
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightSingle.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,273 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "bed_height_single")
+public class BedHeightSingle implements Serializable {
+
+    private Integer id;
+    private Integer year;
+    private Integer soundingWidth;
+
+    private String evaluationBy;
+    private String description;
+
+    private River river;
+
+    private BedHeightType  type;
+
+    private LocationSystem locationSystem;
+
+    private ElevationModel curElevationModel;
+
+    private ElevationModel oldElevationModel;
+
+    private Range range;
+
+    private List<BedHeightSingleValue> values;
+
+
+    public BedHeightSingle() {
+    }
+
+
+    public BedHeightSingle(
+        River          river,
+        Integer        year,
+        Integer        soundingWidth,
+        BedHeightType  type,
+        LocationSystem locationSystem,
+        ElevationModel curElevationModel,
+        Range          range
+    ) {
+        this(
+            river,
+            year,
+            soundingWidth,
+            type,
+            locationSystem,
+            curElevationModel,
+            null,
+            range,
+            null,
+            null);
+    }
+
+
+    public BedHeightSingle(
+        River          river,
+        Integer        year,
+        Integer        soundingWidth,
+        BedHeightType  type,
+        LocationSystem locationSystem,
+        ElevationModel curElevationModel,
+        ElevationModel oldElevationModel,
+        Range          range,
+        String         evaluationBy,
+        String         description
+    ) {
+        this.river             = river;
+        this.year              = year;
+        this.soundingWidth     = soundingWidth;
+        this.type              = type;
+        this.locationSystem    = locationSystem;
+        this.curElevationModel = curElevationModel;
+        this.oldElevationModel = oldElevationModel;
+        this.range             = range;
+        this.evaluationBy      = evaluationBy;
+        this.description       = description;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_BED_HEIGHT_SINGLE_ID_SEQ",
+        sequenceName   = "BED_HEIGHT_SINGLE_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_BED_HEIGHT_SINGLE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "year")
+    public Integer getYear() {
+        return year;
+    }
+
+    public void setYear(Integer year) {
+        this.year = year;
+    }
+
+    @Column(name = "sounding_width")
+    public Integer getSoundingWidth() {
+        return soundingWidth;
+    }
+
+    public void setSoundingWidth(Integer soundingWidth) {
+        this.soundingWidth = soundingWidth;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "type_id")
+    public BedHeightType getType() {
+        return type;
+    }
+
+    public void setType(BedHeightType type) {
+        this.type = type;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "location_system_id")
+    public LocationSystem getLocationSystem() {
+        return locationSystem;
+    }
+
+    public void setLocationSystem(LocationSystem locationSystem) {
+        this.locationSystem = locationSystem;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "cur_elevation_model_id")
+    public ElevationModel getCurElevationModel() {
+        return curElevationModel;
+    }
+
+    public void setCurElevationModel(ElevationModel curElevationModel) {
+        this.curElevationModel = curElevationModel;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "old_elevation_model_id")
+    public ElevationModel getOldElevationModel() {
+        return oldElevationModel;
+    }
+
+    public void setOldElevationModel(ElevationModel oldElevationModel) {
+        this.oldElevationModel = oldElevationModel;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "range_id")
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @Column(name = "evaluation_by")
+    public String getEvaluationBy() {
+        return evaluationBy;
+    }
+
+    public void setEvaluationBy(String evaluationBy) {
+        this.evaluationBy = evaluationBy;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "bed_height_single_id")
+    public List<BedHeightSingleValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<BedHeightSingleValue> values) {
+        this.values = values;
+    }
+
+
+    public static List<BedHeightSingle> getBedHeightSingles(
+        River  river,
+        double kmLo,
+        double kmHi
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightSingle where river=:river");
+
+        query.setParameter("river", river);
+
+        // TODO Do km range filtering in SQL statement
+
+        List<BedHeightSingle> singles = query.list();
+        List<BedHeightSingle> good    = new ArrayList<BedHeightSingle>();
+
+        for (BedHeightSingle s: singles) {
+            OUTER:
+            for (BedHeightSingleValue value: s.getValues()) {
+                double station = value.getStation().doubleValue();
+
+                if (station >= kmLo && station <= kmHi) {
+                    good.add(s);
+                    continue OUTER;
+                }
+            }
+        }
+
+        return good;
+    }
+
+
+    public static BedHeightSingle getBedHeightSingleById(int id) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightSingle where id=:id");
+
+        query.setParameter("id", id);
+
+        List<BedHeightSingle> singles = query.list();
+
+        return singles != null ? singles.get(0) : null;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightSingleValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,167 @@
+package de.intevation.flys.model;
+
+import java.util.List;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "bed_height_single_values")
+public class BedHeightSingleValue
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(BedHeightSingleValue.class);
+
+    private Integer id;
+
+    private BedHeightSingle bedHeight;
+
+    private BigDecimal station;
+    private BigDecimal height;
+    private BigDecimal uncertainty;
+    private BigDecimal dataGap;
+    private BigDecimal soundingWidth;
+    private BigDecimal width;
+
+
+    public BedHeightSingleValue() {
+    }
+
+    public BedHeightSingleValue(
+        BedHeightSingle bedHeight,
+        BigDecimal station,
+        BigDecimal height,
+        BigDecimal uncertainty,
+        BigDecimal dataGap,
+        BigDecimal soundingWidth,
+        BigDecimal width
+    ) {
+        this.bedHeight     = bedHeight;
+        this.station       = station;
+        this.height        = height;
+        this.uncertainty   = uncertainty;
+        this.dataGap       = dataGap;
+        this.soundingWidth = soundingWidth;
+        this.width         = width;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_BED_SINGLE_VALUE_ID_SEQ",
+        sequenceName   = "BED_SINGLE_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_BED_SINGLE_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "bed_height_single_id" )
+    public BedHeightSingle getBedHeight() {
+        return bedHeight;
+    }
+
+    public void setBedHeight(BedHeightSingle bedHeight) {
+        this.bedHeight = bedHeight;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "height")
+    public BigDecimal getHeight() {
+        return height;
+    }
+
+    public void setHeight(BigDecimal height) {
+        this.height = height;
+    }
+
+    @Column(name="uncertainty")
+    public BigDecimal getUncertainty() {
+        return uncertainty;
+    }
+
+    public void setUncertainty(BigDecimal uncertainty) {
+        this.uncertainty = uncertainty;
+    }
+
+    @Column(name="data_gap")
+    public BigDecimal getDataGap() {
+        return dataGap;
+    }
+
+    public void setDataGap(BigDecimal dataGap) {
+        this.dataGap = dataGap;
+    }
+
+    @Column(name="sounding_width")
+    public BigDecimal getSoundingWidth() {
+        return soundingWidth;
+    }
+
+    public void setSoundingWidth(BigDecimal soundingWidth) {
+        this.soundingWidth = soundingWidth;
+    }
+
+    @Column(name="width")
+    public BigDecimal getWidth() {
+        return width;
+    }
+
+    public void setWidth(BigDecimal width) {
+        this.width = width;
+    }
+
+
+    public static List<BedHeightSingleValue> getBedHeightSingleValues(
+        BedHeightSingle single,
+        double kmLo,
+        double kmHi
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from BedHeightSingleValue where bedHeight=:single " +
+            "   and station >= :kmLo and station <= :kmHi");
+
+        query.setParameter("single", single);
+        query.setParameter("kmLo", new BigDecimal(kmLo));
+        query.setParameter("kmHi", new BigDecimal(kmHi));
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/BedHeightType.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,91 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "bed_height_type")
+public class BedHeightType
+implements   Serializable
+{
+    private static Logger log = Logger.getLogger(BedHeightType.class);
+
+    private Integer id;
+    private String  name;
+    private String  description;
+
+
+    public BedHeightType() {
+    }
+
+    public BedHeightType(String name, String description) {
+        this.name        = name;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_BED_HEIGHT_TYPE_ID_SEQ",
+        sequenceName   = "BED_HEIGHT_TYPE_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_BED_HEIGHT_TYPE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public static String getBedHeightName(String description) {
+        if (description.equals("Flächenpeilung")) {
+            return "FP";
+        }
+        else if ("Querprofile".equals(description)) {
+            return "QP";
+        }
+        else if ("TIN".equals(description)) {
+            return "TIN";
+        }
+        else if ("Flächen- u. Querprofilpeilungen".equals(description)) {
+            return "FP-QP";
+        }
+        else {
+            log.warn("Unknown bed height type: " + description);
+            return null;
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Building.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,94 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "buildings")
+public class Building
+implements   Serializable
+{
+    private Integer    id;
+    private River      river;
+    private String     name;
+    private LineString geom;
+
+    public Building() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<Building> getBuildings(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Building where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Catchment.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,107 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Geometry;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "catchment")
+public class Catchment
+implements   Serializable
+{
+    private Integer    id;
+    private BigDecimal area;
+    private String     name;
+    private River      river;
+    private Geometry    geom;
+
+    public Catchment() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "area")
+    public BigDecimal getArea() {
+        return area;
+    }
+
+
+    public void setArea(BigDecimal area) {
+        this.area = area;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Geometry getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Geometry geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<Catchment> getCatchments(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Catchment where river.id =:river_id AND name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSection.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,198 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import java.awt.geom.Point2D;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.OrderBy;
+import javax.persistence.JoinColumn;
+
+import java.math.MathContext;
+import java.math.BigDecimal;
+
+import org.hibernate.Session;
+import org.hibernate.SQLQuery;
+import org.hibernate.Query;
+
+import org.hibernate.type.StandardBasicTypes;
+
+import de.intevation.flys.backend.SessionHolder;
+
+@Entity
+@Table(name = "cross_sections")
+public class CrossSection
+implements   Serializable
+{
+    public static final MathContext PRECISION = new MathContext(6);
+
+    public static final String SQL_FAST_CROSS_SECTION_LINES =
+        "SELECT km, x, y, csl.id AS csl_id " +
+        "FROM cross_section_lines csl JOIN cross_section_points csp " +
+        "ON csp.cross_section_line_id = csl.id " +
+        "WHERE csl.cross_section_id = :cs_id AND " +
+        "km between :from_km AND :to_km " + 
+        "ORDER BY csl.id, csp.col_pos";
+
+    private Integer                id;
+    private River                  river;
+    private TimeInterval           timeInterval;
+    private String                 description;
+    private List<CrossSectionLine> lines;
+
+    public CrossSection() {
+    }
+
+    public CrossSection(
+        River        river,
+        TimeInterval timeInterval,
+        String       description
+    ) {
+        this.river        = river;
+        this.timeInterval = timeInterval;
+        this.description  = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_CROSS_SECTIONS_ID_SEQ",
+        sequenceName   = "CROSS_SECTIONS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_CROSS_SECTIONS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id")
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @OrderBy("km")
+    @JoinColumn(name="cross_section_id")
+    public List<CrossSectionLine> getLines() {
+        return lines;
+    }
+
+    public void setLines(List<CrossSectionLine> lines) {
+        this.lines = lines;
+    }
+
+    public List<CrossSectionLine> getLines(double startKm, double endKm) {
+        Session session = SessionHolder.HOLDER.get();
+        Query query = session.createQuery(
+            "from CrossSectionLine where crossSection=:crossSection " +
+            "and km between :startKm and :endKm order by km");
+        query.setParameter("crossSection", this);
+        query.setParameter("startKm", new BigDecimal(startKm, PRECISION));
+        query.setParameter("endKm", new BigDecimal(endKm, PRECISION));
+
+        return query.list();
+    }
+
+    public List<FastCrossSectionLine> getFastLines(
+        double startKm,
+        double endKm
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        SQLQuery sqlQuery = session.createSQLQuery(SQL_FAST_CROSS_SECTION_LINES)
+            .addScalar("km",     StandardBasicTypes.DOUBLE)
+            .addScalar("x",      StandardBasicTypes.DOUBLE)
+            .addScalar("y",      StandardBasicTypes.DOUBLE)
+            .addScalar("csl_id", StandardBasicTypes.INTEGER);
+
+        sqlQuery
+            .setInteger("cs_id",  getId())
+            .setDouble("from_km", startKm)
+            .setDouble("to_km",   endKm);
+
+        List<Object []> results = sqlQuery.list();
+
+        ArrayList<Point2D> points = new ArrayList<Point2D>(500);
+        ArrayList<FastCrossSectionLine> lines =
+            new ArrayList<FastCrossSectionLine>();
+
+        Integer lastId = null;
+        Double  lastKm = null;
+
+        for (Object [] result: results) {
+            Double  km = (Double)result[0];
+            Double  x  = (Double)result[1];
+            Double  y  = (Double)result[2];
+            Integer id = (Integer)result[3];
+
+            if (lastId != null && !lastId.equals(id)) {
+                points.trimToSize();
+                FastCrossSectionLine line =
+                    new FastCrossSectionLine(lastKm, points);
+                lines.add(line);
+                points = new ArrayList<Point2D>(500);
+            }
+
+            Point2D p = new Point2D.Double(x, y);
+
+            if (CrossSectionLine.isValid(p)) {
+                points.add(p);
+            }
+
+            lastKm = km;
+            lastId = id;
+        }
+
+        if (lastId != null) {
+            points.trimToSize();
+            FastCrossSectionLine line =
+                new FastCrossSectionLine(lastKm, points);
+            lines.add(line);
+        }
+
+        lines.trimToSize();
+
+        return lines;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionLine.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,174 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Collections;
+import java.util.Comparator;
+
+import java.awt.geom.Point2D;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.JoinColumn;
+
+import org.apache.log4j.Logger;
+
+@Entity
+@Table(name = "cross_section_lines")
+public class CrossSectionLine
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(CrossSectionLine.class);
+
+    public static final double EPSILON   = 1e-4;
+
+    public static final double TOO_SMALL = 0.2;
+    public static final double TOO_BIG   = 500;
+
+    private Integer                 id;
+    private Double              km;
+    private CrossSection            crossSection;
+
+    private List<CrossSectionPoint> points;
+
+    public static final Comparator<CrossSectionPoint> COL_POS_CMP =
+        new Comparator<CrossSectionPoint>() {
+            @Override
+            public int compare(CrossSectionPoint a, CrossSectionPoint b) {
+                double xa = a.getX().doubleValue();
+                double xb = b.getX().doubleValue();
+                double d = xa - xb;
+                if (d < -EPSILON) return -1;
+                if (d > +EPSILON) return +1;
+                int diff = a.getColPos() - b.getColPos();
+                return diff < 0 ? -1 : diff > 0 ? +1 : 0;
+            }
+        };
+
+
+    public static final boolean isValid(double x) {
+        x = Math.abs(x);
+        return x > TOO_SMALL && x < TOO_BIG;
+    }
+
+    public static final boolean isValid(Point2D p) {
+        return isValid(p.getX()) && isValid(p.getY());
+    }
+
+
+    public CrossSectionLine() {
+    }
+
+    public CrossSectionLine(CrossSection crossSection, Double km) {
+        this.crossSection = crossSection;
+        this.km           = km;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_CROSS_SECTION_LINES_ID_SEQ",
+        sequenceName   = "CROSS_SECTION_LINES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_CROSS_SECTION_LINES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "km")
+    public Double getKm() {
+        return km;
+    }
+
+    public void setKm(Double km) {
+        this.km = km;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "cross_section_id")
+    public CrossSection getCrossSection() {
+        return crossSection;
+    }
+
+    public void setCrossSection(CrossSection CrossSection) {
+        this.crossSection = crossSection;
+    }
+
+    @OneToMany
+    @JoinColumn(name="cross_section_line_id")
+    public List<CrossSectionPoint> getPoints() {
+        return points;
+    }
+
+    public void setPoints(List<CrossSectionPoint> points) {
+        this.points = points;
+    }
+
+
+    public List<Point2D> fetchCrossSectionLinesPoints() {
+
+        List<CrossSectionPoint> linePoints = 
+            new ArrayList<CrossSectionPoint>(getPoints());
+
+        Collections.sort(linePoints, COL_POS_CMP);
+
+        List<Point2D> points = new ArrayList<Point2D>(linePoints.size());
+        for (CrossSectionPoint p: linePoints) {
+            double x = p.getX().doubleValue();
+            double y = p.getY().doubleValue();
+            if (isValid(x) && isValid(y)) {
+                points.add(new Point2D.Double(x, y));
+            }
+        }
+
+        return points;
+    }
+
+    public double [][] fetchCrossSectionProfile() {
+        return fetchCrossSectionProfile(fetchCrossSectionLinesPoints());
+    }
+
+    public static double [][] fetchCrossSectionProfile(List<Point2D> points) {
+
+        int P = points.size();
+
+        double [] xs = new double[P];
+        double [] ys = new double[P];
+
+        if (P > 0) {
+            xs[0] = points.get(0).getX();
+            ys[0] = points.get(0).getY();
+
+            for (int i = 1; i < P; i++) {
+                Point2D p = points.get(i);
+                double x = p.getX();
+                double y = p.getY();
+
+                if (x <= xs[i-1]) {
+                    x = xs[i-1] + EPSILON;
+                }
+
+                xs[i] = x;
+                ys[i] = y; 
+            }
+        }
+
+        return new double [][] { xs, ys };
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionPoint.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,95 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "cross_section_points")
+public class CrossSectionPoint
+implements   Serializable
+{
+    private Integer          id;
+    private CrossSectionLine crossSectionLine;
+    private Integer          colPos;
+    private Double       x;
+    private Double       y;
+
+    public CrossSectionPoint() {
+    }
+
+    public CrossSectionPoint(
+        CrossSectionLine crossSectionLine,
+        Integer          colPos,
+        Double       x,
+        Double       y
+    ) {
+        this.crossSectionLine = crossSectionLine;
+        this.colPos           = colPos;
+        this.x                = x;
+        this.y                = y;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_CROSS_SECTION_POINTS_ID_SEQ",
+        sequenceName   = "CROSS_SECTION_POINTS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_CROSS_SECTION_POINTS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "cross_section_line_id")
+    public CrossSectionLine getCrossSectionLine() {
+        return crossSectionLine;
+    }
+
+    public void setCrossSectionLine(CrossSectionLine crossSectionLine) {
+        this.crossSectionLine = crossSectionLine;
+    }
+
+    @Column(name = "col_pos")
+    public Integer getColPos() {
+        return colPos;
+    }
+
+    public void setColPos(Integer colPos) {
+        this.colPos = colPos;
+    }
+
+    @Column(name = "x")
+    public Double getX() {
+        return x;
+    }
+
+    public void setX(Double x) {
+        this.x = x;
+    }
+
+    @Column(name = "y")
+    public Double getY() {
+        return y;
+    }
+
+    public void setY(Double y) {
+        this.y = y;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionTrack.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,135 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "cross_section_tracks")
+public class CrossSectionTrack
+implements   Serializable
+{
+    private Integer    id;
+    private River      river;
+    private LineString geom;
+    private BigDecimal km;
+    private BigDecimal z;
+
+    public CrossSectionTrack() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    @Column(name = "km")
+    public BigDecimal getKm() {
+        return km;
+    }
+
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name = "z")
+    public BigDecimal getZ() {
+        return z;
+    }
+
+
+    public void setZ(BigDecimal z) {
+        this.z = z;
+    }
+
+
+    public static List<CrossSectionTrack> getCrossSectionTrack(
+        String river)
+    {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from CrossSectionTrack where river.name =:river");
+        query.setParameter("river", river);
+
+        return query.list();
+    }
+
+
+    /**
+     * Returns the nearest CrossSectionTrack of <i>river</i> to a given
+     * <i>km</i>.
+     *
+     * @param river The name of a river.
+     * @param km The kilometer value.
+     *
+     * @return the nearest CrossSectionTrack to <i>km</i> of river <i>river</i>.
+     */
+    public static CrossSectionTrack getCrossSectionTrack(
+        String river,
+        double km
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from CrossSectionTrack where river.name =:river " +
+            "order by abs( km - :mykm)");
+        query.setParameter("river", river);
+        query.setParameter("mykm", new BigDecimal(km));
+
+        List<CrossSectionTrack> cst = query.list();
+
+        return cst != null && !cst.isEmpty() ? cst.get(0) : null;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/DGM.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,115 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+import java.math.BigDecimal;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "dem")
+public class DGM implements Serializable {
+
+    private Integer    id;
+
+    private River      river;
+
+    private BigDecimal lower;
+    private BigDecimal upper;
+
+    private String     path;
+
+
+    public DGM() {
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setLower(BigDecimal lower) {
+        this.lower = lower;
+    }
+
+    @Column(name = "lower")
+    public BigDecimal getLower() {
+        return lower;
+    }
+
+    public void setUpper(BigDecimal upper) {
+        this.upper = upper;
+    }
+
+    @Column(name = "upper")
+    public BigDecimal getUpper() {
+        return upper;
+    }
+
+    public void setPath(String path) {
+        this.path = path;
+    }
+
+    @Column(name = "path")
+    public String getPath() {
+        return path;
+    }
+
+
+    public static DGM getDGM(int id) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from DGM where Id =:id");
+        query.setParameter("id", id);
+
+        List<DGM> result = query.list();
+
+        return result.isEmpty() ? null : result.get(0);
+    }
+
+
+    public static DGM getDGM(String river, double lower, double upper) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from DGM where river.name =:river and " +
+            "lower <=:lower and upper >=:lower and " +
+            "lower <=:upper and upper >=:upper");
+        query.setParameter("river", river);
+        query.setParameter("lower", new BigDecimal(lower));
+        query.setParameter("upper", new BigDecimal(upper));
+
+        List<DGM> result = query.list();
+
+        return result.isEmpty() ? null : result.get(0);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Depth.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,84 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+
+@Entity
+@Table(name = "depths")
+public class Depth implements Serializable {
+
+    private Integer id;
+
+    private BigDecimal lower;
+    private BigDecimal upper;
+
+    private Unit unit;
+
+
+    public Depth() {
+    }
+
+
+    public Depth(BigDecimal lower, BigDecimal upper, Unit unit) {
+        this.lower = lower;
+        this.upper = upper;
+        this.unit  = unit;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_DEPTHS_ID_SEQ",
+        sequenceName   = "DEPTHS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_DEPTHS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "lower")
+    public BigDecimal getLower() {
+        return lower;
+    }
+
+    public void setLower(BigDecimal lower) {
+        this.lower = lower;
+    }
+
+    @Column(name = "upper")
+    public BigDecimal getUpper() {
+        return upper;
+    }
+
+    public void setUpper(BigDecimal upper) {
+        this.upper = upper;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/DischargeTable.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,120 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.OrderBy;
+import javax.persistence.JoinColumn;
+
+import java.util.List;
+
+@Entity
+@Table(name = "discharge_tables")
+public class DischargeTable
+implements   Serializable
+{
+    private Integer      id;
+    private Gauge        gauge;
+    private String       description;
+    private Integer      kind;
+    private TimeInterval timeInterval;
+
+    private List<DischargeTableValue> dischargeTableValues;
+
+    public DischargeTable() {
+        kind = 0;
+    }
+
+    public DischargeTable(Gauge gauge) {
+        this(gauge, null, 0, null);
+    }
+
+    public DischargeTable(
+        Gauge        gauge,
+        String       description,
+        Integer      kind,
+        TimeInterval timeInterval
+    ) {
+        this.gauge        = gauge;
+        this.description  = description;
+        this.kind         = kind;
+        this.timeInterval = timeInterval;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_DISCHARGE_TABLES_ID_SEQ",
+        sequenceName   = "DISCHARGE_TABLES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_DISCHARGE_TABLES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "gauge_id" )
+    public Gauge getGauge() {
+        return gauge;
+    }
+
+    public void setGauge(Gauge gauge) {
+        this.gauge = gauge;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @Column(name = "kind")
+    public Integer getKind() {
+        return kind;
+    }
+
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id" )
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "table_id")
+    @OrderBy("q")
+    public List<DischargeTableValue> getDischargeTableValues() {
+        return dischargeTableValues;
+    }
+
+    public void setDischargeTableValues(
+        List<DischargeTableValue> dischargeTableValues
+    ) {
+        this.dischargeTableValues = dischargeTableValues;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/DischargeTableValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,84 @@
+package de.intevation.flys.model;
+
+import java.math.BigDecimal;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "discharge_table_values")
+public class DischargeTableValue
+implements   Serializable
+{
+    private Integer        id;
+    private DischargeTable dischargeTable;
+    private BigDecimal     q;
+    private BigDecimal     w;
+
+    public DischargeTableValue() {
+    }
+
+    public DischargeTableValue(
+        DischargeTable dischargeTable, BigDecimal q, BigDecimal w)
+    {
+        this.dischargeTable = dischargeTable;
+        this.q              = q;
+        this.w              = w;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_DISCHARGE_TABLE_VALUES_ID_SEQ",
+        sequenceName   = "DISCHARGE_TABLE_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_DISCHARGE_TABLE_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "table_id" )
+    public DischargeTable getDischargeTable() {
+        return dischargeTable;
+    }
+
+    public void setDischargeTable(DischargeTable dischargeTable) {
+        this.dischargeTable = dischargeTable;
+    }
+
+
+    @Column(name = "q")
+    public BigDecimal getQ() {
+        return q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+
+    @Column(name = "w")
+    public BigDecimal getW() {
+        return w;
+    }
+
+    public void setW(BigDecimal w) {
+        this.w = w;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/DischargeZone.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,152 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "discharge_zone")
+public class DischargeZone
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(DischargeZone.class);
+
+
+    private Integer id;
+
+    private River river;
+
+    private String gaugeName;
+
+    private BigDecimal value;
+
+    private String lowerDischarge;
+    private String upperDischarge;
+
+
+    public DischargeZone() {
+    }
+
+
+    public DischargeZone(
+        River       river,
+        String      gaugeName,
+        BigDecimal  value,
+        String      lowerDischarge,
+        String      upperDischarge
+    ) {
+        this.river          = river;
+        this.gaugeName      = gaugeName;
+        this.value          = value;
+        this.lowerDischarge = lowerDischarge;
+        this.upperDischarge = upperDischarge;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_DISCHARGE_ZONE_ID_SEQ",
+        sequenceName   = "DISCHARGE_ZONE_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_DISCHARGE_ZONE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "value")
+    public BigDecimal getValue() {
+        return value;
+    }
+
+    public void setValue(BigDecimal value) {
+        this.value = value;
+    }
+
+    @Column(name = "gauge_name")
+    public String getGaugeName() {
+        return gaugeName;
+    }
+
+    public void setGaugeName(String gaugeName) {
+        this.gaugeName = gaugeName;
+    }
+
+    @Column(name = "lower_discharge")
+    public String getLowerDischarge() {
+        return lowerDischarge;
+    }
+
+    public void setLowerDischarge(String lowerDischarge) {
+        this.lowerDischarge = lowerDischarge;
+    }
+
+    @Column(name = "upper_discharge")
+    public String getUpperDischarge() {
+        return upperDischarge;
+    }
+
+    public void setUpperDischarge(String upperDischarge) {
+        this.upperDischarge = upperDischarge;
+    }
+
+
+    public static List<DischargeZone> getDischargeZones(River river) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from DischargeZone where river=:river");
+
+        query.setParameter("river", river);
+
+        return query.list();
+    }
+
+
+    public static DischargeZone getDischargeZoneById(int id) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from DischargeZone where id=:id");
+
+        query.setParameter("id", id);
+
+        List<DischargeZone> zones = query.list();
+
+        return zones.isEmpty() ? null : zones.get(0);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Edge.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,67 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "edges")
+public class Edge
+implements   Serializable
+{
+    private Integer    id;
+    private BigDecimal top;
+    private BigDecimal bottom;
+
+    public Edge() {
+    }
+
+    public Edge(BigDecimal top, BigDecimal bottom) {
+        this.top    = top;
+        this.bottom = bottom;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_EDGES_ID_SEQ",
+        sequenceName   = "EDGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_EDGES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "top")
+    public BigDecimal getTop() {
+        return top;
+    }
+
+    public void setTop(BigDecimal top) {
+        this.top = top;
+    }
+
+    @Column(name = "bottom")
+    public BigDecimal getBottom() {
+        return bottom;
+    }
+
+    public void setBottom(BigDecimal bottom) {
+        this.bottom = bottom;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/ElevationModel.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,78 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "elevation_model")
+public class ElevationModel
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(ElevationModel.class);
+
+    protected Integer id;
+
+    protected String name;
+
+    protected Unit unit;
+
+
+    public ElevationModel() {
+    }
+
+
+    public ElevationModel(String name, Unit unit) {
+        this.name = name;
+        this.unit = unit;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_ELEVATION_MODE_ID_SEQ",
+        sequenceName   = "ELEVATION_MODEL_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_ELEVATION_MODE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FastAnnotations.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,313 @@
+package de.intevation.flys.model;
+
+import java.util.Comparator;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+import java.io.Serializable;
+
+import org.hibernate.Session;
+import org.hibernate.SQLQuery;
+
+import org.hibernate.type.StandardBasicTypes;
+
+import de.intevation.flys.backend.SessionHolder;
+
+public class FastAnnotations
+implements   Serializable
+{
+    public static final String SQL_BY_RIVER_NAME =
+        "SELECT r.a AS a, r.b AS b, p.value AS position, " +
+                "at.value AS attribute, ant.name AS name, " +
+                "e.top AS top, e.bottom AS bottom " +
+        "FROM annotations an " +
+            "JOIN ranges r " +
+                "ON an.range_id = r.id " +
+            "JOIN attributes at " +
+                "ON an.attribute_id = at.id " +
+            "JOIN positions p " +
+                "ON an.position_id = p.id " +
+            "JOIN rivers riv " +
+                "ON r.river_id = riv.id " +
+            "LEFT JOIN annotation_types ant " +
+                "ON an.type_id = ant.id " +
+            "LEFT JOIN edges e " +
+                "ON an.edge_id = e.id " +
+            "WHERE riv.name = :river_name " +
+                "ORDER BY r.a";
+
+    public static final String SQL_BY_RIVER_ID =
+        "SELECT r.a AS a, r.b AS b, p.value AS position, " +
+                "at.value AS attribute, ant.name AS name, " +
+                "e.top AS top, e.bottom AS bottom " +
+        "FROM annotations an " +
+            "JOIN ranges r " +
+                "ON an.range_id = r.id " +
+            "JOIN attributes at " +
+                "ON an.attribute_id = at.id " +
+            "JOIN positions p " +
+                "ON an.position_id = p.id " +
+            "LEFT JOIN annotation_types ant " +
+                "ON an.type_id = ant.id " +
+            "LEFT JOIN edges e " +
+                "ON an.edge_id = e.id " +
+            "WHERE r.id = :river_id " +
+                "ORDER BY r.a";
+
+    public static final double EPSILON = 1e-5;
+
+    public static final Comparator<Annotation> KM_CMP =
+        new Comparator<Annotation>() {
+            @Override
+            public int compare(Annotation a, Annotation b) {
+                double diff = a.a - b.a;
+                if (diff < -EPSILON) return -1;
+                if (diff > +EPSILON) return +1;
+                return 0;
+            }
+        };
+
+    public static final class Annotation
+    implements                Serializable
+    {
+        private double a;
+        private double b;
+        private String position;
+        private String attribute;
+        private String name;
+        private double top;
+        private double bottom;
+
+        public Annotation() {
+        }
+
+        public Annotation(double a) {
+            this.a = a;
+        }
+
+        public Annotation(
+            double a,
+            double b,
+            String position,
+            String attribute,
+            String name,
+            double top,
+            double bottom
+        ) {
+            this.a         = a;
+            this.b         = b;
+            this.position  = position;
+            this.attribute = attribute;
+            this.name      = name;
+            this.top       = top;
+            this.bottom    = bottom;
+        }
+
+        public double getA() {
+            return a;
+        }
+
+        public double getB() {
+            return b;
+        }
+
+        public String getPosition() {
+            return position;
+        }
+
+        public String getAttribute() {
+            return attribute;
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public double getTop() {
+            return top;
+        }
+
+        public double getBottom() {
+            return bottom;
+        }
+    } // class Serializable
+
+    public interface Filter {
+
+        boolean accept(Annotation annotation);
+
+    } // interface Filter
+
+    public static final Filter ALL = new Filter() {
+        @Override
+        public boolean accept(Annotation annotation) {
+            return true;
+        }
+    };
+
+    public static final Filter IS_POINT = new Filter() {
+        @Override
+        public boolean accept(Annotation annotation) {
+            return Double.isNaN(annotation.getB());
+        }
+    };
+
+    public static final Filter IS_RANGE = new Filter() {
+        @Override
+        public boolean accept(Annotation annotation) {
+            return !Double.isNaN(annotation.getB());
+        }
+    };
+
+    private Annotation [] annotations;
+
+    public FastAnnotations() {
+    }
+
+    public FastAnnotations(Annotation [] annotations) {
+        this.annotations = annotations;
+    }
+
+    public FastAnnotations(String riverName) {
+        this(loadByRiverName(riverName));
+    }
+
+    public FastAnnotations(int riverId) {
+        this(loadByRiverId(riverId));
+    }
+
+    public FastAnnotations(Iterator<Annotation> iter) {
+        this(toArray(iter));
+    }
+
+    public int size() {
+        return annotations.length;
+    }
+
+    public Iterator<Annotation> filter(final Filter filter) {
+        return new Iterator<Annotation>() {
+
+            private int idx;
+            private Annotation current = findNext();
+
+            @Override 
+            public boolean hasNext() {
+                return current != null;
+            }
+
+            @Override
+            public Annotation next() {
+                if (current == null) {
+                    throw new NoSuchElementException();
+                }
+                Annotation result = current;
+                current = findNext();
+                return result;
+            }
+
+            private Annotation findNext() {
+
+                while (idx < annotations.length) {
+                    Annotation annotation = annotations[idx++];
+                    if (filter.accept(annotation)) {
+                        return annotation;
+                    }
+                }
+
+                return null;
+            }
+
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException();
+            }
+        };
+    }
+
+    public static Annotation [] toArray(Iterator<Annotation> iter) {
+
+        ArrayList<Annotation> list = new ArrayList<Annotation>();
+
+        while (iter.hasNext()) {
+            list.add(iter.next());
+        }
+
+        return list.toArray(new Annotation[list.size()]);
+    }
+
+    public Annotation findByKm(double km) {
+        Annotation key = new Annotation(km);
+        int idx = Arrays.binarySearch(annotations, key, KM_CMP);
+        return idx < 0 ? null : annotations[idx];
+    }
+
+    private static SQLQuery createQuery(String query) {
+        Session session = SessionHolder.HOLDER.get();
+
+        return session.createSQLQuery(query)
+            .addScalar("a",         StandardBasicTypes.DOUBLE)
+            .addScalar("b",         StandardBasicTypes.DOUBLE)
+            .addScalar("position",  StandardBasicTypes.STRING)
+            .addScalar("attribute", StandardBasicTypes.STRING)
+            .addScalar("name",      StandardBasicTypes.STRING)
+            .addScalar("top",       StandardBasicTypes.DOUBLE)
+            .addScalar("bottom",    StandardBasicTypes.DOUBLE);
+    }
+
+    private static Annotation [] buildAnnotations(List<Object []> list) {
+        Annotation [] anns = new Annotation[list.size()];
+
+        // Names are likely the same because they are a type
+        // like 'Pegel' or 'Hafen'.
+        HashMap<String, String> names = new HashMap<String, String>();
+
+        for (int i = 0; i < anns.length; ++i) {
+            Object [] data   = list.get(i);
+            double a         = ((Double)data[0]);
+            double b         = data[1] != null ? (Double)data[1] : Double.NaN;
+            String position  = (String)data[2];
+            String attribute = (String)data[3];
+            String name      = (String)data[4];
+            double top       = data[5] != null ? (Double)data[5] : Double.NaN;
+            double bottom    = data[6] != null ? (Double)data[6] : Double.NaN;
+
+            if (name != null) {
+                String old = names.get(name);
+                if (old != null) {
+                    name = old;
+                }
+                else {
+                    names.put(name, name);
+                }
+            }
+
+            anns[i] = new Annotation(
+                a, b, position, attribute, name, top, bottom);
+        }
+
+        return anns;
+    }
+
+    public static Annotation [] loadByRiverName(String riverName) {
+
+        SQLQuery query = createQuery(SQL_BY_RIVER_NAME);
+
+        query.setString("river_name", riverName);
+
+        return buildAnnotations(query.list());
+    }
+
+    public static Annotation [] loadByRiverId(int riverId) {
+
+        SQLQuery query = createQuery(SQL_BY_RIVER_ID);
+
+        query.setInteger("river_id", riverId);
+
+        return buildAnnotations(query.list());
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FastCrossSectionLine.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,68 @@
+package de.intevation.flys.model;
+
+import java.util.List;
+import java.util.Comparator;
+
+import java.io.Serializable;
+
+import java.awt.geom.Point2D;
+
+public class FastCrossSectionLine
+implements   Serializable
+{
+    public static final double EPSILON = 1e-5;
+
+    public static final Comparator<FastCrossSectionLine> KM_CMP =
+        new Comparator<FastCrossSectionLine>() {
+            public int compare(
+                FastCrossSectionLine a,
+                FastCrossSectionLine b
+            ) {
+                double diff = a.km - b.km;
+                if (diff < -EPSILON) return -1;
+                return diff > +EPSILON ? +1 : 0;
+            }
+        };
+
+    protected double km;
+    protected List<Point2D> points;
+
+    public FastCrossSectionLine() {
+    }
+
+    public FastCrossSectionLine(double km) {
+        this.km = km;
+    }
+
+    public FastCrossSectionLine(double km, List<Point2D> points) {
+        this(km);
+        this.points = points;
+    }
+
+    public FastCrossSectionLine(CrossSectionLine csl) {
+        Double kmBD = csl.getKm();
+        km = kmBD != null ? kmBD.doubleValue() : 0d;
+        points = csl.fetchCrossSectionLinesPoints();
+    }
+
+    public double getKm() {
+        return km;
+    }
+
+    public void setKm(double km) {
+        this.km = km;
+    }
+
+    public List<Point2D> getPoints() {
+        return points;
+    }
+
+    public void setPoints(List<Point2D> points) {
+        this.points = points;
+    }
+
+    public double [][] fetchCrossSectionProfile() {
+        return CrossSectionLine.fetchCrossSectionProfile(points);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Fixpoint.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,130 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Point;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "fixpoints")
+public class Fixpoint
+implements   Serializable
+{
+    private Integer    id;
+    private River      river;
+    private Integer    x;
+    private Integer    y;
+    private BigDecimal km;
+    private String     hpgp;
+    private Point      geom;
+
+    public Fixpoint() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "x")
+    public Integer getX() {
+        return x;
+    }
+
+
+    public void setX(Integer x) {
+        this.x = x;
+    }
+
+
+    @Column(name = "y")
+    public Integer getY() {
+        return y;
+    }
+
+
+    public void setY(Integer y) {
+        this.y = y;
+    }
+
+
+    @Column(name = "km")
+    public BigDecimal getKm() {
+        return km;
+    }
+
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name = "hpgp")
+    public String getHpgp() {
+        return hpgp;
+    }
+
+
+    public void setHpgp(String hpgp) {
+        this.hpgp = hpgp;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Point getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Point geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<Fixpoint> getFixpoints(int riverId) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Fixpoint where river.id =:river_id");
+        query.setParameter("river_id", riverId);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Floodmaps.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,155 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Geometry;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "floodmaps")
+public class Floodmaps
+implements   Serializable
+{
+    private Integer      id;
+    private River        river;
+    private String       name;
+    private Integer      kind;
+    private Integer      count;
+    private BigDecimal   diff;
+    private BigDecimal   area;
+    private BigDecimal   perimeter;
+    private Geometry     geom;
+
+    public Floodmaps() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "kind")
+    public Integer getKind() {
+        return kind;
+    }
+
+
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
+
+    @Column(name = "count")
+    public Integer getCount() {
+        return count;
+    }
+
+
+    public void setCount(Integer count) {
+        this.count = count;
+    }
+
+
+    @Column(name = "diff")
+    public BigDecimal getDiff() {
+        return diff;
+    }
+
+
+    public void setDiff(BigDecimal diff) {
+        this.diff = diff;
+    }
+
+
+    @Column(name = "area")
+    public BigDecimal getArea() {
+        return area;
+    }
+
+
+    public void setArea(BigDecimal area) {
+        this.area = area;
+    }
+
+
+    @Column(name = "perimeter")
+    public BigDecimal getPerimeter() {
+        return perimeter;
+    }
+
+
+    public void setPerimeter(BigDecimal perimeter) {
+        this.perimeter = perimeter;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Geometry getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Geometry geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<Floodmaps> getFloodmaps(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Floodmaps where river.id =:river_id AND name =:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Floodplain.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,81 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Polygon;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "floodplain")
+public class Floodplain
+implements   Serializable
+{
+    private Integer id;
+
+    private River   river;
+
+    private Polygon geom;
+
+
+    public Floodplain() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Polygon getGeom() {
+        return geom;
+    }
+
+    public void setGeom(Polygon geom) {
+        this.geom = geom;
+    }
+
+
+    public static Floodplain getFloodplain(String river) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Floodplain where river.name =:river");
+        query.setParameter("river", river);
+
+        List<Floodplain> result = query.list();
+
+        return result.isEmpty() ? null : result.get(0);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FlowVelocityMeasurement.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,97 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "flow_velocity_measurements")
+public class FlowVelocityMeasurement
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(FlowVelocityMeasurement.class);
+
+
+    private Integer id;
+
+    private River river;
+
+    private String description;
+
+    private List<FlowVelocityMeasurementValue> values;
+
+
+    public FlowVelocityMeasurement() {
+    }
+
+
+    public FlowVelocityMeasurement(River river, String description) {
+        this.river       = river;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_FV_MEASURE_ID_SEQ",
+        sequenceName   = "FV_MEASURE_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_FV_MEASURE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "measurements_id")
+    public List<FlowVelocityMeasurementValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<FlowVelocityMeasurementValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(FlowVelocityMeasurementValue value) {
+        this.values.add(value);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FlowVelocityMeasurementValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,146 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.Date;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "flow_velocity_measure_values")
+public class FlowVelocityMeasurementValue
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(FlowVelocityMeasurementValue.class);
+
+
+    private Integer id;
+
+    private FlowVelocityMeasurement measurement;
+
+    private BigDecimal station;
+    private BigDecimal w;
+    private BigDecimal q;
+    private BigDecimal v;
+
+    private Date datetime;
+
+    private String description;
+
+
+    public FlowVelocityMeasurementValue() {
+    }
+
+
+    public FlowVelocityMeasurementValue(
+        FlowVelocityMeasurement measurement,
+        Date                    datetime,
+        BigDecimal              station,
+        BigDecimal              w,
+        BigDecimal              q,
+        BigDecimal              v,
+        String                  description
+    ) {
+        this.measurement = measurement;
+        this.datetime    = datetime;
+        this.station     = station;
+        this.w           = w;
+        this.q           = q;
+        this.v           = v;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_FV_MEASURE_VALUES_ID_SEQ",
+        sequenceName   = "FV_MEASURE_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_FV_MEASURE_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "measurements_id")
+    public FlowVelocityMeasurement getMeasurement() {
+        return measurement;
+    }
+
+    public void setMeasurement(FlowVelocityMeasurement measurement) {
+        this.measurement = measurement;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "datetime")
+    public Date getDatetime() {
+        return datetime;
+    }
+
+    public void setDatetime(Date datetime) {
+        this.datetime = datetime;
+    }
+
+    @Column(name = "w")
+    public BigDecimal getW() {
+        return w;
+    }
+
+    public void setW(BigDecimal w) {
+        this.w = w;
+    }
+
+    @Column(name = "q")
+    public BigDecimal getQ() {
+        return q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+
+    @Column(name = "v")
+    public BigDecimal getV() {
+        return v;
+    }
+
+    public void setV(BigDecimal v) {
+        this.v = v;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FlowVelocityModel.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,124 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "flow_velocity_model")
+public class FlowVelocityModel
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(FlowVelocityModel.class);
+
+
+    private Integer id;
+
+    private River river;
+
+    private DischargeZone dischargeZone;
+
+    private List<FlowVelocityModelValue> values;
+
+    private String description;
+
+
+    public FlowVelocityModel() {
+    }
+
+
+    public FlowVelocityModel(River river, DischargeZone dischargeZone) {
+        this(river, dischargeZone, null);
+    }
+
+
+    public FlowVelocityModel(
+        River         river,
+        DischargeZone dischargeZone,
+        String        description
+    ) {
+        this.river         = river;
+        this.dischargeZone = dischargeZone;
+        this.description   = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_FLOW_VELOCITY_MODEL_ID_SEQ",
+        sequenceName   = "FLOW_VELOCITY_MODEL_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_FLOW_VELOCITY_MODEL_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "discharge_zone_id")
+    public DischargeZone getDischargeZone() {
+        return dischargeZone;
+    }
+
+    public void setDischargeZone(DischargeZone dischargeZone) {
+        this.dischargeZone = dischargeZone;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+
+    public static List<FlowVelocityModel> getModels(
+        River         river,
+        DischargeZone zone
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from FlowVelocityModel where river=:river and dischargeZone=:zone");
+
+        query.setParameter("river", river);
+        query.setParameter("zone", zone);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/FlowVelocityModelValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,158 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "flow_velocity_model_values")
+public class FlowVelocityModelValue
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(FlowVelocityModelValue.class);
+
+
+    private Integer id;
+
+    private FlowVelocityModel flowVelocity;
+
+    private BigDecimal station;
+    private BigDecimal q;
+    private BigDecimal totalChannel;
+    private BigDecimal mainChannel;
+    private BigDecimal shearStress;
+
+
+    public FlowVelocityModelValue() {
+    }
+
+
+    public FlowVelocityModelValue(
+        FlowVelocityModel flowVelocity,
+        BigDecimal        station,
+        BigDecimal        q,
+        BigDecimal        totalChannel,
+        BigDecimal        mainChannel,
+        BigDecimal        shearStress
+    ) {
+        this.flowVelocity = flowVelocity;
+        this.station      = station;
+        this.q            = q;
+        this.totalChannel = totalChannel;
+        this.mainChannel  = mainChannel;
+        this.shearStress  = shearStress;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_FLOW_VELOCITY_M_VALUES_ID_SEQ",
+        sequenceName   = "FLOW_VELOCITY_M_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_FLOW_VELOCITY_M_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "flow_velocity_model_id")
+    public FlowVelocityModel getFlowVelocity() {
+        return flowVelocity;
+    }
+
+    public void setFlowVelocity(FlowVelocityModel flowVelocity) {
+        this.flowVelocity = flowVelocity;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "q")
+    public BigDecimal getQ() {
+        return q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+
+    @Column(name = "total_channel")
+    public BigDecimal getTotalChannel() {
+        return totalChannel;
+    }
+
+    public void setTotalChannel(BigDecimal totalChannel) {
+        this.totalChannel = totalChannel;
+    }
+
+    @Column(name = "main_channel")
+    public BigDecimal getMainChannel() {
+        return mainChannel;
+    }
+
+    public void setMainChannel(BigDecimal mainChannel) {
+        this.mainChannel = mainChannel;
+    }
+
+    @Column(name = "shear_stress")
+    public BigDecimal getShearStress() {
+        return shearStress;
+    }
+
+    public void setShearStress(BigDecimal shearStress) {
+        this.shearStress = shearStress;
+    }
+
+
+    public static List<FlowVelocityModelValue> getValues(
+        FlowVelocityModel model,
+        double kmLo,
+        double kmHi
+    ) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from FlowVelocityModelValue where " +
+            "   flowVelocity=:model and" +
+            "   station >= :kmLo and " +
+            "   station <= :kmHi");
+
+        query.setParameter("model", model);
+        query.setParameter("kmLo", new BigDecimal(kmLo));
+        query.setParameter("kmHi", new BigDecimal(kmHi));
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Gauge.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,246 @@
+package de.intevation.flys.model;
+
+import java.math.BigDecimal;
+
+import java.io.Serializable;
+
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+import de.intevation.flys.model.MainValue;
+
+@Entity
+@Table(name = "gauges")
+public class Gauge
+implements   Serializable
+{
+    public static final int DEFAULT_SCALE = 100;
+
+    public static final int MASTER_DISCHARGE_TABLE = 0;
+
+
+    private Integer    id;
+    private String     name;
+    private River      river;
+    private BigDecimal station;
+    private BigDecimal aeo;
+    private BigDecimal datum;
+    private Long       officialNumber;
+    private Range      range;
+
+    private List<DischargeTable> dischargeTables;
+
+    /** MainValues at this Gauge. */
+    protected List<MainValue> mainValues;
+
+    public Gauge() {
+    }
+
+    public Gauge(
+        String     name,
+        River      river,
+        BigDecimal station,
+        BigDecimal aeo,
+        BigDecimal datum,
+        Long       officialNumber,
+        Range      range
+    ) {
+        this.name            = name;
+        this.river           = river;
+        this.station         = station;
+        this.aeo             = aeo;
+        this.datum           = datum;
+        this.officialNumber  = officialNumber;
+        this.range           = range;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_GAUGES_ID_SEQ",
+        sequenceName   = "GAUGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_GAUGES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "station") // FIXME: type mapping needed
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "aeo") // FIXME: type mapping needed
+    public BigDecimal getAeo() {
+        return aeo;
+    }
+
+    public void setAeo(BigDecimal aeo) {
+        this.aeo = aeo;
+    }
+
+    @Column(name = "datum") // FIXME: type mapping needed
+    public BigDecimal getDatum() {
+        return datum;
+    }
+
+    public void setDatum(BigDecimal datum) {
+        this.datum = datum;
+    }
+
+    @Column(name = "official_number")
+    public Long getOfficialNumber() {
+        return officialNumber;
+    }
+
+    public void setOfficialNumber(Long officialNumber) {
+        this.officialNumber = officialNumber;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "range_id" )
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "gauge_id")
+    public List<DischargeTable> getDischargeTables() {
+        return dischargeTables;
+    }
+
+    public void setDischargeTables(List<DischargeTable> dischargeTables) {
+        this.dischargeTables = dischargeTables;
+    }
+
+
+    /**
+     * Returns min and max W values of this gauge based with a DEFAULT_SCALE.
+     *
+     * @return min and max W value of this gauge [min,max].
+     */
+    public double[] determineMinMaxW() {
+        return determineMinMaxW(DEFAULT_SCALE);
+    }
+
+
+    /**
+     * Returns min and max W values of this gauge.
+     *
+     * @return the min and max W value of this gauge [min,max].
+     */
+    public double[] determineMinMaxW(int scale) {
+        Session session = SessionHolder.HOLDER.get();
+
+        List<DischargeTable> tables   = getDischargeTables();
+        DischargeTable dischargeTable = null;
+
+        for (DischargeTable tmp: tables) {
+            if (tmp.getKind() == 0) {
+                dischargeTable = tmp;
+                break;
+            }
+        }
+
+        if (dischargeTable == null) {
+            return null;
+        }
+
+        Query query  = session.createQuery(
+            "select min(w) as min, max(w) as max from DischargeTableValue " +
+            "where table_id =:table");
+        query.setParameter("table", dischargeTable.getId());
+
+        List     results = query.list();
+        Object[] result  = (Object[]) results.get(0);
+
+        return result != null
+            ? new double[] {
+                ((BigDecimal) result[0]).doubleValue() * scale,
+                ((BigDecimal) result[1]).doubleValue() * scale}
+            : null;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "gauge_id")
+    public List<MainValue> getMainValues() {
+        return mainValues;
+    }
+
+    public void setMainValues(List<MainValue> mainValues) {
+        this.mainValues = mainValues;
+    }
+
+
+    public static Gauge getGaugeByOfficialNumber(long number) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Gauge where officialNumber=:number");
+
+        query.setParameter("number", number);
+
+        List<Gauge> results = query.list();
+
+        return results.isEmpty() ? null : results.get(0);
+    }
+
+
+    public DischargeTable fetchMasterDischargeTable() {
+        for (DischargeTable dt: dischargeTables) {
+            if (dt.getKind() == MASTER_DISCHARGE_TABLE) {
+                return dt;
+            }
+        }
+
+        return null;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/GaugeLocation.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,105 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Point;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "gauge_location")
+public class GaugeLocation
+implements   Serializable
+{
+    private Integer    id;
+    private River      river;
+    private String     name;
+    private Point      geom;
+
+
+    public GaugeLocation() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Point getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Point geom) {
+        this.geom = geom;
+    }
+
+
+    /**
+     * Returns a list of RiverAxisKm objects for a given river.
+     *
+     * @param riverid The ID of a river in the database.
+     *
+     * @return a list of RiverAxisKm objects.
+     */
+    public static List<GaugeLocation> getGaugeLocations(int riverid, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from GaugeLocation where river.id =:riverid and name=:name");
+        query.setParameter("riverid", riverid);
+        query.setParameter("name", name);
+
+        List<GaugeLocation> list = query.list();
+
+        return list;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/GrainFraction.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,108 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "grain_fraction")
+public class GrainFraction
+implements   Serializable
+{
+    public static final String TOTAL              = "total";
+    public static final String COARSE             = "coarse";
+    public static final String FINE_MIDDLE        = "fine_middle";
+    public static final String SAND               = "sand";
+    public static final String SUSP_SAND          = "susp_sand";
+    public static final String SUSP_SAND_BED      = "susp_sand_bed";
+    public static final String SUSPENDED_SEDIMENT = "suspended_sediment";
+
+
+    private static Logger logger = Logger.getLogger(GrainFraction.class);
+
+    private Integer id;
+
+    private String name;
+
+    private Double lower;
+    private Double upper;
+
+    private Unit unit;
+
+
+    public GrainFraction() {
+    }
+
+    public GrainFraction(String name, Double lower, Double upper, Unit unit) {
+        this.name  = name;
+        this.lower = lower;
+        this.upper = upper;
+        this.unit  = unit;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_GRAIN_FRACTION_ID_SEQ",
+        sequenceName   = "GRAIN_FRACTION_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_GRAIN_FRACTION_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name" )
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "lower")
+    public Double getLower() {
+        return lower;
+    }
+
+    public void setLower(Double lower) {
+        this.lower = lower;
+    }
+
+    @Column(name = "upper")
+    public Double getUpper() {
+        return upper;
+    }
+
+    public void setUpper(Double upper) {
+        this.upper = upper;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HYK.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,85 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.OrderBy;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "hyks")
+public class HYK
+implements   Serializable
+{
+    private Integer id;
+    private River   river;
+    private String  description;
+
+    private List<HYKEntry> entries;
+
+    public HYK() {
+    }
+
+    public HYK(River river, String description) {
+        this.river       = river;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYKS_ID_SEQ",
+        sequenceName   = "HYKS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYKS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @OrderBy("km")
+    @JoinColumn(name="hyk_id")
+    public List<HYKEntry> getEntries() {
+        return entries;
+    }
+
+    public void setEntries(List<HYKEntry> entries) {
+        this.entries = entries;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HYKEntry.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,99 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+
+import java.util.Date;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.OrderBy;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "hyk_entries")
+public class HYKEntry
+implements   Serializable
+{
+    private Integer    id;
+    private HYK        hyk;
+    private BigDecimal km;
+    private Date       measure;
+
+    private List<HYKFormation> formations;
+
+    public HYKEntry() {
+    }
+
+    public HYKEntry(HYK hyk, BigDecimal km, Date measure) {
+        this.hyk     = hyk;
+        this.km      = km;
+        this.measure = measure;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYK_ENTRIES_ID_SEQ",
+        sequenceName   = "HYK_ENTRIES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYK_ENTRIES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "hyk_id")
+    public HYK getHYK() {
+        return hyk;
+    }
+
+    public void setHYK(HYK hyk) {
+        this.hyk = hyk;
+    }
+
+    @Column(name = "km")
+    public BigDecimal getKm() {
+        return km;
+    }
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+    @Column(name = "measure")
+    public Date getMeasure() {
+        return measure;
+    }
+
+    public void setMeasure(Date measure) {
+        this.measure = measure;
+    }
+
+    @OneToMany
+    @OrderBy("formationNum")
+    @JoinColumn(name="hyk_entry_id")
+    public List<HYKFormation> getFormations() {
+        return formations;
+    }
+
+    public void setFormations(List<HYKFormation> formations) {
+        this.formations = formations;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HYKFlowZone.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,98 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "hyk_flow_zones")
+public class HYKFlowZone
+implements   Serializable
+{
+    private Integer         id;
+    private HYKFormation    formation;
+    private HYKFlowZoneType type;
+    private BigDecimal      a;
+    private BigDecimal      b;
+
+    public HYKFlowZone() {
+    }
+
+    public HYKFlowZone(
+        HYKFormation    formation,
+        HYKFlowZoneType type,
+        BigDecimal      a,
+        BigDecimal      b
+    ) {
+        this.formation = formation;
+        this.type      = type;
+        this.a         = a;
+        this.b         = b;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYK_FLOW_ZONES_ID_SEQ",
+        sequenceName   = "HYK_FLOW_ZONES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYK_FLOW_ZONES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "formation_id")
+    public HYKFormation getFormation() {
+        return formation;
+    }
+
+    public void setFormation(HYKFormation formation) {
+        this.formation = formation;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "type_id")
+    public HYKFlowZoneType getType() {
+        return type;
+    }
+
+    public void setType(HYKFlowZoneType type) {
+        this.type = type;
+    }
+
+    @Column(name = "a")
+    public BigDecimal getA() {
+        return a;
+    }
+
+    public void setA(BigDecimal a) {
+        this.a = a;
+    }
+
+    @Column(name = "b")
+    public BigDecimal getB() {
+        return b;
+    }
+
+    public void setB(BigDecimal b) {
+        this.b = b;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HYKFlowZoneType.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,69 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "hyk_flow_zone_types")
+public class HYKFlowZoneType
+implements   Serializable
+{
+    private Integer id;
+    private String  name;
+    private String  description;
+
+    public HYKFlowZoneType() {
+    }
+
+    public HYKFlowZoneType(String name) {
+        this.name = name;
+    }
+
+    public HYKFlowZoneType(String name, String description) {
+        this.name        = name;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYK_FLOW_ZONE_TYPES_ID_SEQ",
+        sequenceName   = "HYK_FLOW_ZONE_TYPES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYK_FLOW_ZONE_TYPES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HYKFormation.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,151 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.List;
+
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+import javax.persistence.OrderBy;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "hyk_formations")
+public class HYKFormation
+implements   Serializable
+{
+    private Integer    id;
+    private Integer    formationNum;
+    private HYKEntry   entry;
+    private BigDecimal top;
+    private BigDecimal bottom;
+    private BigDecimal distanceVL;
+    private BigDecimal distanceHF;
+    private BigDecimal distanceVR;
+
+    private List<HYKFlowZone> zones;
+
+    public HYKFormation() {
+    }
+
+    public HYKFormation(
+        Integer    formationNum, 
+        HYKEntry   entry, 
+        BigDecimal top, 
+        BigDecimal bottom,
+        BigDecimal distanceVL,
+        BigDecimal distanceHF,
+        BigDecimal distanceVR
+    ) {
+        this.formationNum = formationNum;
+        this.entry        = entry;
+        this.top          = top;
+        this.bottom       = bottom;
+        this.distanceVL   = distanceVL;
+        this.distanceHF   = distanceHF;
+        this.distanceVR   = distanceVR;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_HYK_FORMATIONS_ID_SEQ",
+        sequenceName   = "HYK_FORMATIONS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_HYK_FORMATIONS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "formation_num")
+    public Integer getFormationNum() {
+        return formationNum;
+    }
+
+    public void setFormationNum(Integer formationNum) {
+        this.formationNum = formationNum;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "hyk_entry_id")
+    public HYKEntry getEntry() {
+        return entry;
+    }
+
+    public void setEntry(HYKEntry entry) {
+        this.entry = entry;
+    }
+
+    @Column(name = "top")
+    public BigDecimal getTop() {
+        return top;
+    }
+
+    public void setTop(BigDecimal top) {
+        this.top = top;
+    }
+
+    @Column(name = "bottom")
+    public BigDecimal getBottom() {
+        return bottom;
+    }
+
+    public void setBottom(BigDecimal bottom) {
+        this.bottom = bottom;
+    }
+
+    @Column(name = "distance_vl")
+    public BigDecimal getDistanceVL() {
+        return distanceVL;
+    }
+
+    public void setDistanceVL(BigDecimal distanceVL) {
+        this.distanceVL = distanceVL;
+    }
+
+    @Column(name = "distance_hf")
+    public BigDecimal getDistanceHF() {
+        return distanceHF;
+    }
+
+    public void setDistanceHF(BigDecimal distanceHF) {
+        this.distanceHF = distanceHF;
+    }
+
+    @Column(name = "distance_vr")
+    public BigDecimal getDistanceVR() {
+        return distanceVR;
+    }
+
+    public void setDistanceVR(BigDecimal distanceVR) {
+        this.distanceVR = distanceVR;
+    }
+
+
+    @OneToMany
+    @OrderBy("a")
+    @JoinColumn(name="formation_id")
+    public List<HYKFlowZone> getZones() {
+        return zones;
+    }
+
+    public void setZones(List<HYKFlowZone> zones) {
+        this.zones = zones;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Hws.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,106 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "hws")
+public class Hws
+implements   Serializable
+{
+    private Integer    id;
+    private String     facility;
+    private String     type;
+    private River      river;
+    private LineString geom;
+
+    public Hws() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "hws_facility")
+    public String getFacility() {
+        return facility;
+    }
+
+
+    public void setFacility(String facility) {
+        this.facility = facility;
+    }
+
+
+    @Column(name = "type")
+    public String getType() {
+        return type;
+    }
+
+
+    public void setType(String type) {
+        this.type = type;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<Hws> getHws(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Hws where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,94 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "hydr_boundaries")
+public class HydrBoundary
+implements   Serializable
+{
+    private Integer    id;
+    private String     name;
+    private River      river;
+    private LineString geom;
+
+    public HydrBoundary() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<HydrBoundary> getHydrBoundaries(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from HydrBoundary where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,94 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Geometry;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "hydr_boundaries_poly")
+public class HydrBoundaryPoly
+implements   Serializable
+{
+    private Integer    id;
+    private String     name;
+    private River      river;
+    private Geometry   geom;
+
+    public HydrBoundaryPoly() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Geometry getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Geometry geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<HydrBoundaryPoly> getHydrBoundaries(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from HydrBoundaryPoly where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Line.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,108 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "lines")
+public class Line
+implements   Serializable
+{
+    private Integer    id;
+    private String     kind;
+    private River      river;
+    private LineString geom;
+    private BigDecimal z;
+
+    public Line() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "kind")
+    public String getKind() {
+        return kind;
+    }
+
+
+    public void setKind(String kind) {
+        this.kind = kind;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    @Column(name = "z")
+    public BigDecimal getZ() {
+        return z;
+    }
+
+
+    public void setZ(BigDecimal z) {
+        this.z = z;
+    }
+
+
+    public static List<Line> getLines(int riverId, String name) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Line where river.id =:river_id and name=:name");
+        query.setParameter("river_id", riverId);
+        query.setParameter("name", name);
+
+        return query.list();
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/LocationSystem.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,68 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+
+@Entity
+@Table(name = "location_system")
+public class LocationSystem implements Serializable {
+
+    protected Integer id;
+
+    protected String name;
+    protected String description;
+
+
+    public LocationSystem() {
+    }
+
+
+    public LocationSystem(String name, String description) {
+        this.name        = name;
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_LOCATION_SYSTEM_ID_SEQ",
+        sequenceName   = "LOCATION_SYSTEM_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_LOCATION_SYSTEM_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MainValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,103 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+import javax.persistence.GenerationType;
+
+import java.math.BigDecimal;
+
+@Entity
+@Table(name = "main_values")
+public class MainValue
+implements   Serializable
+{
+    private Integer        id;
+
+    private Gauge          gauge;
+
+    private NamedMainValue mainValue;
+
+    private BigDecimal     value;
+
+    private TimeInterval   timeInterval;
+
+    public MainValue() {
+    }
+
+    public MainValue(
+        Gauge          gauge,
+        NamedMainValue mainValue,
+        BigDecimal     value,
+        TimeInterval   timeInterval
+    ) {
+        this.gauge        = gauge;
+        this.mainValue    = mainValue;
+        this.value        = value;
+        this.timeInterval = timeInterval;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_MAIN_VALUES_ID_SEQ",
+        sequenceName   = "MAIN_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_MAIN_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "gauge_id")
+    public Gauge getGauge() {
+        return gauge;
+    }
+
+    public void setGauge(Gauge gauge) {
+        this.gauge = gauge;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "named_value_id")
+    public NamedMainValue getMainValue() {
+        return mainValue;
+    }
+
+    public void setMainValue(NamedMainValue mainValue) {
+        this.mainValue = mainValue;
+    }
+
+    @Column(name = "value") // FIXME: type mapping needed?
+    public BigDecimal getValue() {
+        return value;
+    }
+
+    public void setValue(BigDecimal value) {
+        this.value = value;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id")
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MainValueType.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,54 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "main_value_types")
+public class MainValueType
+implements   Serializable
+{
+    private Integer id;
+    private String  name;
+
+    public MainValueType() {
+    }
+
+    public MainValueType(String name) {
+        this.name = name;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_MAIN_VALUE_TYPES_ID_SEQ",
+        sequenceName   = "MAIN_VALUE_TYPES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_MAIN_VALUE_TYPES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name") // FIXME: Type conversion needed?
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MorphologicalWidth.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,88 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+
+
+@Entity
+@Table(name = "morphologic_width")
+public class MorphologicalWidth implements Serializable {
+
+    private Integer id;
+
+    private River river;
+
+    private Unit unit;
+
+    private List<MorphologicalWidthValue> values;
+
+
+    public MorphologicalWidth() {
+    }
+
+
+    public MorphologicalWidth(River river, Unit unit) {
+        this.river = river;
+        this.unit  = unit;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_MORPHOLOGIC_WIDTH_ID_SEQ",
+        sequenceName   = "MORPHOLOGIC_WIDTH_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_MORPHOLOGIC_WIDTH_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "morphologic_width_id")
+    public List<MorphologicalWidthValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<MorphologicalWidthValue> values) {
+        this.values = values;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MorphologicalWidthValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,103 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+
+@Entity
+@Table(name = "morphologic_width_values")
+public class MorphologicalWidthValue implements Serializable {
+
+    private Integer id;
+
+    private MorphologicalWidth morphologicalWidth;
+
+    private BigDecimal station;
+    private BigDecimal width;
+
+    private String description;
+
+
+    public MorphologicalWidthValue() {
+    }
+
+
+    public MorphologicalWidthValue(
+        MorphologicalWidth morphologicalWidth,
+        BigDecimal         station,
+        BigDecimal         width,
+        String             description
+    ) {
+        this.morphologicalWidth = morphologicalWidth;
+        this.station            = station;
+        this.width              = width;
+        this.description        = description;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_MORPH_WIDTH_VALUES_ID_SEQ",
+        sequenceName   = "MORPH_WIDTH_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_MORPH_WIDTH_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "morphologic_width_id")
+    public MorphologicalWidth getMorphologicalWidth() {
+        return morphologicalWidth;
+    }
+
+    public void setMorphologicalWidth(MorphologicalWidth width) {
+        this.morphologicalWidth = width;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "width")
+    public BigDecimal getWidth() {
+        return width;
+    }
+
+    public void setWidth(BigDecimal width) {
+        this.width = width;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/NamedMainValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,68 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "named_main_values")
+public class NamedMainValue
+implements   Serializable
+{
+    private Integer       id;
+    private String        name;
+    private MainValueType type;
+
+    public NamedMainValue() {
+    }
+
+    public NamedMainValue(String name, MainValueType type) {
+        this.name = name;
+        this.type = type;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_NAMED_MAIN_VALUES_ID_SEQ",
+        sequenceName   = "NAMED_MAIN_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_NAMED_MAIN_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "type_id" )
+    public MainValueType getType() {
+        return type;
+    }
+
+    public void setType(MainValueType type) {
+        this.type = type;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Position.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,71 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.OneToMany;
+import javax.persistence.JoinColumn;
+import javax.persistence.GenerationType;
+
+import java.util.List;
+
+@Entity
+@Table(name = "positions")
+public class Position
+implements   Serializable
+{
+    private Integer id;
+
+    private String  value;
+
+    private List<Annotation> annotations;
+
+    public Position() {
+    }
+
+    public Position(String value) {
+        this.value = value;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_POSITIONS_ID_SEQ",
+        sequenceName   = "POSITIONS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_POSITIONS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "value")
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+
+    @OneToMany
+    @JoinColumn(name="position_id")
+    public List<Annotation> getAnnotations() {
+        return annotations;
+    }
+
+    public void setAnnotations(List<Annotation> annotations) {
+        this.annotations = annotations;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Range.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,144 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+@Entity
+@Table(name = "ranges")
+public class Range
+implements   Serializable
+{
+    private Integer    id;
+    private BigDecimal a;
+    private BigDecimal b;
+
+    private River      river;
+
+    public Range() {
+    }
+
+    public Range(double a, double b, River river) {
+        this(new BigDecimal(a), new BigDecimal(b), river);
+    }
+
+    public Range(BigDecimal a, BigDecimal b, River river) {
+        this.a     = a;
+        this.b     = b;
+        this.river = river;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_RANGES_ID_SEQ",
+        sequenceName   = "RANGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_RANGES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "a") // FIXME: type mapping needed?
+    public BigDecimal getA() {
+        return a;
+    }
+
+    public void setA(BigDecimal a) {
+        this.a = a;
+    }
+
+    @Column(name = "b") // FIXME: type mapping needed?
+    public BigDecimal getB() {
+        return b;
+    }
+
+    public void setB(BigDecimal b) {
+        this.b = b;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    public int code() {
+        int code = 0;
+        if (a != null) code  = 1;
+        if (b != null) code |= 2;
+        return code;
+    }
+
+    public boolean intersects(BigDecimal c) {
+        return !(a.compareTo(c) > 0 || b.compareTo(c) < 0);
+    }
+
+    public boolean intersects(Range other) {
+
+        int code  = code();
+        int ocode = other.code();
+
+        if (code == 0 || ocode == 0) {
+            return false;
+        }
+
+        switch (code) {
+            case 1: // has a
+                switch (ocode) {
+                    case 1: // has a
+                        return a.compareTo(other.a) == 0;
+                    case 2: // has b
+                        return a.compareTo(other.b) == 0;
+                    case 3: // has range
+                        return other.intersects(a);
+                }
+                break;
+            case 2: // has b
+                switch (ocode) {
+                    case 1: // has a
+                        return b.compareTo(other.a) == 0;
+                    case 2: // has b
+                        return b.compareTo(other.b) == 0;
+                    case 3: // has range
+                        return other.intersects(b);
+                }
+                break;
+            case 3: // has range
+                switch (ocode) {
+                    case 1: // has a
+                        return intersects(other.a);
+                    case 2: // has b
+                        return intersects(other.b);
+                    case 3: // has range
+                        return !(other.b.compareTo(a) < 0
+                               ||other.a.compareTo(b) > 0);
+                }
+                break;
+
+        }
+
+        return false;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/River.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,292 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.math.BigDecimal;
+import java.math.MathContext;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+import javax.persistence.GenerationType;
+
+import java.util.List;
+import java.util.Comparator;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "rivers")
+public class River
+implements   Serializable
+{
+    public static final MathContext PRECISION = new MathContext(6);
+
+    public static final double EPSILON = 1e-5;
+
+    public static final Comparator KM_CMP = new Comparator<Double>() {
+        @Override
+        public int compare(Double a, Double b) {
+            double diff = a - b;
+            if (diff < -EPSILON) return -1;
+            if (diff >  EPSILON) return +1;
+            return 0;
+        }
+    };
+
+    private Integer id;
+
+    private String  name;
+
+    private boolean kmUp;
+
+    private List<Gauge> gauges;
+
+    private Unit wstUnit;
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_RIVERS_ID_SEQ",
+        sequenceName   = "RIVERS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_RIVERS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "km_up")
+    public boolean getKmUp() {
+        return kmUp;
+    }
+
+    public void setKmUp(boolean kmUp) {
+        this.kmUp = kmUp;
+    }
+
+    public River() {
+    }
+
+    public River(String name, Unit wstUnit) {
+        this.name    = name;
+        this.wstUnit = wstUnit;
+    }
+
+    @OneToMany
+    @JoinColumn(name="river_id")
+    public List<Gauge> getGauges() {
+        return gauges;
+    }
+
+    public void setGauges(List<Gauge> gauges) {
+        this.gauges = gauges;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "wst_unit_id" )
+    public Unit getWstUnit() {
+        return wstUnit;
+    }
+
+    public void setWstUnit(Unit wstUnit) {
+        this.wstUnit = wstUnit;
+    }
+
+
+
+    public String toString() {
+        return name != null ? name : "";
+    }
+
+
+    /**
+     * This method returns the gauges that intersect with <i>a</i> and
+     * <i>b</i>,
+     *
+     * @param a A start point.
+     * @param b An end point.
+     *
+     * @return the intersecting gauges.
+     */
+    public List<Gauge> determineGauges(double a, double b) {
+        Session session = SessionHolder.HOLDER.get();
+
+        if (a > b) { double t = a; a = b; b = t; }
+
+        Query query = session.createQuery(
+            "from Gauge where river=:river " +
+            "and not (range.a > :b or range.b < :a) order by a");
+        query.setParameter("river", this);
+        query.setParameter("a", new BigDecimal(a, PRECISION));
+        query.setParameter("b", new BigDecimal(b, PRECISION));
+
+        return query.list();
+    }
+
+    public Gauge maxOverlap(double a, double b) {
+        List<Gauge> gauges = determineGauges(a, b);
+        if (gauges == null) {
+            return null;
+        }
+
+        if (a > b) { double t = a; a = b; b = t; }
+
+        double max = -Double.MAX_VALUE;
+
+        Gauge result = null;
+
+        for (Gauge gauge: gauges) {
+            Range  r = gauge.getRange();
+            double c = r.getA().doubleValue();
+            double d = r.getB().doubleValue();
+
+            double start = c >= a ? c : a;
+            double stop  = d <= b ? d : b;
+
+            double length = stop - start;
+
+            if (length > max) {
+                max = length;
+                result = gauge;
+            }
+        }
+
+        return result;
+    }
+
+    public Gauge determineGaugeByName(String name) {
+        Session session = SessionHolder.HOLDER.get();
+        Query query = session.createQuery(
+            "from Gauge where river=:river and name=:name");
+        query.setParameter("river", this);
+        query.setParameter("name", name);
+        List<Gauge> gauges = query.list();
+        return gauges.isEmpty() ? null : gauges.get(0);
+    }
+
+    public Gauge determineGaugeByPosition(double p) {
+        Session session = SessionHolder.HOLDER.get();
+        Query query = session.createQuery(
+            "from Gauge g where river=:river "  +
+            "and :p between g.range.a and g.range.b");
+        query.setParameter("river", this);
+        query.setParameter("p", new BigDecimal(p, PRECISION));
+        List<Gauge> gauges = query.list();
+        return gauges.isEmpty() ? null : gauges.get(0);
+    }
+
+    public Gauge determineGaugeByStation(double a, double b) {
+
+        if (a > b) { double t = a; a = b; b = t; }
+
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from Gauge where river.id=:river " +
+            "and station between :a and :b");
+        query.setParameter("river", getId());
+        query.setParameter("a", new BigDecimal(a));
+        query.setParameter("b", new BigDecimal(b));
+
+        List<Gauge> gauges = query.list();
+        return gauges.isEmpty() ? null : gauges.get(0);
+    }
+
+
+    /**
+     * This method returns the first gauge that is intersected by <i>a</i> and
+     * <i>b</i>,
+     *
+     * @param a A start point.
+     * @param b An end point.
+     *
+     * @return the first intersecting gauge.
+     */
+    public Gauge determineGauge(double a, double b) {
+        List<Gauge> gauges = determineGauges(a, b);
+
+        int idx = a < b ? 0 : gauges.size() - 1;
+
+        return gauges.isEmpty() ? null : gauges.get(idx);
+    }
+
+    /**
+     * Returns the min and max distance of this river. The first position in the
+     * resulting array contains the min distance, the second position the max
+     * distance.
+     *
+     * @return the min and max distance of this river.
+     */
+    public double[] determineMinMaxDistance() {
+        List<Gauge> gauges = getGauges();
+
+        if (gauges == null || gauges.isEmpty()) {
+            return null;
+        }
+
+        double minmax[] = new double[] { Double.MAX_VALUE, Double.MIN_VALUE };
+
+        for (Gauge g: gauges) {
+            Range r = g.getRange();
+
+            if (r == null) {
+                continue;
+            }
+
+            double a  = r.getA().doubleValue();
+            minmax[0] = minmax[0] < a ? minmax[0] : a;
+
+            BigDecimal bigB = r.getB();
+            if (bigB != null) {
+                double b  = bigB.doubleValue();
+                minmax[1] = minmax[1] > b ? minmax[1] : b;
+            }
+        }
+
+        return minmax;
+    }
+
+    public Map<Double, Double> queryGaugeDatumsKMs() {
+        List<Gauge> gauges = getGauges();
+        Map result = new TreeMap<Double, Double>(KM_CMP);
+
+        for (Gauge gauge: gauges) {
+            BigDecimal km    = gauge.getStation();
+            BigDecimal datum = gauge.getDatum();
+            if (km != null && datum != null) {
+                result.put(km.doubleValue(), datum.doubleValue());
+            }
+        }
+
+        return result;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/RiverAxis.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,102 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.LineString;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+/**
+ * There is a modeling problem with the RiverAxis. The initial idea was, that a
+ * river can have a riveraxis that consist of exact one geometry. Now, it has
+ * turned out, that a single geometry is not enough for a riveraxis (arm of a
+ * river, inflows, ...). As workaround, we now expect, that a river can just
+ * have a single riveraxis.
+ */
+@Entity
+@Table(name = "river_axes")
+public class RiverAxis
+implements   Serializable
+{
+    private Integer    id;
+    private Integer    kind;
+    private River      river;
+    private LineString geom;
+
+    public RiverAxis() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "kind")
+    public Integer getKind() {
+        return kind;
+    }
+
+
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public LineString getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(LineString geom) {
+        this.geom = geom;
+    }
+
+
+    public static List<RiverAxis> getRiverAxis(String river) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from RiverAxis where river.name =:river");
+        query.setParameter("river", river);
+
+        List<RiverAxis> list = query.list();
+
+        return list.isEmpty() ? null : list;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/RiverAxisKm.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,104 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import com.vividsolutions.jts.geom.Point;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "river_axes_km")
+public class RiverAxisKm
+implements   Serializable
+{
+    private Integer    id;
+    private River      river;
+    private BigDecimal km;
+    private Point      geom;
+
+
+    public RiverAxisKm() {
+    }
+
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+
+    @Column(name = "km")
+    public BigDecimal getKm() {
+        return km;
+    }
+
+
+    public void setKm(BigDecimal km) {
+        this.km = km;
+    }
+
+
+    @Column(name = "geom")
+    @Type(type = "org.hibernatespatial.GeometryUserType")
+    public Point getGeom() {
+        return geom;
+    }
+
+
+    public void setGeom(Point geom) {
+        this.geom = geom;
+    }
+
+
+    /**
+     * Returns a list of RiverAxisKm objects for a given river.
+     *
+     * @param riverid The ID of a river in the database.
+     *
+     * @return a list of RiverAxisKm objects.
+     */
+    public static List<RiverAxisKm> getRiverAxisKms(int riverid) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+            "from RiverAxisKm where river.id =:riverid");
+        query.setParameter("riverid", riverid);
+
+        List<RiverAxisKm> list = query.list();
+
+        return list;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SedimentDensity.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,116 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+
+@Entity
+@Table(name = "sediment_density")
+public class SedimentDensity implements Serializable {
+
+    private Integer id;
+
+    private River river;
+
+    private Depth depth;
+
+    private Unit unit;
+
+    private List<SedimentDensityValue> values;
+
+    private String description;
+
+
+    public SedimentDensity() {
+    }
+
+
+    public SedimentDensity(River river, Depth depth, Unit unit, String desc) {
+        this.river       = river;
+        this.depth       = depth;
+        this.unit        = unit;
+        this.description = desc;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_SEDIMENT_DENSITY_ID_SEQ",
+        sequenceName   = "SEDIMENT_DENSITY_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_SEDIMENT_DENSITY_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "depth_id")
+    public Depth getDepth() {
+        return depth;
+    }
+
+    public void setDepth(Depth depth) {
+        this.depth = depth;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name="sediment_density_id")
+    public List<SedimentDensityValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<SedimentDensityValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(SedimentDensityValue value) {
+        this.values.add(value);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SedimentDensityValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,101 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+
+@Entity
+@Table(name = "sediment_density_values")
+public class SedimentDensityValue implements Serializable {
+
+    private Integer id;
+
+    private SedimentDensity sedimentDensity;
+
+    private BigDecimal station;
+    private BigDecimal density;
+
+    private String description;
+
+
+    public SedimentDensityValue() {
+    }
+
+
+    public SedimentDensityValue(
+        SedimentDensity sedimentDensity,
+        BigDecimal      station,
+        BigDecimal      density,
+        String          desc
+    ) {
+        this.sedimentDensity = sedimentDensity;
+        this.station         = station;
+        this.density         = density;
+        this.description     = desc;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_SEDIMENT_DENSITY_VALUES_ID_SEQ",
+        sequenceName   = "SEDIMENT_DENSITY_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_SEDIMENT_DENSITY_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "sediment_density_id")
+    public SedimentDensity getSedimentDensity() {
+        return sedimentDensity;
+    }
+
+    public void setSedimentDensity(SedimentDensity sedimentDensity) {
+        this.sedimentDensity = sedimentDensity;
+    }
+
+    @Column(name = "station")
+    public BigDecimal getStation() {
+        return station;
+    }
+
+    public void setStation(BigDecimal station) {
+        this.station = station;
+    }
+
+    @Column(name = "density")
+    public BigDecimal getDensity() {
+        return density;
+    }
+
+    public void setDensity(BigDecimal density) {
+        this.density = density;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SedimentYield.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,145 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "sediment_yield")
+public class SedimentYield
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(SedimentYield.class);
+
+    private Integer id;
+
+    private River river;
+
+    private GrainFraction grainFraction;
+
+    private Unit unit;
+
+    private TimeInterval timeInterval;
+
+    private String description;
+
+    private List<SedimentYieldValue> values;
+
+
+    public SedimentYield() {
+        this.values = new ArrayList<SedimentYieldValue>();
+    }
+
+    public SedimentYield(River river, Unit unit, TimeInterval timeInterval) {
+        this();
+
+        this.river        = river;
+        this.unit         = unit;
+        this.timeInterval = timeInterval;
+    }
+
+
+    public SedimentYield(
+        River         river,
+        Unit          unit,
+        TimeInterval  timeInterval,
+        GrainFraction grainFraction
+    ) {
+        this(river, unit, timeInterval);
+
+        this.grainFraction = grainFraction;
+    }
+
+
+    public SedimentYield(
+        River         river,
+        Unit          unit,
+        TimeInterval  timeInterval,
+        GrainFraction grainFraction,
+        String        description
+    ) {
+        this(river, unit, timeInterval, grainFraction);
+
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_SEDIMENT_YIELD_ID_SEQ",
+        sequenceName   = "SEDIMENT_YIELD_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_SEDIMENT_YIELD_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name="grain_fraction_id")
+    public GrainFraction getGrainFraction() {
+        return grainFraction;
+    }
+
+    public void setGrainFraction(GrainFraction grainFraction) {
+        this.grainFraction = grainFraction;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id")
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SedimentYieldValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,93 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "sediment_yield_values")
+public class SedimentYieldValue
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(SedimentYieldValue.class);
+
+    private Integer id;
+
+    private SedimentYield sedimentYield;
+
+    private Double station;
+    private Double value;
+
+    private Unit unit;
+
+
+    public SedimentYieldValue() {
+    }
+
+    public SedimentYieldValue(
+        SedimentYield sedimentYield,
+        Double        station,
+        Double        value
+    ) {
+        this.sedimentYield = sedimentYield;
+        this.station       = station;
+        this.value         = value;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_SEDIMENT_YIELD_VALuES_ID_SEQ",
+        sequenceName   = "SEDIMENT_YIELD_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_SEDIMENT_YIELD_VALuES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "sediment_yield_id" )
+    public SedimentYield getSedimentYield() {
+        return sedimentYield;
+    }
+
+    public void setSedimentYield(SedimentYield sedimentYield) {
+        this.sedimentYield = sedimentYield;
+    }
+
+    @Column(name="station")
+    public Double getStation() {
+        return station;
+    }
+
+    public void setStation(Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "value")
+    public Double getValue() {
+        return value;
+    }
+
+    public void setValue(Double value) {
+        this.value = value;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/TimeInterval.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,67 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import java.util.Date;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+
+@Entity
+@Table(name = "time_intervals")
+public class TimeInterval
+implements   Serializable
+{
+    private Integer id;
+    private Date    startTime;
+    private Date    stopTime;
+
+    public TimeInterval() {
+    }
+
+    public TimeInterval(Date startTime, Date stopTime) {
+        this.startTime = startTime;
+        this.stopTime  = stopTime;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_TIME_INTERVALS_ID_SEQ",
+        sequenceName   = "TIME_INTERVALS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_TIME_INTERVALS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "start_time") // FIXME: type mapping needed?
+    public Date getStartTime() {
+        return startTime;
+    }
+
+    public void setStartTime(Date startTime) {
+        this.startTime = startTime;
+    }
+
+    @Column(name = "stop_time") // FIXME: type mapping needed?
+    public Date getStopTime() {
+        return stopTime;
+    }
+
+    public void setStopTime(Date stopTime) {
+        this.stopTime = stopTime;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Unit.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,60 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+
+@Entity
+@Table(name = "units")
+public class Unit
+implements   Serializable
+{
+    protected Integer id;
+    protected String  name;
+
+
+    public Unit() {
+    }
+
+
+    public Unit(String name) {
+        this.name = name;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_UNITS_ID_SEQ",
+        sequenceName   = "UNITS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_UNITS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+
+    public void setName(String name) {
+        this.name = name;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Waterlevel.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,113 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+
+
+
+@Entity
+@Table(name = "waterlevel")
+public class Waterlevel
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(Waterlevel.class);
+
+    private Integer id;
+
+    private River river;
+
+    private Unit unit;
+
+    private String  description;
+
+    private List<WaterlevelQRange> qRanges;
+
+
+    public Waterlevel() {
+    }
+
+    public Waterlevel(River river, Unit unit) {
+        this.river = river;
+        this.unit  = unit;
+    }
+
+    public Waterlevel(River river, Unit unit, String description) {
+        this(river, unit);
+        this.description = description;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_ID_SEQ",
+        sequenceName   = "WATERLEVEL_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name="waterlevel_id")
+    public List<WaterlevelQRange> getQRanges() {
+        return qRanges;
+    }
+
+    public void setQRanges(List<WaterlevelQRange> qRanges) {
+        this.qRanges = qRanges;
+    }
+
+    public void addQRange(WaterlevelQRange qRange) {
+        qRanges.add(qRange);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelDifference.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,119 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "waterlevel_difference")
+public class WaterlevelDifference
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(WaterlevelDifference.class);
+
+    private Integer id;
+
+    private River river;
+
+    private Unit unit;
+
+    private List<WaterlevelDifferenceColumn> columns;
+
+    private String description;
+
+
+    public WaterlevelDifference() {
+        columns = new ArrayList<WaterlevelDifferenceColumn>();
+    }
+
+
+    public WaterlevelDifference(River river, Unit unit) {
+        this();
+
+        this.river = river;
+        this.unit  = unit;
+    }
+
+
+    public WaterlevelDifference(River river, Unit unit, String description) {
+        this(river, unit);
+
+        this.description = description;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_DIFFERENCE_ID_SEQ",
+        sequenceName   = "WATERLEVEL_DIFFERENCE_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_DIFFERENCE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "unit_id")
+    public Unit getUnit() {
+        return unit;
+    }
+
+    public void setUnit(Unit unit) {
+        this.unit = unit;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "difference_id")
+    public List<WaterlevelDifferenceColumn> getColumns() {
+        return columns;
+    }
+
+    public void setColumns(List<WaterlevelDifferenceColumn> columns) {
+        this.columns = columns;
+    }
+
+    public void addColumn(WaterlevelDifferenceColumn column) {
+        this.columns.add(column);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelDifferenceColumn.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,104 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "waterlevel_difference_column")
+public class WaterlevelDifferenceColumn
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(WaterlevelDifferenceColumn.class);
+
+
+    private Integer id;
+
+    private WaterlevelDifference difference;
+
+    private List<WaterlevelDifferenceValue> values;
+
+    private String description;
+
+
+    public WaterlevelDifferenceColumn() {
+        values = new ArrayList<WaterlevelDifferenceValue>();
+    }
+
+    public WaterlevelDifferenceColumn(
+        WaterlevelDifference difference,
+        String               description
+    ) {
+        this();
+
+        this.difference = difference;
+        this.description = description;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_DIFF_COLUMN_ID_SEQ",
+        sequenceName   = "WATERLEVEL_DIFF_COLUMN_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_DIFF_COLUMN_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "difference_id" )
+    public WaterlevelDifference getDifference() {
+        return difference;
+    }
+
+    public void setDifference(WaterlevelDifference difference) {
+        this.difference = difference;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "column_id")
+    public List<WaterlevelDifferenceValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<WaterlevelDifferenceValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(WaterlevelDifferenceValue value) {
+        this.values.add(value);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelDifferenceValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,94 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+@Entity
+@Table(name = "waterlevel_difference_values")
+public class WaterlevelDifferenceValue
+implements   Serializable
+{
+    private static Logger logger =
+        Logger.getLogger(WaterlevelDifferenceValue.class);
+
+
+    private Integer id;
+
+    private WaterlevelDifferenceColumn column;
+
+    private Double station;
+    private Double value;
+
+
+    public WaterlevelDifferenceValue() {
+    }
+
+    public WaterlevelDifferenceValue(
+        WaterlevelDifferenceColumn column,
+        Double                     station,
+        Double                     value
+    ) {
+        this.column  = column;
+        this.station = station;
+        this.value   = value;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_DIFF_VALUES_ID_SEQ",
+        sequenceName   = "WATERLEVEL_DIFF_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_DIFF_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "column_id" )
+    public WaterlevelDifferenceColumn getColumn() {
+        return column;
+    }
+
+    public void setColumn(WaterlevelDifferenceColumn column) {
+        this.column = column;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return station;
+    }
+
+    public void setStation(Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "value")
+    public Double getValue() {
+        return value;
+    }
+
+    public void setValue(Double value) {
+        this.value = value;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelQRange.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,100 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+
+
+
+@Entity
+@Table(name = "waterlevel_q_range")
+public class WaterlevelQRange
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(WaterlevelQRange.class);
+
+    private Integer id;
+
+    private Waterlevel waterlevel;
+
+    private Double q;
+
+    private List<WaterlevelValue> values;
+
+
+    public WaterlevelQRange() {
+        this.values = new ArrayList<WaterlevelValue>();
+    }
+
+    public WaterlevelQRange(Waterlevel waterlevel, Double q) {
+        this();
+        this.q          = q;
+        this.waterlevel = waterlevel;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_Q_RANGE_ID_SEQ",
+        sequenceName   = "WATERLEVEL_Q_RANGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_Q_RANGE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "waterlevel_id" )
+    public Waterlevel getWaterlevel() {
+        return waterlevel;
+    }
+
+    public void setWaterlevel(Waterlevel waterlevel) {
+        this.waterlevel = waterlevel;
+    }
+
+    @Column(name = "q")
+    public Double getQ() {
+        return q;
+    }
+
+    public void setQ(Double q) {
+        this.q = q;
+    }
+
+    @OneToMany
+    @Column(name = "waterlevel_q_range_id")
+    public List<WaterlevelValue> getValues() {
+        return values;
+    }
+
+    public void setValues(List<WaterlevelValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(WaterlevelValue value) {
+        values.add(value);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WaterlevelValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,90 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+
+import org.apache.log4j.Logger;
+
+
+
+
+@Entity
+@Table(name = "waterlevel_values")
+public class WaterlevelValue
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(WaterlevelValue.class);
+
+    private Integer id;
+
+    private WaterlevelQRange qrange;
+
+    private Double station;
+    private Double w;
+
+
+    public WaterlevelValue() {
+    }
+
+    public WaterlevelValue(WaterlevelQRange qrange, Double station, Double w) {
+        this.qrange  = qrange;
+        this.station = station;
+        this.w       = w;
+    }
+
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WATERLEVEL_VALUES_ID_SEQ",
+        sequenceName   = "WATERLEVEL_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WATERLEVEL_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "waterlevel_q_range_id" )
+    public WaterlevelQRange getQrange() {
+        return qrange;
+    }
+
+    public void setQrange(WaterlevelQRange qrange) {
+        this.qrange = qrange;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return station;
+    }
+
+    public void setStation(Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "w")
+    public Double getW() {
+        return w;
+    }
+
+    public void setW(Double w) {
+        this.w = w;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Wst.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,215 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.SQLQuery;
+import org.hibernate.Query;
+import org.hibernate.type.StandardBasicTypes;
+
+import de.intevation.flys.backend.SessionHolder;
+
+
+@Entity
+@Table(name = "wsts")
+public class Wst
+implements   Serializable
+{
+    private static Logger logger = Logger.getLogger(Wst.class);
+
+    private Integer id;
+    private River   river;
+    private String  description;
+    private Integer kind;
+
+    private List<WstColumn> columns;
+
+
+    public static final String SQL_SELECT_MINMAX =
+        "select min(q) as minQ, max(q) as maxQ from wst_q_values " +
+        "where wst_id = :wst and not (a > :km or b < :km)";
+
+    public Wst() {
+    }
+
+    public Wst(River river, String description) {
+        this(river, description, 0);
+    }
+
+    public Wst(River river, String description, Integer kind) {
+        this.river       = river;
+        this.description = description;
+        this.kind        = kind;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WSTS_ID_SEQ",
+        sequenceName   = "WSTS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WSTS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id" )
+    public River getRiver() {
+        return river;
+    }
+
+    public void setRiver(River river) {
+        this.river = river;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @Column(name = "kind")
+    public Integer getKind() {
+        return kind;
+    }
+
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
+    @OneToMany
+    @JoinColumn(name="wst_id")
+    public List<WstColumn> getColumns() {
+        return columns;
+    }
+
+    public void setColumns(List<WstColumn> columns) {
+        this.columns = columns;
+    }
+
+
+    /**
+     * Determines the min and max Q values of this WST. The min value is placed
+     * in the first field of the resulting array - the max value is placed in
+     * the second field.
+     *
+     * @return the min and max Q values of this WST.
+     */
+    public double[] determineMinMaxQ() {
+        double[] ab = river.determineMinMaxDistance();
+        return determineMinMaxQ(new Range(ab[0], ab[1], river));
+    }
+
+
+    /**
+     * Determines the min and max Q values of this WST in the given range. The
+     * min value is placed in the first field of the resulting array - the max
+     * value is placed in the second field.
+     *
+     * @param range The range used for querying the Q values.
+     *
+     * @return the min and max Q values of this WST.
+     */
+    public double[] determineMinMaxQ(Range range) {
+        if (range != null) {
+            return determineMinMaxQ(
+                range.getA().doubleValue(),
+                range.getB().doubleValue());
+        }
+
+        return null;
+    }
+
+
+    /**
+     * Determines the min and max Q values of this WST in the given range. The
+     * min value is placed in the first field of the resulting array - the max
+     * value is placed in the second field.
+     *
+     * @param fromKm the lower km value.
+     * @param toKm the upper km value.
+     *
+     * @return the min and max Q values of this WST.
+     */
+    public double[] determineMinMaxQ(double fromKm, double toKm) {
+        Session session = SessionHolder.HOLDER.get();
+
+        Query query = session.createQuery(
+          "select min(q), max(q) from WstQRange where " +
+          " id in " +
+          "  (select wstQRange.id from WstColumnQRange where " +
+          "    wstColumn.id in (select id from WstColumn where wst.id = :wst)) " +
+          " and range.id in " +
+          "  (select id from Range where not (a > :end or b < :start))");
+
+        query.setParameter("wst",   getId());
+        query.setParameter("start", new BigDecimal(fromKm));
+        query.setParameter("end",   new BigDecimal(toKm));
+
+        List<Object []> results = query.list();
+
+        if (results.isEmpty()) {
+            return null;
+        }
+
+        Object [] result = results.get(0);
+
+        return new double [] {
+            ((BigDecimal)result[0]).doubleValue(),
+            ((BigDecimal)result[1]).doubleValue() };
+    }
+
+
+    public double[] determineMinMaxQFree(double km) {
+        Session session = SessionHolder.HOLDER.get();
+
+        SQLQuery sqlQuery = session.createSQLQuery(SQL_SELECT_MINMAX)
+            .addScalar("minQ", StandardBasicTypes.DOUBLE)
+            .addScalar("maxQ", StandardBasicTypes.DOUBLE);
+
+        sqlQuery.setInteger("wst", getId());
+        sqlQuery.setDouble("km", km);
+
+        List<Object[]> minmaxQ = sqlQuery.list();
+
+
+        if (minmaxQ.isEmpty()) {
+            return null;
+        }
+
+        Object[] mm = minmaxQ.get(0);
+
+        if (mm[0] == null || mm[1] == null) {
+            logger.warn ("No min/max Q for km " + km + " found.");
+            return null;
+        }
+
+        return new double[] { (Double) mm[0], (Double) mm[1] };
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WstColumn.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,134 @@
+package de.intevation.flys.model;
+
+import java.util.List;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.OneToMany;
+
+@Entity
+@Table(name = "wst_columns")
+public class WstColumn
+implements   Serializable
+{
+    private Integer               id;
+    private Wst                   wst;
+    private String                name;
+    private String                description;
+    private Integer               position;
+    private TimeInterval          timeInterval;
+
+    private List<WstColumnQRange> columnQRanges;
+    private List<WstColumnValue>  columnValues;
+
+    public WstColumn() {
+    }
+
+    public WstColumn(
+        Wst          wst,
+        String       name,
+        String       description,
+        Integer      position,
+        TimeInterval timeInterval
+    ) {
+        this.wst          = wst;
+        this.name         = name;
+        this.description  = description;
+        this.position     = position;
+        this.timeInterval = timeInterval;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WST_COLUMNS_ID_SEQ",
+        sequenceName   = "WST_COLUMNS_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WST_COLUMNS_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "wst_id" )
+    public Wst getWst() {
+        return wst;
+    }
+
+    public void setWst(Wst wst) {
+        this.wst = wst;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Column(name = "description")
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
+
+    @Column(name = "position")
+    public Integer getPosition() {
+        return position;
+    }
+
+    public void setPosition(Integer position) {
+        this.position = position;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "time_interval_id" )
+    public TimeInterval getTimeInterval() {
+        return timeInterval;
+    }
+
+    public void setTimeInterval(TimeInterval timeInterval) {
+        this.timeInterval = timeInterval;
+    }
+
+    @OneToMany
+    @JoinColumn(name="wst_column_id")
+    public List<WstColumnQRange> getColumnQRanges() {
+        return columnQRanges;
+    }
+
+    public void setColumnQRanges(List<WstColumnQRange> columnQRanges) {
+        this.columnQRanges = columnQRanges;
+    }
+
+    @OneToMany
+    @JoinColumn(name="wst_column_id")
+    public List<WstColumnValue> getColumnValues() {
+        return columnValues;
+    }
+
+    public void setColumnValues(List<WstColumnValue> columnValues) {
+        this.columnValues = columnValues;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WstColumnQRange.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,73 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "wst_column_q_ranges")
+public class WstColumnQRange
+implements   Serializable
+{
+    private Integer   id;
+    private WstColumn wstColumn;
+    private WstQRange wstQRange;
+
+    public WstColumnQRange() {
+    }
+
+    public WstColumnQRange(
+        WstColumn wstColumn,
+        WstQRange wstQRange
+    ) {
+        this.wstColumn = wstColumn;
+        this.wstQRange = wstQRange;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WST_Q_RANGES_ID_SEQ",
+        sequenceName   = "WST_Q_RANGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WST_Q_RANGES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "wst_column_id" )
+    public WstColumn getWstColumn() {
+        return wstColumn;
+    }
+
+    public void setWstColumn(WstColumn wstColumn) {
+        this.wstColumn = wstColumn;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "wst_q_range_id" )
+    public WstQRange getWstQRange() {
+        return wstQRange;
+    }
+
+    public void setWstQRange(WstQRange wstQRange) {
+        this.wstQRange = wstQRange;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WstColumnValue.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,85 @@
+package de.intevation.flys.model;
+
+import java.math.BigDecimal;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "wst_column_values")
+public class WstColumnValue
+implements   Serializable
+{
+    private Integer    id;
+    private WstColumn  wstColumn;
+    private BigDecimal position;
+    private BigDecimal w;
+
+    public WstColumnValue() {
+    }
+
+    public WstColumnValue(
+        WstColumn  wstColumn,
+        BigDecimal position,
+        BigDecimal w
+    ) {
+        this.wstColumn = wstColumn;
+        this.position  = position;
+        this.w         = w;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WST_COLUMN_VALUES_ID_SEQ",
+        sequenceName   = "WST_COLUMN_VALUES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WST_COLUMN_VALUES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "wst_column_id")
+    public WstColumn getWstColumn() {
+        return wstColumn;
+    }
+
+    public void setWstColumn(WstColumn wstColumn) {
+        this.wstColumn = wstColumn;
+    }
+
+    @Column(name = "position") // FIXME: type mapping needed?
+    public BigDecimal getPosition() {
+        return position;
+    }
+
+    public void setPosition(BigDecimal position) {
+        this.position = position;
+    }
+
+    @Column(name = "w") // FIXME: type mapping needed?
+    public BigDecimal getW() {
+        return w;
+    }
+
+    public void setW(BigDecimal w) {
+        this.w = w;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/WstQRange.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,70 @@
+package de.intevation.flys.model;
+
+import java.math.BigDecimal;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.GenerationType;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+
+@Entity
+@Table(name = "wst_q_ranges")
+public class WstQRange
+implements   Serializable
+{
+    private Integer    id;
+    private Range      range;
+    private BigDecimal q;
+
+    public WstQRange() {
+    }
+
+    public WstQRange(Range range, BigDecimal q) {
+        this.range = range;
+        this.q     = q;
+    }
+
+    @Id
+    @SequenceGenerator(
+        name           = "SEQUENCE_WST_Q_RANGES_ID_SEQ",
+        sequenceName   = "WST_Q_RANGES_ID_SEQ",
+        allocationSize = 1)
+    @GeneratedValue(
+        strategy  = GenerationType.SEQUENCE,
+        generator = "SEQUENCE_WST_Q_RANGES_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return id;
+    }
+
+    public void setId(Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "range_id" )
+    public Range getRange() {
+        return range;
+    }
+
+    public void setRange(Range range) {
+        this.range = range;
+    }
+
+    @Column(name = "q") // FIXME: type mapping needed?!
+    public BigDecimal getQ() {
+        return q;
+    }
+
+    public void setQ(BigDecimal q) {
+        this.q = q;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/utils/DBCPConnectionProvider.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,244 @@
+/*
+ * Copyright 2004 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.hibernate.connection;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+
+import java.util.Iterator;
+import java.util.Properties;
+import java.util.Map;
+
+import org.apache.commons.dbcp.BasicDataSource;
+import org.apache.commons.dbcp.BasicDataSourceFactory;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.HibernateException;
+
+import org.hibernate.connection.ConnectionProviderFactory;
+import org.hibernate.connection.ConnectionProvider;
+
+import org.hibernate.cfg.Environment;
+
+/**
+ * <p>A connection provider that uses an Apache commons DBCP connection pool.</p>
+ *
+ * <p>To use this connection provider set:<br>
+ * <code>hibernate.connection.provider_class&nbsp;org.hibernate.connection.DBCPConnectionProvider</code></p>
+ *
+ * <pre>Supported Hibernate properties:
+ *   hibernate.connection.driver_class
+ *   hibernate.connection.url
+ *   hibernate.connection.username
+ *   hibernate.connection.password
+ *   hibernate.connection.isolation
+ *   hibernate.connection.autocommit
+ *   hibernate.connection.pool_size
+ *   hibernate.connection (JDBC driver properties)</pre>
+ * <br>
+ * All DBCP properties are also supported by using the hibernate.dbcp prefix.
+ * A complete list can be found on the DBCP configuration page:
+ * <a href="http://jakarta.apache.org/commons/dbcp/configuration.html">http://jakarta.apache.org/commons/dbcp/configuration.html</a>.
+ * <br>
+ * <pre>Example:
+ *   hibernate.connection.provider_class org.hibernate.connection.DBCPConnectionProvider
+ *   hibernate.connection.driver_class org.hsqldb.jdbcDriver
+ *   hibernate.connection.username sa
+ *   hibernate.connection.password
+ *   hibernate.connection.url jdbc:hsqldb:test
+ *   hibernate.connection.pool_size 20
+ *   hibernate.dbcp.initialSize 10
+ *   hibernate.dbcp.maxWait 3000
+ *   hibernate.dbcp.validationQuery select 1 from dual</pre>
+ *
+ * <p>More information about configuring/using DBCP can be found on the
+ * <a href="http://jakarta.apache.org/commons/dbcp/">DBCP website</a>.
+ * There you will also find the DBCP wiki, mailing lists, issue tracking
+ * and other support facilities</p>
+ *
+ * @see org.hibernate.connection.ConnectionProvider
+ * @author Dirk Verbeeck
+ */
+public class DBCPConnectionProvider
+implements   ConnectionProvider
+{
+    private static Logger log = Logger.getLogger(DBCPConnectionProvider.class);
+
+    private static final String PREFIX = "hibernate.dbcp.";
+
+    private BasicDataSource ds;
+
+    // Old Environment property for backward-compatibility
+    // (property removed in Hibernate3)
+    private static final String DBCP_PS_MAXACTIVE =
+        "hibernate.dbcp.ps.maxActive";
+
+    // Property doesn't exists in Hibernate2
+    private static final String AUTOCOMMIT =
+        "hibernate.connection.autocommit";
+
+    public void configure(Properties props) throws HibernateException {
+        try {
+            log.debug("Configure DBCPConnectionProvider");
+
+            // DBCP properties used to create the BasicDataSource
+            Properties dbcpProperties = new Properties();
+
+            // DriverClass & url
+            String jdbcDriverClass = props.getProperty(Environment.DRIVER);
+            String jdbcUrl = props.getProperty(Environment.URL);
+            dbcpProperties.put("driverClassName", jdbcDriverClass);
+            dbcpProperties.put("url", jdbcUrl);
+
+            // Username / password
+            String username = props.getProperty(Environment.USER);
+            String password = props.getProperty(Environment.PASS);
+            dbcpProperties.put("username", username);
+            dbcpProperties.put("password", password);
+
+            // Isolation level
+            String isolationLevel = props.getProperty(Environment.ISOLATION);
+            if (isolationLevel != null
+            && (isolationLevel = isolationLevel.trim()).length() > 0) {
+                dbcpProperties.put("defaultTransactionIsolation", isolationLevel);
+            }
+
+            // Turn off autocommit (unless autocommit property is set)
+            String autocommit = props.getProperty(AUTOCOMMIT);
+            if (autocommit != null
+            && (autocommit = autocommit.trim()).length() > 0) {
+                dbcpProperties.put("defaultAutoCommit", autocommit);
+            } else {
+                dbcpProperties.put("defaultAutoCommit", String.valueOf(Boolean.FALSE));
+            }
+
+            // Pool size
+            String poolSize = props.getProperty(Environment.POOL_SIZE);
+            if (poolSize != null
+            && (poolSize = poolSize.trim()).length() > 0
+            && Integer.parseInt(poolSize) > 0)  {
+                dbcpProperties.put("maxActive", poolSize);
+            }
+
+            // Copy all "driver" properties into "connectionProperties"
+            Properties driverProps =
+                ConnectionProviderFactory.getConnectionProperties(props);
+
+            if (driverProps.size() > 0) {
+                StringBuilder connectionProperties = new StringBuilder();
+                for (Iterator iter = driverProps.entrySet().iterator();
+                    iter.hasNext();
+                ) {
+                    Map.Entry entry = (Map.Entry)iter.next();
+                    String    key   = (String)entry.getKey();
+                    String    value = (String)entry.getValue();
+                    connectionProperties
+                        .append(key)
+                        .append('=')
+                        .append(value);
+                    if (iter.hasNext()) {
+                        connectionProperties.append(';');
+                    }
+                }
+                dbcpProperties.put(
+                    "connectionProperties", connectionProperties.toString());
+            }
+
+            // Copy all DBCP properties removing the prefix
+            for (Iterator iter = props.entrySet().iterator() ; iter.hasNext() ;) {
+                Map.Entry entry = (Map.Entry)iter.next();
+                String    key   = (String)entry.getKey();
+                if (key.startsWith(PREFIX)) {
+                    String property = key.substring(PREFIX.length());
+                    String value    = (String)entry.getValue();
+                    dbcpProperties.put(property, value);
+                }
+            }
+
+            // Backward-compatibility
+            if (props.getProperty(DBCP_PS_MAXACTIVE) != null) {
+                dbcpProperties.put(
+                    "poolPreparedStatements",
+                    String.valueOf(Boolean.TRUE));
+                dbcpProperties.put(
+                    "maxOpenPreparedStatements",
+                    props.getProperty(DBCP_PS_MAXACTIVE));
+            }
+
+            // Some debug info
+            /* // commented out, because it leaks the password
+            if (log.isDebugEnabled()) {
+                log.debug("Creating a DBCP BasicDataSource" +
+                          " with the following DBCP factory properties:");
+                StringWriter sw = new StringWriter();
+                dbcpProperties.list(new PrintWriter(sw, true));
+                log.debug(sw.toString());
+            }
+            */
+
+            // Let the factory create the pool
+            ds = (BasicDataSource)BasicDataSourceFactory
+                .createDataSource(dbcpProperties);
+
+            // The BasicDataSource has lazy initialization
+            // borrowing a connection will start the DataSource
+            // and make sure it is configured correctly.
+
+            // Connection conn = ds.getConnection();
+            // conn.close();
+        }
+        catch (Exception e) {
+            String message = "Could not create a DBCP pool";
+            log.fatal(message, e);
+            if (ds != null) {
+                BasicDataSource x = ds; ds = null;
+                try {
+                    x.close();
+                }
+                catch (SQLException sqle) {
+                }
+            }
+            throw new HibernateException(message, e);
+        }
+        log.debug("Configure DBCPConnectionProvider complete");
+    }
+
+    public Connection getConnection() throws SQLException {
+        return ds.getConnection();
+    }
+
+    public void closeConnection(Connection conn) throws SQLException {
+        conn.close();
+    }
+
+    public void close() throws HibernateException {
+        try {
+            if (ds != null) {
+                BasicDataSource x = ds; ds = null;
+                x.close();
+            }
+        }
+        catch (SQLException sqle) {
+            throw new HibernateException("Could not close DBCP pool", sqle);
+        }
+    }
+
+    public boolean supportsAggressiveRelease() {
+        return false;
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/utils/DateGuesser.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,226 @@
+package de.intevation.flys.utils;
+
+/**
+ * Copyright (c) 2006 by Intevation GmbH
+ *
+ * @author Sascha L. Teichmann (teichmann@intevation.de)
+ *
+ * This program is free software under the LGPL (&gt;=v2.1)
+ * Read the file LGPL coming with FLYS for details.
+ */
+
+import java.util.Date;
+import java.util.Calendar;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+public final class DateGuesser {
+    public static final String [] MONTH = {
+        "jan", "feb", "mrz", "apr", "mai", "jun",
+        "jul", "aug", "sep", "okt", "nov", "dez"
+    };
+
+    public static final int guessMonth(String s) {
+        s = s.toLowerCase();
+        for (int i = 0; i < MONTH.length; ++i)
+            if (MONTH[i].equals(s)) {
+                return i;
+            }
+        return -1;
+    }
+
+    public static final Pattern YYYY_MM_DD =
+        Pattern.compile("^(\\d{4})-(\\d{2})-(\\d{2})$");
+
+    public static final Pattern DD_MM_YYYY =
+        Pattern.compile("^(\\d{1,2})\\.(\\d{1,2})\\.(\\d{2,4})$");
+
+    public static final Pattern MMM_YYYY =
+        Pattern.compile("^(\\d{0,2})\\.?(\\w{3})\\.?(\\d{2,4})$");
+
+    public static final Pattern GARBAGE_YYYY =
+        Pattern.compile("^\\D*(\\d{2,4})$");
+
+    public static final Pattern YYYY_MM_DDThh_mm =
+        Pattern.compile("^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2})$");
+
+    public static final Pattern YYYY_MM_DDThh_mm_ss =
+        Pattern.compile("^(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2})$");
+
+    public static final Pattern DD_MM_YYYYThh_mm =
+        Pattern.compile("^(\\d{1,2})\\.(\\d{1,2})\\.(\\d{2,4})T(\\d{1,2}):(\\d{2})$");
+
+    public static final Pattern DD_MM_YYYYThh_mm_ss =
+        Pattern.compile("^(\\d{1,2})\\.(\\d{1,2})\\.(\\d{2,4})T(\\d{1,2}):(\\d{2}):(\\d{2})$");
+
+    private DateGuesser() {
+    }
+
+    public static final int calendarMonth(String month) {
+        return calendarMonth(Integer.parseInt(month));
+    }
+
+    public static final int calendarMonth(int month) {
+        return Math.max(Math.min(month-1, 11), 0);
+    }
+
+    public static Date guessDate(String s) {
+        if (s == null || (s = s.trim()).length() == 0) {
+            throw new IllegalArgumentException();
+        }
+
+        Matcher m;
+
+        m = YYYY_MM_DD.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year  = m.group(1);
+            String month = m.group(2);
+            String day   = m.group(3);
+            cal.set(
+                Integer.parseInt(year),
+                calendarMonth(month),
+                Integer.parseInt(day),
+                12, 0, 0);
+            return cal.getTime();
+        }
+
+        m = DD_MM_YYYY.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year  = m.group(3);
+            String month = m.group(2);
+            String day   = m.group(1);
+            cal.set(
+                Integer.parseInt(year) + (year.length() == 2 ? 1900 : 0),
+                calendarMonth(month),
+                Integer.parseInt(m.group(1)),
+                12, 0, 0);
+            return cal.getTime();
+        }
+
+        m = MMM_YYYY.matcher(s);
+
+        if (m.matches()) {
+            int month = guessMonth(m.group(2));
+            if (month >= 0) {
+                Calendar cal = Calendar.getInstance();
+                String year = m.group(3);
+                String day  = m.group(1);
+                cal.set(
+                    Integer.parseInt(year) + (year.length() == 2 ? 1900 : 0),
+                    month,
+                    day.length() == 0 ? 15 : Integer.parseInt(day),
+                    12, 0, 0);
+                return cal.getTime();
+            }
+        }
+
+        m = YYYY_MM_DDThh_mm.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year = m.group(1);
+            String month = m.group(2);
+            String day = m.group(3);
+            String hour = m.group(4);
+            String minute = m.group(5);
+            cal.set(
+                Integer.parseInt(year),
+                calendarMonth(month),
+                Integer.parseInt(day),
+                Integer.parseInt(hour),
+                Integer.parseInt(minute),
+                0
+            );
+            return cal.getTime();
+        }
+
+        m = YYYY_MM_DDThh_mm_ss.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year = m.group(1);
+            String month = m.group(2);
+            String day = m.group(3);
+            String hour = m.group(4);
+            String minute = m.group(5);
+            String second = m.group(6);
+            cal.set(
+                Integer.parseInt(year),
+                calendarMonth(month),
+                Integer.parseInt(day),
+                Integer.parseInt(hour),
+                Integer.parseInt(minute),
+                Integer.parseInt(second)
+            );
+            return cal.getTime();
+        }
+
+        m = DD_MM_YYYYThh_mm.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year = m.group(3);
+            String month = m.group(2);
+            String day = m.group(1);
+            String hour = m.group(4);
+            String minute = m.group(5);
+            cal.set(
+                Integer.parseInt(year) + (year.length() == 2 ? 1900 : 0),
+                calendarMonth(month),
+                Integer.parseInt(day),
+                Integer.parseInt(hour),
+                Integer.parseInt(minute),
+                0
+            );
+            return cal.getTime();
+        }
+
+        m = DD_MM_YYYYThh_mm_ss.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year = m.group(3);
+            String month = m.group(2);
+            String day = m.group(1);
+            String hour = m.group(4);
+            String minute = m.group(5);
+            String second = m.group(6);
+            cal.set(
+                Integer.parseInt(year) + (year.length() == 2 ? 1900 : 0),
+                calendarMonth(month),
+                Integer.parseInt(day),
+                Integer.parseInt(hour),
+                Integer.parseInt(minute),
+                Integer.parseInt(second)
+            );
+            return cal.getTime();
+        }
+
+        m = GARBAGE_YYYY.matcher(s);
+
+        if (m.matches()) {
+            Calendar cal = Calendar.getInstance();
+            String year = m.group(1);
+            cal.set(
+                Integer.parseInt(year) + (year.length() == 2 ? 1900 : 0),
+                5,  // month
+                15, // day
+                12, 0, 0);
+            return cal.getTime();
+        }
+
+        throw new IllegalArgumentException();
+    }
+
+    public static void main(String [] args) {
+        for (int i = 0; i < args.length; ++i) {
+            System.out.println(args[i] + ": " + guessDate(args[i]));
+        }
+    }
+}
+// end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/utils/StringUtil.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,823 @@
+package de.intevation.flys.utils;
+
+/**
+ * Copyright (c) 2006 by Intevation GmbH
+ *
+ * @author Sascha L. Teichmann (teichmann@intevation.de)
+ * @author Ludwig Reiter       (ludwig@intevation.de)
+ *
+ * This program is free software under the LGPL (&gt;=v2.1)
+ * Read the file LGPL coming with FLYS for details.
+ */
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.Locale;
+
+import java.net.URLEncoder;
+import java.net.URLDecoder;
+
+import java.io.UnsupportedEncodingException;
+import java.io.IOException;
+import java.io.BufferedReader;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.io.PrintWriter;
+
+
+public final class StringUtil {
+    final static String NUMBER_SEPERATOR = ";";
+    final static String LINE_SEPERATOR = ":";
+
+    private StringUtil() {
+    }
+
+    public static final String double2DArrayToString(double[][] values) {
+
+        if (values == null) {
+            throw new IllegalArgumentException("keine double[][]-Werte");
+        }
+
+        StringBuilder strbuf = new StringBuilder();
+
+        for (int i=0; i < values.length; i++) {
+            if (i>0) {
+                strbuf.append(LINE_SEPERATOR);
+            }
+            for (int j=0; j < values[i].length; j++) {
+                if (j > 0) {
+                    strbuf.append(NUMBER_SEPERATOR);
+                }
+                strbuf.append(values[i][j]);
+            }
+        }
+
+        return strbuf.toString();
+    }
+
+    public static final double[][] stringToDouble2DArray(String str) {
+        if (str == null || str.length() == 0) {
+            return null;
+        }
+
+        String[] lineSplit = str.split(LINE_SEPERATOR);
+        double[][] array2D = new double[lineSplit.length][];
+        for (int i=0; i < lineSplit.length; i++) {
+            String[] numberSplit =  lineSplit[i].split(NUMBER_SEPERATOR);
+
+            double[] numbers = new double[numberSplit.length];
+            for (int j=0; j < numberSplit.length; j++) {
+                numbers[j] = Double.valueOf(numberSplit[j]).doubleValue();
+            }
+
+            array2D[i] = numbers;
+        }
+
+        return array2D;
+    }
+
+    /**
+     * Remove first occurrence of "[" and "]" (if both do occur).
+     * @param value String to be stripped of [] (might be null).
+     * @return input string but with [ and ] removed, or input string if no
+     *         brackets were found.
+     */
+    public static final String unbracket(String value) {
+        // null- guard
+        if (value == null) return value;
+
+        int start = value.indexOf("[");
+        int end   = value.indexOf("]");
+
+        if (start < 0 || end < 0) {
+            return value;
+        }
+
+        value = value.substring(start + 1, end);
+
+        return value;
+    }
+
+
+    /**
+     * From "Q=1" make "W(Q=1)".
+     * @return original string wraped in "W()" if it contains a "Q", original
+     *         string otherwise.
+     */
+    public static String wWrap(String wOrQ) {
+        return (wOrQ != null && wOrQ.indexOf("Q") >=0)
+               ? "W(" + wOrQ + ")"
+               : wOrQ;
+        }
+
+
+    public static final String [] splitLines(String s) {
+        if (s == null) {
+            return null;
+        }
+        ArrayList<String> list = new ArrayList<String>();
+
+        BufferedReader in = null;
+
+        try {
+            in =
+                new BufferedReader(
+                new StringReader(s));
+
+            String line;
+
+            while ((line = in.readLine()) != null) {
+                list.add(line);
+            }
+        }
+        catch (IOException ioe) {
+            return null;
+        }
+        finally {
+            if (in != null)
+                try {
+                    in.close();
+                }
+                catch (IOException ioe) {}
+        }
+
+        return list.toArray(new String[list.size()]);
+    }
+
+    public static final String concat(String [] s) {
+        return concat(s, null);
+    }
+
+    public static final String concat(String [] s, String glue) {
+        if (s == null) {
+            return null;
+        }
+        StringBuilder sb = new StringBuilder();
+        for (int i = 0; i < s.length; ++i) {
+            if (i > 0 && glue != null) {
+                sb.append(glue);
+            }
+            sb.append(s[i]);
+        }
+        return sb.toString();
+    }
+
+    public static final String [] splitAfter(String [] src, int N) {
+        if (src == null) {
+            return null;
+        }
+
+        ArrayList<String> list = new ArrayList<String>(src.length);
+        for (int i = 0; i < src.length; ++i) {
+            String s = src[i];
+            int R;
+            if (s == null || (R = s.length()) == 0) {
+                list.add(s);
+            }
+            else {
+                while (R > N) {
+                    list.add(s.substring(0, N));
+                    s = s.substring(N);
+                    R = s.length();
+                }
+                list.add(s);
+            }
+        }
+        return list.toArray(new String[list.size()]);
+    }
+
+    public static final String [] splitQuoted(String s) {
+        return splitQuoted(s, '"');
+    }
+
+    public static final String[] fitArray(String [] src, String [] dst) {
+        if (src == null) {
+            return dst;
+        }
+        if (dst == null) {
+            return src;
+        }
+
+        if (src.length == dst.length) {
+            return src;
+        }
+
+        System.arraycopy(src, 0, dst, 0, Math.min(dst.length, src.length));
+
+        return dst;
+    }
+
+    public static final String [] splitQuoted(String s, char quoteChar) {
+        if (s == null) {
+            return null;
+        }
+        ArrayList<String> l = new ArrayList<String>();
+        int mode = 0, last_mode = 0;
+        StringBuilder sb = new StringBuilder();
+        for (int N = s.length(), i = 0; i < N; ++i) {
+            char c = s.charAt(i);
+            switch (mode) {
+                case 0: // unquoted mode
+                    if (c == quoteChar) {
+                        mode = 1; // to quoted mode
+                        if (sb.length() > 0) {
+                            l.add(sb.toString());
+                            sb.setLength(0);
+                        }
+                    }
+                    else if (c == '\\') {
+                        last_mode = 0;
+                        mode = 2; // escape mode
+                    }
+                    else if (!Character.isWhitespace(c)) {
+                        sb.append(c);
+                    }
+                    else if (sb.length() > 0) {
+                        l.add(sb.toString());
+                        sb.setLength(0);
+                    }
+                    break;
+                case 1: // quote mode
+                    if (c == '\\') {
+                        last_mode = 1;
+                        mode = 2; // escape mode
+                    }
+                    else if (c == quoteChar) { // leave quote mode
+                        l.add(sb.toString());
+                        sb.setLength(0);
+                        mode = 0; // to unquoted mode
+                    }
+                    else {
+                        sb.append(c);
+                    }
+                    break;
+                case 2: // escape mode
+                    sb.append(c);
+                    mode = last_mode;
+                    break;
+            }
+        }
+        if (sb.length() > 0) {
+            l.add(sb.toString());
+        }
+        return l.toArray(new String[l.size()]);
+    }
+
+    public static final String [] splitUnique(String s) {
+        return splitUnique(s, "[\\s,]+");
+    }
+
+    public static final String [] splitUnique(String s, String sep) {
+        return s != null ? unique(s.split(sep)) : null;
+    }
+
+    public static final String [] unique(String [] str) {
+        if (str == null || str.length == 1) {
+            return str;
+        }
+
+        Arrays.sort(str);
+
+        for (int i = 1; i < str.length; ++i)
+            if (str[i].equals(str[i-1])) {
+                ArrayList<String> list = new ArrayList<String>(str.length);
+
+                for (int j = 0; j < i; ++j) {
+                    list.add(str[j]);
+                }
+
+                String last = str[i];
+
+                for (++i; i < str.length; ++i)
+                    if (!last.equals(str[i])) {
+                        list.add(last = str[i]);
+                    }
+
+                return list.toArray(new String[list.size()]);
+            }
+
+        return str;
+    }
+
+    public static final String [] ensureEmptyExistence(String [] str) {
+        if (str == null) {
+            return null;
+        }
+
+        for (int i = 0; i < str.length; ++i)
+            if (str[i].length() == 0) {
+                if (i != 0) { // copy to front
+                    String t = str[0];
+                    str[0] = str[i];
+                    str[i] = t;
+                }
+                return str;
+            }
+
+        String [] n = new String[str.length+1];
+        n[0] = "";
+        System.arraycopy(str, 0, n, 1, str.length);
+        return n;
+    }
+
+    public static final String ensureWidthPadLeft(String s, int width, char pad) {
+        int N = s.length();
+        if (N >= width) {
+            return s;
+        }
+        StringBuilder sb = new StringBuilder(width);
+        for (; N < width; ++N) {
+            sb.append(pad);
+        }
+        sb.append(s);
+        return sb.toString();
+    }
+
+    public static final String [] splitWhiteSpaceWithNAsPad(
+        String s,
+        int    N,
+        String pad
+    ) {
+        if (s == null) {
+            return null;
+        }
+
+        boolean copyChars = true;
+        int     count     = 0; // number of WS
+
+        int S = s.length();
+
+        ArrayList<String> parts = new ArrayList<String>();
+
+        StringBuilder part = new StringBuilder(S);
+
+        for (int i = 0; i < S; ++i) {
+            char c = s.charAt(i);
+            if (copyChars) { // char mode
+                if (Character.isWhitespace(c)) {
+                    if (part.length() > 0) {
+                        parts.add(part.toString());
+                        part.setLength(0);
+                    }
+                    count     = 1;
+                    copyChars = false; // to WS mode
+                }
+                else {
+                    part.append(c);
+                }
+            }
+            else { // counting WS
+                if (Character.isWhitespace(c)) {
+                    ++count;
+                }
+                else {
+                    while (count >= N) {// enough to insert pad?
+                        parts.add(pad);
+                        count -= N;
+                    }
+                    part.append(c);
+                    count     = 0;
+                    copyChars = true; // back to char mode
+                }
+            }
+        } // for all chars
+
+        if (copyChars) {
+            if (part.length() > 0) {
+                parts.add(part.toString());
+            }
+        }
+        else {
+            while (count >= N) { // enough to insert pad?
+                parts.add(pad);
+                count -= N;
+            }
+        }
+
+        return parts.toArray(new String[parts.size()]);
+    }
+
+    public static final String encodeURL(String url) {
+        try {
+            return url != null
+                   ? URLEncoder.encode(url, "UTF-8")
+                   : "";
+        }
+        catch (UnsupportedEncodingException usee) {
+            throw new RuntimeException(usee.getLocalizedMessage());
+        }
+    }
+
+    public static final String decodeURL(String url) {
+        try {
+            return url != null
+                   ? URLDecoder.decode(url, "UTF-8")
+                   : "";
+        }
+        catch (UnsupportedEncodingException usee) {
+            throw new RuntimeException(usee.getLocalizedMessage());
+        }
+    }
+
+    public static final boolean isEmpty(String s) {
+        return s == null || s.length() == 0;
+    }
+
+    public static final String empty(String s) {
+        return s == null ? "" : s;
+    }
+
+
+    public static final String trim(String s) {
+        return s != null ? s.trim() : null;
+    }
+
+    public static final String uniqueWhitespaces(String s) {
+        if (s == null) {
+            return null;
+        }
+
+        boolean wasWS = false;
+        StringBuilder sb = new StringBuilder();
+
+        for (int N = s.length(), i = 0; i < N; ++i) {
+            char c = s.charAt(i);
+            if (Character.isWhitespace(c)) {
+                if (!wasWS) {
+                    sb.append(c);
+                    wasWS = true;
+                }
+            }
+            else {
+                sb.append(c);
+                wasWS = false;
+            }
+        }
+
+        return sb.toString();
+    }
+
+    public static final String replaceNewlines(String s) {
+        return s == null
+               ? null
+               : s.replace('\r', ' ').replace('\n', ' ');
+    }
+
+    /*
+    public static final String quoteReplacement(String s) {
+
+        if (s == null || (s.indexOf('\\') == -1 && s.indexOf('$') == -1))
+            return s;
+
+        StringBuilder sb = new StringBuilder();
+
+        for (int N = s.length(), i = 0; i < N; ++i) {
+            char c = s.charAt(i);
+            if (c == '\\' || c == '$') sb.append('\\');
+            sb.append(c);
+        }
+
+        return sb.toString();
+    }
+    */
+
+    public static final String quoteReplacement(String s) {
+
+        if (s == null) {
+            return null;
+        }
+
+        for (int N = s.length(), i = 0; i < N; ++i) { // plain check loop
+            char c = s.charAt(i);
+            if (c == '$' || c == '\\') { // first special -> StringBuilder
+                StringBuilder sb = new StringBuilder(s.substring(0, i))
+                .append('\\')
+                .append(c);
+                for (++i; i < N; ++i) { // build StringBuilder with rest
+                    if ((c = s.charAt(i)) == '$' || c == '\\') {
+                        sb.append('\\');
+                    }
+                    sb.append(c);
+                }
+                return sb.toString();
+            }
+        }
+
+        return s;
+    }
+
+    public static final String repeat(String what, int times) {
+        return repeat(what, times, new StringBuilder()).toString();
+    }
+
+    public static final StringBuilder repeat(String what, int times, StringBuilder sb) {
+        while (times-- > 0) {
+            sb.append(what);
+        }
+        return sb;
+    }
+
+    /**
+     * Gibt den Dateinamen in S ohne Dateiendung zurück.
+     */
+    public static final String cutExtension(String s) {
+        if (s == null) {
+            return null;
+        }
+        int dot = s.lastIndexOf('.');
+        return dot >= 0
+               ? s.substring(0, dot)
+               : s;
+    }
+
+    public static final String extension(String s) {
+        if (s == null) {
+            return null;
+        }
+        int dot = s.lastIndexOf('.');
+        return dot >= 0
+               ? s.substring(dot+1)
+               : s;
+    }
+
+    public static final String [] splitExtension(String x) {
+        if (x == null) {
+            return null;
+        }
+        int i = x.lastIndexOf('.');
+        return i < 0
+               ? new String[] { x, null }
+               : new String[] { x.substring(0, Math.max(0, i)), x.substring(i+1).toLowerCase() };
+    }
+
+    public static String entityEncode(String s) {
+        if (s == null || s.length() == 0) {
+            return s;
+        }
+
+        StringBuilder sb = new StringBuilder();
+        for (int i=0, N =s.length(); i < N; i++) {
+            char c = s.charAt(i);
+            switch (c) {
+                case '<':
+                    sb.append("&lt;");
+                    break;
+                case '>':
+                    sb.append("&gt;");
+                    break;
+                case '&':
+                    sb.append("&amp;");
+                    break;
+                default:
+                    sb.append(c);
+            }
+        }
+        return sb.toString();
+    }
+
+    public static String entityDecode(String s) {
+        if (s == null || s.length() == 0) {
+            return s;
+        }
+
+        boolean amp = false;
+        StringBuilder sb = new StringBuilder();
+        StringBuilder ampbuf = new StringBuilder();
+        for (int i=0, N =s.length(); i < N; i++) {
+            char c = s.charAt(i);
+            if (amp) {
+                if (c == ';') {
+                    amp = false;
+                    String str = ampbuf.toString();
+                    ampbuf.setLength(0);
+                    if (str.equals("lt")) {
+                        sb.append('<');
+                    }
+                    else if (str.equals("gt")) {
+                        sb.append('>');
+                    }
+                    else if (str.equals("amp")) {
+                        sb.append('&');
+                    }
+                    else {
+                        sb.append('&').append(str).append(';');
+                    }
+                }
+                else {
+                    ampbuf.append(c);
+                }
+            }
+            else if (c=='&') {
+                amp = true;
+            }
+            else {
+                sb.append(c);
+            }
+
+        }
+        return sb.toString();
+    }
+
+    public static final String quote(String s) {
+        return quote(s, '"');
+    }
+
+    public static final String quote(String s, char quoteChar) {
+        if (s == null) {
+            return null;
+        }
+
+        int N = s.length();
+
+        if (N == 0)
+            return new StringBuilder(2)
+                   .append(quoteChar)
+                   .append(quoteChar)
+                   .toString();
+
+        StringBuilder sb = null;
+
+        int i = 0;
+
+        for (; i < N; ++i) {
+            char c = s.charAt(i);
+
+            if (Character.isWhitespace(c)) {
+                sb = new StringBuilder()
+                .append(quoteChar)
+                .append(s.substring(0, i+1));
+                break;
+            }
+            else if (c == quoteChar) {
+                sb = new StringBuilder()
+                .append(quoteChar)
+                .append(s.substring(0, i))
+                .append('\\')
+                .append(quoteChar);
+                break;
+            }
+        }
+
+        if (sb == null) {
+            return s;
+        }
+
+        for (++i; i < N; ++i) {
+            char c = s.charAt(i);
+            if (c == quoteChar || c == '\\') {
+                sb.append('\\');
+            }
+
+            sb.append(c);
+        }
+
+        return sb.append(quoteChar).toString();
+    }
+
+    /*
+    public static String sprintf(String format, Object... args) {
+        return sprintf(null, format, args);
+    }
+    */
+
+    public static String sprintf(Locale locale, String format, Object ... args) {
+        StringWriter sw = new StringWriter();
+        PrintWriter pw = new PrintWriter(sw);
+        pw.printf(locale, format, args);
+        pw.flush();
+        return sw.toString();
+    }
+
+
+    public static void testQuote() {
+        System.err.println("testing quote:");
+
+        String cases []  = {
+            "",          "''",
+            "test",      "test",
+            "test test", "'test test'",
+            "  test",    "'  test'",
+            "test   ",   "'test   '",
+            " test ",    "' test '",
+            "'test",     "'\\'test'",
+            "'",         "'\\''",
+            " ' ' ",     "' \\' \\' '",
+            "te'st",     "'te\\'st'"
+        };
+
+        int failed = 0;
+
+        for (int i = 0; i < cases.length; i += 2) {
+            String in  = cases[i];
+            String out = cases[i+1];
+
+            String res = quote(in, '\'');
+            if (!res.equals(out)) {
+                ++failed;
+                System.err.println(
+                    "quote failed on: >" + in +
+                    "< result: >" + res +
+                    "< expected: >" + out + "<");
+            }
+        }
+
+        int T = cases.length/2;
+
+        System.err.println("tests total: "  + T);
+        System.err.println("tests failed: " + failed);
+        System.err.println("tests passed: " + (T - failed));
+    }
+
+    public static void testQuoteReplacement() {
+        System.err.println("testing quoteReplacement:");
+
+        String cases []  = {
+            "",          "",
+            "test",      "test",
+            "$",         "\\$",
+            "\\",        "\\\\",
+            "\\$",       "\\\\\\$",
+            "test\\$",   "test\\\\\\$",
+            "\\test",    "\\\\test",
+            "test$",     "test\\$",
+            "test$test", "test\\$test",
+            "$test$",    "\\$test\\$"
+        };
+
+        int failed = 0;
+
+        for (int i = 0; i < cases.length; i += 2) {
+            String in  = cases[i];
+            String out = cases[i+1];
+
+            String res = quoteReplacement(in);
+            if (!res.equals(out)) {
+                ++failed;
+                System.err.println(
+                    "quoteReplacement failed on: '" + in +
+                    "' result: '" + res +
+                    "' expected: '" + out + "'");
+            }
+        }
+
+        int T = cases.length/2;
+
+        System.err.println("tests total: "  + T);
+        System.err.println("tests failed: " + failed);
+        System.err.println("tests passed: " + (T - failed));
+    }
+
+    public static void testStringArray2D() {
+        int total = 0;
+        int fail = 0;
+        int passed = 0;
+
+        System.err.println("testing StringArray2D:");
+
+        double[][] testarray = {{1.0, 2.0, 3.0},
+            {1.1, 2.1, 3.1},
+            {100.2, 200.2}
+        };
+        String str = double2DArrayToString(testarray);
+
+        total += 1;
+        if (str.equals("1.0;2.0;3.0:1.1;2.1;3.1:100.2;200.2")) {
+            passed +=1;
+        }
+        else {
+            fail +=1;
+            System.err.println("Der Ergebnis-String ist nicht richtig:");
+            System.err.println(str);
+        }
+
+
+
+        double[][] testarray2 = stringToDouble2DArray(str);
+        boolean failed = false;
+
+        total +=1;
+        for (int i=0; i < testarray.length; i++)
+            for (int j=0; j < testarray[i].length; j++)
+                if (testarray[i][j] != testarray2[i][j]) {
+                    System.err.println("Test scheitert bei i=" +i +" j=" +j);
+                    System.err.println("alter Wert=" + testarray[i][j] +" neuer Wert=" +testarray2[i][j]);
+                    failed = true;
+                }
+        if (failed) {
+            fail +=1;
+        }
+        else {
+            passed +=1;
+        }
+        System.err.println("tests total: "+ total);
+        System.err.println("tests failed: "+ fail);
+        System.err.println("tests passed: "+ passed);
+    }
+
+    public static void main(String [] args) {
+
+        testQuoteReplacement();
+        testQuote();
+        testStringArray2D();
+    }
+}
+// end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/test/java/de/intevation/flys/AppTest.java	Fri Sep 28 12:14:31 2012 +0200
@@ -0,0 +1,38 @@
+package de.intevation.flys;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+/**
+ * Unit test for simple App.
+ */
+public class AppTest
+    extends TestCase
+{
+    /**
+     * Create the test case
+     *
+     * @param testName name of the test case
+     */
+    public AppTest( String testName )
+    {
+        super( testName );
+    }
+
+    /**
+     * @return the suite of tests being tested
+     */
+    public static Test suite()
+    {
+        return new TestSuite( AppTest.class );
+    }
+
+    /**
+     * Rigourous Test :-)
+     */
+    public void testApp()
+    {
+        assertTrue( true );
+    }
+}

http://dive4elements.wald.intevation.org