changeset 8994:2bb38e25091b

Zusammenführen
author gernotbelger
date Wed, 11 Apr 2018 14:20:01 +0200 (2018-04-11)
parents 0adc6d04de95 (current diff) d046997281bc (diff)
children 8c3e5682cb60
files artifacts/src/main/resources/messages.properties artifacts/src/main/resources/messages_de.properties
diffstat 63 files changed, 2725 insertions(+), 1349 deletions(-) [+]
line wrap: on
line diff
Binary file artifacts/doc/conf/jasper/sinfo.flowdepth.jasper has changed
Binary file artifacts/doc/conf/jasper/sinfo.flowdepthdevelopment.jasper has changed
Binary file artifacts/doc/conf/jasper/sinfo.flowdepthminmax.jasper has changed
Binary file artifacts/doc/conf/jasper/sinfo.tkh.jasper has changed
--- a/artifacts/doc/conf/jasper/templates/sinfo.flowdepth.jrxml	Wed Apr 11 14:09:13 2018 +0200
+++ b/artifacts/doc/conf/jasper/templates/sinfo.flowdepth.jrxml	Wed Apr 11 14:20:01 2018 +0200
@@ -1,11 +1,11 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!-- Created with Jaspersoft Studio version 6.5.1.final using JasperReports Library version 4.5.0  -->
+<!-- 2018-04-06T11:41:23 -->
 <jasperReport xmlns="http://jasperreports.sourceforge.net/jasperreports" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://jasperreports.sourceforge.net/jasperreports http://jasperreports.sourceforge.net/xsd/jasperreport.xsd" name="flysreport" language="groovy" pageWidth="842" pageHeight="595" columnWidth="515" leftMargin="60" rightMargin="20" topMargin="20" bottomMargin="20">
 	<property name="ireport.zoom" value="1.0"/>
 	<property name="ireport.x" value="0"/>
 	<property name="ireport.y" value="0"/>
-
-    <style name="htmlStyle" markup="html" />
-
+	<style name="htmlStyle" markup="html"/>
 	<field name="meta:header" class="java.lang.String"/>
 	<field name="meta:calcMode" class="java.lang.String"/>
 	<field name="meta:version_label" class="java.lang.String"/>
@@ -18,7 +18,6 @@
 	<field name="meta:river" class="java.lang.String"/>
 	<field name="meta:range_label" class="java.lang.String"/>
 	<field name="meta:range" class="java.lang.String"/>
-
 	<field name="meta:station_header" class="java.lang.String"/>
 	<field name="meta:flowdepth_header" class="java.lang.String"/>
 	<field name="meta:flowdepth_tkh_header" class="java.lang.String"/>
@@ -31,7 +30,6 @@
 	<field name="meta:sounding_name_header" class="java.lang.String"/>
 	<field name="meta:location_header" class="java.lang.String"/>
 	<field name="meta:river_unit" class="java.lang.String"/>
-
 	<field name="data:0" class="java.lang.String"/>
 	<field name="data:1" class="java.lang.String"/>
 	<field name="data:2" class="java.lang.String"/>
@@ -43,22 +41,11 @@
 	<field name="data:8" class="java.lang.String"/>
 	<field name="data:9" class="java.lang.String"/>
 	<field name="data:10" class="java.lang.String"/>
-
-<!--	
-	<field name="data:6" class="java.lang.String"/>
-	<field name="data:7" class="java.lang.String"/>
-	<field name="data:8" class="java.lang.String"/>
-	<field name="data:9" class="java.lang.String"/>
-	<field name="data:10" class="java.lang.String"/>
-	<field name="data:11" class="java.lang.String"/>
--->
-
 	<background>
 		<band splitType="Stretch"/>
 	</background>
 	<title>
-		<band height="182" splitType="Stretch">
-     
+		<band height="170" splitType="Stretch">
 			<textField>
 				<reportElement x="0" y="1" width="515" height="30"/>
 				<textElement>
@@ -66,188 +53,193 @@
 				</textElement>
 				<textFieldExpression><![CDATA[$F{meta:header} + " " + $F{meta:river}]]></textFieldExpression>
 			</textField>
-
 			<textField>
 				<reportElement x="0" y="40" width="165" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:calcMode}]]></textFieldExpression>
 			</textField>
-			
 			<textField>
 				<reportElement x="0" y="70" width="123" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:version_label} + ":"]]></textFieldExpression>
 			</textField>
 			<textField>
 				<reportElement x="123" y="70" width="392" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:version}]]></textFieldExpression>
 			</textField>
-			
 			<textField>
 				<reportElement x="0" y="90" width="123" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:user_label} + ":"]]></textFieldExpression>
 			</textField>
 			<textField>
 				<reportElement x="123" y="90" width="392" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:user}]]></textFieldExpression>
 			</textField>
-			
 			<textField>
 				<reportElement x="0" y="110" width="123" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:date_label} + ":"]]></textFieldExpression>
 			</textField>
 			<textField>
 				<reportElement x="123" y="110" width="392" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:date}]]></textFieldExpression>
 			</textField>
-			
 			<textField>
 				<reportElement x="0" y="130" width="123" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:river_label} + ":"]]></textFieldExpression>
 			</textField>
 			<textField>
 				<reportElement x="123" y="130" width="392" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:river}]]></textFieldExpression>
 			</textField>
-			
 			<textField>
 				<reportElement x="0" y="150" width="123" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:range_label} + ":"]]></textFieldExpression>
 			</textField>
-
-
 			<textField>
 				<reportElement x="123" y="150" width="392" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[$F{meta:range}]]></textFieldExpression>
 			</textField>
 		</band>
 	</title>
 	<columnHeader>
-		<band height="30" splitType="Stretch">
+		<band height="25" splitType="Stretch">
 			<line>
-				<reportElement x="0" y="29" width="762" height="1"/>
+				<reportElement positionType="FixRelativeToBottom" x="0" y="24" width="762" height="1"/>
 			</line>
-			<textField>
-				<reportElement x="0" y="0" width="50" height="30" style="htmlStyle"/>
-				<textElement/>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="0" y="0" width="50" height="24"/>
+				<box padding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
 				<textFieldExpression><![CDATA[$F{meta:station_header}]]></textFieldExpression>
 			</textField>
-			<textField>
-				<reportElement x="50" y="0" width="50" height="30" style="htmlStyle"/>
-				<textElement />
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="50" y="0" width="55" height="24"/>
+				<box padding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
 				<textFieldExpression><![CDATA[$F{meta:flowdepth_header} + "<br/>[m]"]]></textFieldExpression>
 			</textField>
-			<textField>
-				<reportElement x="100" y="0" width="85" height="30" style="htmlStyle"/>
-				<textElement/>
-				<textFieldExpression><![CDATA[$F{meta:flowdepth_tkh_header} + "<br/>[m]"]]></textFieldExpression>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="105" y="0" width="54" height="24"/>
+				<box padding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepth_tkh_header} + " [m]"]]></textFieldExpression>
 			</textField>
-			<textField>
-				<reportElement x="185" y="0" width="25" height="30" style="htmlStyle"/>
-				<textElement/>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="159" y="0" width="31" height="24"/>
+				<box padding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
 				<textFieldExpression><![CDATA[$F{meta:tkh_header} + "<br/>[cm]"]]></textFieldExpression>
 			</textField>
-			<textField>
-				<reportElement x="210" y="0" width="65" height="30" style="htmlStyle"/>
-				<textElement/>
-				<textFieldExpression><![CDATA[$F{meta:waterlevel_header} + "["  + $F{meta:river_unit} + "]"]]></textFieldExpression>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="190" y="0" width="60" height="24"/>
+				<box padding="5">
+					<pen lineStyle="Solid"/>
+				</box>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:waterlevel_header} + "<br/>["  + $F{meta:river_unit} + "]"]]></textFieldExpression>
 			</textField>
-			<textField>
-				<reportElement x="275" y="0" width="40" height="30" style="htmlStyle"/>
-				<textElement/>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="250" y="0" width="40" height="24"/>
+				<box padding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
 				<textFieldExpression><![CDATA[$F{meta:discharge_header} + "<br/>[m³/s]"]]></textFieldExpression>
 			</textField>
-			<textField>
-				<reportElement x="315" y="0" width="85" height="30" style="htmlStyle"/>
-				<textElement/>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="290" y="0" width="80" height="24"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
 				<textFieldExpression><![CDATA[$F{meta:waterlevel_name_header}]]></textFieldExpression>
 			</textField>
-			<textField>
-				<reportElement x="400" y="0" width="100" height="30" style="htmlStyle"/>
-				<textElement/>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="370" y="0" width="115" height="24"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
 				<textFieldExpression><![CDATA[$F{meta:gauge_header}]]></textFieldExpression>
 			</textField>
-			<textField>
-				<reportElement x="500" y="0" width="65" height="30" style="htmlStyle"/>
-				<textElement/>
-				<textFieldExpression><![CDATA[$F{meta:bedheight_header} + "[" + $F{meta:river_unit} + "]"]]></textFieldExpression>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="485" y="0" width="60" height="24"/>
+				<box padding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:bedheight_header} +  "<br/>[" + $F{meta:river_unit} + "]"]]></textFieldExpression>
 			</textField>
-            <textField>
-                <reportElement x="565" y="0" width="85" height="30" style="htmlStyle"/>
-                <textElement/>
-                <textFieldExpression><![CDATA[$F{meta:sounding_name_header}]]></textFieldExpression>
-            </textField>
-			<textField>
-				<reportElement x="650" y="0" width="110" height="30" style="htmlStyle"/>
-				<textElement/>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="545" y="0" width="85" height="24"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:sounding_name_header}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="630" y="0" width="132" height="24"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
 				<textFieldExpression><![CDATA[$F{meta:location_header}]]></textFieldExpression>
 			</textField>
 		</band>
 	</columnHeader>
 	<detail>
-		<band height="14" splitType="Stretch">
-			<textField isBlankWhenNull="true">
-				<reportElement x="0" y="0" width="40" height="14"/>
-				<textElement textAlignment="Right"/>
+		<band height="20" splitType="Prevent">
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="0" y="0" width="50" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:0}]]></textFieldExpression>
 			</textField>
-			<textField isBlankWhenNull="true">
-				<reportElement x="50" y="0" width="40" height="14" />
-				<textElement textAlignment="Right"/>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="50" y="0" width="55" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:1}]]></textFieldExpression>
 			</textField>
-			<textField isBlankWhenNull="true">
-				<reportElement x="100" y="0" width="75" height="14"/>
-				<textElement textAlignment="Right"/>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="105" y="0" width="54" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:2}]]></textFieldExpression>
 			</textField>
-			<textField isBlankWhenNull="true">
-				<reportElement x="185" y="0" width="15" height="14"/>
-				<textElement textAlignment="Right"/>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="159" y="0" width="31" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:3}]]></textFieldExpression>
 			</textField>
-			<textField isBlankWhenNull="true">
-				<reportElement x="210" y="0" width="55" height="14"/>
-				<textElement textAlignment="Right"/>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="190" y="0" width="60" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:4}]]></textFieldExpression>
 			</textField>
-			<textField isBlankWhenNull="true">
-				<reportElement x="275" y="0" width="30" height="14"/>
-				<textElement textAlignment="Right"/>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="250" y="0" width="40" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:5}]]></textFieldExpression>
 			</textField>
-			<textField isBlankWhenNull="true">
-				<reportElement x="315" y="0" width="85" height="14"/>
-				<textElement/>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="290" y="0" width="80" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:6}]]></textFieldExpression>
 			</textField>
-			<textField isBlankWhenNull="true">
-				<reportElement x="400" y="0" width="100" height="14"/>
-				<textElement/>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="370" y="0" width="115" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:7}]]></textFieldExpression>
 			</textField>
-			<textField isBlankWhenNull="true">
-				<reportElement x="500" y="0" width="55" height="14"/>
-				<textElement textAlignment="Right"/>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="485" y="0" width="60" height="20"/>
+				<box topPadding="5" leftPadding="5" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:8}]]></textFieldExpression>
 			</textField>
-			<textField isBlankWhenNull="true">
-				<reportElement x="565" y="0" width="85" height="14"/>
-				<textElement/>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="545" y="0" width="85" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:9}]]></textFieldExpression>
 			</textField>
-			<textField isBlankWhenNull="true">
-				<reportElement x="650" y="0" width="110" height="14" stretchType="RelativeToBandHeight"/>
-				<textElement/>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement positionType="Float" stretchType="RelativeToBandHeight" x="630" y="0" width="132" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement verticalAlignment="Top"/>
 				<textFieldExpression><![CDATA[$F{data:10}]]></textFieldExpression>
 			</textField>
 		</band>
@@ -256,7 +248,6 @@
 		<band height="29" splitType="Stretch">
 			<textField evaluationTime="Report">
 				<reportElement x="705" y="9" width="57" height="20"/>
-				<textElement/>
 				<textFieldExpression><![CDATA[" / " + $V{PAGE_NUMBER}]]></textFieldExpression>
 			</textField>
 			<textField>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/doc/conf/jasper/templates/sinfo.flowdepthdevelopment.jrxml	Wed Apr 11 14:20:01 2018 +0200
@@ -0,0 +1,272 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Created with Jaspersoft Studio version 6.5.1.final using JasperReports Library version 4.5.0  -->
+<!-- 2018-04-05T10:22:02 -->
+<jasperReport xmlns="http://jasperreports.sourceforge.net/jasperreports" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://jasperreports.sourceforge.net/jasperreports http://jasperreports.sourceforge.net/xsd/jasperreport.xsd" name="flysreport" language="groovy" pageWidth="842" pageHeight="595" columnWidth="515" leftMargin="60" rightMargin="20" topMargin="20" bottomMargin="20">
+	<property name="ireport.zoom" value="1.0"/>
+	<property name="ireport.x" value="0"/>
+	<property name="ireport.y" value="0"/>
+	<property name="com.jaspersoft.studio.data.defaultdataadapter" value="One Empty Record"/>
+	<style name="htmlStyle" isDefault="true" mode="Transparent" markup="html"/>
+	<field name="meta:header" class="java.lang.String"/>
+	<field name="meta:calcMode" class="java.lang.String"/>
+	<field name="meta:version_label" class="java.lang.String"/>
+	<field name="meta:version" class="java.lang.String"/>
+	<field name="meta:user_label" class="java.lang.String"/>
+	<field name="meta:user" class="java.lang.String"/>
+	<field name="meta:date_label" class="java.lang.String"/>
+	<field name="meta:date" class="java.lang.String"/>
+	<field name="meta:river_label" class="java.lang.String"/>
+	<field name="meta:river" class="java.lang.String"/>
+	<field name="meta:range_label" class="java.lang.String"/>
+	<field name="meta:range" class="java.lang.String"/>
+	<field name="meta:station_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopment_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopmentperyear_header" class="java.lang.String"/>
+	<field name="meta:waterleveldifference_header" class="java.lang.String"/>
+	<field name="meta:waterleveldifference_header2" class="java.lang.String"/>
+	<field name="meta:waterleveldifference_header_label" class="java.lang.String"/>
+	<field name="meta:bedheightdifference_header" class="java.lang.String"/>
+	<field name="meta:bedheightdifference_header2" class="java.lang.String"/>
+	<field name="meta:bedheightdifference_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepthcurrent_header" class="java.lang.String"/>
+	<field name="meta:flowdepthcurrent_header2" class="java.lang.String"/>
+	<field name="meta:flowdepthhistorical_header" class="java.lang.String"/>
+	<field name="meta:flowdepthhistorical_header2" class="java.lang.String"/>
+	<field name="meta:flowdepthcurrent_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepthhistorical_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepth_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopment" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopment_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepth_tkh_header" class="java.lang.String"/>
+	<field name="meta:tkh_header" class="java.lang.String"/>
+	<field name="meta:waterlevel_header" class="java.lang.String"/>
+	<field name="meta:discharge_header" class="java.lang.String"/>
+	<field name="meta:waterlevel_name_header" class="java.lang.String"/>
+	<field name="meta:gauge_header" class="java.lang.String"/>
+	<field name="meta:bedheight_header" class="java.lang.String"/>
+	<field name="meta:sounding_name_header" class="java.lang.String"/>
+	<field name="meta:location_header" class="java.lang.String"/>
+	<field name="meta:river_unit" class="java.lang.String"/>
+	<field name="data:0" class="java.lang.String"/>
+	<field name="data:1" class="java.lang.String"/>
+	<field name="data:2" class="java.lang.String"/>
+	<field name="data:3" class="java.lang.String"/>
+	<field name="data:4" class="java.lang.String"/>
+	<field name="data:5" class="java.lang.String"/>
+	<field name="data:6" class="java.lang.String"/>
+	<field name="data:7" class="java.lang.String"/>
+	<background>
+		<band splitType="Stretch"/>
+	</background>
+	<title>
+		<band height="222" splitType="Stretch">
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="1" width="515" height="30"/>
+				<textElement>
+					<font size="18"/>
+				</textElement>
+				<textFieldExpression><![CDATA[$F{meta:header} + " " + $F{meta:river}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="40" width="165" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:calcMode}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="70" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:version_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="70" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:version}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="90" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:user_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="90" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:user}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="110" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:date_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="110" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:date}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="130" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:river_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="130" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:river}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="150" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:range_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="150" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:range}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="170" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepthdevelopment_header_label}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="170" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepthdevelopment}]]></textFieldExpression>
+			</textField>
+		</band>
+	</title>
+	<columnHeader>
+		<band height="75" splitType="Stretch">
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="0" width="60" height="75"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:station_header}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="60" y="0" width="80" height="75"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepthdevelopment_header} + "<br/>[cm]"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement key="" style="htmlStyle" x="140" y="0" width="80" height="75"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepthdevelopmentperyear_header} + "<br/>[cm/a]"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement key="" style="htmlStyle" x="220" y="40" width="85" height="35"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:waterleveldifference_header2}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement key="" style="htmlStyle" x="305" y="0" width="85" height="40"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:bedheightdifference_header} + "<br/>[m³/s]"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement key="" style="htmlStyle" x="305" y="40" width="85" height="35"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:bedheightdifference_header2}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="390" y="0" width="85" height="40"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepthcurrent_header} + "<br/>[" + $F{meta:river_unit} + "]"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="390" y="40" width="85" height="35"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepthcurrent_header2}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="475" y="0" width="85" height="40"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepthhistorical_header}+"<br/>[" + $F{meta:river_unit} + "]"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="475" y="40" width="85" height="35"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepthhistorical_header2}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="560" y="0" width="180" height="75"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:location_header}]]></textFieldExpression>
+			</textField>
+			<line>
+				<reportElement style="htmlStyle" x="0" y="74" width="762" height="1"/>
+			</line>
+			<textField>
+				<reportElement key="" style="htmlStyle" x="220" y="0" width="85" height="40"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom">
+					<font pdfEncoding=""/>
+				</textElement>
+				<textFieldExpression><![CDATA[$F{meta:waterleveldifference_header}+"<br/>["+$F{meta:river_unit}+"]"]]></textFieldExpression>
+			</textField>
+		</band>
+	</columnHeader>
+	<detail>
+		<band height="20" splitType="Stretch">
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="475" y="0" width="85" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:6}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="560" y="0" width="180" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textFieldExpression><![CDATA[$F{data:7}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="390" y="0" width="85" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:5}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="0" y="0" width="60" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:0}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="60" y="0" width="80" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:1}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement key="" style="htmlStyle" x="140" y="0" width="80" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:2}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement key="" style="htmlStyle" x="220" y="0" width="85" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:3}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement key="" style="htmlStyle" x="305" y="0" width="85" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:4}]]></textFieldExpression>
+			</textField>
+		</band>
+	</detail>
+	<pageFooter>
+		<band height="30" splitType="Stretch">
+			<textField evaluationTime="Report">
+				<reportElement style="htmlStyle" x="705" y="9" width="57" height="20"/>
+				<textFieldExpression><![CDATA[" / " + $V{PAGE_NUMBER}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="650" y="9" width="55" height="20"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$V{PAGE_NUMBER}]]></textFieldExpression>
+			</textField>
+		</band>
+	</pageFooter>
+	<summary>
+		<band height="42" splitType="Stretch"/>
+	</summary>
+</jasperReport>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/doc/conf/jasper/templates/sinfo.flowdepthminmax.jrxml	Wed Apr 11 14:20:01 2018 +0200
@@ -0,0 +1,264 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Created with Jaspersoft Studio version 6.5.1.final using JasperReports Library version 4.5.0  -->
+<!-- 2018-04-05T16:21:40 -->
+<jasperReport xmlns="http://jasperreports.sourceforge.net/jasperreports" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://jasperreports.sourceforge.net/jasperreports http://jasperreports.sourceforge.net/xsd/jasperreport.xsd" name="flysreport" language="groovy" pageWidth="842" pageHeight="595" columnWidth="515" leftMargin="60" rightMargin="20" topMargin="20" bottomMargin="20">
+	<property name="ireport.zoom" value="1.0"/>
+	<property name="ireport.x" value="0"/>
+	<property name="ireport.y" value="0"/>
+	<property name="com.jaspersoft.studio.data.defaultdataadapter" value="One Empty Record"/>
+	<style name="htmlStyle" isDefault="true" mode="Transparent" markup="html"/>
+	<field name="meta:header" class="java.lang.String"/>
+	<field name="meta:calcMode" class="java.lang.String"/>
+	<field name="meta:version_label" class="java.lang.String"/>
+	<field name="meta:version" class="java.lang.String"/>
+	<field name="meta:user_label" class="java.lang.String"/>
+	<field name="meta:user" class="java.lang.String"/>
+	<field name="meta:date_label" class="java.lang.String"/>
+	<field name="meta:date" class="java.lang.String"/>
+	<field name="meta:river_label" class="java.lang.String"/>
+	<field name="meta:river" class="java.lang.String"/>
+	<field name="meta:range_label" class="java.lang.String"/>
+	<field name="meta:range" class="java.lang.String"/>
+	<field name="meta:station_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopment_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopmentperyear_header" class="java.lang.String"/>
+	<field name="meta:waterleveldifference_header" class="java.lang.String"/>
+	<field name="meta:waterleveldifference_header2" class="java.lang.String"/>
+	<field name="meta:waterleveldifference_header_label" class="java.lang.String"/>
+	<field name="meta:bedheightdifference_header" class="java.lang.String"/>
+	<field name="meta:bedheightdifference_header2" class="java.lang.String"/>
+	<field name="meta:bedheightdifference_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepthcurrent_header" class="java.lang.String"/>
+	<field name="meta:flowdepthcurrent_header2" class="java.lang.String"/>
+	<field name="meta:flowdepthhistorical_header" class="java.lang.String"/>
+	<field name="meta:flowdepthhistorical_header2" class="java.lang.String"/>
+	<field name="meta:flowdepthcurrent_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepthhistorical_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepth_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopment" class="java.lang.String"/>
+	<field name="meta:flowdepthmin_header" class="java.lang.String"/>
+	<field name="meta:flowdepthmax_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopment_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepth_tkh_header" class="java.lang.String"/>
+	<field name="meta:tkh_header" class="java.lang.String"/>
+	<field name="meta:waterlevel_header" class="java.lang.String"/>
+	<field name="meta:discharge_header" class="java.lang.String"/>
+	<field name="meta:waterlevel_name_header" class="java.lang.String"/>
+	<field name="meta:gauge_header" class="java.lang.String"/>
+	<field name="meta:bedheight_header" class="java.lang.String"/>
+	<field name="meta:sounding_name_header" class="java.lang.String"/>
+	<field name="meta:location_header" class="java.lang.String"/>
+	<field name="meta:river_unit" class="java.lang.String"/>
+	<field name="data:0" class="java.lang.String"/>
+	<field name="data:1" class="java.lang.String"/>
+	<field name="data:2" class="java.lang.String"/>
+	<field name="data:3" class="java.lang.String"/>
+	<field name="data:4" class="java.lang.String"/>
+	<field name="data:5" class="java.lang.String"/>
+	<field name="data:6" class="java.lang.String"/>
+	<field name="data:7" class="java.lang.String"/>
+	<field name="data:8" class="java.lang.String"/>
+	<field name="data:9" class="java.lang.String"/>
+	<background>
+		<band splitType="Stretch"/>
+	</background>
+	<title>
+		<band height="180" splitType="Stretch">
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="1" width="515" height="30"/>
+				<textElement>
+					<font size="18"/>
+				</textElement>
+				<textFieldExpression><![CDATA[$F{meta:header} + " " + $F{meta:river}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="40" width="165" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:calcMode}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="70" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:version_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="70" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:version}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="90" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:user_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="90" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:user}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="110" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:date_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="110" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:date}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="130" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:river_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="130" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:river}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="150" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:range_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="150" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:range}]]></textFieldExpression>
+			</textField>
+		</band>
+	</title>
+	<columnHeader>
+		<band height="75" splitType="Stretch">
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="0" width="50" height="75"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:station_header}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="50" y="0" width="60" height="74"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepthmin_header} + "<br/>[m]"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement key="" style="htmlStyle" x="110" y="0" width="60" height="75"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:flowdepthmax_header} + "<br/>[m]"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement key="" style="htmlStyle" x="170" y="0" width="70" height="75"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:waterlevel_header}+ "<br/>[" + $F{meta:river_unit} + "]"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement key="" style="htmlStyle" x="240" y="0" width="50" height="75"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:discharge_header}+ "<br/>[m³/s]"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="290" y="0" width="80" height="75"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:waterlevel_name_header}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="370" y="0" width="90" height="75"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:gauge_header}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="641" y="0" width="120" height="74"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:location_header}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="541" y="0" width="100" height="74"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:sounding_name_header}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="460" y="0" width="81" height="74"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:bedheight_header}+ "<br/>[" + $F{meta:river_unit} + "]"]]></textFieldExpression>
+			</textField>
+			<line>
+				<reportElement style="htmlStyle" x="0" y="74" width="762" height="1"/>
+			</line>
+		</band>
+	</columnHeader>
+	<detail>
+		<band height="20" splitType="Stretch">
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="370" y="0" width="90" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:6}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="460" y="0" width="81" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textFieldExpression><![CDATA[$F{data:7}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="290" y="0" width="80" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:5}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="0" y="0" width="50" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:0}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="50" y="0" width="60" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:1}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement key="" style="htmlStyle" x="110" y="0" width="60" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:2}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement key="" style="htmlStyle" x="170" y="0" width="70" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:3}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement key="" style="htmlStyle" x="240" y="0" width="50" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:4}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="541" y="0" width="100" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textFieldExpression><![CDATA[$F{data:8}]]></textFieldExpression>
+			</textField>
+			<textField isBlankWhenNull="true">
+				<reportElement style="htmlStyle" x="641" y="0" width="120" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5"/>
+				<textFieldExpression><![CDATA[$F{data:9}]]></textFieldExpression>
+			</textField>
+		</band>
+	</detail>
+	<pageFooter>
+		<band height="30" splitType="Stretch">
+			<textField evaluationTime="Report">
+				<reportElement style="htmlStyle" x="705" y="9" width="57" height="20"/>
+				<textFieldExpression><![CDATA[" / " + $V{PAGE_NUMBER}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="650" y="9" width="55" height="20"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$V{PAGE_NUMBER}]]></textFieldExpression>
+			</textField>
+		</band>
+	</pageFooter>
+	<summary>
+		<band height="42" splitType="Stretch"/>
+	</summary>
+</jasperReport>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/doc/conf/jasper/templates/sinfo.tkh.jrxml	Wed Apr 11 14:20:01 2018 +0200
@@ -0,0 +1,308 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Created with Jaspersoft Studio version 6.5.1.final using JasperReports Library version 4.5.0  -->
+<!-- 2018-04-06T11:26:49 -->
+<jasperReport xmlns="http://jasperreports.sourceforge.net/jasperreports" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://jasperreports.sourceforge.net/jasperreports http://jasperreports.sourceforge.net/xsd/jasperreport.xsd" name="flysreport" language="groovy" pageWidth="595" pageHeight="842" columnWidth="545" leftMargin="30" rightMargin="20" topMargin="20" bottomMargin="20">
+	<property name="ireport.zoom" value="1.0"/>
+	<property name="ireport.x" value="0"/>
+	<property name="ireport.y" value="0"/>
+	<property name="com.jaspersoft.studio.data.defaultdataadapter" value="One Empty Record"/>
+	<style name="htmlStyle" isDefault="true" mode="Transparent" markup="html"/>
+	<field name="meta:header" class="java.lang.String"/>
+	<field name="meta:calcMode" class="java.lang.String"/>
+	<field name="meta:version_label" class="java.lang.String"/>
+	<field name="meta:version" class="java.lang.String"/>
+	<field name="meta:user_label" class="java.lang.String"/>
+	<field name="meta:user" class="java.lang.String"/>
+	<field name="meta:date_label" class="java.lang.String"/>
+	<field name="meta:date" class="java.lang.String"/>
+	<field name="meta:river_label" class="java.lang.String"/>
+	<field name="meta:river" class="java.lang.String"/>
+	<field name="meta:range_label" class="java.lang.String"/>
+	<field name="meta:range" class="java.lang.String"/>
+	<field name="meta:calculation_label" class="java.lang.String"/>
+	<field name="meta:calculation_name" class="java.lang.String"/>
+	<field name="meta:station_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopment_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopmentperyear_header" class="java.lang.String"/>
+	<field name="meta:waterleveldifference_header" class="java.lang.String"/>
+	<field name="meta:waterleveldifference_header2" class="java.lang.String"/>
+	<field name="meta:waterleveldifference_header_label" class="java.lang.String"/>
+	<field name="meta:bedheightdifference_header" class="java.lang.String"/>
+	<field name="meta:bedheightdifference_header2" class="java.lang.String"/>
+	<field name="meta:bedheightdifference_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepthcurrent_header" class="java.lang.String"/>
+	<field name="meta:flowdepthcurrent_header2" class="java.lang.String"/>
+	<field name="meta:flowdepthhistorical_header" class="java.lang.String"/>
+	<field name="meta:flowdepthhistorical_header2" class="java.lang.String"/>
+	<field name="meta:flowdepthcurrent_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepthhistorical_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepth_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopment" class="java.lang.String"/>
+	<field name="meta:flowdepthmin_header" class="java.lang.String"/>
+	<field name="meta:flowdepthmax_header" class="java.lang.String"/>
+	<field name="meta:flowdepthdevelopment_header_label" class="java.lang.String"/>
+	<field name="meta:flowdepth_tkh_header" class="java.lang.String"/>
+	<field name="meta:tkh_header" class="java.lang.String"/>
+	<field name="meta:tkhkind_header" class="java.lang.String"/>
+	<field name="meta:waterlevel_header" class="java.lang.String"/>
+	<field name="meta:discharge_header" class="java.lang.String"/>
+	<field name="meta:waterlevel_name_header" class="java.lang.String"/>
+	<field name="meta:gauge_header" class="java.lang.String"/>
+	<field name="meta:bedheight_header" class="java.lang.String"/>
+	<field name="meta:sounding_name_header" class="java.lang.String"/>
+	<field name="meta:location_header" class="java.lang.String"/>
+	<field name="meta:river_unit" class="java.lang.String"/>
+	<field name="data:0" class="java.lang.String"/>
+	<field name="data:1" class="java.lang.String"/>
+	<field name="data:2" class="java.lang.String"/>
+	<field name="data:3" class="java.lang.String"/>
+	<field name="data:4" class="java.lang.String"/>
+	<field name="data:5" class="java.lang.String"/>
+	<field name="data:6" class="java.lang.String"/>
+	<field name="data:7" class="java.lang.String"/>
+	<field name="data:8" class="java.lang.String"/>
+	<background>
+		<band splitType="Stretch"/>
+	</background>
+	<title>
+		<band height="189" splitType="Stretch">
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="0" width="515" height="30"/>
+				<textElement>
+					<font size="18"/>
+				</textElement>
+				<textFieldExpression><![CDATA[$F{meta:header} + " " + $F{meta:river}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="39" width="165" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:calcMode}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="69" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:version_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="69" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:version}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="89" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:user_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="89" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:user}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="109" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:date_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="109" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:date}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="129" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:river_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="129" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:river}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="149" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:range_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="149" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:range}]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="0" y="169" width="123" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:calculation_label} + ":"]]></textFieldExpression>
+			</textField>
+			<textField>
+				<reportElement style="htmlStyle" x="123" y="169" width="392" height="20"/>
+				<textFieldExpression><![CDATA[$F{meta:calculation_name}]]></textFieldExpression>
+			</textField>
+		</band>
+	</title>
+	<columnHeader>
+		<band height="25" splitType="Stretch">
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="0" y="0" width="50" height="24"/>
+				<box padding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:station_header}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="50" y="0" width="50" height="24"/>
+				<box padding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:tkh_header} + "<br/>[cm]"]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true">
+				<reportElement key="" style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="100" y="0" width="60" height="24"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:tkhkind_header}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true">
+				<reportElement key="" style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="160" y="0" width="60" height="24"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:waterlevel_header}+ "<br/>[NHN + m]"]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true">
+				<reportElement key="" style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="220" y="0" width="50" height="24"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:discharge_header}+ "<br/>[m³/s]"]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="270" y="0" width="70" height="24"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:waterlevel_name_header}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="340" y="0" width="70" height="24"/>
+				<box topPadding="0" leftPadding="5" bottomPadding="5" rightPadding="5"/>
+				<textElement textAlignment="Right" verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:gauge_header}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="470" y="0" width="80" height="24"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:location_header}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="410" y="0" width="60" height="24"/>
+				<box padding="5"/>
+				<textElement verticalAlignment="Bottom"/>
+				<textFieldExpression><![CDATA[$F{meta:bedheight_header}+ "<br/>[NHN + m]"]]></textFieldExpression>
+			</textField>
+			<line>
+				<reportElement style="htmlStyle" positionType="FixRelativeToBottom" x="0" y="24" width="550" height="1"/>
+			</line>
+		</band>
+	</columnHeader>
+	<detail>
+		<band height="20" splitType="Prevent">
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="340" y="0" width="70" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5">
+					<topPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<leftPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<bottomPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<rightPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+				</box>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:6}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="410" y="0" width="60" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5">
+					<topPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<leftPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<bottomPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<rightPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+				</box>
+				<textFieldExpression><![CDATA[$F{data:7}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="270" y="0" width="70" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5">
+					<topPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<leftPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<bottomPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<rightPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+				</box>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:5}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="0" y="0" width="50" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5">
+					<topPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<leftPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<bottomPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<rightPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+				</box>
+				<textElement textAlignment="Right" verticalAlignment="Top"/>
+				<textFieldExpression><![CDATA[$F{data:0}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="50" y="0" width="50" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5">
+					<topPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<leftPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<bottomPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<rightPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+				</box>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:1}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement key="" style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="100" y="0" width="60" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5">
+					<topPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<leftPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<bottomPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<rightPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+				</box>
+				<textElement textAlignment="Left"/>
+				<textFieldExpression><![CDATA[$F{data:2}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement key="" style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="160" y="0" width="60" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5">
+					<topPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<leftPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<bottomPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<rightPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+				</box>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:3}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement key="" style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="220" y="0" width="50" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5">
+					<topPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<leftPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<bottomPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<rightPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+				</box>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$F{data:4}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true" isBlankWhenNull="true">
+				<reportElement style="htmlStyle" positionType="Float" stretchType="RelativeToBandHeight" x="470" y="0" width="80" height="20"/>
+				<box topPadding="5" leftPadding="5" bottomPadding="0" rightPadding="5">
+					<topPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<leftPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<bottomPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+					<rightPen lineWidth="0.0" lineStyle="Solid" lineColor="#000000"/>
+				</box>
+				<textFieldExpression><![CDATA[$F{data:8}]]></textFieldExpression>
+			</textField>
+		</band>
+	</detail>
+	<pageFooter>
+		<band height="30" splitType="Stretch">
+			<textField isStretchWithOverflow="true">
+				<reportElement style="htmlStyle" x="437" y="10" width="55" height="20"/>
+				<textElement textAlignment="Right"/>
+				<textFieldExpression><![CDATA[$V{PAGE_NUMBER}]]></textFieldExpression>
+			</textField>
+			<textField isStretchWithOverflow="true" evaluationTime="Report">
+				<reportElement style="htmlStyle" x="493" y="10" width="57" height="20"/>
+				<textFieldExpression><![CDATA[" / " + $V{PAGE_NUMBER}]]></textFieldExpression>
+			</textField>
+		</band>
+	</pageFooter>
+	<summary>
+		<band height="42" splitType="Stretch"/>
+	</summary>
+</jasperReport>
Binary file artifacts/doc/conf/jasper/waterlevel.jasper has changed
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/SInfoI18NStrings.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/SInfoI18NStrings.java	Wed Apr 11 14:20:01 2018 +0200
@@ -70,6 +70,8 @@
 
     String CSV_FLOWDEPTH_DEVELOPMENT_HEADER = "sinfo.export.csv.header.flowdepth.development";
 
+    String PDF_FLOWDEPTH_DEVELOPMENT_HEADER = "sinfo.export.pdf.header.flowdepth.development";
+    
     String CSV_FLOWDEPTH_DEVELOPMENT_PER_YEAR_HEADER = "sinfo.export.csv.header.flowdepth.development.per.year";
 
     String CSV_WATERLEVEL_DIFFERENCE_HEADER = "sinfo.export.csv.header.waterlevel.difference";
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/common/SInfoResultType.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/common/SInfoResultType.java	Wed Apr 11 14:20:01 2018 +0200
@@ -43,7 +43,7 @@
         }
     },
 
-    waterlevel(null, SInfoI18NStrings.CSV_WATERLEVEL_HEADER) {
+    waterlevel(null, SInfoI18NStrings.CSV_WATERLEVEL_HEADER,"sinfo.export.flow_depth.pdf.header.waterlevel") {
         @Override
         public String exportValue(final CallContext context, final Object value) {
             final double doubleValue = asDouble(value);
@@ -159,7 +159,7 @@
         }
     },
 
-    soilkind(SInfoI18NStrings.UNIT_NONE, SInfoI18NStrings.CSV_TKHKIND_HEADER) {
+    soilkind(SInfoI18NStrings.UNIT_NONE, SInfoI18NStrings.CSV_TKHKIND_HEADER, "sinfo.export.tkh.pdf.header.tkhkind") {
         @Override
         public String exportValue(final CallContext context, final Object value) {
 
@@ -281,7 +281,8 @@
             return Formatter.getFlowDepth(context);
         }
     },
-    flowdepthDevelopment(SInfoI18NStrings.UNIT_CM, SInfoI18NStrings.CSV_FLOWDEPTH_DEVELOPMENT_HEADER) {
+
+    flowdepthDevelopment(SInfoI18NStrings.UNIT_CM, SInfoI18NStrings.CSV_FLOWDEPTH_DEVELOPMENT_HEADER, SInfoI18NStrings.PDF_FLOWDEPTH_DEVELOPMENT_HEADER) {
         @Override
         public String exportValue(final CallContext context, final Object value) {
             final double doubleValue = asDouble(value);
@@ -305,7 +306,7 @@
             return Formatter.getFlowDepthDevelopmentPerYear(context);
         }
     },
-    waterlevelDifference(SInfoI18NStrings.UNIT_CM, SInfoI18NStrings.CSV_WATERLEVEL_DIFFERENCE_HEADER) {
+    waterlevelDifference(SInfoI18NStrings.UNIT_CM, SInfoI18NStrings.CSV_WATERLEVEL_DIFFERENCE_HEADER, "sinfo.export.pdf.header.waterlevel.difference") {
         @Override
         public String exportValue(final CallContext context, final Object value) {
             final double doubleValue = asDouble(value);
@@ -317,7 +318,7 @@
             return Formatter.getTkh(context);
         }
     },
-    bedHeightDifference(SInfoI18NStrings.UNIT_CM, SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_DIFFERENCE_HEADER) {
+    bedHeightDifference(SInfoI18NStrings.UNIT_CM, SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_DIFFERENCE_HEADER, "sinfo.export.pdf.header.mean_bed_height.difference") {
         @Override
         public String exportValue(final CallContext context, final Object value) {
             final double doubleValue = asDouble(value);
@@ -420,6 +421,10 @@
     public final String getCsvHeader() {
         return this.csvHeader;
     }
+    
+    public final String getCsvHeader(final CallMeta meta) {
+    	 return Resources.getMsg(meta, this.csvHeader, this.csvHeader);
+    }
 
     public final String getPdfHeader(final CallMeta meta) {
         return Resources.getMsg(meta, this.pdfHeader, this.pdfHeader);
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepthdev/FlowDepthDevelopmentExporter.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepthdev/FlowDepthDevelopmentExporter.java	Wed Apr 11 14:20:01 2018 +0200
@@ -40,7 +40,7 @@
     /** The log used in this exporter. */
     private static Logger log = Logger.getLogger(FlowDepthDevelopmentExporter.class);
 
-    private static final String JASPER_FILE = "/jasper/sinfo.flowdepthminmax.jasper";
+    private static final String JASPER_FILE = "/jasper/sinfo.flowdepthdevelopment.jasper";
 
     @Override
     protected Logger getLog() {
@@ -138,23 +138,25 @@
         super.addJRMetaDataDefaults(source, results);
 
         final FlowDepthDevelopmentCalculationResult result = results.getResult();
+        source.addMetaData("flowdepthdevelopment", result.getLabel());
+        source.addMetaData("flowdepthdevelopment_header_label",SInfoResultType.flowdepthDevelopment.getCsvHeader(this.context.getMeta()));//(this.context.getMeta()));
 
         /* column headings */
         source.addMetaData("station_header", SInfoResultType.station.getPdfHeader(this.context.getMeta()));
-        source.addMetaData("flowdeptdevelopment_header", SInfoResultType.flowdepthDevelopment.getPdfHeader(this.context.getMeta()));
-        source.addMetaData("flowdeptdevelopmentperyear_header", SInfoResultType.flowdepthDevelopmentPerYear.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("flowdepthdevelopment_header", SInfoResultType.flowdepthDevelopment.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("flowdepthdevelopmentperyear_header", SInfoResultType.flowdepthDevelopmentPerYear.getPdfHeader(this.context.getMeta()));
 
         source.addMetaData("waterleveldifference_header", SInfoResultType.waterlevelDifference.getPdfHeader(this.context.getMeta()));
-        source.addMetaData("waterleveldifference_header_label", result.getWaterlevelDifferenceLabel());
+        source.addMetaData("waterleveldifference_header2", result.getWaterlevelDifferenceLabel());
 
         source.addMetaData("bedheightdifference_header", SInfoResultType.bedHeightDifference.getPdfHeader(this.context.getMeta()));
-        source.addMetaData("bedheightdifference_header_label", result.getBedHeightDifferenceLabel());
+        source.addMetaData("bedheightdifference_header2", result.getBedHeightDifferenceLabel());
 
         source.addMetaData("flowdepthcurrent_header", SInfoResultType.flowdepthCurrent.getPdfHeader(this.context.getMeta()));
-        source.addMetaData("flowdepthcurrent_header_label", result.getFlowDepthCurrentLabel());
+        source.addMetaData("flowdepthcurrent_header2", result.getFlowDepthCurrentLabel());
 
         source.addMetaData("flowdepthhistorical_header", SInfoResultType.flowdepthHistorical.getPdfHeader(this.context.getMeta()));
-        source.addMetaData("flowdepthhistorical_header_label", result.getFlowDepthHistoricalLabel());
+        source.addMetaData("flowdepthhistorical_header2", result.getFlowDepthHistoricalLabel());
 
         source.addMetaData("location_header", SInfoResultType.location.getPdfHeader(this.context.getMeta()));
     }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhcalculation/DischargeValuesFinder.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhcalculation/DischargeValuesFinder.java	Wed Apr 11 14:20:01 2018 +0200
@@ -91,7 +91,6 @@
             return this.qInterpolator.value(station);
         }
         catch (final FunctionEvaluationException e) {
-            e.printStackTrace();
             return Double.NaN;
         }
     }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java	Wed Apr 11 14:20:01 2018 +0200
@@ -38,7 +38,7 @@
 
     private static final String CSV_META_CALCULATION_FORMULA = "sinfo.export.tkh.calculation.formula";
 
-    private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper";
+    private static final String JASPER_FILE = "/jasper/sinfo.tkh.jasper";
 
     @Override
     protected Logger getLog() {
@@ -140,13 +140,17 @@
         /* general metadata */
         super.addJRMetaDataDefaults(source, results);
 
+        source.addMetaData("calculation_label",msg( "sinfo.export.flow_depth.pdf.meta.calculation.label" ));
+        source.addMetaData("calculation_name",msg( "sinfo.export.flow_depth.pdf.meta.calculation.name" ));
+        
         /* column headings */
         source.addMetaData("station_header", SInfoResultType.station.getPdfHeader(this.context.getMeta()));
         source.addMetaData("tkh_header", SInfoResultType.tkh.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("tkhkind_header", SInfoResultType.soilkind.getPdfHeader(this.context.getMeta()));
         source.addMetaData("bedheight_header", SInfoResultType.meanBedHeight.getPdfHeader(this.context.getMeta()));
         source.addMetaData("waterlevel_header", SInfoResultType.waterlevel.getPdfHeader(this.context.getMeta()));
         source.addMetaData("discharge_header", SInfoResultType.discharge.getPdfHeader(this.context.getMeta()));
-
+        	
         // REMARK: actually the column makes no sense if description header is null. But (software symmetry...) WINFO also
         // writes an empty column into the pdf in that case (most probably to avoid the need for two jasper templates).
         final String descriptionHeader = results.getDescriptionHeader();
--- a/artifacts/src/main/resources/messages.properties	Wed Apr 11 14:09:13 2018 +0200
+++ b/artifacts/src/main/resources/messages.properties	Wed Apr 11 14:20:01 2018 +0200
@@ -855,6 +855,8 @@
 sinfo.export.flow_depth.csv.meta.header.waterlevel.year = # Jahr/Zeitraum der Wasserspiegellage: {0}
 sinfo.export.flow_depth.csv.meta.range = # {0}: {1} - {2}
 sinfo.export.flow_depth.csv.meta.range.label = Range (km)
+sinfo.export.flow_depth.pdf.meta.calculation.label = Angewandte Gleichung
+sinfo.export.flow_depth.pdf.meta.calculation.name = Gill (1971)
 sinfo.export.flow_depth.csv.meta.height_unit.river = # H\u00f6hensystem des Flusses: {0} 
 
 sinfo.export.flow_depth.csv.header.km = Fluss-km
@@ -862,6 +864,7 @@
 sinfo.export.flow_depth.csv.header.flowdepthTkh = Flie\u00dftiefe mit TKH
 sinfo.export.flow_depth.csv.header.tkh = TKH
 sinfo.export.flow_depth.csv.header.waterlevel = Wasserstand
+sinfo.export.flow_depth.pdf.header.waterlevel = Wasser-stand
 sinfo.export.flow_depth.csv.header.discharge = Q
 sinfo.export.flow_depth.csv.header.label = Bezeichnung
 sinfo.export.flow_depth.csv.header.gauge = Bezugspegel
@@ -895,6 +898,7 @@
 sinfo.export.tkh.calculation.formula = # Berechnungsgrundlage: Gleichung nach GILL (1971)
 sinfo.export.tkh.csv.header.tkh = Transportk\u00f6rperh\u00f6he
 sinfo.export.tkh.csv.header.tkhkind = Einteilung der Gew\u00e4ssersohle
+sinfo.export.tkh.pdf.header.tkhkind = Einteilung der Gew\u00e4sser-sohle
 sinfo.export.tkh.soilkind.mobil = Mobil
 sinfo.export.tkh.soilkind.starr = Starr
 
@@ -963,9 +967,12 @@
 sinfo.export.csv.meta.header.waterlevel.historical = ##METADATEN WASSERSPIEGELLAGE historisch
 
 sinfo.export.csv.header.flowdepth.development = Flie\u00dftiefenentwicklung
+sinfo.export.pdf.header.flowdepth.development = Flie\u00dftiefen-entwicklung
 sinfo.export.csv.header.flowdepth.development.per.year = Flie\u00dftiefenent-wicklung pro Jahr
 sinfo.export.csv.header.waterlevel.difference = \u0394WSPL
+sinfo.export.pdf.header.waterlevel.difference = dWSPL
 sinfo.export.csv.header.mean_bed_height.difference = \u0394MSH
+sinfo.export.pdf.header.mean_bed_height.difference = dMSH
 sinfo.export.csv.header.flowdepth.current = Flie\u00dftiefe h-aktuell
 sinfo.export.csv.header.flowdepth.historical = Flie\u00dftiefe h-historisch
 
--- a/artifacts/src/main/resources/messages_de.properties	Wed Apr 11 14:09:13 2018 +0200
+++ b/artifacts/src/main/resources/messages_de.properties	Wed Apr 11 14:20:01 2018 +0200
@@ -855,6 +855,8 @@
 sinfo.export.flow_depth.csv.meta.header.waterlevel.year = # Jahr/Zeitraum der Wasserspiegellage: {0}
 sinfo.export.flow_depth.csv.meta.range = # {0}: {1} - {2}
 sinfo.export.flow_depth.csv.meta.range.label = Bereich (km)
+sinfo.export.flow_depth.pdf.meta.calculation.label = Angewandte Gleichung
+sinfo.export.flow_depth.pdf.meta.calculation.name = Gill (1971)
 sinfo.export.flow_depth.csv.meta.height_unit.river = # H\u00f6hensystem des Flusses: {0} 
 
 sinfo.export.flow_depth.csv.header.km = Fluss-km
@@ -862,6 +864,7 @@
 sinfo.export.flow_depth.csv.header.flowdepthTkh = Flie\u00dftiefe mit TKH
 sinfo.export.flow_depth.csv.header.tkh = TKH
 sinfo.export.flow_depth.csv.header.waterlevel = Wasserstand
+sinfo.export.flow_depth.pdf.header.waterlevel = Wasser-stand
 sinfo.export.flow_depth.csv.header.discharge = Q
 sinfo.export.flow_depth.csv.header.label = Bezeichnung
 sinfo.export.flow_depth.csv.header.gauge = Bezugspegel
@@ -895,6 +898,7 @@
 sinfo.export.tkh.calculation.formula = # Berechnungsgrundlage: Gleichung nach GILL (1971)
 sinfo.export.tkh.csv.header.tkh = Transportk\u00f6rperh\u00f6he
 sinfo.export.tkh.csv.header.tkhkind = Einteilung der Gew\u00e4ssersohle
+sinfo.export.tkh.pdf.header.tkhkind = Einteilung der Gew\u00e4sser-sohle
 sinfo.export.tkh.soilkind.mobil = Mobil
 sinfo.export.tkh.soilkind.starr = Starr
 
@@ -962,10 +966,13 @@
 sinfo.export.csv.meta.header.waterlevel.current = ##METADATEN WASSERSPIEGELLAGE aktuell
 sinfo.export.csv.meta.header.waterlevel.historical = ##METADATEN WASSERSPIEGELLAGE historisch
 
+sinfo.export.pdf.header.flowdepth.development = Flie\u00dftiefen-entwicklung
 sinfo.export.csv.header.flowdepth.development = Flie\u00dftiefenentwicklung
 sinfo.export.csv.header.flowdepth.development.per.year = Flie\u00dftiefenent-wicklung pro Jahr
 sinfo.export.csv.header.waterlevel.difference = \u0394WSPL
+sinfo.export.pdf.header.waterlevel.difference = dWSPL
 sinfo.export.csv.header.mean_bed_height.difference = \u0394MSH
+sinfo.export.pdf.header.mean_bed_height.difference = dMSH
 sinfo.export.csv.header.flowdepth.current = Flie\u00dftiefe h-aktuell
 sinfo.export.csv.header.flowdepth.historical = Flie\u00dftiefe h-historisch
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/doc/schema/oracle-sinfo-uinfo.sql	Wed Apr 11 14:20:01 2018 +0200
@@ -0,0 +1,402 @@
+BEGIN;
+
+
+CREATE TABLE bed_mobility (
+    id  NUMBER(9,0) PRIMARY KEY,
+    river_id  NUMBER(38,0) NOT NULL CONSTRAINT cBedMobilityRivers REFERENCES rivers(id) ON DELETE CASCADE,
+    kmrange_info  VARCHAR2(32),
+    filename  VARCHAR2(256) NOT NULL,
+    "comment"  VARCHAR2(256)
+);
+COMMENT ON TABLE bed_mobility IS 'Longitudinal section of the bed mobility of a river' ;
+COMMENT ON COLUMN bed_mobility.kmrange_info IS 'File header line info "Strecke"' ;
+COMMENT ON COLUMN bed_mobility.filename IS 'Name without type extension of the imported file' ;
+COMMENT ON COLUMN bed_mobility."comment" IS 'File header line info "weitere Bemerkungen"' ;
+
+CREATE SEQUENCE BED_MOBILITY_ID_SEQ ;
+
+
+CREATE TABLE bed_mobility_values (
+    id  NUMBER(9,0) PRIMARY KEY,
+    bed_mobility_id  NUMBER(9,0) NOT NULL CONSTRAINT cBedMobilityValuesBedMobility REFERENCES bed_mobility(id) ON DELETE CASCADE,
+    station  NUMBER(7,3) NOT NULL,
+    moving  NUMBER(1,0) DEFAULT 0 NOT NULL CHECK(moving IN (0,1))
+);
+COMMENT ON TABLE bed_mobility_values IS 'Bed mobility of a km' ;
+COMMENT ON COLUMN bed_mobility_values.station IS 'River km' ;
+COMMENT ON COLUMN bed_mobility_values.moving IS 'Whether the river bed at the station is moving' ;
+
+CREATE SEQUENCE BED_MOBILITY_VALUES_ID_SEQ ;
+
+
+CREATE TABLE infrastructure (
+    id  NUMBER(9,0) PRIMARY KEY,
+    river_id  NUMBER(38,0) NOT NULL CONSTRAINT cInfrastructureRivers REFERENCES rivers(id) ON DELETE CASCADE,
+    annotation_type_id  NUMBER(38,0) NOT NULL CONSTRAINT cInfrastructureAnnotationType REFERENCES annotation_types(id),
+    year  NUMBER(4,0) CHECK((year >= 1700) AND (year <= 2199)),
+    dataprovider  VARCHAR2(256),
+    evaluation_by  VARCHAR2(256),
+    kmrange_info  VARCHAR2(32),
+    filename  VARCHAR2(256) NOT NULL,
+    "comment"  VARCHAR2(256)
+);
+COMMENT ON TABLE infrastructure IS 'Longitudinal section of infrastructures of a river and a type' ;
+COMMENT ON COLUMN infrastructure.year IS 'File header line info "Stand"' ;
+COMMENT ON COLUMN infrastructure.dataprovider IS 'File header line info "Datenherkunft"' ;
+COMMENT ON COLUMN infrastructure.evaluation_by IS 'File header line info "Auswerter"' ;
+COMMENT ON COLUMN infrastructure.kmrange_info IS 'File header line info "Strecke"' ;
+COMMENT ON COLUMN infrastructure.filename IS 'Name without type extension of the imported file' ;
+COMMENT ON COLUMN infrastructure."comment" IS 'File header line info "weitere Bemerkungen"' ;
+
+CREATE SEQUENCE INFRASTRUCTURE_ID_SEQ ;
+
+
+CREATE TABLE infrastructure_values (
+    id  NUMBER(9,0) PRIMARY KEY,
+    infrastructure_id  NUMBER(9,0) CONSTRAINT cInfrastructureValuesInfrastructure REFERENCES infrastructure(id) ON DELETE CASCADE,
+    station  NUMBER(7,3) NOT NULL,
+    attribute_id  NUMBER(38,0) CONSTRAINT cInfrastructureValuesAttributes REFERENCES attributes(id),
+    height  NUMBER(6,2)
+);
+COMMENT ON TABLE infrastructure_values IS 'Infrastructure at a river station' ;
+COMMENT ON COLUMN infrastructure_values.station IS 'River km' ;
+COMMENT ON COLUMN infrastructure_values.height IS 'Geodetic height of the top of the infrastructure in m' ;
+
+CREATE SEQUENCE INFRASTRUCTURE_VALUES_ID_SEQ ;
+
+
+CREATE TABLE channel (
+    id  NUMBER(9,0) PRIMARY KEY,
+    river_id  NUMBER(38,0) NOT NULL CONSTRAINT cChannelRivers REFERENCES rivers(id) ON DELETE CASCADE,
+    kmrange_info  VARCHAR2(32),
+    filename  VARCHAR2(256) NOT NULL,
+    "comment"  VARCHAR2(256),
+    year_from  NUMBER(4,0),
+    year_to  NUMBER(4,0)
+);
+COMMENT ON TABLE channel IS 'Longitudinal section of the navigable channel of a river' ;
+COMMENT ON COLUMN channel.kmrange_info IS 'File header line info "Strecke"' ;
+COMMENT ON COLUMN channel.filename IS 'Name without type extension of the imported file' ;
+COMMENT ON COLUMN channel."comment" IS 'File header line info "weitere Bemerkungen"' ;
+COMMENT ON COLUMN channel.year_from IS 'Start year of the period for which the channel values are valid, NULL when valid for the whole past';
+COMMENT ON COLUMN channel.year_to IS 'End year of the period for which the channel values are valid, NULL when unlimited';
+
+
+CREATE SEQUENCE CHANNEL_ID_SEQ ;
+
+
+CREATE TABLE channel_values (
+    id  NUMBER(9,0) PRIMARY KEY,
+    channel_id  NUMBER(9,0) CONSTRAINT cChannelValuesChannel REFERENCES channel(id) ON DELETE CASCADE,
+    station  NUMBER(7,3) NOT NULL,
+    width  NUMBER(6,2),
+    depth  NUMBER(6,2)
+);
+COMMENT ON TABLE channel_values IS 'Nominal size of the navigable channel at a river station' ;
+COMMENT ON COLUMN channel_values.station IS 'River km' ;
+COMMENT ON COLUMN channel_values.width IS 'Nominal width of the channel in m' ;
+COMMENT ON COLUMN channel_values.depth IS 'Nominal depth of the channel in m' ;
+
+CREATE SEQUENCE CHANNEL_VALUES_ID_SEQ ;
+
+
+CREATE TABLE collision_type (
+    id  NUMBER(9,0) PRIMARY KEY,
+    name  VARCHAR2(64) NOT NULL UNIQUE
+);
+COMMENT ON TABLE collision_type IS 'Type of a ship collision' ;
+COMMENT ON COLUMN collision_type.name IS 'Name of the collision type' ;
+
+CREATE SEQUENCE COLLISION_TYPE_ID_SEQ ;
+
+
+CREATE TABLE collision (
+    id  NUMBER(9,0) PRIMARY KEY,
+    river_id  NUMBER(38,0) NOT NULL CONSTRAINT cCollisionRivers REFERENCES rivers(id) ON DELETE CASCADE,
+    year  NUMBER(4,0) NOT NULL,
+    kmrange_info  VARCHAR2(32),
+    filename  VARCHAR2(256) NOT NULL,
+    "comment"  VARCHAR2(256)
+);
+COMMENT ON TABLE collision IS 'Longitudinal section of ship collisions with the river bed' ;
+COMMENT ON COLUMN collision.year IS 'File header line info "Jahr"' ;
+COMMENT ON COLUMN collision.kmrange_info IS 'File header line info "Strecke"' ;
+COMMENT ON COLUMN collision.filename IS 'Name without type extension of the imported file' ;
+COMMENT ON COLUMN collision."comment" IS 'File header line info "weitere Bemerkungen"' ;
+
+CREATE SEQUENCE COLLISION_ID_SEQ ;
+
+
+CREATE TABLE collision_values (
+    id  NUMBER(9,0) PRIMARY KEY,
+    collision_id  NUMBER(9,0) NOT NULL CONSTRAINT cCollisionValueCollision REFERENCES collision(id) ON DELETE CASCADE,
+    station  NUMBER(7,3) NOT NULL,
+    event_date  DATE NOT NULL,
+    gauge_w  NUMBER(6,2),
+    gauge_name  VARCHAR2(64),
+    collision_type_id  NUMBER(9,0) NOT NULL CONSTRAINT cCollisionValuesCollisionType REFERENCES collision_type(id)
+);
+COMMENT ON TABLE collision_values IS 'Collision event' ;
+COMMENT ON COLUMN collision_values.station IS 'River km' ;
+COMMENT ON COLUMN collision_values.event_date IS 'Date of the collision' ;
+COMMENT ON COLUMN collision_values.gauge_w IS 'Waterlevel during the collision' ;
+COMMENT ON COLUMN collision_values.gauge_name IS 'Name of the gauge' ;
+
+CREATE SEQUENCE COLLISION_VALUES_ID_SEQ ;
+
+
+CREATE TABLE tkh (
+    id  NUMBER(9,0) PRIMARY KEY,
+    river_id  NUMBER(38,0) NOT NULL CONSTRAINT cTkhRivers REFERENCES rivers(id) ON DELETE CASCADE,
+    year  NUMBER(4,0) NOT NULL,
+    kmrange_info  VARCHAR2(32),
+    filename  VARCHAR2(256) NOT NULL,
+    sounding_info  VARCHAR2(64),
+    evaluation_by  VARCHAR2(256),
+    "comment"  VARCHAR2(256)
+);
+COMMENT ON TABLE tkh IS 'Longitudinal section of computed Transportk�rperh�he of a river' ;
+COMMENT ON COLUMN tkh.year IS 'File header line info "Bezugsjahr"' ;
+COMMENT ON COLUMN tkh.kmrange_info IS 'File header line info "Strecke"' ;
+COMMENT ON COLUMN tkh.filename IS 'Name without type extension of the imported file' ;
+COMMENT ON COLUMN tkh.sounding_info IS 'File header line info "Peilung"' ;
+COMMENT ON COLUMN tkh.evaluation_by IS 'File header line info "Auswerter"' ;
+COMMENT ON COLUMN tkh."comment" IS 'File header line info "weitere Bemerkungen"' ;
+
+CREATE SEQUENCE TKH_ID_SEQ ;
+
+
+CREATE TABLE tkh_column (
+    id  NUMBER(9,0) PRIMARY KEY,
+    tkh_id  NUMBER(9,0) NOT NULL CONSTRAINT cTkhColumnTkh REFERENCES tkh(id) ON DELETE CASCADE,
+    name  VARCHAR2(64) NOT NULL
+);
+COMMENT ON TABLE tkh_column IS 'Longitudinal section of computed Transportk�rperh�he for a waterlevel series' ;
+COMMENT ON COLUMN tkh_column.name IS 'Name of the tkh computation column' ;
+
+CREATE SEQUENCE TKH_COLUMN_ID_SEQ ;
+
+
+CREATE TABLE tkh_values (
+    id  NUMBER(9,0) PRIMARY KEY,
+    tkh_column_id  NUMBER(9,0) NOT NULL CONSTRAINT cTkhValuesTkhColumn REFERENCES tkh_column(id) ON DELETE CASCADE,
+    station  NUMBER(7,3) NOT NULL,
+    tkheight  NUMBER(7,3)
+);
+COMMENT ON TABLE tkh_values IS 'Transportk�rperh�he of a river station and referenced file column' ;
+COMMENT ON COLUMN tkh_values.station IS 'River km' ;
+COMMENT ON COLUMN tkh_values.tkheight IS 'Transportk�rperh�he of a river station computed for a waterlevel in  m' ;
+
+CREATE SEQUENCE TKH_VALUES_ID_SEQ ;
+
+
+CREATE TABLE flow_depth (
+    id  NUMBER(9,0) PRIMARY KEY,
+    river_id  NUMBER(38,0) NOT NULL CONSTRAINT cFlowDepthRivers REFERENCES rivers(id) ON DELETE CASCADE,
+    year  NUMBER(4,0) NOT NULL,
+    kmrange_info  VARCHAR2(32),
+    filename  VARCHAR2(256) NOT NULL,
+    sounding_info  VARCHAR2(64),
+    evaluation_by  VARCHAR2(255),
+    "comment"  VARCHAR2(256)
+);
+
+COMMENT ON TABLE flow_depth IS 'Longitudinal section of computed flow depth of a river' ;
+COMMENT ON COLUMN flow_depth.year IS 'File header line info "Bezugsjahr"' ;
+COMMENT ON COLUMN flow_depth.kmrange_info IS 'File header line info "Strecke"' ;
+COMMENT ON COLUMN flow_depth.filename IS 'Name without type extension of the imported file' ;
+COMMENT ON COLUMN flow_depth.sounding_info IS 'File header line info "Peilung"' ;
+COMMENT ON COLUMN flow_depth.evaluation_by IS 'File header line info "Auswerter"' ;
+COMMENT ON COLUMN flow_depth."comment" IS 'File header line info "weitere Bemerkungen"' ;
+
+CREATE SEQUENCE FLOW_DEPTH_ID_SEQ ;
+
+
+CREATE TABLE flow_depth_column (
+    id  NUMBER(9,0) PRIMARY KEY,
+    flow_depth_id  NUMBER(9,0) NOT NULL CONSTRAINT cFlowDepthColumnFlowDepth REFERENCES flow_depth(id) ON DELETE CASCADE,
+    name  VARCHAR2(64) NOT NULL
+);
+
+COMMENT ON TABLE flow_depth_column IS 'Longitudinal section of computed flow depth for a waterlevel series' ;
+COMMENT ON COLUMN flow_depth_column.name IS 'Name of the flow depth computation column' ;
+
+CREATE SEQUENCE FLOW_DEPTH_COLUMN_ID_SEQ ;
+
+
+CREATE TABLE flow_depth_values (
+    id  NUMBER(9,0) PRIMARY KEY,
+    flow_depth_column_id  NUMBER(9,0) NOT NULL CONSTRAINT cFlowDepthValuesFlowDepthColumn REFERENCES flow_depth_column(id) ON DELETE CASCADE,
+    station  NUMBER(7,3) NOT NULL,
+    depth  NUMBER(7,3)
+);
+
+COMMENT ON TABLE flow_depth_values IS 'Flow depth of a river station and referenced file column' ;
+COMMENT ON COLUMN flow_depth_values.station IS 'River km' ;
+COMMENT ON COLUMN flow_depth_values.depth IS 'Flow depth of a river station computed for a waterlevel in  m' ;
+
+CREATE SEQUENCE FLOW_DEPTH_VALUES_ID_SEQ ;
+
+
+CREATE TABLE depth_evolution (
+    id  NUMBER(9,0) PRIMARY KEY,
+    river_id  NUMBER(38,0) NOT NULL CONSTRAINT cDepthEvolutionRivers REFERENCES rivers(id) ON DELETE CASCADE,
+    reference_year  NUMBER(4,0) NOT NULL,
+    start_year  NUMBER(4,0) NOT NULL,
+    curr_sounding  VARCHAR2(64) NOT NULL,
+    old_sounding  VARCHAR2(64) NOT NULL,
+    kmrange_info  VARCHAR2(32),
+    curr_glw  VARCHAR2(64) NOT NULL,
+    old_glw  VARCHAR2(64) NOT NULL,
+    filename  VARCHAR2(256) NOT NULL,
+    "comment"  VARCHAR2(256)
+);
+COMMENT ON TABLE depth_evolution IS 'Longitudinal section of the evolution of the flow depth of a river' ;
+COMMENT ON COLUMN depth_evolution.reference_year IS 'File header line info "Bezugsjahr" (GlW)' ;
+COMMENT ON COLUMN depth_evolution.start_year IS 'File header line info "Ausgangsjahr" (GlW)' ;
+COMMENT ON COLUMN depth_evolution.curr_sounding IS 'File header line info "Aktuelle Peilung / Epoche"' ;
+COMMENT ON COLUMN depth_evolution.old_sounding IS 'File header line info "Historische Peilung / Epoche"' ;
+COMMENT ON COLUMN depth_evolution.kmrange_info IS 'File header line info "Strecke"' ;
+COMMENT ON COLUMN depth_evolution.curr_glw IS 'File header line info "Aktuelle Wasserspiegellage"' ;
+COMMENT ON COLUMN depth_evolution.old_glw IS 'File header line info "Historische Wasserspiegellage"' ;
+COMMENT ON COLUMN depth_evolution.filename IS 'Name without type extension of the imported file' ;
+COMMENT ON COLUMN depth_evolution."comment" IS 'File header line info "weitere Bemerkungen"' ;
+
+CREATE SEQUENCE DEPTH_EVOLUTION_ID_SEQ ;
+
+
+CREATE TABLE depth_evolution_values (
+    id  NUMBER(9,0) PRIMARY KEY,
+    depth_evolution_id  NUMBER(9,0) NOT NULL CONSTRAINT cDepthEvolutionValuesDepthEvolution REFERENCES depth_evolution(id) ON DELETE CASCADE,
+    station  NUMBER(7,3) NOT NULL,
+    total_change  NUMBER(8,4),
+    change_per_year  NUMBER(8,4)
+);
+COMMENT ON TABLE depth_evolution_values IS 'Evolution of the flow depth of a river station in a referenced period of time' ;
+COMMENT ON COLUMN depth_evolution_values.station IS 'River km' ;
+COMMENT ON COLUMN depth_evolution_values.total_change IS 'Flow depth change of the whole period of time in m' ;
+COMMENT ON COLUMN depth_evolution_values.change_per_year IS 'Average yearly flow depth change in the referenced period of time in m' ;
+
+CREATE SEQUENCE DEPTH_EVOLUTION_VALUES_ID_SEQ ;
+
+
+CREATE TABLE daily_discharge (
+    id  NUMBER(9,0) PRIMARY KEY,
+    gauge_id  NUMBER(38,0) NOT NULL CONSTRAINT cDailyDischargeGauge REFERENCES gauges(id) ON DELETE CASCADE,
+    filename  VARCHAR2(256) NOT NULL
+);
+
+COMMENT ON TABLE daily_discharge IS 'Daily discharge value series' ;
+COMMENT ON COLUMN daily_discharge.filename IS 'Name without type extension of the imported file' ;
+
+CREATE SEQUENCE DAILY_DISCHARGE_ID_SEQ ;
+
+
+CREATE TABLE daily_discharge_values (
+    id  NUMBER(9,0) PRIMARY KEY,
+    daily_discharge_id  NUMBER(9,0) NOT NULL CONSTRAINT cDailyDischargeValueDailyDischarge REFERENCES daily_discharge(id) ON DELETE CASCADE,
+    day  DATE NOT NULL,
+    discharge  NUMBER(8,3) NOT NULL
+);
+
+COMMENT ON TABLE daily_discharge_values IS 'Mean daily discharge of a date and a gauge' ;
+COMMENT ON COLUMN daily_discharge_values.day IS 'Date of the discharge value' ;
+COMMENT ON COLUMN daily_discharge_values.discharge IS 'Mean daily discharge in m^3/s' ;
+
+CREATE SEQUENCE DAILY_DISCHARGE_VALUES_ID_SEQ ;
+
+
+CREATE TABLE salix (
+    id  NUMBER(9,0) PRIMARY KEY,
+    river_id  NUMBER(38,0) NOT NULL CONSTRAINT cSalixRivers REFERENCES rivers(id) ON DELETE CASCADE,
+    evaluation_by  VARCHAR2(255),
+    kmrange_info  VARCHAR2(32),
+    filename  VARCHAR2(256) NOT NULL,
+    "comment"  VARCHAR2(256)
+);
+
+COMMENT ON TABLE salix IS 'Longitudinal section of the salix line parameters of a river' ;
+COMMENT ON COLUMN salix.evaluation_by IS 'File header line info "Auswerter"' ;
+COMMENT ON COLUMN salix.kmrange_info IS 'File header line info "Strecke"' ;
+COMMENT ON COLUMN salix.filename IS 'Name without type extension of the imported file' ;
+COMMENT ON COLUMN salix."comment" IS 'File header line info "weitere Bemerkungen"' ;
+
+CREATE SEQUENCE SALIX_ID_SEQ ;
+
+
+CREATE TABLE salix_values (
+    id  NUMBER(9,0) PRIMARY KEY,
+    salix_id  NUMBER(9,0) NOT NULL CONSTRAINT cSalixValuesSalix REFERENCES salix(id) ON DELETE CASCADE,
+    station  NUMBER(7,3) NOT NULL,
+    factor  NUMBER(6,2) NOT NULL,
+    mnw_mw_diff  NUMBER(6,2)
+);
+
+COMMENT ON COLUMN salix_values.station IS 'River km' ;
+COMMENT ON COLUMN salix_values.factor IS 'Salix "factor" of the station in m' ;
+COMMENT ON COLUMN salix_values.mnw_mw_diff IS 'Difference between MNW and MW in m (less than zero)' ;
+
+CREATE SEQUENCE SALIX_VALUES_ID_SEQ ;
+
+
+CREATE TABLE salix_rank (
+    id  NUMBER(9,0) PRIMARY KEY,
+    min_value  NUMBER(6,2),
+    max_value  NUMBER(6,2),
+    name  VARCHAR2(16) NOT NULL
+);
+INSERT INTO salix_rank (id, min_value, max_value, name) VALUES (1, NULL, -0.3, 'invalid');
+INSERT INTO salix_rank (id, min_value, max_value, name) VALUES (2, -0.3, 0.3, 'very good');
+INSERT INTO salix_rank (id, min_value, max_value, name) VALUES (3, 0.3, 0.5, 'good');
+INSERT INTO salix_rank (id, min_value, max_value, name) VALUES (4, 0.5, 1.0, 'moderate');
+INSERT INTO salix_rank (id, min_value, max_value, name) VALUES (5, 1.0, 1.5, 'bad');
+INSERT INTO salix_rank (id, min_value, max_value, name) VALUES (6, 1.5, NULL, 'very bad');
+
+
+CREATE TABLE vegetation_type (
+    id  NUMBER(9,0) PRIMARY KEY,
+    name  VARCHAR2(256) NOT NULL
+);
+INSERT INTO vegetation_type (id, name) VALUES (1, 'zonal forest');
+INSERT INTO vegetation_type (id, name) VALUES (2, 'dry hartwood forest floodplain');
+INSERT INTO vegetation_type (id, name) VALUES (3, 'wet hartwood forest floodplain');
+INSERT INTO vegetation_type (id, name) VALUES (4, 'salix alba forest');
+INSERT INTO vegetation_type (id, name) VALUES (5, 'salix shrubs');
+INSERT INTO vegetation_type (id, name) VALUES (6, 'reed bed');
+INSERT INTO vegetation_type (id, name) VALUES (7, 'bank pioneers');
+INSERT INTO vegetation_type (id, name) VALUES (8, 'no vegetation');
+INSERT INTO vegetation_type (id, name) VALUES (9, 'water');
+
+
+CREATE TABLE vegetation (
+    id  NUMBER(9,0) PRIMARY KEY,
+    river_id  NUMBER(38,0) NOT NULL CONSTRAINT cVegetationRivers REFERENCES rivers(id) ON DELETE CASCADE,
+    name  VARCHAR2(256),
+    filename  VARCHAR2(256) NOT NULL,
+    "comment"  VARCHAR2(256)
+);
+
+COMMENT ON TABLE vegetation IS 'Classification of the vegetation of a river' ;
+COMMENT ON COLUMN vegetation.name IS 'File header line info "Einteilung"' ;
+COMMENT ON COLUMN vegetation.filename IS 'Name without type extension of the imported file' ;
+COMMENT ON COLUMN vegetation."comment" IS 'File header line info "weitere Bemerkungen"' ;
+
+CREATE SEQUENCE VEGETATION_ID_SEQ ;
+
+
+CREATE TABLE vegetation_zone (
+    id  NUMBER(9,0) PRIMARY KEY,
+    vegetation_id  NUMBER(9,0) NOT NULL CONSTRAINT cVegetationZoneVegetation REFERENCES vegetation(id) ON DELETE CASCADE,
+    vegetation_type_id  NUMBER(9,0) NOT NULL CONSTRAINT cVegetationZoneVegetationType REFERENCES vegetation_type(id) ON DELETE CASCADE,
+    min_overflow_days  NUMBER(3,0) NOT NULL,
+    max_overflow_days  NUMBER(3,0) NOT NULL
+);
+
+COMMENT ON TABLE vegetation_zone IS 'Vegetation zone of a river station' ;
+COMMENT ON COLUMN vegetation_zone.min_overflow_days IS 'Minimum number (inclusive) of overflow days in a year for the zone type' ;
+COMMENT ON COLUMN vegetation_zone.max_overflow_days IS 'Maximum number (exclusive) of overflow days in a year for the zone type' ;
+
+CREATE SEQUENCE VEGETATION_ZONE_ID_SEQ ;
+
+
+COMMIT;
--- a/backend/doc/schema/postgresql-alter-bed_height.sql	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/doc/schema/postgresql-alter-bed_height.sql	Wed Apr 11 14:20:01 2018 +0200
@@ -3,7 +3,7 @@
 
 --Add further bed height file header info columns
 
-ALTER TABLE bed_height ADD COLUMN sounding_width_info VARCHAR(32);
+ALTER TABLE bed_height ADD COLUMN sounding_width_info VARCHAR(256);
 COMMENT ON COLUMN bed_height.sounding_width_info IS 'File header line info "ausgewertete Peilbreite"' ;
 
 ALTER TABLE bed_height ADD COLUMN "comment" VARCHAR(256);
--- a/backend/doc/schema/postgresql-sinfo-uinfo.sql	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/doc/schema/postgresql-sinfo-uinfo.sql	Wed Apr 11 14:20:01 2018 +0200
@@ -155,7 +155,7 @@
     evaluation_by  VARCHAR(256),
     "comment"  VARCHAR(256)
 );
-COMMENT ON TABLE tkh IS 'Longitudinal section of computed Transportk�rperh�he of a river' ;
+COMMENT ON TABLE tkh IS 'Longitudinal section of computed Transportkörperhöhe of a river' ;
 COMMENT ON COLUMN tkh.year IS 'File header line info "Bezugsjahr"' ;
 COMMENT ON COLUMN tkh.kmrange_info IS 'File header line info "Strecke"' ;
 COMMENT ON COLUMN tkh.filename IS 'Name without type extension of the imported file' ;
@@ -171,7 +171,7 @@
     tkh_id  NUMERIC(9,0) NOT NULL CONSTRAINT cTkhColumnTkh REFERENCES tkh(id) ON DELETE CASCADE,
     name  VARCHAR(64) NOT NULL
 );
-COMMENT ON TABLE tkh_column IS 'Longitudinal section of computed Transportk�rperh�he for a waterlevel series' ;
+COMMENT ON TABLE tkh_column IS 'Longitudinal section of computed Transportkörperhöhe for a waterlevel series' ;
 COMMENT ON COLUMN tkh_column.name IS 'Name of the tkh computation column' ;
 
 CREATE SEQUENCE TKH_COLUMN_ID_SEQ ;
@@ -183,9 +183,9 @@
     station  NUMERIC(7,3) NOT NULL,
     tkheight  NUMERIC(7,3)
 );
-COMMENT ON TABLE tkh_values IS 'Transportk�rperh�he of a river station and referenced file column' ;
+COMMENT ON TABLE tkh_values IS 'Transportkörperhöhe of a river station and referenced file column' ;
 COMMENT ON COLUMN tkh_values.station IS 'River km' ;
-COMMENT ON COLUMN tkh_values.tkheight IS 'Transportk�rperh�he of a river station computed for a waterlevel in  m' ;
+COMMENT ON COLUMN tkh_values.tkheight IS 'Transportkörperhöhe of a river station computed for a waterlevel in  m' ;
 
 CREATE SEQUENCE TKH_VALUES_ID_SEQ ;
 
@@ -326,7 +326,7 @@
 
 CREATE TABLE salix_values (
     id  NUMERIC(9,0) PRIMARY KEY,
-    salix_id  integer NOT NULL CONSTRAINT cSalixValuesSalix REFERENCES salix(id) ON DELETE CASCADE,
+    salix_id  NUMERIC(9,0) NOT NULL CONSTRAINT cSalixValuesSalix REFERENCES salix(id) ON DELETE CASCADE,
     station  NUMERIC(7,3) NOT NULL,
     factor  NUMERIC(6,2) NOT NULL,
     mnw_mw_diff  NUMERIC(6,2)
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportBedHeight.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportBedHeight.java	Wed Apr 11 14:20:01 2018 +0200
@@ -12,6 +12,7 @@
 import java.util.List;
 
 import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.BedHeight;
 import org.dive4elements.river.model.BedHeightType;
 import org.dive4elements.river.model.ElevationModel;
@@ -40,12 +41,15 @@
 
     protected List<ImportBedHeightValue> values;
 
+    protected StoreMode storeMode;
+
     protected BedHeight peer;
 
 
     public ImportBedHeight(final String description) {
         this.description = description;
         this.values      = new ArrayList<>();
+        this.storeMode = StoreMode.NONE;
     }
 
 
@@ -178,9 +182,11 @@
                     (this.oldElevationModel != null) ? this.oldElevationModel.getPeer() : null, this.range.getPeer(river),
                             this.evaluationBy, this.description, this.sounding_width_info, this.comment);
             session.save(this.peer);
+            this.storeMode = StoreMode.INSERT;
         }
         else {
             this.peer = bedHeights.get(0);
+            this.storeMode = StoreMode.UPDATE;
         }
 
         return this.peer;
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportBedHeightValue.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportBedHeightValue.java	Wed Apr 11 14:20:01 2018 +0200
@@ -11,6 +11,7 @@
 import java.util.List;
 
 import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.BedHeight;
 import org.dive4elements.river.model.BedHeightValue;
 import org.hibernate.Query;
@@ -61,13 +62,18 @@
         if (this.peer != null)
             return this.peer;
 
+        List<BedHeightValue> values;
         final Session session = ImporterSession.getInstance().getDatabaseSession();
-        final Query query = session.createQuery("FROM BedHeightValue WHERE (bedHeight=:bedHeight)"
-                + " AND (station BETWEEN (:station-0.0001) AND (:station+0.0001))");
-        query.setParameter("bedHeight", bedHeight);
-        query.setParameter("station", this.station);
-        final List<BedHeightValue> values = query.list();
-        if (values.isEmpty()) {
+        if (this.bedHeight.storeMode == StoreMode.INSERT)
+            values = null;
+        else {
+            final Query query = session.createQuery("FROM BedHeightValue WHERE (bedHeight=:bedHeight)"
+                    + " AND (station BETWEEN (:station-0.0001) AND (:station+0.0001))");
+            query.setParameter("bedHeight", bedHeight);
+            query.setParameter("station", this.station);
+            values = query.list();
+        }
+        if ((values == null) || values.isEmpty()) {
             this.peer = new BedHeightValue(bedHeight, this.station, this.height, this.uncertainty, this.dataGap, this.soundingWidth,
                     this.minHeight, this.maxHeight);
             session.save(this.peer);
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportCrossSection.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportCrossSection.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,16 +8,15 @@
 
 package org.dive4elements.river.importer;
 
-import org.dive4elements.river.model.River;
-import org.dive4elements.river.model.CrossSection;
-import org.dive4elements.river.model.TimeInterval;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
 import java.util.List;
 
 import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.StoreMode;
+import org.dive4elements.river.model.CrossSection;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.TimeInterval;
+import org.hibernate.Query;
+import org.hibernate.Session;
 
 /** CrossSection to be imported, holds list of ImportCrossSectionLines. */
 public class ImportCrossSection
@@ -28,6 +27,7 @@
     protected String                       description;
     protected ImportTimeInterval           timeInterval;
     protected List<ImportCrossSectionLine> lines;
+    protected StoreMode storeMode;
 
     protected CrossSection peer;
 
@@ -35,94 +35,97 @@
     }
 
     public ImportCrossSection(
-        ImportRiver                  river,
-        String                       description,
-        ImportTimeInterval           timeInterval,
-        List<ImportCrossSectionLine> lines
-    ) {
+            final ImportRiver                  river,
+            final String                       description,
+            final ImportTimeInterval           timeInterval,
+            final List<ImportCrossSectionLine> lines
+            ) {
         this.river        = river;
         this.description  = description;
         this.timeInterval = timeInterval;
         this.lines        = lines;
+        this.storeMode = StoreMode.NONE;
         wireWithLines();
     }
 
     public void wireWithLines() {
-        for (ImportCrossSectionLine line: lines) {
+        for (final ImportCrossSectionLine line: this.lines) {
             line.setCrossSection(this);
         }
     }
 
     public ImportRiver getRiver() {
-        return river;
+        return this.river;
     }
 
-    public void setRiver(ImportRiver river) {
+    public void setRiver(final ImportRiver river) {
         this.river = river;
     }
 
     public String getDescription() {
-        return description;
+        return this.description;
     }
 
-    public void setDescription(String description) {
+    public void setDescription(final String description) {
         this.description = description;
     }
 
     public ImportTimeInterval getTimeInterval() {
-        return timeInterval;
+        return this.timeInterval;
     }
 
-    public void setTimeInterval(ImportTimeInterval timeInterval) {
+    public void setTimeInterval(final ImportTimeInterval timeInterval) {
         this.timeInterval = timeInterval;
     }
 
     public void storeDependencies() {
 
-        log.info("store cross section '" + description + "'");
+        log.info("store cross section '" + this.description + "'");
 
         getPeer();
 
-        int i = 1, N = lines.size();
+        // int i = 1;
+        // final int N = this.lines.size();
 
-        for (ImportCrossSectionLine line: lines) {
+        for (final ImportCrossSectionLine line: this.lines) {
             line.storeDependencies();
-            log.info("  stored " + i + " lines. remaining: " + (N-i));
-            ++i;
+            // log.info(" stored " + i + " lines. remaining: " + (N-i));
+            // ++i;
         }
+        log.info(" lines stored: " + this.lines.size());
     }
 
     public CrossSection getPeer() {
 
-        if (peer == null) {
-            River r = river.getPeer();
-            TimeInterval t = timeInterval != null
-                ? timeInterval.getPeer()
-                : null;
+        if (this.peer == null) {
+            final River r = this.river.getPeer();
+            final TimeInterval t = (this.timeInterval != null) ? this.timeInterval.getPeer() : null;
 
-            Session session =
-                ImporterSession.getInstance().getDatabaseSession();
+            final Session session =
+                    ImporterSession.getInstance().getDatabaseSession();
 
-            Query query = session.createQuery(
-                "from CrossSection where " +
-                "river=:r and "            +
-                "timeInterval=:t and "     +
-                "description=:d");
+            final Query query = session.createQuery(
+                    "from CrossSection where " +
+                            "river=:r and "            +
+                            "timeInterval=:t and "     +
+                    "description=:d");
 
             query.setParameter("r", r);
             query.setParameter("t", t);
-            query.setParameter("d", description);
+            query.setParameter("d", this.description);
 
-            List<CrossSection> crossSections = query.list();
+            final List<CrossSection> crossSections = query.list();
             if (crossSections.isEmpty()) {
-                peer = new CrossSection(r, t, description);
-                session.save(peer);
+                this.peer = new CrossSection(r, t, this.description);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = crossSections.get(0);
+                this.peer = crossSections.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
         }
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportCrossSectionLine.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportCrossSectionLine.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,18 +8,18 @@
 
 package org.dive4elements.river.importer;
 
-import org.dive4elements.river.model.CrossSection;
-import org.dive4elements.river.model.CrossSectionPoint;
-import org.dive4elements.river.model.CrossSectionLine;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
+import java.util.Comparator;
 import java.util.List;
-import java.util.Comparator;
 import java.util.Map;
 import java.util.TreeMap;
 
+import org.dive4elements.river.importer.common.StoreMode;
+import org.dive4elements.river.model.CrossSection;
+import org.dive4elements.river.model.CrossSectionLine;
+import org.dive4elements.river.model.CrossSectionPoint;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
 /**
  * A CrossSectionLine (containing points) ready to be transformed into a mapped
  * object and written to db (used in importer).
@@ -27,39 +27,42 @@
 public class ImportCrossSectionLine
 {
     public static final Comparator<CrossSectionPoint> INDEX_CMP =
-        new Comparator<CrossSectionPoint>() {
-            public int compare(CrossSectionPoint a, CrossSectionPoint b) {
-                return a.getColPos().compareTo(b.getColPos());
-            }
-        };
+            new Comparator<CrossSectionPoint>() {
+        @Override
+        public int compare(final CrossSectionPoint a, final CrossSectionPoint b) {
+            return a.getColPos().compareTo(b.getColPos());
+        }
+    };
 
     protected Double km;
     protected ImportCrossSection crossSection;
     protected List<XY> points;
+    protected StoreMode storeMode;
 
     protected CrossSectionLine peer;
 
     public ImportCrossSectionLine() {
     }
 
-    public ImportCrossSectionLine(Double km, List<XY> points) {
+    public ImportCrossSectionLine(final Double km, final List<XY> points) {
         this.km     = km;
         this.points = points;
+        this.storeMode = StoreMode.NONE;
     }
 
     public ImportCrossSection getCrossSection() {
-        return crossSection;
+        return this.crossSection;
     }
 
-    public void setCrossSection(ImportCrossSection crossSection) {
+    public void setCrossSection(final ImportCrossSection crossSection) {
         this.crossSection = crossSection;
     }
 
     public Double getKm() {
-        return km;
+        return this.km;
     }
 
-    public void setKm(Double km) {
+    public void setKm(final Double km) {
         this.km = km;
     }
 
@@ -70,35 +73,37 @@
 
     /** Write a line and its points. */
     protected void storePoints() {
-        CrossSectionLine csl = getPeer();
-
-        Map<CrossSectionPoint, CrossSectionPoint> map =
-            new TreeMap<CrossSectionPoint, CrossSectionPoint>(INDEX_CMP);
+        final CrossSectionLine csl = getPeer();
+        if (this.storeMode == StoreMode.INSERT) {
+            insertPoints();
+            return;
+        }
+        final Map<CrossSectionPoint, CrossSectionPoint> map =
+                new TreeMap<>(INDEX_CMP);
 
         // Build index for faster (index) collision lookup.
-        List<CrossSectionPoint> ps = csl.getPoints();
+        final List<CrossSectionPoint> ps = csl.getPoints();
         if (ps != null) {
-            for (CrossSectionPoint point: ps) {
+            for (final CrossSectionPoint point: ps) {
                 map.put(point, point);
             }
         }
 
-        Session session =
-            ImporterSession.getInstance().getDatabaseSession();
+        final Session session = ImporterSession.getInstance().getDatabaseSession();
 
-        CrossSectionPoint key = new CrossSectionPoint();
+        final CrossSectionPoint key = new CrossSectionPoint();
 
         // Somehow it looks as if even with the map it is still possible that
         // multiple points with same id enter hibernate (and then violate a
         // constraint). -> TODO
-        for (XY xy: points) {
+        for (final XY xy: this.points) {
             key.setColPos(xy.getIndex());
             CrossSectionPoint csp = map.get(key);
             if (csp == null) { // create new
                 csp = new CrossSectionPoint(
-                    csl, key.getColPos(),
-                    Double.valueOf(xy.getX()),
-                    Double.valueOf(xy.getY()));
+                        csl, key.getColPos(),
+                        Double.valueOf(xy.getX()),
+                        Double.valueOf(xy.getY()));
             }
             else { // update old
                 csp.setX(Double.valueOf(xy.getX()));
@@ -108,29 +113,41 @@
         }
     }
 
+    /**
+     * Insert the points of a new line into the database without previously querying the database
+     */
+    private void insertPoints() {
+        final Session session = ImporterSession.getInstance().getDatabaseSession();
+        for (final XY xy : this.points) {
+            session.save(new CrossSectionPoint(getPeer(), xy.getIndex(), xy.getX(), xy.getY()));
+        }
+    }
+
     /** Pull database-mapped object from db, or create (and save) one. */
     public CrossSectionLine getPeer() {
-        if (peer == null) {
-            CrossSection cs = crossSection.getPeer();
-
-            Session session =
-                ImporterSession.getInstance().getDatabaseSession();
-
-            Query query = session.createQuery(
-                "from CrossSectionLine where crossSection=:cs and km=:km");
-            query.setParameter("cs", cs);
-            query.setParameter("km", km);
-
-            List<CrossSectionLine> lines = query.list();
-            if (lines.isEmpty()) {
-                peer = new CrossSectionLine(cs, km);
-                session.save(peer);
+        if (this.peer == null) {
+            final CrossSection cs = this.crossSection.getPeer();
+            final Session session = ImporterSession.getInstance().getDatabaseSession();
+            List<CrossSectionLine> lines;
+            if (this.crossSection.storeMode == StoreMode.INSERT)
+                lines = null;
+            else {
+                final Query query = session.createQuery("from CrossSectionLine where crossSection=:cs and km=:km");
+                query.setParameter("cs", cs);
+                query.setParameter("km", this.km);
+                lines = query.list();
+            }
+            if ((lines == null) || lines.isEmpty()) {
+                this.peer = new CrossSectionLine(cs, this.km);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = lines.get(0);
+                this.peer = lines.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
         }
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportFlowVelocityModel.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportFlowVelocityModel.java	Wed Apr 11 14:20:01 2018 +0200
@@ -12,75 +12,75 @@
 import java.util.List;
 
 import org.apache.log4j.Logger;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.DischargeZone;
 import org.dive4elements.river.model.FlowVelocityModel;
 import org.dive4elements.river.model.River;
+import org.hibernate.Query;
+import org.hibernate.Session;
 
 
 public class ImportFlowVelocityModel {
 
     private static final Logger log = Logger
-        .getLogger(ImportFlowVelocityModel.class);
+            .getLogger(ImportFlowVelocityModel.class);
 
     private String description;
 
     private ImportDischargeZone dischargeZone;
 
-    private List<ImportFlowVelocityModelValue> values;
+    private final List<ImportFlowVelocityModelValue> values;
+
+    protected StoreMode storeMode;
 
     private FlowVelocityModel peer;
 
     public ImportFlowVelocityModel() {
-        values = new ArrayList<ImportFlowVelocityModelValue>();
+        this.values = new ArrayList<>();
+        this.storeMode = StoreMode.NONE;
     }
 
-    public ImportFlowVelocityModel(String description) {
+    public ImportFlowVelocityModel(final String description) {
         this();
-
         this.description = description;
     }
 
-    public ImportFlowVelocityModel(ImportDischargeZone dischargeZone,
-        String description) {
+    public ImportFlowVelocityModel(final ImportDischargeZone dischargeZone,
+            final String description) {
         this();
-
         this.dischargeZone = dischargeZone;
         this.description = description;
     }
 
-    public void setDischargeZone(ImportDischargeZone dischargeZone) {
+    public void setDischargeZone(final ImportDischargeZone dischargeZone) {
         this.dischargeZone = dischargeZone;
     }
 
-    public void setDescription(String description) {
+    public void setDescription(final String description) {
         this.description = description;
     }
 
-    public void addValue(ImportFlowVelocityModelValue value) {
+    public void addValue(final ImportFlowVelocityModelValue value) {
         this.values.add(value);
     }
 
-    public void storeDependencies(River river) {
+    public void storeDependencies(final River river) {
         log.debug("store dependencies");
 
-        if (dischargeZone == null) {
+        if (this.dischargeZone == null) {
             log.warn("skip flow velocity model: No discharge zone specified.");
             return;
         }
 
-        dischargeZone.storeDependencies(river);
+        this.dischargeZone.storeDependencies(river);
 
-        FlowVelocityModel peer = getPeer(river);
+        final FlowVelocityModel peer = getPeer(river);
 
         if (peer != null) {
             int i = 0;
 
-            for (ImportFlowVelocityModelValue value : values) {
-                value.storeDependencies(peer);
+            for (final ImportFlowVelocityModelValue value : this.values) {
+                value.storeDependencies(peer, this.storeMode);
                 i++;
             }
 
@@ -88,30 +88,32 @@
         }
     }
 
-    public FlowVelocityModel getPeer(River river) {
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
+    public FlowVelocityModel getPeer(final River river) {
+        if (this.peer == null) {
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
 
-            DischargeZone zone = dischargeZone.getPeer(river);
+            final DischargeZone zone = this.dischargeZone.getPeer(river);
 
-            Query query = session.createQuery("from FlowVelocityModel where "
-                + "   dischargeZone=:dischargeZone");
+            final Query query = session.createQuery("from FlowVelocityModel where "
+                    + "   dischargeZone=:dischargeZone");
 
             query.setParameter("dischargeZone", zone);
 
-            List<FlowVelocityModel> model = query.list();
+            final List<FlowVelocityModel> model = query.list();
 
             if (model.isEmpty()) {
-                peer = new FlowVelocityModel(zone, description);
-                session.save(peer);
+                this.peer = new FlowVelocityModel(zone, this.description);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = model.get(0);
+                this.peer = model.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
         }
 
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportFlowVelocityModelValue.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportFlowVelocityModelValue.java	Wed Apr 11 14:20:01 2018 +0200
@@ -9,34 +9,33 @@
 package org.dive4elements.river.importer;
 
 import java.math.BigDecimal;
-
 import java.util.List;
 
-import org.hibernate.Session;
-import org.hibernate.Query;
-
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.FlowVelocityModel;
 import org.dive4elements.river.model.FlowVelocityModelValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
 
 
 public class ImportFlowVelocityModelValue {
 
-    private BigDecimal station;
-    private BigDecimal q;
-    private BigDecimal totalChannel;
-    private BigDecimal mainChannel;
-    private BigDecimal shearStress;
+    private final BigDecimal station;
+    private final BigDecimal q;
+    private final BigDecimal totalChannel;
+    private final BigDecimal mainChannel;
+    private final BigDecimal shearStress;
 
     private FlowVelocityModelValue peer;
 
 
     public ImportFlowVelocityModelValue(
-        BigDecimal station,
-        BigDecimal q,
-        BigDecimal totalChannel,
-        BigDecimal mainChannel,
-        BigDecimal shearStress
-    ) {
+            final BigDecimal station,
+            final BigDecimal q,
+            final BigDecimal totalChannel,
+            final BigDecimal mainChannel,
+            final BigDecimal shearStress
+            ) {
         this.station      = station;
         this.q            = q;
         this.totalChannel = totalChannel;
@@ -45,39 +44,41 @@
     }
 
 
-    public void storeDependencies(FlowVelocityModel model) {
-        getPeer(model);
+    public void storeDependencies(final FlowVelocityModel model, final StoreMode parentStoreMode) {
+        getPeer(model, parentStoreMode);
     }
 
 
-    public FlowVelocityModelValue getPeer(FlowVelocityModel model) {
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
+    public FlowVelocityModelValue getPeer(final FlowVelocityModel model, final StoreMode parentStoreMode) {
+        if (this.peer == null) {
+            List<FlowVelocityModelValue> values;
+            final Session session = ImporterSession.getInstance().getDatabaseSession();
+            if (parentStoreMode == StoreMode.INSERT)
+                values = null;
+            else {
+                final Query query = session.createQuery(
+                        "from FlowVelocityModelValue where " +
+                                "   flowVelocity=:model and " +
+                                "   station between :station - 0.00001 and :station + 0.00001"
+                        );
 
-            Query query = session.createQuery(
-                "from FlowVelocityModelValue where " +
-                "   flowVelocity=:model and " +
-                "   station between :station - 0.00001 and :station + 0.00001"
-            );
+                query.setParameter("model", model);
+                query.setParameter("station", this.station.doubleValue());
 
-            query.setParameter("model", model);
-            query.setParameter("station", station.doubleValue());
-
-            List<FlowVelocityModelValue> values = query.list();
+                values = query.list();
+            }
+            if ((values == null) || values.isEmpty()) {
+                this.peer = new FlowVelocityModelValue(
+                        model, this.station, this.q, this.totalChannel, this.mainChannel, this.shearStress);
 
-            if (values.isEmpty()) {
-                peer = new FlowVelocityModelValue(
-                    model, station, q, totalChannel, mainChannel, shearStress);
-
-                session.save(peer);
+                session.save(this.peer);
             }
             else {
-                peer = values.get(0);
+                this.peer = values.get(0);
             }
         }
 
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportHYK.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportHYK.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,16 +8,15 @@
 
 package org.dive4elements.river.importer;
 
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.HYK;
 import org.dive4elements.river.model.River;
-
-import java.util.List;
-import java.util.ArrayList;
-
+import org.hibernate.Query;
 import org.hibernate.Session;
-import org.hibernate.Query;
-
-import org.apache.log4j.Logger;
 
 public class ImportHYK
 {
@@ -28,61 +27,66 @@
 
     protected List<ImportHYKEntry> entries;
 
+    protected StoreMode storeMode;
+
     protected HYK peer;
 
     public ImportHYK() {
-        entries = new ArrayList<ImportHYKEntry>();
+        this.entries = new ArrayList<>();
+        this.storeMode = StoreMode.NONE;
     }
 
-    public ImportHYK(ImportRiver river, String description) {
+    public ImportHYK(final ImportRiver river, final String description) {
         this();
         this.river       = river;
         this.description = description;
     }
 
     public ImportRiver getRiver() {
-        return river;
+        return this.river;
     }
 
-    public void setRiver(ImportRiver river) {
+    public void setRiver(final ImportRiver river) {
         this.river = river;
     }
 
-    public void addEntry(ImportHYKEntry entry) {
-        entries.add(entry);
+    public void addEntry(final ImportHYKEntry entry) {
+        this.entries.add(entry);
         entry.setHYK(this);
     }
 
     public void storeDependencies() {
-        log.info("store HYK '" + description + "'");
+        log.info("store HYK '" + this.description + "'");
         getPeer();
-        for (int i = 0, N = entries.size(); i < N; ++i) {
-            ImportHYKEntry entry = entries.get(i);
+        for (int i = 0, N = this.entries.size(); i < N; ++i) {
+            final ImportHYKEntry entry = this.entries.get(i);
             log.info("  store km " + entry.getKm() +
-                " (" + (i+1) + " of " + N + ")");
+                    " (" + (i+1) + " of " + N + ")");
             entry.storeDependencies();
         }
     }
 
     public HYK getPeer() {
-        if (peer == null) {
-            River r = river.getPeer();
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery(
-                "from HYK where river=:river and description=:description");
+        if (this.peer == null) {
+            final River r = this.river.getPeer();
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            final Query query = session.createQuery(
+                    "from HYK where river=:river and description=:description");
             query.setParameter("river", r);
-            query.setParameter("description", description);
-            List<HYK> hyks = query.list();
+            query.setParameter("description", this.description);
+            final List<HYK> hyks = query.list();
             if (hyks.isEmpty()) {
-                peer = new HYK(r, description);
-                session.save(peer);
+                this.peer = new HYK(r, this.description);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = hyks.get(0);
+                this.peer = hyks.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
         }
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportHYKEntry.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportHYKEntry.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,17 +8,16 @@
 
 package org.dive4elements.river.importer;
 
-import org.dive4elements.river.model.HYKEntry;
-import org.dive4elements.river.model.HYK;
-
+import java.math.BigDecimal;
+import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
-import java.util.ArrayList;
 
-import java.math.BigDecimal;
-
+import org.dive4elements.river.importer.common.StoreMode;
+import org.dive4elements.river.model.HYK;
+import org.dive4elements.river.model.HYKEntry;
+import org.hibernate.Query;
 import org.hibernate.Session;
-import org.hibernate.Query;
 
 public class ImportHYKEntry
 {
@@ -28,17 +27,20 @@
 
     protected List<ImportHYKFormation> formations;
 
+    protected StoreMode storeMode;
+
     protected HYKEntry peer;
 
     public ImportHYKEntry() {
-        formations = new ArrayList<ImportHYKFormation>();
+        this.formations = new ArrayList<>();
+        this.storeMode = StoreMode.NONE;
     }
 
     public ImportHYKEntry(
-        ImportHYK  hyk,
-        BigDecimal km,
-        Date       measure
-    ) {
+            final ImportHYK  hyk,
+            final BigDecimal km,
+            final Date       measure
+            ) {
         this();
         this.hyk     = hyk;
         this.km      = km;
@@ -46,56 +48,63 @@
     }
 
     public ImportHYK getHYK() {
-        return hyk;
+        return this.hyk;
     }
 
-    public void setHYK(ImportHYK hyk) {
+    public void setHYK(final ImportHYK hyk) {
         this.hyk = hyk;
     }
 
     public BigDecimal getKm() {
-        return km;
+        return this.km;
     }
 
-    public void setKm(BigDecimal km) {
+    public void setKm(final BigDecimal km) {
         this.km = km;
     }
 
-    public void addFormation(ImportHYKFormation formation) {
-        int numFormation = formations.size();
-        formations.add(formation);
+    public void addFormation(final ImportHYKFormation formation) {
+        final int numFormation = this.formations.size();
+        this.formations.add(formation);
         formation.setFormationNum(numFormation);
         formation.setEntry(this);
     }
 
     public void storeDependencies() {
         getPeer();
-        for (ImportHYKFormation formation: formations) {
+        for (final ImportHYKFormation formation: this.formations) {
             formation.storeDependencies();
         }
     }
 
     public HYKEntry getPeer() {
-        if (peer == null) {
-            HYK h = hyk.getPeer();
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery(
-                "from HYKEntry where HYK=:hyk " +
-                "and km=:km and measure=:measure");
-            query.setParameter("hyk", h);
-            query.setParameter("km", km);
-            query.setParameter("measure", measure);
-            List<HYKEntry> entries = query.list();
-            if (entries.isEmpty()) {
-                peer = new HYKEntry(h, km, measure);
-                session.save(peer);
+        if (this.peer == null) {
+            final HYK h = this.hyk.getPeer();
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            List<HYKEntry> entries;
+            if (this.hyk.storeMode == StoreMode.INSERT)
+                entries = null;
+            else {
+                final Query query = session.createQuery(
+                        "from HYKEntry where HYK=:hyk " +
+                        "and km=:km and measure=:measure");
+                query.setParameter("hyk", h);
+                query.setParameter("km", this.km);
+                query.setParameter("measure", this.measure);
+                entries = query.list();
+            }
+            if ((entries == null) || entries.isEmpty()) {
+                this.peer = new HYKEntry(h, this.km, this.measure);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = entries.get(0);
+                this.peer = entries.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
         }
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportHYKFlowZone.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportHYKFlowZone.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,16 +8,15 @@
 
 package org.dive4elements.river.importer;
 
-import org.dive4elements.river.model.HYKFormation;
+import java.math.BigDecimal;
+import java.util.List;
+
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.HYKFlowZone;
 import org.dive4elements.river.model.HYKFlowZoneType;
-
-import org.hibernate.Session;
+import org.dive4elements.river.model.HYKFormation;
 import org.hibernate.Query;
-
-import java.util.List;
-
-import java.math.BigDecimal;
+import org.hibernate.Session;
 
 public class ImportHYKFlowZone
 {
@@ -32,11 +31,11 @@
     }
 
     public ImportHYKFlowZone(
-        ImportHYKFormation    formation,
-        ImportHYKFlowZoneType type,
-        BigDecimal            a,
-        BigDecimal            b
-    ) {
+            final ImportHYKFormation    formation,
+            final ImportHYKFlowZoneType type,
+            final BigDecimal            a,
+            final BigDecimal            b
+            ) {
         this.formation = formation;
         this.type      = type;
         this.a         = a;
@@ -44,10 +43,10 @@
     }
 
     public ImportHYKFormation getFormation() {
-        return formation;
+        return this.formation;
     }
 
-    public void setFormation(ImportHYKFormation formation) {
+    public void setFormation(final ImportHYKFormation formation) {
         this.formation = formation;
     }
 
@@ -56,29 +55,34 @@
     }
 
     public HYKFlowZone getPeer() {
-        if (peer == null) {
-            HYKFormation    f = formation.getPeer();
-            HYKFlowZoneType t = type.getPeer();
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery(
-                "from HYKFlowZone where formation=:formation " +
-                "and type=:type and a=:a and b=:b");
-            query.setParameter("formation", f);
-            query.setParameter("type", t);
-            query.setParameter("a", a);
-            query.setParameter("b", b);
-            List<HYKFlowZone> zones = query.list();
-            if (zones.isEmpty()) {
-                peer = new HYKFlowZone(f, t, a, b);
-                session.save(peer);
+        if (this.peer == null) {
+            final HYKFormation    f = this.formation.getPeer();
+            final HYKFlowZoneType t = this.type.getPeer();
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            List<HYKFlowZone> zones;
+            if (this.formation.storeMode == StoreMode.INSERT)
+                zones = null;
+            {
+                final Query query = session.createQuery(
+                        "from HYKFlowZone where formation=:formation " +
+                        "and type=:type and a=:a and b=:b");
+                query.setParameter("formation", f);
+                query.setParameter("type", t);
+                query.setParameter("a", this.a);
+                query.setParameter("b", this.b);
+                zones = query.list();
+            }
+            if ((zones == null) || zones.isEmpty()) {
+                this.peer = new HYKFlowZone(f, t, this.a, this.b);
+                session.save(this.peer);
             }
             else {
-                peer = zones.get(0);
+                this.peer = zones.get(0);
             }
 
         }
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportHYKFormation.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportHYKFormation.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,16 +8,15 @@
 
 package org.dive4elements.river.importer;
 
-import org.dive4elements.river.model.HYKFormation;
-import org.dive4elements.river.model.HYKEntry;
-
+import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.hibernate.Session;
+import org.dive4elements.river.importer.common.StoreMode;
+import org.dive4elements.river.model.HYKEntry;
+import org.dive4elements.river.model.HYKFormation;
 import org.hibernate.Query;
-
-import java.math.BigDecimal;
+import org.hibernate.Session;
 
 public class ImportHYKFormation
 {
@@ -31,21 +30,24 @@
 
     protected List<ImportHYKFlowZone> zones;
 
+    protected StoreMode storeMode;
+
     protected HYKFormation peer;
 
     public ImportHYKFormation() {
-        zones = new ArrayList<ImportHYKFlowZone>();
+        this.zones = new ArrayList<>();
+        this.storeMode = StoreMode.NONE;
     }
 
     public ImportHYKFormation(
-        int            formationNum,
-        ImportHYKEntry entry,
-        BigDecimal     top,
-        BigDecimal     bottom,
-        BigDecimal     distanceVL,
-        BigDecimal     distanceHF,
-        BigDecimal     distanceVR
-    ) {
+            final int            formationNum,
+            final ImportHYKEntry entry,
+            final BigDecimal     top,
+            final BigDecimal     bottom,
+            final BigDecimal     distanceVL,
+            final BigDecimal     distanceHF,
+            final BigDecimal     distanceVR
+            ) {
         this();
         this.formationNum = formationNum;
         this.entry        = entry;
@@ -56,103 +58,110 @@
         this.distanceVR   = distanceVR;
     }
 
-    public void addFlowZone(ImportHYKFlowZone zone) {
-        zones.add(zone);
+    public void addFlowZone(final ImportHYKFlowZone zone) {
+        this.zones.add(zone);
         zone.setFormation(this);
     }
 
     public int getFormationNum() {
-        return formationNum;
+        return this.formationNum;
     }
 
-    public void setFormationNum(int formationNum) {
+    public void setFormationNum(final int formationNum) {
         this.formationNum = formationNum;
     }
 
     public ImportHYKEntry getEntry() {
-        return entry;
+        return this.entry;
     }
 
-    public void setEntry(ImportHYKEntry entry) {
+    public void setEntry(final ImportHYKEntry entry) {
         this.entry = entry;
     }
 
     public BigDecimal getTop() {
-        return top;
+        return this.top;
     }
 
-    public void setTop(BigDecimal top) {
+    public void setTop(final BigDecimal top) {
         this.top = top;
     }
 
     public BigDecimal getBottom() {
-        return bottom;
+        return this.bottom;
     }
 
-    public void setBottom(BigDecimal bottom) {
+    public void setBottom(final BigDecimal bottom) {
         this.bottom = bottom;
     }
 
     public BigDecimal getDistanceVL() {
-        return distanceVL;
+        return this.distanceVL;
     }
 
-    public void setDistanceVL(BigDecimal distanceVL) {
+    public void setDistanceVL(final BigDecimal distanceVL) {
         this.distanceVL = distanceVL;
     }
 
     public BigDecimal getDistanceHF() {
-        return distanceHF;
+        return this.distanceHF;
     }
 
-    public void setDistanceHF(BigDecimal distanceHF) {
+    public void setDistanceHF(final BigDecimal distanceHF) {
         this.distanceHF = distanceHF;
     }
 
     public BigDecimal getDistanceVR() {
-        return distanceVR;
+        return this.distanceVR;
     }
 
-    public void setDistanceVR(BigDecimal distanceVR) {
+    public void setDistanceVR(final BigDecimal distanceVR) {
         this.distanceVR = distanceVR;
     }
 
     public void storeDependencies() {
         getPeer();
-        for (ImportHYKFlowZone zone: zones) {
+        for (final ImportHYKFlowZone zone: this.zones) {
             zone.storeDependencies();
         }
     }
 
     public HYKFormation getPeer() {
-        if (peer == null) {
-            HYKEntry e = entry.getPeer();
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery(
-                "from HYKFormation where formationNum=:formationNum " +
-                "and entry=:entry and top=:top and bottom=:bottom " +
-                "and distanceVL=:distanceVL and distanceHF=:distanceHF " +
-                "and distanceVR=:distanceVR");
-            query.setParameter("formationNum", formationNum);
-            query.setParameter("entry", e);
-            query.setParameter("top", top);
-            query.setParameter("bottom", bottom);
-            query.setParameter("distanceVL", distanceVL);
-            query.setParameter("distanceHF", distanceHF);
-            query.setParameter("distanceVR", distanceVR);
-            List<HYKFormation> formations = query.list();
-            if (formations.isEmpty()) {
-                peer = new HYKFormation(
-                    formationNum, e, top, bottom,
-                    distanceVL, distanceHF, distanceVR);
-                session.save(peer);
+        if (this.peer == null) {
+            final HYKEntry e = this.entry.getPeer();
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            List<HYKFormation> formations;
+            if (this.entry.storeMode == StoreMode.INSERT)
+                formations = null;
+            else {
+                final Query query = session.createQuery(
+                        "from HYKFormation where formationNum=:formationNum " +
+                                "and entry=:entry and top=:top and bottom=:bottom " +
+                                "and distanceVL=:distanceVL and distanceHF=:distanceHF " +
+                        "and distanceVR=:distanceVR");
+                query.setParameter("formationNum", this.formationNum);
+                query.setParameter("entry", e);
+                query.setParameter("top", this.top);
+                query.setParameter("bottom", this.bottom);
+                query.setParameter("distanceVL", this.distanceVL);
+                query.setParameter("distanceHF", this.distanceHF);
+                query.setParameter("distanceVR", this.distanceVR);
+                formations = query.list();
+            }
+            if ((formations == null) || formations.isEmpty()) {
+                this.peer = new HYKFormation(
+                        this.formationNum, e, this.top, this.bottom,
+                        this.distanceVL, this.distanceHF, this.distanceVR);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = formations.get(0);
+                this.peer = formations.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
         }
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportPorosity.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportPorosity.java	Wed Apr 11 14:20:01 2018 +0200
@@ -12,12 +12,12 @@
 import java.util.List;
 
 import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.StoreMode;
+import org.dive4elements.river.model.Porosity;
+import org.dive4elements.river.model.River;
 import org.hibernate.Query;
 import org.hibernate.Session;
 
-import org.dive4elements.river.model.River;
-import org.dive4elements.river.model.Porosity;
-
 
 public class ImportPorosity {
 
@@ -33,82 +33,88 @@
 
     protected List<ImportPorosityValue> values;
 
-    public ImportPorosity(String description) {
+    protected StoreMode storeMode;
+
+    public ImportPorosity(final String description) {
         this.description = description;
-        this.values = new ArrayList<ImportPorosityValue>();
+        this.values = new ArrayList<>();
+        this.storeMode = StoreMode.NONE;
     }
 
     public String getDescription() {
-        return description;
+        return this.description;
     }
 
-    public void setDepth(ImportDepth depth) {
+    public void setDepth(final ImportDepth depth) {
         this.depth = depth;
     }
 
-    public void setTimeInterval(ImportTimeInterval importTimeInterval) {
+    public void setTimeInterval(final ImportTimeInterval importTimeInterval) {
         this.timeInterval = importTimeInterval;
     }
 
-    public void addValue(ImportPorosityValue value) {
-        values.add(value);
+    public void addValue(final ImportPorosityValue value) {
+        this.values.add(value);
     }
 
-    public void storeDependencies(River river) {
-        log.info("store dependencies");
+    public void storeDependencies(final River river) {
+        log.info("store dependencies for '" + getDescription() + "'");
 
-        if (depth != null) {
-            depth.storeDependencies();
+        if (this.depth != null) {
+            this.depth.storeDependencies();
         }
 
-        Porosity peer = getPeer(river);
+        final Porosity peer = getPeer(river);
 
         if (peer != null) {
-            log.info("store porosity values.");
-            for (ImportPorosityValue value : values) {
-                value.storeDependencies(peer);
+            // log.info("store porosity values.");
+            for (final ImportPorosityValue value : this.values) {
+                value.storeDependencies(peer, this.storeMode);
             }
+            log.info("Porosity values processed: " + this.values.size());
         }
     }
 
-    public Porosity getPeer(River river) {
-        log.info("get peer");
+    public Porosity getPeer(final River river) {
+        // log.info("get peer");
 
-        if (depth == null) {
-            log.warn("cannot store porosity '" + description
-                + "': no depth");
+        if (this.depth == null) {
+            log.warn("cannot store porosity '" + this.description
+                    + "': no depth");
             return null;
         }
 
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
+        if (this.peer == null) {
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
 
-            Query query = session.createQuery("from Porosity where "
-                + "   river=:river and "
-                + "   depth=:depth and "
-                + "   description=:description");
+            final Query query = session.createQuery("from Porosity where "
+                    + "   river=:river and "
+                    + "   depth=:depth and "
+                    + "   description=:description");
 
             query.setParameter("river", river);
-            query.setParameter("depth", depth.getPeer());
-            query.setParameter("description", description);
+            query.setParameter("depth", this.depth.getPeer());
+            query.setParameter("description", this.description);
 
-            List<Porosity> porosity = query.list();
+            final List<Porosity> porosity = query.list();
 
             if (porosity.isEmpty()) {
                 log.debug("Create new Porosity DB instance.");
 
-                peer = new Porosity(river, depth.getPeer(),
-                    description, timeInterval.getPeer());
+                this.peer = new Porosity(river, this.depth.getPeer(),
+                        this.description, this.timeInterval.getPeer());
 
-                session.save(peer);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = porosity.get(0);
+                this.peer = porosity.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
         }
 
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportPorosityValue.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportPorosityValue.java	Wed Apr 11 14:20:01 2018 +0200
@@ -9,22 +9,20 @@
 package org.dive4elements.river.importer;
 
 import java.math.BigDecimal;
-
 import java.util.List;
 
 import org.apache.log4j.Logger;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.Porosity;
 import org.dive4elements.river.model.PorosityValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
 
 
 public class ImportPorosityValue {
 
     private static final Logger log =
-        Logger.getLogger(ImportPorosityValue.class);
+            Logger.getLogger(ImportPorosityValue.class);
 
 
     protected PorosityValue peer;
@@ -39,11 +37,11 @@
 
 
     public ImportPorosityValue(
-        BigDecimal station,
-        BigDecimal shoreOffset,
-        BigDecimal porosity,
-        String     description
-    ) {
+            final BigDecimal station,
+            final BigDecimal shoreOffset,
+            final BigDecimal porosity,
+            final String     description
+            ) {
         this.station     = station;
         this.shoreOffset = shoreOffset;
         this.porosity    = porosity;
@@ -51,53 +49,57 @@
     }
 
 
-    public void storeDependencies(Porosity porosity) {
-        log.info("store dependencies");
+    public void storeDependencies(final Porosity porosity, final StoreMode parentStoreMode) {
+        // log.info("store dependencies");
 
-        getPeer(porosity);
+        getPeer(porosity, parentStoreMode);
     }
 
 
-    public PorosityValue getPeer(Porosity porosity) {
+    public PorosityValue getPeer(final Porosity porosity, final StoreMode parentStoreMode) {
         log.info("get peer");
 
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-
-            Query query = session.createQuery(
-                "from PorosityValue "
-                + "where porosity=:porosity "
-                + "and station between :station - 0.0001f "
-                + "    and :station + 0.0001f "
-                + "and porosityValue between :poros -0.0001f "
-                + "    and :poros + 0.0001f "
-                + "and description=:description");
+        if (this.peer == null) {
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            List<PorosityValue> values;
+            if (parentStoreMode == StoreMode.INSERT)
+                values = null;
+            else {
+                final Query query = session.createQuery(
+                        "from PorosityValue "
+                                + "where porosity=:porosity "
+                                + "and station between :station - 0.0001f "
+                                + "    and :station + 0.0001f "
+                                + "and porosityValue between :poros -0.0001f "
+                                + "    and :poros + 0.0001f "
+                                + "and description=:description");
 
-            query.setParameter("porosity", porosity);
-            query.setParameter("station", station.floatValue());
-            query.setParameter("poros", this.porosity.floatValue());
-            query.setParameter("description", description);
+                query.setParameter("porosity", porosity);
+                query.setParameter("station", this.station.floatValue());
+                query.setParameter("poros", this.porosity.floatValue());
+                query.setParameter("description", this.description);
 
-            List<PorosityValue> values = query.list();
-            if (values.isEmpty()) {
-                log.debug("Create new PorosityValue DB instance.");
+                values = query.list();
+            }
+            if ((values == null) || values.isEmpty()) {
+                // log.debug("Create new PorosityValue DB instance.");
 
-                peer = new PorosityValue(
-                    porosity,
-                    station,
-                    shoreOffset,
-                    this.porosity,
-                    description);
+                this.peer = new PorosityValue(
+                        porosity,
+                        this.station,
+                        this.shoreOffset,
+                        this.porosity,
+                        this.description);
 
-                session.save(peer);
+                session.save(this.peer);
             }
             else {
-                peer = values.get(0);
+                this.peer = values.get(0);
             }
         }
 
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java	Wed Apr 11 14:20:01 2018 +0200
@@ -391,6 +391,10 @@
         return this.wstFile.getParentFile().getParentFile().getParentFile();
     }
 
+    public AnnotationClassifier getAnnotationClassifier() {
+        return this.annotationClassifier;
+    }
+
     public void parseDependencies() throws IOException {
         log.info("Root dir is '" + getRiverDir() + "'");
         parseGauges();
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoad.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoad.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,9 +8,11 @@
 
 package org.dive4elements.river.importer;
 
+import java.util.ArrayList;
 import java.util.List;
-import java.util.ArrayList;
 
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.GrainFraction;
 import org.dive4elements.river.model.SedimentLoad;
 import org.dive4elements.river.model.TimeInterval;
@@ -19,6 +21,8 @@
 
 public class ImportSedimentLoad
 {
+    private static Logger log = Logger.getLogger(ImportSedimentLoad.class);
+
     private SedimentLoad peer;
 
     private ImportGrainFraction grainFraction;
@@ -27,92 +31,98 @@
     private String              description;
     private Integer             kind;
 
-    private List<ImportSedimentLoadValue> values;
+    private final List<ImportSedimentLoadValue> values;
+
+    protected StoreMode storeMode;
 
     public ImportSedimentLoad() {
-        this.values = new ArrayList<ImportSedimentLoadValue>();
+        this.values = new ArrayList<>();
     }
 
     public ImportSedimentLoad(
-        ImportGrainFraction grainFraction,
-        ImportTimeInterval  timeInterval,
-        ImportTimeInterval  sqTimeInterval,
-        String              description,
-        Integer             kind
-    ) {
+            final ImportGrainFraction grainFraction,
+            final ImportTimeInterval  timeInterval,
+            final ImportTimeInterval  sqTimeInterval,
+            final String              description,
+            final Integer             kind
+            ) {
         this.grainFraction  = grainFraction;
         this.timeInterval   = timeInterval;
         this.sqTimeInterval = sqTimeInterval;
         this.description    = description;
         this.kind           = kind;
 
-        this.values = new ArrayList<ImportSedimentLoadValue>();
+        this.values = new ArrayList<>();
+        this.storeMode = StoreMode.NONE;
     }
 
-    public void addValue(ImportSedimentLoadValue value) {
-        values.add(value);
+    public void addValue(final ImportSedimentLoadValue value) {
+        this.values.add(value);
     }
 
     public void storeDependencies() {
-        grainFraction.getPeer();
-        timeInterval.getPeer();
+        log.info("store dependencies for '" + this.description + "'");
+        this.grainFraction.getPeer();
+        this.timeInterval.getPeer();
 
-        if (sqTimeInterval != null) {
-            sqTimeInterval.getPeer();
+        if (this.sqTimeInterval != null) {
+            this.sqTimeInterval.getPeer();
         }
 
         getPeer();
 
-        for (ImportSedimentLoadValue value : values) {
-            value.storeDependencies(peer);
+        for (final ImportSedimentLoadValue value : this.values) {
+            value.storeDependencies(this.peer, this.storeMode);
         }
-
+        log.info("Values processed: " + this.values.size());
     }
 
     public SedimentLoad getPeer() {
 
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-
-            String sqtquery = sqTimeInterval == null
-                ? "sq_time_interval_id is null"
-                : "sqTimeInterval = :sqTimeInterval";
-            Query query = session.createQuery(
-                "from SedimentLoad where " +
-                "   grainFraction = :grainFraction and " +
-                "   timeInterval = :timeInterval and " +
-                "   description = :description and " +
-                "   kind = :kind and " +
-                    sqtquery);
-
-            GrainFraction gf = grainFraction.getPeer();
-            TimeInterval  ti = timeInterval.getPeer();
+        if (this.peer == null) {
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
 
-            TimeInterval sqti = sqTimeInterval != null
-                ? sqTimeInterval.getPeer()
-                : null;
-
-            query.setParameter("grainFraction", gf);
-            query.setParameter("timeInterval", ti);
+            final String sqtquery = this.sqTimeInterval == null
+                    ? "sq_time_interval_id is null"
+                            : "sqTimeInterval = :sqTimeInterval";
+            final Query query = session.createQuery(
+                    "from SedimentLoad where " +
+                            "   grainFraction = :grainFraction and " +
+                            "   timeInterval = :timeInterval and " +
+                            "   description = :description and " +
+                            "   kind = :kind and " +
+                            sqtquery);
 
-            if (sqti != null) {
-                query.setParameter("sqTimeInterval", sqti);
-            }
-            query.setParameter("description", description);
-            query.setParameter("kind", kind);
+            final GrainFraction gf = this.grainFraction.getPeer();
+            final TimeInterval  ti = this.timeInterval.getPeer();
 
-            List<SedimentLoad> loads = query.list();
-            if (loads.isEmpty()) {
-                peer = new SedimentLoad(gf, ti, sqti, description, kind);
-                session.save(peer);
-            }
-            else {
-                peer = loads.get(0);
-            }
+            final TimeInterval sqti = this.sqTimeInterval != null
+                    ? this.sqTimeInterval.getPeer()
+                            : null;
+
+                    query.setParameter("grainFraction", gf);
+                    query.setParameter("timeInterval", ti);
+
+                    if (sqti != null) {
+                        query.setParameter("sqTimeInterval", sqti);
+                    }
+                    query.setParameter("description", this.description);
+                    query.setParameter("kind", this.kind);
+
+                    final List<SedimentLoad> loads = query.list();
+                    if (loads.isEmpty()) {
+                        this.peer = new SedimentLoad(gf, ti, sqti, this.description, this.kind);
+                        session.save(this.peer);
+                        this.storeMode = StoreMode.INSERT;
+                    }
+                    else {
+                        this.peer = loads.get(0);
+                        this.storeMode = StoreMode.UPDATE;
+                    }
         }
 
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadLS.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadLS.java	Wed Apr 11 14:20:01 2018 +0200
@@ -12,15 +12,14 @@
 import java.util.List;
 
 import org.apache.log4j.Logger;
-
-import org.hibernate.Session;
-import org.hibernate.Query;
-
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.GrainFraction;
 import org.dive4elements.river.model.River;
 import org.dive4elements.river.model.SedimentLoadLS;
 import org.dive4elements.river.model.TimeInterval;
 import org.dive4elements.river.model.Unit;
+import org.hibernate.Query;
+import org.hibernate.Session;
 
 
 public class ImportSedimentLoadLS {
@@ -35,53 +34,56 @@
 
     private ImportTimeInterval sqTimeInterval;
 
-    private String description;
+    private final String description;
 
     private Integer kind;
 
-    private List<ImportSedimentLoadLSValue> values;
+    private final List<ImportSedimentLoadLSValue> values;
+
+    protected StoreMode storeMode;
 
     private SedimentLoadLS peer;
 
-    public ImportSedimentLoadLS(String description) {
-        this.values = new ArrayList<ImportSedimentLoadLSValue>();
+    public ImportSedimentLoadLS(final String description) {
+        this.values = new ArrayList<>();
         this.description = description;
+        this.storeMode = StoreMode.NONE;
     }
 
-    public void setTimeInterval(ImportTimeInterval timeInterval) {
+    public void setTimeInterval(final ImportTimeInterval timeInterval) {
         this.timeInterval = timeInterval;
     }
 
-    public void setSQTimeInterval(ImportTimeInterval sqTimeInterval) {
+    public void setSQTimeInterval(final ImportTimeInterval sqTimeInterval) {
         this.sqTimeInterval = sqTimeInterval;
     }
 
-    public void setUnit(ImportUnit unit) {
+    public void setUnit(final ImportUnit unit) {
         this.unit = unit;
     }
 
-    public void setGrainFraction(ImportGrainFraction grainFraction) {
+    public void setGrainFraction(final ImportGrainFraction grainFraction) {
         this.grainFraction = grainFraction;
     }
 
-    public void setKind(Integer kind) {
+    public void setKind(final Integer kind) {
         this.kind = kind;
     }
 
-    public void addValue(ImportSedimentLoadLSValue value) {
+    public void addValue(final ImportSedimentLoadLSValue value) {
         this.values.add(value);
     }
 
-    public void storeDependencies(River river) {
-        log.debug("store dependencies");
+    public void storeDependencies(final River river) {
+        log.info("store dependencies for '" + this.description + "'");
 
-        SedimentLoadLS peer = getPeer(river);
+        final SedimentLoadLS peer = getPeer(river);
 
         if (peer != null) {
             int i = 0;
 
-            for (ImportSedimentLoadLSValue value : values) {
-                value.storeDependencies(peer);
+            for (final ImportSedimentLoadLSValue value : this.values) {
+                value.storeDependencies(peer, this.storeMode);
                 i++;
             }
 
@@ -89,40 +91,38 @@
         }
     }
 
-    public SedimentLoadLS getPeer(River river) {
+    public SedimentLoadLS getPeer(final River river) {
         log.debug("get peer");
 
-        GrainFraction gf = grainFraction != null ? grainFraction.getPeer()
-            : null;
+        final GrainFraction gf = this.grainFraction != null ? this.grainFraction.getPeer()
+                : null;
 
-        Unit u = unit != null ? unit.getPeer() : null;
+        final Unit u = this.unit != null ? this.unit.getPeer() : null;
 
-        TimeInterval ti = timeInterval != null ? timeInterval.getPeer() : null;
-        TimeInterval sqti = sqTimeInterval != null
-                ? sqTimeInterval.getPeer()
-                : null;
+        final TimeInterval ti = (this.timeInterval != null) ? this.timeInterval.getPeer() : null;
+        final TimeInterval sqti = (this.sqTimeInterval != null) ? this.sqTimeInterval.getPeer() : null;
 
         if (ti == null || u == null) {
             log.warn(
-                "Skip invalid SedimentLoadLS: time interval or unit null!");
+                    "Skip invalid SedimentLoadLS: time interval or unit null!");
             return null;
         }
 
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
+        if (this.peer == null) {
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
 
-            String sqtquery = sqTimeInterval == null ?
-                "sq_time_interval_id is null" :
-                "sqTimeInterval = :sqTimeInterval";
-            Query query = session.createQuery("from SedimentLoadLS where "
-                + "   river=:river and "
-                + "   grainFraction=:grainFraction and "
-                + "   unit=:unit and "
-                + "   timeInterval=:timeInterval and "
-                + "   description=:description and "
-                + "   kind = :kind and " +
-                      sqtquery);
+            final String sqtquery = this.sqTimeInterval == null ?
+                    "sq_time_interval_id is null" :
+                        "sqTimeInterval = :sqTimeInterval";
+            final Query query = session.createQuery("from SedimentLoadLS where "
+                    + "   river=:river and "
+                    + "   grainFraction=:grainFraction and "
+                    + "   unit=:unit and "
+                    + "   timeInterval=:timeInterval and "
+                    + "   description=:description and "
+                    + "   kind = :kind and " +
+                    sqtquery);
 
             query.setParameter("river", river);
             query.setParameter("grainFraction", gf);
@@ -131,23 +131,25 @@
             if (sqti != null) {
                 query.setParameter("sqTimeInterval", sqti);
             }
-            query.setParameter("description", description);
-            query.setParameter("kind", kind);
+            query.setParameter("description", this.description);
+            query.setParameter("kind", this.kind);
 
-            List<SedimentLoadLS> loads = query.list();
+            final List<SedimentLoadLS> loads = query.list();
             if (loads.isEmpty()) {
                 log.debug("create new SedimentLoadLS");
 
-                peer = new SedimentLoadLS(river, u, ti, sqti, gf, description);
-                peer.setKind(this.kind);
-                session.save(peer);
+                this.peer = new SedimentLoadLS(river, u, ti, sqti, gf, this.description);
+                this.peer.setKind(this.kind);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = loads.get(0);
+                this.peer = loads.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
         }
 
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadLSValue.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadLSValue.java	Wed Apr 11 14:20:01 2018 +0200
@@ -10,58 +10,60 @@
 
 import java.util.List;
 
-import org.hibernate.Session;
-import org.hibernate.Query;
-
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.SedimentLoadLS;
 import org.dive4elements.river.model.SedimentLoadLSValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
 
 
 public class ImportSedimentLoadLSValue {
 
-    private Double station;
-    private Double value;
+    private final Double station;
+    private final Double value;
 
     private SedimentLoadLSValue peer;
 
 
-    public ImportSedimentLoadLSValue(Double station, Double value) {
+    public ImportSedimentLoadLSValue(final Double station, final Double value) {
         this.station = station;
         this.value   = value;
     }
 
 
-    public void storeDependencies(SedimentLoadLS sedimentLoadLS) {
-        getPeer(sedimentLoadLS);
+    public void storeDependencies(final SedimentLoadLS sedimentLoadLS, final StoreMode parentStoreMode) {
+        getPeer(sedimentLoadLS, parentStoreMode);
     }
 
 
-    public SedimentLoadLSValue getPeer(SedimentLoadLS sedimentLoadLS) {
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery(
-                "from SedimentLoadLSValue where " +
-                "   sedimentLoadLS=:sedimentLoadLS and " +
-                "   station=:station and " +
-                "   value=:value"
-            );
-
-            query.setParameter("sedimentLoadLS", sedimentLoadLS);
-            query.setParameter("station", station);
-            query.setParameter("value", value);
-
-            List<SedimentLoadLSValue> values = query.list();
-            if (values.isEmpty()) {
-                peer = new SedimentLoadLSValue(sedimentLoadLS, station, value);
-                session.save(peer);
+    public SedimentLoadLSValue getPeer(final SedimentLoadLS sedimentLoadLS, final StoreMode parentStoreMode) {
+        if (this.peer == null) {
+            List<SedimentLoadLSValue> values;
+            final Session session = ImporterSession.getInstance().getDatabaseSession();
+            if (parentStoreMode == StoreMode.INSERT)
+                values = null;
+            else {
+                final Query query = session.createQuery(
+                        "from SedimentLoadLSValue where " +
+                                "   sedimentLoadLS=:sedimentLoadLS and " +
+                                "   station=:station and " +
+                                "   value=:value"
+                        );
+                query.setParameter("sedimentLoadLS", sedimentLoadLS);
+                query.setParameter("station", this.station);
+                query.setParameter("value", this.value);
+                values = query.list();
+            }
+            if ((values == null) || values.isEmpty()) {
+                this.peer = new SedimentLoadLSValue(sedimentLoadLS, this.station, this.value);
+                session.save(this.peer);
             }
             else {
-                peer = values.get(0);
+                this.peer = values.get(0);
             }
         }
 
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadValue.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadValue.java	Wed Apr 11 14:20:01 2018 +0200
@@ -10,6 +10,7 @@
 
 import java.util.List;
 
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.MeasurementStation;
 import org.dive4elements.river.model.SedimentLoad;
 import org.dive4elements.river.model.SedimentLoadValue;
@@ -27,43 +28,48 @@
     }
 
     public ImportSedimentLoadValue(
-        MeasurementStation station,
-        Double             value
-    ) {
+            final MeasurementStation station,
+            final Double             value
+            ) {
         this.station      = station;
         this.value        = value;
     }
 
-    protected SedimentLoadValue getPeer(SedimentLoad sedimentLoad) {
+    protected SedimentLoadValue getPeer(final SedimentLoad sedimentLoad, final StoreMode parentStoreMode) {
 
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery(
-                "from SedimentLoadValue where " +
-                "   measurementStation = :station and " +
-                "   sedimentLoad = :sedimentLoad and " +
-                "   value = :value");
+        if (this.peer == null) {
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            List<SedimentLoadValue> values;
+            if (parentStoreMode == StoreMode.INSERT)
+                values = null;
+            else {
+                final Query query = session.createQuery(
+                        "from SedimentLoadValue where " +
+                                "   measurementStation = :station and " +
+                                "   sedimentLoad = :sedimentLoad and " +
+                        "   value = :value");
 
-            query.setParameter("station", station);
-            query.setParameter("sedimentLoad", sedimentLoad);
-            query.setParameter("value", value);
+                query.setParameter("station", this.station);
+                query.setParameter("sedimentLoad", sedimentLoad);
+                query.setParameter("value", this.value);
 
-            List<SedimentLoadValue> values = query.list();
-            if (values.isEmpty()) {
-                peer = new SedimentLoadValue(sedimentLoad, station, value);
-                session.save(peer);
+                values = query.list();
+            }
+            if ((values == null) || values.isEmpty()) {
+                this.peer = new SedimentLoadValue(sedimentLoad, this.station, this.value);
+                session.save(this.peer);
             }
             else {
-                peer = values.get(0);
+                this.peer = values.get(0);
             }
         }
 
-        return peer;
+        return this.peer;
     }
 
-    public void storeDependencies(SedimentLoad sedimentLoad) {
-        getPeer(sedimentLoad);
+    public void storeDependencies(final SedimentLoad sedimentLoad, final StoreMode parentStoreMode) {
+        getPeer(sedimentLoad, parentStoreMode);
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportWst.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportWst.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,16 +8,15 @@
 
 package org.dive4elements.river.importer;
 
-import org.dive4elements.river.model.Wst;
-import org.dive4elements.river.model.River;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.log4j.Logger;
-
-import org.hibernate.Session;
+import org.dive4elements.river.importer.common.StoreMode;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.Wst;
 import org.hibernate.Query;
-
-import java.util.ArrayList;
-import java.util.List;
+import org.hibernate.Session;
 
 /** Not (yet) db-mapped WST object. */
 public class ImportWst
@@ -29,12 +28,12 @@
     }
 
     public static final ImportWstColumnFactory COLUMN_FACTORY =
-        new ImportWstColumnFactory() {
-            @Override
-            public ImportWstColumn create(ImportWst importWst, int position) {
-                return new ImportWstColumn(importWst, null, null, position);
-            }
-        };
+            new ImportWstColumnFactory() {
+        @Override
+        public ImportWstColumn create(final ImportWst importWst, final int position) {
+            return new ImportWstColumn(importWst, null, null, position);
+        }
+    };
 
     protected String description;
 
@@ -48,6 +47,8 @@
 
     protected boolean kmUp;
 
+    protected StoreMode storeMode;
+
     /** Wst as in db. */
     protected Wst peer;
 
@@ -55,123 +56,126 @@
         this(COLUMN_FACTORY);
     }
 
-    public ImportWst(ImportWstColumnFactory columnFactory) {
+    public ImportWst(final ImportWstColumnFactory columnFactory) {
         this.columnFactory = columnFactory;
-        kind = 0;
-        columns = new ArrayList<ImportWstColumn>();
+        this.kind = 0;
+        this.columns = new ArrayList<>();
+        this.storeMode = StoreMode.NONE;
     }
 
-    public ImportWst(String description) {
+    public ImportWst(final String description) {
         this(description, COLUMN_FACTORY);
     }
 
     public ImportWst(
-        String description,
-        ImportWstColumnFactory columnFactory
-    ) {
+            final String description,
+            final ImportWstColumnFactory columnFactory
+            ) {
         this(columnFactory);
         this.description = description;
     }
 
     public String getDescription() {
-        return description;
+        return this.description;
     }
 
     public Integer getKind() {
-        return kind;
+        return this.kind;
     }
 
-    public void setKind(Integer kind) {
+    public void setKind(final Integer kind) {
         this.kind = kind;
     }
 
     public boolean getKmUp() {
-        return kmUp;
+        return this.kmUp;
     }
 
-    public void setKmUp(boolean kmUp) {
+    public void setKmUp(final boolean kmUp) {
         this.kmUp = kmUp;
     }
 
-    public void setDescription(String description) {
+    public void setDescription(final String description) {
         this.description = description;
     }
 
     /** Create columns that can be accessed with getColumn. */
-    public void setNumberColumns(int numColumns) {
+    public void setNumberColumns(final int numColumns) {
         for (int i = 0; i < numColumns; ++i) {
-            columns.add(columnFactory.create(this, i));
+            this.columns.add(this.columnFactory.create(this, i));
         }
     }
 
     public int getNumberColumns() {
-        return columns.size();
+        return this.columns.size();
     }
 
-    public ImportWstColumn getColumn(int index) {
-        return columns.get(index);
+    public ImportWstColumn getColumn(final int index) {
+        return this.columns.get(index);
     }
 
     public List<ImportWstColumn> getColumns() {
-        return columns;
+        return this.columns;
     }
 
     /** Adds a column. Assumes that columns wst is this instance. */
-    public void addColumn(ImportWstColumn column) {
-        columns.add(column);
+    public void addColumn(final ImportWstColumn column) {
+        this.columns.add(column);
     }
 
     public ImportUnit getUnit() {
-        return unit;
+        return this.unit;
     }
 
-    public void setUnit(ImportUnit unit) {
+    public void setUnit(final ImportUnit unit) {
         this.unit = unit;
     }
 
-    public void storeDependencies(River river) {
+    public void storeDependencies(final River river) {
 
-        log.info("store '" + description + "'");
+        log.info("store '" + this.description + "'");
         getPeer(river);
 
-        for (ImportWstColumn column: columns) {
+        for (final ImportWstColumn column: this.columns) {
             column.storeDependencies(river);
         }
 
-        Session session = ImporterSession.getInstance().getDatabaseSession();
+        final Session session = ImporterSession.getInstance().getDatabaseSession();
         session.flush();
     }
 
     public boolean guessWaterLevelIncreasing() {
         int up = 0;
-        for (ImportWstColumn column: columns) {
+        for (final ImportWstColumn column: this.columns) {
             if (column.guessWaterLevelIncreasing()) ++up;
         }
-        return up > columns.size() - up;
+        return up > this.columns.size() - up;
     }
 
     /** Get corresponding mapped wst (from database). */
-    public Wst getPeer(River river) {
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery(
-                "from Wst where " +
-                "river=:river and description=:description and kind=:kind");
+    public Wst getPeer(final River river) {
+        if (this.peer == null) {
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            final Query query = session.createQuery(
+                    "from Wst where " +
+                    "river=:river and description=:description and kind=:kind");
             query.setParameter("river",       river);
-            query.setParameter("description", description);
-            query.setParameter("kind",        kind);
-            List<Wst> wsts = query.list();
+            query.setParameter("description", this.description);
+            query.setParameter("kind",        this.kind);
+            final List<Wst> wsts = query.list();
             if (wsts.isEmpty()) {
-                peer = new Wst(river, description, kind);
-                session.save(peer);
+                this.peer = new Wst(river, this.description, this.kind);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = wsts.get(0);
+                this.peer = wsts.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
 
         }
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportWstColumn.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportWstColumn.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,21 +8,19 @@
 
 package org.dive4elements.river.importer;
 
-import org.dive4elements.river.model.Wst;
-import org.dive4elements.river.model.WstColumn;
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.StoreMode;
 import org.dive4elements.river.model.River;
 import org.dive4elements.river.model.TimeInterval;
-
-import org.hibernate.Session;
+import org.dive4elements.river.model.Wst;
+import org.dive4elements.river.model.WstColumn;
 import org.hibernate.Query;
-
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Random;
-
-import java.math.BigDecimal;
-
-import org.apache.log4j.Logger;
+import org.hibernate.Session;
 
 
 /** Unmapped column of a WST. */
@@ -41,20 +39,23 @@
     protected List<ImportWstColumnQRange> columnQRanges;
     protected List<ImportWstColumnValue>  columnValues;
 
+    protected StoreMode storeMode;
+
     protected WstColumn peer;
 
     public ImportWstColumn() {
-        columnQRanges = new ArrayList<ImportWstColumnQRange>();
-        columnValues  = new ArrayList<ImportWstColumnValue>();
+        this.columnQRanges = new ArrayList<>();
+        this.columnValues  = new ArrayList<>();
+        this.storeMode = StoreMode.NONE;
     }
 
     public ImportWstColumn(
-        ImportWst wst,
-        String    name,
-        String    description,
-        Integer   position,
-        String    source
-    ) {
+            final ImportWst wst,
+            final String    name,
+            final String    description,
+            final Integer   position,
+            final String    source
+            ) {
         this();
         this.wst         = wst;
         this.name        = name;
@@ -63,114 +64,114 @@
     }
 
     public ImportWstColumn(
-        ImportWst wst,
-        String    name,
-        String    description,
-        Integer   position
-    ) {
+            final ImportWst wst,
+            final String    name,
+            final String    description,
+            final Integer   position
+            ) {
         this(wst, name, description, position, null);
     }
 
     public ImportWst getWst() {
-        return wst;
+        return this.wst;
     }
 
-    public void setWst(ImportWst wst) {
+    public void setWst(final ImportWst wst) {
         this.wst = wst;
     }
 
     public String getName() {
-        return name;
+        return this.name;
     }
 
-    public void setName(String name) {
+    public void setName(final String name) {
         this.name = name;
     }
 
     public String getDescription() {
-        return description;
+        return this.description;
     }
 
-    public void setDescription(String description) {
+    public void setDescription(final String description) {
         this.description = description;
     }
 
     public Integer getPosition() {
-        return position;
+        return this.position;
     }
 
-    public void setPosition(Integer position) {
+    public void setPosition(final Integer position) {
         this.position = position;
     }
 
     public String getSource() {
-        return source;
+        return this.source;
     }
 
-    public void setSource(String source) {
+    public void setSource(final String source) {
         this.source = source;
     }
 
-    public void addColumnValue(BigDecimal position, BigDecimal w) {
-        columnValues.add(
-            new ImportWstColumnValue(this, position, w));
+    public void addColumnValue(final BigDecimal position, final BigDecimal w) {
+        this.columnValues.add(
+                new ImportWstColumnValue(this, position, w));
     }
 
-    public void addColumnQRange(ImportWstQRange columnQRange) {
-        columnQRanges.add(
-            new ImportWstColumnQRange(this, columnQRange));
+    public void addColumnQRange(final ImportWstQRange columnQRange) {
+        this.columnQRanges.add(
+                new ImportWstColumnQRange(this, columnQRange));
     }
 
 
     /** Get the Column Values stored in this column. */
     public List<ImportWstColumnValue> getColumnValues() {
-        return columnValues;
+        return this.columnValues;
     }
 
 
-    public void storeDependencies(River river) {
-        log.info("store column '" + name + "'");
+    public void storeDependencies(final River river) {
+        log.info("store column '" + this.name + "'");
         getPeer(river);
 
-        for (ImportWstColumnQRange columnQRange: columnQRanges) {
+        for (final ImportWstColumnQRange columnQRange: this.columnQRanges) {
             columnQRange.getPeer(river);
         }
 
-        for (ImportWstColumnValue columnValue: columnValues) {
+        for (final ImportWstColumnValue columnValue: this.columnValues) {
             columnValue.getPeer(river);
         }
     }
 
     public ImportTimeInterval getTimeInterval() {
-        return timeInterval;
+        return this.timeInterval;
     }
 
-    public void setTimeInterval(ImportTimeInterval timeInterval) {
+    public void setTimeInterval(final ImportTimeInterval timeInterval) {
         this.timeInterval = timeInterval;
     }
 
     public boolean guessWaterLevelIncreasing() {
 
-        int N = columnValues.size();
+        final int N = this.columnValues.size();
 
         if (N < 2) {
             return true;
         }
 
-        Random r = new Random();
+        final Random r = new Random();
         int up = 0;
 
-        int S = N < 50 ? N : (int)(0.1f * N)+1;
+        final int S = N < 50 ? N : (int)(0.1f * N)+1;
         for (int s = 0; s < S; ++s) {
             int i1, i2;
             do {
                 i1 = r.nextInt(N-1);
                 i2 = r.nextInt(N-1);
             } while (i1 == i2);
-            ImportWstColumnValue b = columnValues.get(i1);
-            ImportWstColumnValue a = columnValues.get(i2);
+            ImportWstColumnValue b = this.columnValues.get(i1);
+            ImportWstColumnValue a = this.columnValues.get(i2);
             if (b.getPosition().compareTo(a.getPosition()) < 0) {
-                ImportWstColumnValue t = a; a = b; b = t;
+                final ImportWstColumnValue t = a; a = b; b = t;
             }
 
             if (a.getW().compareTo(b.getW()) < 0) ++up;
@@ -180,37 +181,42 @@
     }
 
     /** Get corresponding mapped wst-column (from database). */
-    public WstColumn getPeer(River river) {
-        if (peer == null) {
-            Wst w = wst.getPeer(river);
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery(
-                "from WstColumn where" +
-                " wst=:wst and name=:name" +
-                " and source=:source" +
-                " and position=:position");
-            query.setParameter("wst",      w);
-            query.setParameter("name",     name);
-            query.setParameter("position", position);
-            query.setParameter("source",   source);
+    public WstColumn getPeer(final River river) {
+        if (this.peer == null) {
+            final Wst w = this.wst.getPeer(river);
+            List<WstColumn> columns;
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            if (this.wst.storeMode == StoreMode.INSERT)
+                columns = null;
+            else {
+                final Query query = session.createQuery(
+                        "from WstColumn where" +
+                                " wst=:wst and name=:name" +
+                                " and source=:source" +
+                        " and position=:position");
+                query.setParameter("wst",      w);
+                query.setParameter("name",     this.name);
+                query.setParameter("position", this.position);
+                query.setParameter("source",   this.source);
+                columns = query.list();
+            }
 
-            TimeInterval ti = timeInterval != null
-                ? timeInterval.getPeer()
-                : null;
+            final TimeInterval ti = (this.timeInterval != null) ? this.timeInterval.getPeer() : null;
 
-            List<WstColumn> columns = query.list();
-            if (columns.isEmpty()) {
-                log.debug("source: " + source);
-                peer = new WstColumn(
-                    w, name, description, source, position, ti);
-                session.save(peer);
+            if ((columns == null) || columns.isEmpty()) {
+                log.debug("source: " + this.source);
+                this.peer = new WstColumn(
+                        w, this.name, this.description, this.source, this.position, ti);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = columns.get(0);
+                this.peer = columns.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
         }
-        return peer;
+        return this.peer;
     }
 
 }
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportWstColumnQRange.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportWstColumnQRange.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,71 +8,81 @@
 
 package org.dive4elements.river.importer;
 
+import java.util.List;
+
+import org.dive4elements.river.importer.common.StoreMode;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.WstColumn;
 import org.dive4elements.river.model.WstColumnQRange;
 import org.dive4elements.river.model.WstQRange;
-import org.dive4elements.river.model.WstColumn;
-import org.dive4elements.river.model.River;
-
+import org.hibernate.Query;
 import org.hibernate.Session;
-import org.hibernate.Query;
-
-import java.util.List;
 
 public class ImportWstColumnQRange
 {
     protected ImportWstColumn wstColumn;
     protected ImportWstQRange qRange;
 
+    protected StoreMode storeMode;
+
     protected WstColumnQRange peer;
 
     public ImportWstColumnQRange() {
     }
 
     public ImportWstColumnQRange(
-        ImportWstColumn wstColumn,
-        ImportWstQRange qRange
-    ) {
+            final ImportWstColumn wstColumn,
+            final ImportWstQRange qRange
+            ) {
         this.wstColumn = wstColumn;
         this.qRange    = qRange;
+        this.storeMode = StoreMode.NONE;
     }
 
     public ImportWstColumn getWstColumn() {
-        return wstColumn;
+        return this.wstColumn;
     }
 
-    public void setWstColumn(ImportWstColumn wstColumn) {
+    public void setWstColumn(final ImportWstColumn wstColumn) {
         this.wstColumn = wstColumn;
     }
 
     public ImportWstQRange getQRange() {
-        return qRange;
+        return this.qRange;
     }
 
-    public void setQRange(ImportWstQRange qRange) {
+    public void setQRange(final ImportWstQRange qRange) {
         this.qRange = qRange;
     }
 
-    public WstColumnQRange getPeer(River river) {
-        if (peer == null) {
-            WstColumn c = wstColumn.getPeer(river);
-            WstQRange q = qRange.getPeer(river);
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery(
-                "from WstColumnQRange where " +
-                "wstColumn=:c and wstQRange=:q");
-            query.setParameter("c", c);
-            query.setParameter("q", q);
-            List<WstColumnQRange> cols = query.list();
-            if (cols.isEmpty()) {
-                peer = new WstColumnQRange(c, q);
-                session.save(peer);
+    public WstColumnQRange getPeer(final River river) {
+        if (this.peer == null) {
+            final WstColumn c = this.wstColumn.getPeer(river);
+            final WstQRange q = this.qRange.getPeer(river);
+            List<WstColumnQRange> cols;
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            if (this.wstColumn.storeMode == StoreMode.INSERT)
+                cols = null;
+            else {
+                final Query query = session.createQuery(
+                        "from WstColumnQRange where " +
+                        "wstColumn=:c and wstQRange=:q");
+                query.setParameter("c", c);
+                query.setParameter("q", q);
+                cols = query.list();
+            }
+            if ((cols == null) || cols.isEmpty()) {
+                this.peer = new WstColumnQRange(c, q);
+                session.save(this.peer);
+                this.storeMode = StoreMode.INSERT;
             }
             else {
-                peer = cols.get(0);
+                this.peer = cols.get(0);
+                this.storeMode = StoreMode.UPDATE;
             }
         }
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/common/AbstractKmLineImport.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/common/AbstractKmLineImport.java	Wed Apr 11 14:20:01 2018 +0200
@@ -29,10 +29,12 @@
 
     protected StoreMode storeMode;
 
+
     /***** CONSTRUCTOR *****/
 
     public AbstractKmLineImport(final double km) {
         this.station = km;
+        this.storeMode = StoreMode.NONE;
     }
 
 
@@ -52,10 +54,8 @@
      * it
      */
     protected KMTUPLE getPeer(final SERIES parent, final StoreMode parentStoreMode) {
-        if (this.peer != null) {
-            this.storeMode = StoreMode.NONE;
+        if (this.peer != null)
             return this.peer;
-        }
         final Session session = ImporterSession.getInstance().getDatabaseSession();
         KMTUPLE value = null;
         if (parentStoreMode != StoreMode.INSERT) {
--- a/backend/src/main/java/org/dive4elements/river/importer/common/AbstractParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/common/AbstractParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -16,6 +16,8 @@
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.LineNumberReader;
+import java.math.BigDecimal;
+import java.text.DecimalFormat;
 import java.text.NumberFormat;
 import java.text.ParseException;
 import java.util.ArrayList;
@@ -72,6 +74,13 @@
 
     private static NumberFormat numberFormat = NumberFormat.getInstance(Locale.ROOT);
 
+    private static DecimalFormat bigDecimalFormat;
+
+    static {
+        bigDecimalFormat = (DecimalFormat) NumberFormat.getInstance(Locale.ROOT);
+        bigDecimalFormat.setParseBigDecimal(true);
+    }
+
     /**
      * Path of the file or directory to import from
      */
@@ -262,6 +271,13 @@
     }
 
     /**
+     * Parses a number string as a BigDecimal, replacing a comma with a dot first
+     */
+    public static BigDecimal parseDecimal(final String text) throws ParseException {
+        return (BigDecimal) bigDecimalFormat.parse(text.replace(',', '.'));
+    }
+
+    /**
      * Gets the class's logger
      */
     protected abstract Logger getLog();
--- a/backend/src/main/java/org/dive4elements/river/importer/common/AbstractSeriesImport.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/common/AbstractSeriesImport.java	Wed Apr 11 14:20:01 2018 +0200
@@ -45,7 +45,7 @@
 
     protected Map<StoreMode, Integer> valueStoreCount;
 
-    protected StoreMode seriesStoreMode;
+    protected StoreMode storeMode;
 
 
     /***** CONSTRUCTORS *****/
@@ -55,6 +55,7 @@
         this.valueStoreCount = new EnumMap<>(StoreMode.class);
         for (final StoreMode mode : StoreMode.values())
             this.valueStoreCount.put(mode, Integer.valueOf(0));
+        this.storeMode = StoreMode.NONE;
     }
 
     public AbstractSeriesImport(final String filename) {
@@ -118,11 +119,11 @@
         final SERIES peer = getPeer(river);
         if (peer != null) {
             for (final KMLINE value : this.values) {
-                incrementValueStoreCount(value.store(peer, this.seriesStoreMode));
+                incrementValueStoreCount(value.store(peer, this.storeMode));
             }
         }
         ImporterSession.getInstance().getDatabaseSession().flush();
-        return this.seriesStoreMode;
+        return this.storeMode;
     }
 
     /**
@@ -139,21 +140,19 @@
     /**
      * Gets the model object of the data series, inserting it into the database if not already existing
      */
-    public SERIES getPeer(final River river) {
-        if (this.peer != null) {
-            this.seriesStoreMode = StoreMode.NONE;
+    protected SERIES getPeer(final River river) {
+        if (this.peer != null)
             return this.peer;
-        }
         final Session session = ImporterSession.getInstance().getDatabaseSession();
         final List<SERIES> rows = querySeriesItem(session, river);
         if (rows.isEmpty()) {
             getLog().info("Create new database instance");
             this.peer = createSeriesItem(river);
             session.save(this.peer);
-            this.seriesStoreMode = StoreMode.INSERT;
+            this.storeMode = StoreMode.INSERT;
         } else {
             this.peer = rows.get(0);
-            this.seriesStoreMode = StoreMode.UPDATE;
+            this.storeMode = StoreMode.UPDATE;
         }
         return this.peer;
     }
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/BedHeightParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/BedHeightParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -37,6 +37,7 @@
 import org.dive4elements.river.importer.ImportTimeInterval;
 import org.dive4elements.river.importer.ImportUnit;
 import org.dive4elements.river.importer.ImporterSession;
+import org.dive4elements.river.importer.common.AbstractParser;
 import org.dive4elements.river.model.BedHeightType;
 
 public class BedHeightParser {
@@ -284,12 +285,12 @@
             final String a = m.group(1).replace(";", "").trim();
             final String b = m.group(2).replace(";", "").trim();
             try {
-                final BigDecimal lower = new BigDecimal(nf.parse(a).doubleValue());
-                final BigDecimal upper = new BigDecimal(nf.parse(b).doubleValue());
+                final BigDecimal lower = AbstractParser.parseDecimal(a);
+                final BigDecimal upper = AbstractParser.parseDecimal(b);
                 obj.setRange(new ImportRange(lower, upper));
                 return true;
             }
-            catch (final ParseException e) {
+            catch (final Exception e) {
                 log.warn("BHP: could not parse range", e);
             }
         }
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/FlowVelocityMeasurementParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/FlowVelocityMeasurementParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,61 +8,58 @@
 
 package org.dive4elements.river.importer.parsers;
 
-import org.dive4elements.river.importer.ImportFlowVelocityMeasurement;
-import org.dive4elements.river.importer.ImportFlowVelocityMeasurementValue;
-
 import java.math.BigDecimal;
-
 import java.text.DateFormat;
 import java.text.NumberFormat;
-import java.text.ParseException;
 import java.text.SimpleDateFormat;
-
 import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImportFlowVelocityMeasurement;
+import org.dive4elements.river.importer.ImportFlowVelocityMeasurementValue;
+import org.dive4elements.river.importer.common.AbstractParser;
 public class FlowVelocityMeasurementParser extends LineParser {
 
     private static final Logger log =
-        Logger.getLogger(FlowVelocityMeasurementParser.class);
+            Logger.getLogger(FlowVelocityMeasurementParser.class);
 
     private static final NumberFormat nf =
-        NumberFormat.getInstance(DEFAULT_LOCALE);
+            NumberFormat.getInstance(DEFAULT_LOCALE);
 
     private static final DateFormat df =
-        new SimpleDateFormat("dd.MM.yyyy HH:mm:ss");
+            new SimpleDateFormat("dd.MM.yyyy HH:mm:ss");
 
 
-    private List<ImportFlowVelocityMeasurement> measurements;
+    private final List<ImportFlowVelocityMeasurement> measurements;
 
     private ImportFlowVelocityMeasurement current;
 
 
     public FlowVelocityMeasurementParser() {
-        measurements = new ArrayList<ImportFlowVelocityMeasurement>();
+        this.measurements = new ArrayList<>();
     }
 
 
     public List<ImportFlowVelocityMeasurement> getMeasurements() {
-        return measurements;
+        return this.measurements;
     }
 
     @Override
     protected void reset() {
-        current = new ImportFlowVelocityMeasurement();
+        this.current = new ImportFlowVelocityMeasurement();
     }
 
 
     @Override
     protected void finish() {
-        current.setDescription(fileName);
-        measurements.add(current);
+        this.current.setDescription(this.fileName);
+        this.measurements.add(this.current);
     }
 
 
     @Override
-    protected void handleLine(int lineNum, String line) {
+    protected void handleLine(final int lineNum, final String line) {
         if (line.startsWith(START_META_CHAR)) {
             handleMetaLine(stripMetaLine(line));
         }
@@ -72,12 +69,12 @@
     }
 
 
-    public void handleMetaLine(String line) {
+    public void handleMetaLine(final String line) {
     }
 
 
-    public void handleDataLine(String line) {
-        String[] cols = line.split(SEPERATOR_CHAR);
+    public void handleDataLine(final String line) {
+        final String[] cols = line.split(SEPERATOR_CHAR);
 
         if (cols.length < 8) {
             log.warn("skip invalid data line: '" + line + "'");
@@ -85,24 +82,17 @@
         }
 
         try {
-            double km     = nf.parse(cols[1]).doubleValue();
-            double w      = nf.parse(cols[5]).doubleValue();
-            double q      = nf.parse(cols[6]).doubleValue();
-            double v      = nf.parse(cols[7]).doubleValue();
-
-            String timestr     = cols[3] + " " + cols[4];
-            String description = cols.length > 8 ? cols[8] : null;
+            final BigDecimal km = AbstractParser.parseDecimal(cols[1]);
+            final BigDecimal w = AbstractParser.parseDecimal(cols[5]);
+            final BigDecimal q = AbstractParser.parseDecimal(cols[6]);
+            final BigDecimal v = AbstractParser.parseDecimal(cols[7]);
 
-            current.addValue(new ImportFlowVelocityMeasurementValue(
-                df.parse(timestr),
-                new BigDecimal(km),
-                new BigDecimal(w),
-                new BigDecimal(q),
-                new BigDecimal(v),
-                description
-            ));
+            final String timestr     = cols[3] + " " + cols[4];
+            final String description = cols.length > 8 ? cols[8] : null;
+
+            this.current.addValue(new ImportFlowVelocityMeasurementValue(df.parse(timestr), km, w, q, v, description));
         }
-        catch (ParseException pe) {
+        catch (final Exception pe) {
             log.warn("Unparseable flow velocity values:", pe);
         }
     }
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/FlowVelocityModelParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/FlowVelocityModelParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -10,7 +10,6 @@
 
 import java.io.File;
 import java.io.IOException;
-
 import java.math.BigDecimal;
 import java.text.NumberFormat;
 import java.text.ParseException;
@@ -21,48 +20,48 @@
 import java.util.regex.Pattern;
 
 import org.apache.log4j.Logger;
-
+import org.dive4elements.river.backend.utils.EpsilonComparator;
 import org.dive4elements.river.importer.ImportDischargeZone;
 import org.dive4elements.river.importer.ImportFlowVelocityModel;
 import org.dive4elements.river.importer.ImportFlowVelocityModelValue;
-import org.dive4elements.river.backend.utils.EpsilonComparator;
+import org.dive4elements.river.importer.common.AbstractParser;
 
 
 public class FlowVelocityModelParser extends LineParser {
 
     private static final Logger log =
-        Logger.getLogger(FlowVelocityModelParser.class);
+            Logger.getLogger(FlowVelocityModelParser.class);
 
     private static final Pattern META_REGEX =
-        Pattern.compile(".*Rechnung [unter ]*(.*) \\(Pegel (.*)\\).*");
+            Pattern.compile(".*Rechnung [unter ]*(.*) \\(Pegel (.*)\\).*");
 
     private static final Pattern META_GAUGE =
-        Pattern.compile("(.*) Q=(\\w*)m3/s");
+            Pattern.compile("(.*) Q=(\\w*)m3/s");
 
     private static final Pattern META_MAINVALUE_A =
-        Pattern.compile("([a-zA-Z]+)+(\\d+)*[\\w()]*");
+            Pattern.compile("([a-zA-Z]+)+(\\d+)*[\\w()]*");
 
     private static final Pattern META_MAINVALUE_B =
-        Pattern.compile(
-            "(([a-zA-Z]+)+(\\d+)*)\\s*-\\s*(([a-zA-Z]+)+(\\d+)*\\S*)");
+            Pattern.compile(
+                    "(([a-zA-Z]+)+(\\d+)*)\\s*-\\s*(([a-zA-Z]+)+(\\d+)*\\S*)");
 
     private static final Pattern META_MAINVALUE_C =
-        Pattern.compile("([0-9]++)\\s?(\\S*)|([0-9]++,[0-9]++)\\s?(\\S*)");
+            Pattern.compile("([0-9]++)\\s?(\\S*)|([0-9]++,[0-9]++)\\s?(\\S*)");
 
     private static final Pattern META_MAINVALUE_D =
-        Pattern.compile(
-            "(([0-9]*)\\s?(\\w*)|([0-9]++,[0-9]++)\\s?(\\w*))\\s*"
-            + "bis (([0-9]*)\\s?(\\S*)|([0-9]++,[0-9]++)\\s?(\\S*))");
+            Pattern.compile(
+                    "(([0-9]*)\\s?(\\w*)|([0-9]++,[0-9]++)\\s?(\\w*))\\s*"
+                            + "bis (([0-9]*)\\s?(\\S*)|([0-9]++,[0-9]++)\\s?(\\S*))");
 
     private static final Pattern META_MAINVALUE_E =
-        Pattern.compile(
-            "(([a-zA-Z]+)+(\\d+)*)\\s*bis (([a-zA-Z]+)+(\\d+)*\\S*)");
+            Pattern.compile(
+                    "(([a-zA-Z]+)+(\\d+)*)\\s*bis (([a-zA-Z]+)+(\\d+)*\\S*)");
 
     private static final NumberFormat nf =
-        NumberFormat.getInstance(DEFAULT_LOCALE);
+            NumberFormat.getInstance(DEFAULT_LOCALE);
 
 
-    private List<ImportFlowVelocityModel> models;
+    private final List<ImportFlowVelocityModel> models;
 
     private ImportFlowVelocityModel current;
 
@@ -72,38 +71,38 @@
 
 
     public FlowVelocityModelParser() {
-        models = new ArrayList<ImportFlowVelocityModel>();
-        kmExists = new TreeSet<Double>(EpsilonComparator.CMP);
+        this.models = new ArrayList<>();
+        this.kmExists = new TreeSet<>(EpsilonComparator.CMP);
     }
 
 
     public List<ImportFlowVelocityModel> getModels() {
-        return models;
+        return this.models;
     }
 
     @Override
-    public void parse(File file) throws IOException {
-        description = file.getName();
+    public void parse(final File file) throws IOException {
+        this.description = file.getName();
 
         super.parse(file);
     }
 
     @Override
     protected void reset() {
-        current = new ImportFlowVelocityModel(description);
-        kmExists.clear();
+        this.current = new ImportFlowVelocityModel(this.description);
+        this.kmExists.clear();
     }
 
 
     @Override
     protected void finish() {
-        models.add(current);
+        this.models.add(this.current);
         // description = null;
     }
 
 
     @Override
-    protected void handleLine(int lineNum, String line) {
+    protected void handleLine(final int lineNum, final String line) {
         if (line.startsWith(START_META_CHAR)) {
             handleMetaLine(stripMetaLine(line));
         }
@@ -113,16 +112,16 @@
     }
 
 
-    protected void handleMetaLine(String line) {
-        Matcher m = META_REGEX.matcher(line);
+    protected void handleMetaLine(final String line) {
+        final Matcher m = META_REGEX.matcher(line);
 
         if (m.matches()) {
-            String mainValueStr = m.group(1);
+            final String mainValueStr = m.group(1);
             log.debug("mainValueStr = '" + mainValueStr + "'");
-            String gaugeStr     = m.group(2);
+            final String gaugeStr     = m.group(2);
 
-            Object[] valueData = handleMainValueString(mainValueStr);
-            Object[] gaugeData = handleGaugeString(gaugeStr);
+            final Object[] valueData = handleMainValueString(mainValueStr);
+            final Object[] gaugeData = handleGaugeString(gaugeStr);
 
             if (valueData == null || valueData.length < 2) {
                 log.warn("skip invalid MainValue part in '" + line + "'");
@@ -142,102 +141,102 @@
                 log.debug("   upper: " + valueData[1]);
             }
 
-            current.setDischargeZone(new ImportDischargeZone(
-                (String) gaugeData[0],
-                (BigDecimal) gaugeData[1],
-                (String) valueData[0],
-                (String) valueData[1]
-            ));
+            this.current.setDischargeZone(new ImportDischargeZone(
+                    (String) gaugeData[0],
+                    (BigDecimal) gaugeData[1],
+                    (String) valueData[0],
+                    (String) valueData[1]
+                    ));
         }
     }
 
 
-    protected Object[] handleMainValueString(String mainValueStr) {
-        Matcher mA = META_MAINVALUE_A.matcher(mainValueStr.trim());
+    protected Object[] handleMainValueString(final String mainValueStr) {
+        final Matcher mA = META_MAINVALUE_A.matcher(mainValueStr.trim());
         if (mA.matches()) {
             log.debug("mainValueStr matches META_MAINVALUE_A");
-            String name = mA.group(0);
+            final String name = mA.group(0);
 
             return new Object[] { name, name };
         }
 
-        Matcher mB = META_MAINVALUE_B.matcher(mainValueStr.trim());
+        final Matcher mB = META_MAINVALUE_B.matcher(mainValueStr.trim());
         if (mB.matches()) {
             log.debug("mainValueStr matches META_MAINVALUE_B");
-            String lower = mB.group(1);
-            String upper = mB.group(4);
+            final String lower = mB.group(1);
+            final String upper = mB.group(4);
 
             return new Object[] { lower, upper };
         }
 
-        Matcher mC = META_MAINVALUE_C.matcher(mainValueStr.trim());
+        final Matcher mC = META_MAINVALUE_C.matcher(mainValueStr.trim());
         if (mC.matches()) {
             log.debug("mainValueStr matches META_MAINVALUE_C");
-            String facA  = mC.group(1);
-            String nameA = mC.group(2);
-            String facB  = mC.group(3);
-            String nameB = mC.group(4);
+            final String facA  = mC.group(1);
+            final String nameA = mC.group(2);
+            final String facB  = mC.group(3);
+            final String nameB = mC.group(4);
 
-            String fac  = facA  != null ? facA  : facB;
-            String name = nameA != null ? nameA : nameB;
+            final String fac  = facA  != null ? facA  : facB;
+            final String name = nameA != null ? nameA : nameB;
 
-            String mainValue = fac + " " + name;
+            final String mainValue = fac + " " + name;
 
             return new Object[] { mainValue, mainValue };
         }
 
-        Matcher mD = META_MAINVALUE_D.matcher(mainValueStr.trim());
+        final Matcher mD = META_MAINVALUE_D.matcher(mainValueStr.trim());
         if (mD.matches()) {
             log.debug("mainValueStr matches META_MAINVALUE_D");
-            String loFacA  = mD.group(2);
-            String loNameA = mD.group(3);
-            String loFacB  = mD.group(4);
-            String loNameB = mD.group(5);
+            final String loFacA  = mD.group(2);
+            final String loNameA = mD.group(3);
+            final String loFacB  = mD.group(4);
+            final String loNameB = mD.group(5);
 
-            String upFacA  = mD.group(7);
-            String upNameA = mD.group(8);
-            String upFacB  = mD.group(9);
-            String upNameB = mD.group(10);
+            final String upFacA  = mD.group(7);
+            final String upNameA = mD.group(8);
+            final String upFacB  = mD.group(9);
+            final String upNameB = mD.group(10);
 
-            String loFac  = loFacA  != null ? loFacA  : loFacB;
-            String loName = loNameA != null ? loNameA : loNameB;
+            final String loFac  = loFacA  != null ? loFacA  : loFacB;
+            final String loName = loNameA != null ? loNameA : loNameB;
 
-            String upFac  = upFacA  != null ? upFacA  : upFacB;
-            String upName = upNameA != null ? upNameA : upNameB;
+            final String upFac  = upFacA  != null ? upFacA  : upFacB;
+            final String upName = upNameA != null ? upNameA : upNameB;
 
-            String loMainValue = loFac + " " + loName;
-            String upMainValue = upFac + " " + upName;
+            final String loMainValue = loFac + " " + loName;
+            final String upMainValue = upFac + " " + upName;
 
             return new Object[] { loMainValue, upMainValue };
         }
 
-        Matcher mE = META_MAINVALUE_E.matcher(mainValueStr.trim());
+        final Matcher mE = META_MAINVALUE_E.matcher(mainValueStr.trim());
         if (mE.matches()) {
             log.debug("mainValueStr matches META_MAINVALUE_E");
-            String lower = mE.group(1);
-            String upper = mE.group(4);
+            final String lower = mE.group(1);
+            final String upper = mE.group(4);
 
             return new Object[] { lower, upper };
         }
 
-    log.debug("mainValueStr not matched");
+        log.debug("mainValueStr not matched");
         return null;
     }
 
 
-    protected Object[] handleGaugeString(String gaugeStr) {
-        Matcher m = META_GAUGE.matcher(gaugeStr);
+    protected Object[] handleGaugeString(final String gaugeStr) {
+        final Matcher m = META_GAUGE.matcher(gaugeStr);
 
         if (m.matches()) {
-            String name = m.group(1);
-            String qStr = m.group(2);
+            final String name = m.group(1);
+            final String qStr = m.group(2);
 
             try {
                 return new Object[] {
-                    name,
-                    new BigDecimal(nf.parse(qStr).doubleValue()) };
+                        name,
+                        AbstractParser.parseDecimal(qStr) };
             }
-            catch (ParseException pe) {
+            catch (final ParseException pe) {
                 log.warn("Could not parse Q value: '" + qStr + "'");
             }
         }
@@ -246,8 +245,8 @@
     }
 
 
-    protected void handleDataLine(String line) {
-        String[] cols = line.split(SEPERATOR_CHAR);
+    protected void handleDataLine(final String line) {
+        final String[] cols = line.split(SEPERATOR_CHAR);
 
         if (cols.length < 5) {
             log.warn("skip invalid data line: '" + line + "'");
@@ -255,31 +254,25 @@
         }
 
         try {
-            double km = nf.parse(cols[0]).doubleValue();
+            final BigDecimal km = AbstractParser.parseDecimal(cols[0]);
 
-            Double key = Double.valueOf(km);
+            final Double key = Double.valueOf(km.doubleValue());
 
-            if (kmExists.contains(key)) {
+            if (this.kmExists.contains(key)) {
                 log.warn("duplicate station '" + km + "': -> ignored");
                 return;
             }
 
-            double q      = nf.parse(cols[1]).doubleValue();
-            double total  = nf.parse(cols[2]).doubleValue();
-            double main   = nf.parse(cols[3]).doubleValue();
-            double stress = nf.parse(cols[4]).doubleValue();
+            final BigDecimal q = AbstractParser.parseDecimal(cols[1]);
+            final BigDecimal total = AbstractParser.parseDecimal(cols[2]);
+            final BigDecimal main = AbstractParser.parseDecimal(cols[3]);
+            final BigDecimal stress = AbstractParser.parseDecimal(cols[4]);
 
-            current.addValue(new ImportFlowVelocityModelValue(
-                new BigDecimal(km),
-                new BigDecimal(q),
-                new BigDecimal(total),
-                new BigDecimal(main),
-                new BigDecimal(stress)
-            ));
+            this.current.addValue(new ImportFlowVelocityModelValue(km, q, total, main, stress));
 
-            kmExists.add(key);
+            this.kmExists.add(key);
         }
-        catch (ParseException pe) {
+        catch (final ParseException pe) {
             log.warn("Unparseable flow velocity values:", pe);
         }
     }
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/MeasurementStationsParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/MeasurementStationsParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,19 +8,17 @@
 
 package org.dive4elements.river.importer.parsers;
 
-import java.math.BigDecimal;
 import java.text.ParseException;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
 
 import org.apache.log4j.Logger;
-
-import org.dive4elements.river.model.MeasurementStation;
-
 import org.dive4elements.river.importer.ImportMeasurementStation;
 import org.dive4elements.river.importer.ImportRange;
 import org.dive4elements.river.importer.ImportTimeInterval;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.model.MeasurementStation;
 
 
 public class MeasurementStationsParser extends LineParser {
@@ -29,7 +27,7 @@
 
         private static final long serialVersionUID = 1L;
 
-        public MeasurementStationParserException(String msg) {
+        public MeasurementStationParserException(final String msg) {
             super(msg);
         }
     }
@@ -39,14 +37,14 @@
     public static final int MAX_COMMENT_LENGTH = 512;
 
     private static final Logger log = Logger
-        .getLogger(MeasurementStationsParser.class);
+            .getLogger(MeasurementStationsParser.class);
 
     private List<ImportMeasurementStation> measurementStations;
     private ImportMeasurementStation current;
 
     @Override
     protected void reset() {
-        this.measurementStations = new ArrayList<ImportMeasurementStation>();
+        this.measurementStations = new ArrayList<>();
     }
 
     @Override
@@ -54,114 +52,109 @@
     }
 
     @Override
-    protected void handleLine(int lineNum, String line) {
+    protected void handleLine(final int lineNum, final String line) {
         if (line == null || line.startsWith(START_META_CHAR)) {
             log.info("skip meta information at line " + lineNum);
             return;
         }
 
         try {
-            current = new ImportMeasurementStation();
+            this.current = new ImportMeasurementStation();
             handleDataLine(lineNum, line);
-            measurementStations.add(current);
+            this.measurementStations.add(this.current);
         }
-        catch (MeasurementStationParserException e) {
+        catch (final MeasurementStationParserException e) {
             log.warn("Problem in line " + lineNum + ": " + e.getMessage());
         }
     }
 
     public List<ImportMeasurementStation> getMeasurementStations() {
-        return measurementStations;
+        return this.measurementStations;
     }
 
-    protected void handleDataLine(int lineNum, String line)
-        throws MeasurementStationParserException {
-        String[] cols = line.split(SEPERATOR_CHAR);
+    protected void handleDataLine(final int lineNum, final String line)
+            throws MeasurementStationParserException {
+        final String[] cols = line.split(SEPERATOR_CHAR);
 
         if (cols == null || cols.length < MIN_COLUMNS) {
-            int num = cols != null ? cols.length : 0;
+            final int num = cols != null ? cols.length : 0;
             throw new MeasurementStationParserException("Not enough columns: "
-                + num);
+                    + num);
         }
 
-        current.name = getName(cols, lineNum);
-        current.range = getRange(cols, lineNum);
-        current.measurementType = getMeasurementType(cols, lineNum);
-        current.riverside = getRiverside(cols, lineNum);
-        current.gauge = getGauge(cols, lineNum);
-        current.observationTimerange = getObservationTimerange(cols, lineNum);
-        current.operator = getOperator(cols, lineNum);
-        current.comment = getComment(cols, lineNum);
+        this.current.name = getName(cols, lineNum);
+        this.current.range = getRange(cols, lineNum);
+        this.current.measurementType = getMeasurementType(cols, lineNum);
+        this.current.riverside = getRiverside(cols, lineNum);
+        this.current.gauge = getGauge(cols, lineNum);
+        this.current.observationTimerange = getObservationTimerange(cols, lineNum);
+        this.current.operator = getOperator(cols, lineNum);
+        this.current.comment = getComment(cols, lineNum);
     }
 
-    protected String getName(String[] cols, int lineNum)
-        throws MeasurementStationParserException {
+    protected String getName(final String[] cols, final int lineNum)
+            throws MeasurementStationParserException {
         if (cols[0] == null || cols[0].length() == 0) {
             throw new MeasurementStationParserException("invalid name in line "
-                + lineNum);
+                    + lineNum);
         }
 
         return cols[0];
     }
 
-    protected ImportRange getRange(String[] cols, int lineNum) {
-        String from = cols[1];
-        String to   = cols[4];
+    protected ImportRange getRange(final String[] cols, final int lineNum) {
+        final String from = cols[1];
+        final String to   = cols[4];
         if (from == null || from.length() == 0) {
             log.error("No station found in line" + lineNum);
             return null;
         }
 
         try {
-            double lower = getDouble(from);
-
             if (to == null || to.length() == 0) {
                 log.warn("No end km found in line " + lineNum);
-                return new ImportRange(new BigDecimal(lower));
+                return new ImportRange(AbstractParser.parseDecimal(from));
             }
 
             try {
-                double upper = getDouble(to);
-
-                return new ImportRange(new BigDecimal(lower),
-                    new BigDecimal(upper));
+                return new ImportRange(AbstractParser.parseDecimal(from), AbstractParser.parseDecimal(to));
             }
-            catch (ParseException e) {
+            catch (final NumberFormatException e) {
                 log.warn("Unparseable end km in line " + lineNum +
-                    ". Error: " + e.getMessage());
-                return new ImportRange(new BigDecimal(lower));
+                        ". Error: " + e.getMessage());
+                return new ImportRange(AbstractParser.parseDecimal(from));
             }
 
         }
-        catch (ParseException e) {
+        catch (final ParseException e) {
             log.error("Unparseable station in line " + lineNum +
                     ". Error: " + e.getMessage());
             return null;
         }
     }
 
-    protected String getMeasurementType(String[] cols, int lineNum)
-        throws MeasurementStationParserException {
-        String mtype = cols[2].trim();
+    protected String getMeasurementType(final String[] cols, final int lineNum)
+            throws MeasurementStationParserException {
+        final String mtype = cols[2].trim();
         if (!(MeasurementStation.MEASUREMENT_TYPE_BEDLOAD.equals(mtype) ||
                 MeasurementStation.MEASUREMENT_TYPE_SUSP.equals(mtype))) {
             throw new MeasurementStationParserException(
-                "invalid measurement type in line " + lineNum);
+                    "invalid measurement type in line " + lineNum);
         }
 
         return mtype;
     }
 
-    protected String getRiverside(String[] cols, int lineNum) {
-        String col = cols[3];
+    protected String getRiverside(final String[] cols, final int lineNum) {
+        final String col = cols[3];
         if (col == null || col.length() == 0) {
             log.warn("No river side given in line " + lineNum);
         }
         return col;
     }
 
-    protected String getGauge(String[] cols, int lineNum) {
-        String col = cols[5];
+    protected String getGauge(final String[] cols, final int lineNum) {
+        final String col = cols[5];
         if (col == null || col.length() == 0) {
             log.warn("Invalid gauge found in line " + lineNum);
         }
@@ -169,45 +162,45 @@
     }
 
     protected ImportTimeInterval getObservationTimerange(
-        String[] cols,
-        int lineNum
-    ) {
-        String col = cols[7];
+            final String[] cols,
+            final int lineNum
+            ) {
+        final String col = cols[7];
         if (col == null || col.length() == 0) {
             log.warn("Observation time invalid in line " + lineNum);
             return null;
         }
 
         try {
-            Date date = getDate(col);
+            final Date date = getDate(col);
 
             if (date != null) {
                 return new ImportTimeInterval(date);
             }
             log.warn("Observation time invalid in line " + lineNum);
         }
-        catch (ParseException pe) {
+        catch (final ParseException pe) {
             log.warn("Unparseable observation time '" + col +
-                "' in line " + lineNum);
+                    "' in line " + lineNum);
         }
         return null;
     }
 
-    protected String getOperator(String[] cols, int lineNum) {
-        String col = cols[8];
+    protected String getOperator(final String[] cols, final int lineNum) {
+        final String col = cols[8];
         if (col == null || col.length() == 0) {
             log.warn("No operator given in line " + lineNum);
         }
         return col;
     }
 
-    protected String getComment(String[] cols, int lineNum) {
+    protected String getComment(final String[] cols, final int lineNum) {
         if (cols.length > MIN_COLUMNS) {
-            String col = cols[9];
+            final String col = cols[9];
             if (col.length() > MAX_COMMENT_LENGTH) {
                 log.warn("Comment in line " + lineNum +
-                    " longer than allowed " + MAX_COMMENT_LENGTH +
-                    " characters. Truncated.");
+                        " longer than allowed " + MAX_COMMENT_LENGTH +
+                        " characters. Truncated.");
                 return col.substring(0, MAX_COMMENT_LENGTH);
             }
             return col;
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/MorphologicalWidthParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/MorphologicalWidthParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -9,32 +9,30 @@
 package org.dive4elements.river.importer.parsers;
 
 import java.math.BigDecimal;
-
 import java.text.NumberFormat;
 import java.text.ParseException;
-
 import java.util.ArrayList;
 import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.log4j.Logger;
-
 import org.dive4elements.river.importer.ImportMorphWidth;
 import org.dive4elements.river.importer.ImportMorphWidthValue;
 import org.dive4elements.river.importer.ImportUnit;
+import org.dive4elements.river.importer.common.AbstractParser;
 
 
 public class MorphologicalWidthParser extends LineParser {
 
     private static final Logger log =
-        Logger.getLogger(MorphologicalWidthParser.class);
+            Logger.getLogger(MorphologicalWidthParser.class);
 
     public static final NumberFormat nf = NumberFormat.getInstance(
-        DEFAULT_LOCALE);
+            DEFAULT_LOCALE);
 
     public static final Pattern META_UNIT =
-        Pattern.compile("^Einheit: \\[(.*)\\].*");
+            Pattern.compile("^Einheit: \\[(.*)\\].*");
 
     protected List<ImportMorphWidth> morphWidths;
 
@@ -42,26 +40,26 @@
 
 
     public MorphologicalWidthParser() {
-        morphWidths = new ArrayList<ImportMorphWidth>();
+        this.morphWidths = new ArrayList<>();
     }
 
 
     @Override
     protected void reset() {
-        current = new ImportMorphWidth();
+        this.current = new ImportMorphWidth();
     }
 
 
     @Override
     protected void finish() {
-        if (current != null) {
-            morphWidths.add(current);
+        if (this.current != null) {
+            this.morphWidths.add(this.current);
         }
     }
 
 
     @Override
-    protected void handleLine(int lineNum, String line) {
+    protected void handleLine(final int lineNum, final String line) {
         if (line.startsWith(START_META_CHAR)) {
             handleMetaLine(stripMetaLine(line));
         }
@@ -71,7 +69,7 @@
     }
 
 
-    protected void handleMetaLine(String line) {
+    protected void handleMetaLine(final String line) {
         if (handleMetaUnit(line)) {
             return;
         }
@@ -81,13 +79,13 @@
     }
 
 
-    protected boolean handleMetaUnit(String line) {
-        Matcher m = META_UNIT.matcher(line);
+    protected boolean handleMetaUnit(final String line) {
+        final Matcher m = META_UNIT.matcher(line);
 
         if (m.matches()) {
-            String unit = m.group(1);
+            final String unit = m.group(1);
 
-            current.setUnit(new ImportUnit(unit));
+            this.current.setUnit(new ImportUnit(unit));
 
             return true;
         }
@@ -96,8 +94,8 @@
     }
 
 
-    protected void handleDataLine(String line) {
-        String[] vals = line.split(SEPERATOR_CHAR);
+    protected void handleDataLine(final String line) {
+        final String[] vals = line.split(SEPERATOR_CHAR);
 
         if (vals == null || vals.length < 2) {
             log.warn("MWP: skip invalid data line: '" + line + "'");
@@ -105,25 +103,25 @@
         }
 
         try {
-            BigDecimal km    = new BigDecimal(nf.parse(vals[0]).doubleValue());
-            BigDecimal width = new BigDecimal(nf.parse(vals[1]).doubleValue());
-
-            String desc = vals.length > 2 ? vals[2] : null;
+            final BigDecimal km = AbstractParser.parseDecimal(vals[0]);
+            final BigDecimal width = AbstractParser.parseDecimal(vals[1]);
 
-            current.addValue(new ImportMorphWidthValue(
-                km,
-                width,
-                desc
-            ));
+            final String desc = vals.length > 2 ? vals[2] : null;
+
+            this.current.addValue(new ImportMorphWidthValue(
+                    km,
+                    width,
+                    desc
+                    ));
         }
-        catch (ParseException pe) {
+        catch (final ParseException pe) {
             log.warn("MWP: unparseable number in data row: " + line);
         }
     }
 
 
     public List<ImportMorphWidth> getMorphologicalWidths() {
-        return morphWidths;
+        return this.morphWidths;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentDensityParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentDensityParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -8,39 +8,35 @@
 
 package org.dive4elements.river.importer.parsers;
 
-import org.dive4elements.river.importer.ImportDepth;
-import org.dive4elements.river.importer.ImportSedimentDensity;
-import org.dive4elements.river.importer.ImportSedimentDensityValue;
-
 import java.io.File;
 import java.io.IOException;
-
 import java.math.BigDecimal;
-
 import java.text.NumberFormat;
 import java.text.ParseException;
-
 import java.util.ArrayList;
 import java.util.List;
-
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImportDepth;
+import org.dive4elements.river.importer.ImportSedimentDensity;
+import org.dive4elements.river.importer.ImportSedimentDensityValue;
+import org.dive4elements.river.importer.common.AbstractParser;
 
 public class SedimentDensityParser extends LineParser {
 
     private static final Logger log =
-        Logger.getLogger(SedimentDensityParser.class);
+            Logger.getLogger(SedimentDensityParser.class);
 
     public static final NumberFormat nf =
-        NumberFormat.getInstance(DEFAULT_LOCALE);
+            NumberFormat.getInstance(DEFAULT_LOCALE);
 
     public static final Pattern META_DEPTH =
-        Pattern.compile("^Tiefe: (\\d++)-(\\d++).*");
+            Pattern.compile("^Tiefe: (\\d++)-(\\d++).*");
 
     public static final Pattern META_YEAR =
-        Pattern.compile("^Jahr: (\\d{4}).*");
+            Pattern.compile("^Jahr: (\\d{4}).*");
 
     protected List<ImportSedimentDensity> sedimentDensities;
 
@@ -51,13 +47,13 @@
     protected String yearString;
 
     public SedimentDensityParser() {
-        sedimentDensities = new ArrayList<ImportSedimentDensity>();
+        this.sedimentDensities = new ArrayList<>();
     }
 
 
     @Override
-    public void parse(File file) throws IOException {
-        currentDescription = file.getName();
+    public void parse(final File file) throws IOException {
+        this.currentDescription = file.getName();
 
         super.parse(file);
     }
@@ -65,20 +61,20 @@
 
     @Override
     protected void reset() {
-        current = new ImportSedimentDensity(currentDescription);
+        this.current = new ImportSedimentDensity(this.currentDescription);
     }
 
 
     @Override
     protected void finish() {
-        if (current != null) {
-            sedimentDensities.add(current);
+        if (this.current != null) {
+            this.sedimentDensities.add(this.current);
         }
     }
 
 
     @Override
-    protected void handleLine(int lineNum, String line) {
+    protected void handleLine(final int lineNum, final String line) {
         if (line.startsWith(START_META_CHAR)) {
             handleMetaLine(stripMetaLine(line));
         }
@@ -88,7 +84,7 @@
     }
 
 
-    protected void handleMetaLine(String line) {
+    protected void handleMetaLine(final String line) {
         if (handleMetaDepth(line)) {
             return;
         }
@@ -99,27 +95,26 @@
     }
 
 
-    protected boolean handleMetaDepth(String line) {
-        Matcher m = META_DEPTH.matcher(line);
+    protected boolean handleMetaDepth(final String line) {
+        final Matcher m = META_DEPTH.matcher(line);
 
         if (m.matches()) {
-            String lo   = m.group(1);
-            String up   = m.group(2);
+            final String lo   = m.group(1);
+            final String up   = m.group(2);
 
             log.info("Found sediment density depth: "
-                + lo + " - " + up + " cm");
+                    + lo + " - " + up + " cm");
 
             try {
-                ImportDepth depth = new ImportDepth(
-                    new BigDecimal(nf.parse(lo).doubleValue()),
-                    new BigDecimal(nf.parse(up).doubleValue())
-                );
+                final ImportDepth depth = new ImportDepth(
+                        AbstractParser.parseDecimal(lo),
+                        AbstractParser.parseDecimal(up));
 
-                current.setDepth(depth);
+                this.current.setDepth(depth);
 
                 return true;
             }
-            catch (ParseException pe) {
+            catch (final ParseException pe) {
                 log.warn("Unparseable numbers in: '" + line + "'");
             }
         }
@@ -130,13 +125,13 @@
         return false;
     }
 
-    protected boolean handleMetaYear(String line) {
-        Matcher m = META_YEAR.matcher(line);
+    protected boolean handleMetaYear(final String line) {
+        final Matcher m = META_YEAR.matcher(line);
 
         if (m.matches()) {
-            yearString = m.group(1);
+            this.yearString = m.group(1);
 
-            log.info("Found sediment density year: " + yearString);
+            log.info("Found sediment density year: " + this.yearString);
 
             return true;
         }
@@ -147,8 +142,8 @@
     }
 
 
-    protected void handleDataLine(String line) {
-        String[] vals = line.split(SEPERATOR_CHAR);
+    protected void handleDataLine(final String line) {
+        final String[] vals = line.split(SEPERATOR_CHAR);
 
         if (vals == null || vals.length < 3) {
             log.warn("skip invalid data line: '" + line + "'");
@@ -159,13 +154,13 @@
         BigDecimal shoreOffset = null;
         BigDecimal density = null;
         try {
-            km          = new BigDecimal(nf.parse(vals[0]).doubleValue());
-            density     = new BigDecimal(nf.parse(vals[2]).doubleValue());
+            km = AbstractParser.parseDecimal(vals[0]);
+            density = AbstractParser.parseDecimal(vals[2]);
             if (!vals[1].isEmpty()) {
-                shoreOffset = new BigDecimal(nf.parse(vals[1]).doubleValue());
+                shoreOffset = AbstractParser.parseDecimal(vals[1]);
             }
         }
-        catch (ParseException pe) {
+        catch (final ParseException pe) {
             log.warn("Unparseable numbers in '" + line + "'");
         }
 
@@ -175,26 +170,26 @@
         }
 
         BigDecimal year = null;
-        if (yearString != null) {
+        if (this.yearString != null) {
             try {
-                year = new BigDecimal(nf.parse(yearString).doubleValue());
+                year = AbstractParser.parseDecimal(this.yearString);
             }
-            catch (ParseException pe) {
+            catch (final ParseException pe) {
                 log.warn("Unparseable year string");
             }
         }
 
-        current.addValue(new ImportSedimentDensityValue(
-            km,
-            shoreOffset,
-            density,
-            year,
-            currentDescription));
+        this.current.addValue(new ImportSedimentDensityValue(
+                km,
+                shoreOffset,
+                density,
+                year,
+                this.currentDescription));
     }
 
 
     public List<ImportSedimentDensity> getSedimentDensities() {
-        return sedimentDensities;
+        return this.sedimentDensities;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/WaterlevelDifferencesParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/WaterlevelDifferencesParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -19,15 +19,14 @@
 import java.util.regex.Pattern;
 
 import org.apache.log4j.Logger;
-
+import org.dive4elements.river.backend.utils.DateUtil;
 import org.dive4elements.river.importer.ImportTimeInterval;
 import org.dive4elements.river.importer.ImportUnit;
-
 import org.dive4elements.river.importer.ImportWst;
-import org.dive4elements.river.importer.ImportWstQRange;
 import org.dive4elements.river.importer.ImportWstColumn;
 import org.dive4elements.river.importer.ImportWstColumnValue;
-import org.dive4elements.river.backend.utils.DateUtil;
+import org.dive4elements.river.importer.ImportWstQRange;
+import org.dive4elements.river.importer.common.AbstractParser;
 
 
 /**
@@ -36,21 +35,21 @@
 public class WaterlevelDifferencesParser extends LineParser {
 
     private static final Logger log =
-        Logger.getLogger(WaterlevelDifferencesParser.class);
+            Logger.getLogger(WaterlevelDifferencesParser.class);
 
     private static final NumberFormat nf =
-        NumberFormat.getInstance(DEFAULT_LOCALE);
+            NumberFormat.getInstance(DEFAULT_LOCALE);
 
     public static final Pattern META_UNIT =
-        Pattern.compile("^Einheit: \\[(.*)\\].*");
+            Pattern.compile("^Einheit: \\[(.*)\\].*");
 
     public static final Pattern YEARS_IN_COLUMN =
-        Pattern.compile(".*(\\d{4})-(\\d{4})$");
+            Pattern.compile(".*(\\d{4})-(\\d{4})$");
 
-    public static final double INTERVAL_GAP = 0.00001d;
+    public static final BigDecimal INTERVAL_GAP = new BigDecimal("0.00001");
 
     /** List of parsed differences as ImportWst s. */
-    private List<ImportWst> differences;
+    private final List<ImportWst> differences;
 
     private ImportWstColumn[] columns;
 
@@ -59,13 +58,13 @@
 
 
     public WaterlevelDifferencesParser() {
-        differences = new ArrayList<ImportWst>();
+        this.differences = new ArrayList<>();
     }
 
 
     /** Get the differences as wst parsed so far. */
     public List<ImportWst> getDifferences() {
-        return differences;
+        return this.differences;
     }
 
 
@@ -74,9 +73,9 @@
      * from it.
      */
     @Override
-    public void parse(File file) throws IOException {
-        current = new ImportWst(file.getName());
-        current.setKind(6);
+    public void parse(final File file) throws IOException {
+        this.current = new ImportWst(file.getName());
+        this.current.setKind(6);
 
         super.parse(file);
     }
@@ -90,14 +89,14 @@
 
     @Override
     protected void finish() {
-        if (columns != null && current != null) {
+        if (this.columns != null && this.current != null) {
             // TODO figure out if its needed, as the columns
             //      are registered at their construction time.
-            for (ImportWstColumn col: columns) {
+            for (final ImportWstColumn col: this.columns) {
                 // TODO place a current.addColumn(col); here?
             }
 
-            differences.add(current);
+            this.differences.add(this.current);
         }
 
         // For all differences columns, add a single Q-Range with
@@ -106,27 +105,27 @@
         // TODO: should otherwise be extended to
         // (first station of next range - INTERVAL_GAP),
         // assuming always ascending stations
-        for (ImportWstColumn column: columns) {
-            List<ImportWstColumnValue> cValues = column.getColumnValues();
-            BigDecimal a = cValues.get(0).getPosition();
+        for (final ImportWstColumn column: this.columns) {
+            final List<ImportWstColumnValue> cValues = column.getColumnValues();
+            final BigDecimal a = cValues.get(0).getPosition();
             BigDecimal b = cValues.get(cValues.size() - 1).getPosition();
             if (a.compareTo(b) == 0) {
-                b = new BigDecimal(b.doubleValue() + INTERVAL_GAP);
+                b = b.add(INTERVAL_GAP);
             }
             column.addColumnQRange(
-                new ImportWstQRange(
-                    a,
-                    b,
-                    new BigDecimal(-1d))
-                );
+                    new ImportWstQRange(
+                            a,
+                            b,
+                            new BigDecimal(-1d))
+                    );
         }
-        current = null;
-        columns = null;
+        this.current = null;
+        this.columns = null;
     }
 
 
     @Override
-    protected void handleLine(int lineNum, String line) {
+    protected void handleLine(final int lineNum, final String line) {
         if (line.startsWith(START_META_CHAR)) {
             handleMetaLine(stripMetaLine(line));
         }
@@ -136,7 +135,7 @@
     }
 
 
-    private void handleMetaLine(String meta) {
+    private void handleMetaLine(final String meta) {
         if (handleMetaUnit(meta)) {
             return;
         }
@@ -146,14 +145,14 @@
     }
 
 
-    private boolean handleMetaUnit(String meta) {
-        Matcher m = META_UNIT.matcher(meta);
+    private boolean handleMetaUnit(final String meta) {
+        final Matcher m = META_UNIT.matcher(meta);
 
         if (m.matches()) {
-            String unit = m.group(1);
+            final String unit = m.group(1);
             log.debug("Found unit: '" + unit + "'");
 
-            current.setUnit(new ImportUnit(unit));
+            this.current.setUnit(new ImportUnit(unit));
 
             return true;
         }
@@ -162,13 +161,13 @@
     }
 
 
-    private boolean handleMetaColumnNames(String meta) {
-        Pattern META_COLUMN_NAMES = Pattern.compile("Fluss-km;(.*)");
-        Matcher m = META_COLUMN_NAMES.matcher(meta);
+    private boolean handleMetaColumnNames(final String meta) {
+        final Pattern META_COLUMN_NAMES = Pattern.compile("Fluss-km;(.*)");
+        final Matcher m = META_COLUMN_NAMES.matcher(meta);
 
         if (m.matches()) {
-            String colStr = m.group(1);
-            String[] cols = colStr.split(SEPERATOR_CHAR);
+            final String colStr = m.group(1);
+            final String[] cols = colStr.split(SEPERATOR_CHAR);
 
             log.debug("Found " + cols.length + " columns.");
 
@@ -182,28 +181,28 @@
 
 
     /** Setup column structures with name, description and time interval. */
-    private void initColumns(String[] cols) {
-        current.setNumberColumns(cols.length);
-        columns = current.getColumns().toArray(
-            new ImportWstColumn[cols.length]);
+    private void initColumns(final String[] cols) {
+        this.current.setNumberColumns(cols.length);
+        this.columns = this.current.getColumns().toArray(
+                new ImportWstColumn[cols.length]);
 
         for (int i = 0; i < cols.length; i++) {
-            String name = cols[i].replace("\"", "");
+            final String name = cols[i].replace("\"", "");
 
             log.debug("Create new column '" + name + "'");
-            ImportWstColumn column = current.getColumn(i);
+            final ImportWstColumn column = this.current.getColumn(i);
             column.setName(name);
             column.setDescription(name);
 
-            Matcher m = YEARS_IN_COLUMN.matcher(name);
+            final Matcher m = YEARS_IN_COLUMN.matcher(name);
 
             if (m.matches()) {
-                int startYear = Integer.parseInt(m.group(1));
-                int endYear   = Integer.parseInt(m.group(2));
-                ImportTimeInterval time = new ImportTimeInterval(
-                    DateUtil.getStartDateFromYear(startYear),
-                    DateUtil.getEndDateFromYear(endYear)
-                );
+                final int startYear = Integer.parseInt(m.group(1));
+                final int endYear   = Integer.parseInt(m.group(2));
+                final ImportTimeInterval time = new ImportTimeInterval(
+                        DateUtil.getStartDateFromYear(startYear),
+                        DateUtil.getEndDateFromYear(endYear)
+                        );
                 column.setTimeInterval(time);
             } else {
                 log.debug("No time interval in column header found: " + name);
@@ -215,9 +214,9 @@
     /** Handle one line of data, add one value for all columns.
      * @param line the line to parse
      */
-    private void handleDataLine(String line) {
+    private void handleDataLine(final String line) {
         // Split by separator, do not exclude trailing empty string.
-        String[] cols = line.split(SEPERATOR_CHAR, -1);
+        final String[] cols = line.split(SEPERATOR_CHAR, -1);
 
         if (cols == null || cols.length < 2) {
             log.warn("skip invalid waterlevel-diff line: '" + line + "'");
@@ -227,31 +226,31 @@
         try {
             // The first value in a line like 12,9;4,3;4,5 is the station,
             // later real values.
-            Double station = nf.parse(cols[0]).doubleValue();
+            final BigDecimal station = AbstractParser.parseDecimal(cols[0]);
 
-            for (int i = 0; i < columns.length; i++) {
-                int idx = i+1;
+            for (int i = 0; i < this.columns.length; i++) {
+                final int idx = i+1;
 
                 if (idx >= cols.length) {
                     log.warn("Insufficient column numbers: " + line);
                     continue;
                 }
 
-                String value = cols[idx];
+                final String value = cols[idx];
 
                 if (value != null && !value.equals("")) {
                     try {
-                        columns[i].addColumnValue(
-                            new BigDecimal(station),
-                            new BigDecimal(nf.parse(value).doubleValue()));
+                        this.columns[i].addColumnValue(
+                                station,
+                                AbstractParser.parseDecimal(value));
                     }
-                    catch (ParseException pe) {
+                    catch (final ParseException pe) {
                         log.warn("Could not parse value: '" + value + "'");
                     }
                 }
             }
         }
-        catch (ParseException pe) {
+        catch (final ParseException pe) {
             log.warn("Could not parse station: '" + line + "'");
         }
     }
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/WaterlevelParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/WaterlevelParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -19,16 +19,15 @@
 import java.util.regex.Pattern;
 
 import org.apache.log4j.Logger;
-
+import org.dive4elements.river.backend.utils.DateUtil;
+import org.dive4elements.river.importer.ImportRange;
 import org.dive4elements.river.importer.ImportTimeInterval;
 import org.dive4elements.river.importer.ImportUnit;
-
-import org.dive4elements.river.importer.ImportRange;
 import org.dive4elements.river.importer.ImportWst;
 import org.dive4elements.river.importer.ImportWstColumn;
 import org.dive4elements.river.importer.ImportWstColumnValue;
 import org.dive4elements.river.importer.ImportWstQRange;
-import org.dive4elements.river.backend.utils.DateUtil;
+import org.dive4elements.river.importer.common.AbstractParser;
 
 
 /**
@@ -43,17 +42,17 @@
     private static final Logger log = Logger.getLogger(WaterlevelParser.class);
 
     private static final NumberFormat nf =
-        NumberFormat.getInstance(DEFAULT_LOCALE);
+            NumberFormat.getInstance(DEFAULT_LOCALE);
 
     private static final Pattern META_Q_RANGE =
-        Pattern.compile("Abfluss\\s\\[(.*)\\];(.*)");
+            Pattern.compile("Abfluss\\s\\[(.*)\\];(.*)");
 
     public static final Pattern META_UNIT =
-        Pattern.compile("^Einheit: \\[(.*)\\].*");
+            Pattern.compile("^Einheit: \\[(.*)\\].*");
 
-    public static final double INTERVAL_GAP = 0.00001d;
+    public static final BigDecimal INTERVAL_GAP = new BigDecimal("0.00001");
 
-    private List<ImportWst> waterlevels;
+    private final List<ImportWst> waterlevels;
 
     private ImportWst current;
 
@@ -70,18 +69,18 @@
 
 
     public WaterlevelParser() {
-        waterlevels = new ArrayList<ImportWst>();
+        this.waterlevels = new ArrayList<>();
     }
 
 
     public List<ImportWst> getWaterlevels() {
-        return waterlevels;
+        return this.waterlevels;
     }
 
 
     @Override
-    public void parse(File file) throws IOException {
-        currentDescription = file.getName();
+    public void parse(final File file) throws IOException {
+        this.currentDescription = file.getName();
 
         super.parse(file);
     }
@@ -89,64 +88,63 @@
 
     @Override
     protected void reset() {
-        currentQRange = null;
-        current       = new ImportWst(currentDescription);
-        current.setNumberColumns(1);
-        column        = current.getColumn(0);
-        column.setName(currentDescription);
-        column.setDescription(currentDescription);
+        this.currentQRange = null;
+        this.current       = new ImportWst(this.currentDescription);
+        this.current.setNumberColumns(1);
+        this.column        = this.current.getColumn(0);
+        this.column.setName(this.currentDescription);
+        this.column.setDescription(this.currentDescription);
 
         // Try to extract and set the TimeInterval.
-        Matcher m = WaterlevelDifferencesParser.YEARS_IN_COLUMN.matcher(
-            currentDescription);
+        final Matcher m = WaterlevelDifferencesParser.YEARS_IN_COLUMN.matcher(
+                this.currentDescription);
 
         if (m.matches()) {
-            int startYear = Integer.parseInt(m.group(1));
-            int endYear   = Integer.parseInt(m.group(2));
-            ImportTimeInterval time = new ImportTimeInterval(
-                DateUtil.getStartDateFromYear(startYear),
-                DateUtil.getEndDateFromYear(endYear)
-            );
-            column.setTimeInterval(time);
+            final int startYear = Integer.parseInt(m.group(1));
+            final int endYear   = Integer.parseInt(m.group(2));
+            final ImportTimeInterval time = new ImportTimeInterval(
+                    DateUtil.getStartDateFromYear(startYear),
+                    DateUtil.getEndDateFromYear(endYear)
+                    );
+            this.column.setTimeInterval(time);
         } else {
             log.debug("No time interval in column header found: "
-                + currentDescription);
+                    + this.currentDescription);
         }
 
-        current.setKind(7);
+        this.current.setKind(7);
     }
 
 
     @Override
     protected void finish() {
-        if (current != null) {
-            if (currentQRange != null) {
-                List<ImportWstColumnValue> cValues = column.getColumnValues();
+        if (this.current != null) {
+            if (this.currentQRange != null) {
+                final List<ImportWstColumnValue> cValues = this.column.getColumnValues();
                 // Set end of range to last station
                 // or expand range to minimal length in case it would be 0
                 // TODO: should otherwise be extended to
                 // (first station of next range - INTERVAL_GAP),
                 // assuming always ascending stations
-                BigDecimal lastStation = cValues.get(cValues.size() -1)
-                    .getPosition();
-                if (lastStation.compareTo(currentRange.getA()) == 0) {
-                    currentRange.setB(new BigDecimal(lastStation.doubleValue()
-                        + INTERVAL_GAP));
+                final BigDecimal lastStation = cValues.get(cValues.size() -1)
+                        .getPosition();
+                if (lastStation.compareTo(this.currentRange.getA()) == 0) {
+                    this.currentRange.setB(lastStation.add(INTERVAL_GAP));
                 }
                 else {
-                    currentRange.setB(lastStation);
+                    this.currentRange.setB(lastStation);
                 }
 
-                currentQRange.setRange(currentRange);
-                column.addColumnQRange(currentQRange);
+                this.currentQRange.setRange(this.currentRange);
+                this.column.addColumnQRange(this.currentQRange);
             }
 
-            waterlevels.add(current);
+            this.waterlevels.add(this.current);
         }
     }
 
     @Override
-    protected void handleLine(int lineNum, String line) {
+    protected void handleLine(final int lineNum, final String line) {
         if (line.startsWith(START_META_CHAR)) {
             handleMetaLine(stripMetaLine(line));
             return;
@@ -161,44 +159,44 @@
     }
 
 
-    private void handleMetaLine(String meta) {
-        Matcher m = META_UNIT.matcher(meta);
+    private void handleMetaLine(final String meta) {
+        final Matcher m = META_UNIT.matcher(meta);
 
         if (m.matches()) {
-            String unit = m.group(1);
+            final String unit = m.group(1);
             log.debug("Found unit: '" + unit + "'");
 
-            current.setUnit(new ImportUnit(unit));
+            this.current.setUnit(new ImportUnit(unit));
         }
     }
 
 
-    private boolean handleQRange(String line) {
-        Matcher m = META_Q_RANGE.matcher(line);
+    private boolean handleQRange(final String line) {
+        final Matcher m = META_Q_RANGE.matcher(line);
 
         if (m.matches()) {
-            String unitStr  = m.group(1);
-            String valueStr = m.group(2);
+            final String unitStr  = m.group(1);
+            final String valueStr = m.group(2);
             try {
-                if (currentQRange != null) {
+                if (this.currentQRange != null) {
                     // Finish off the last one.
-                    List<ImportWstColumnValue> cValues = column
-                        .getColumnValues();
+                    final List<ImportWstColumnValue> cValues = this.column
+                            .getColumnValues();
                     // Set end of range to last station.
-                    currentRange.setB(cValues.get(cValues.size() -1)
-                        .getPosition());
-                    currentQRange.setRange(currentRange);
-                    column.addColumnQRange(currentQRange);
+                    this.currentRange.setB(cValues.get(cValues.size() -1)
+                            .getPosition());
+                    this.currentQRange.setRange(this.currentRange);
+                    this.column.addColumnQRange(this.currentQRange);
                 }
-                currentQRange = new ImportWstQRange(null,
-                    new BigDecimal(nf.parse(valueStr).doubleValue()));
-                currentRange = new ImportRange();
+                this.currentQRange = new ImportWstQRange(null,
+                        AbstractParser.parseDecimal(valueStr));
+                this.currentRange = new ImportRange();
 
                 log.debug("Found new Q range: Q=" + valueStr);
 
                 return true;
             }
-            catch (ParseException pe) {
+            catch (final ParseException pe) {
                 log.warn("Unparseable Q range: '" + line + "'");
             }
         }
@@ -207,8 +205,8 @@
     }
 
 
-    private void handleDataLine(String line) {
-        String[] cols = line.split(SEPERATOR_CHAR);
+    private void handleDataLine(final String line) {
+        final String[] cols = line.split(SEPERATOR_CHAR);
 
         if (cols == null || cols.length < 2) {
             log.warn("skip invalid waterlevel line: '" + line + "'");
@@ -217,18 +215,16 @@
 
         try {
             // Store the value and remember the position for QRange, if needed.
-            Double station = nf.parse(cols[0]).doubleValue();
-            Double value   = nf.parse(cols[1]).doubleValue();
+            final BigDecimal station = AbstractParser.parseDecimal(cols[0]);
+            final BigDecimal value = AbstractParser.parseDecimal(cols[1]);
 
-            BigDecimal stationBD = new BigDecimal(station);
+            this.column.addColumnValue(station, value);
 
-            column.addColumnValue(stationBD, new BigDecimal(value));
-
-            if (currentRange.getA() == null) {
-                currentRange.setA(stationBD);
+            if (this.currentRange.getA() == null) {
+                this.currentRange.setA(station);
             }
         }
-        catch (ParseException pe) {
+        catch (final ParseException pe) {
             log.warn("Unparseable number in data row: " + line);
         }
     }
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/WstParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/WstParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -63,6 +63,8 @@
     protected ImportWst wst;
 
     protected ImportRange lastRange;
+    protected Double lastA;
+    protected Double lastB;
 
     public WstParser() {
     }
@@ -450,9 +452,8 @@
 
     protected void addInterval(
             final BigDecimal from,
-            final BigDecimal to,
-            final BigDecimal[] values
-            ) {
+            BigDecimal to,
+            final BigDecimal[] values) {
         log.debug("addInterval: " + from + " " + to);
 
         if (values == null || from == MAX_RANGE || from == MIN_RANGE) {
@@ -460,28 +461,38 @@
         }
 
         // expand single-line i.e. 0-lenght Q-range to minimal length
-        final ImportRange range = new ImportRange(from, to);
         if (from == to) {
-            if ((this.lastRange != null) && (this.lastRange.difference() < 0.0))
-                range.setB(from.subtract(INTERVAL_GAP));
-            else
-                range.setB(from.add(INTERVAL_GAP));
+            if (this.lastRange != null && this.lastA > this.lastB) {
+                to = from.subtract(INTERVAL_GAP);
+            }
+            else {
+                to = from.add(INTERVAL_GAP);
+            }
         }
 
+        final ImportRange range = new ImportRange(from, to);
+
         // little workaround to make the q ranges tightly fit.
         // Leave a very small gap to ensure that the range queries
         // still work.
 
         if (this.lastRange != null) {
-            if (this.lastRange.difference() > 0.0)
+            if (this.lastA < this.lastB) {
                 this.lastRange.setB(range.getA().subtract(INTERVAL_GAP));
-            else // lastA >= lastB
+            }
+            else { // lastA >= lastB
                 this.lastRange.setA(range.getB().add(INTERVAL_GAP));
+            }
         }
 
-        for (int i = 0; i < values.length; ++i)
-            this.wst.getColumn(i).addColumnQRange(new ImportWstQRange(range, values[i]));
+        for (int i = 0; i < values.length; ++i) {
+            final ImportWstColumn column = this.wst.getColumn(i);
+            final ImportWstQRange wstQRange = new ImportWstQRange(range, values[i]);
+            column.addColumnQRange(wstQRange);
+        }
 
+        this.lastA = from.doubleValue();
+        this.lastB = to.doubleValue();
         this.lastRange = range;
     }
 
--- a/backend/src/main/java/org/dive4elements/river/importer/sinfo/SInfoImporter.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/SInfoImporter.java	Wed Apr 11 14:20:01 2018 +0200
@@ -48,9 +48,8 @@
         BASICS("Basisdaten"), //
         SELECTED_ADDITIONAL("Selektierte_Zusaetzliche_Laengsschnitte"), //
         INFRASTRUCTURE("Infrastrukturen_BWaStr"), //
-        CHANNEL("Fahrrinnenverhaeltnisse"), //
+        CHANNEL("Zu_gewaehrleistende_Fahrrinnenverhaeltnisse"), //
         COLLISION_EINZEL("Grundberuehrungen" + File.separator + "Einzeljahre"), //
-        COLLISION_EPOCHE("Grundberuehrungen" + File.separator + "Epoche"), //
         TKH_EINZEL("Transportkoerperhoehen" + File.separator + "Einzeljahre"), //
         TKH_EPOCHE("Transportkoerperhoehen" + File.separator + "Epochen"), //
         TKH_MODELL("Transportkoerperhoehen" + File.separator + "Modellergebnisse"), //
@@ -136,8 +135,6 @@
             boolean added = false;
             added = this.parsers.addAll(CollisionParser.createParsers(SInfoDirName.COLLISION_EINZEL.buildPath(this.rootDir),
                     SInfoDirName.COLLISION_EINZEL.getFile(), river));
-            added |= this.parsers.addAll(CollisionParser.createParsers(SInfoDirName.COLLISION_EPOCHE.buildPath(this.rootDir),
-                    SInfoDirName.COLLISION_EPOCHE.getFile(), river));
             if (!added)
                 log.info("Collision: no files found");
         }
--- a/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/BedMobilityParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/BedMobilityParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -66,7 +66,9 @@
      */
     public static List<BedMobilityParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
         final List<BedMobilityParser> parsers = new ArrayList<>();
-        parsers.add(new BedMobilityParser(new File(importDir, IMPORT_FILENAME), new File(relativeDir, IMPORT_FILENAME), river));
+        final File importFile = new File(importDir, IMPORT_FILENAME);
+        if (importFile.exists())
+            parsers.add(new BedMobilityParser(importFile, new File(relativeDir, IMPORT_FILENAME), river));
         return parsers;
     }
 
--- a/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/ChannelParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/ChannelParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -79,7 +79,9 @@
      */
     public static List<ChannelParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
         final List<ChannelParser> parsers = new ArrayList<>();
-        parsers.add(new ChannelParser(new File(importDir, IMPORT_FILENAME), new File(relativeDir, IMPORT_FILENAME), river));
+        final File importFile = new File(importDir, IMPORT_FILENAME);
+        if (importFile.exists())
+            parsers.add(new ChannelParser(importFile, new File(relativeDir, IMPORT_FILENAME), river));
         return parsers;
     }
 
--- a/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/CollisionParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/CollisionParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -100,8 +100,10 @@
      */
     public static List<CollisionParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
         final List<CollisionParser> parsers = new ArrayList<>();
-        for (final File file : listFiles(importDir, ".csv"))
-            parsers.add(new CollisionParser(file, new File(relativeDir, file.getName()), river));
+        if (importDir.exists()) {
+            for (final File file : listFiles(importDir, ".csv"))
+                parsers.add(new CollisionParser(file, new File(relativeDir, file.getName()), river));
+        }
         return parsers;
     }
 
--- a/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/DailyDischargeParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/DailyDischargeParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -79,14 +79,16 @@
      */
     public static List<DailyDischargeParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
         final List<DailyDischargeParser> parsers = new ArrayList<>();
-        final File[] files = importDir.listFiles(new FilenameFilter() {
-            @Override
-            public boolean accept(final File dir, final String name) {
-                return IMPORT_FILENAME.matcher(name).matches();
-            }
-        });
-        for (final File file : files)
-            parsers.add(new DailyDischargeParser(file, new File(relativeDir, file.getName()), river));
+        if (importDir.exists()) {
+            final File[] files = importDir.listFiles(new FilenameFilter() {
+                @Override
+                public boolean accept(final File dir, final String name) {
+                    return IMPORT_FILENAME.matcher(name).matches();
+                }
+            });
+            for (final File file : files)
+                parsers.add(new DailyDischargeParser(file, new File(relativeDir, file.getName()), river));
+        }
         return parsers;
     }
 
--- a/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/DepthEvolutionParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/DepthEvolutionParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -77,8 +77,9 @@
      */
     public static List<DepthEvolutionParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
         final List<DepthEvolutionParser> parsers = new ArrayList<>();
-        for (final File file : listFiles(importDir, ".csv"))
-            parsers.add(new DepthEvolutionParser(file, new File(relativeDir, file.getName()), river));
+        if (importDir.exists())
+            for (final File file : listFiles(importDir, ".csv"))
+                parsers.add(new DepthEvolutionParser(file, new File(relativeDir, file.getName()), river));
         return parsers;
     }
 
--- a/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/FlowDepthParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/FlowDepthParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -87,8 +87,9 @@
      */
     public static List<FlowDepthParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
         final List<FlowDepthParser> parsers = new ArrayList<>();
-        for (final File file : listFiles(importDir, ".csv"))
-            parsers.add(new FlowDepthParser(file, new File(relativeDir, file.getName()), river));
+        if (importDir.exists())
+            for (final File file : listFiles(importDir, ".csv"))
+                parsers.add(new FlowDepthParser(file, new File(relativeDir, file.getName()), river));
         return parsers;
     }
 
--- a/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/InfrastructureParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/InfrastructureParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -26,7 +26,6 @@
 import org.dive4elements.river.importer.common.ParsingState;
 import org.dive4elements.river.importer.sinfo.importitem.InfrastructureKmLineImport;
 import org.dive4elements.river.importer.sinfo.importitem.InfrastructureSeriesImport;
-import org.dive4elements.river.model.AnnotationType;
 import org.dive4elements.river.model.sinfo.Infrastructure;
 import org.dive4elements.river.model.sinfo.InfrastructureValue;
 
@@ -66,8 +65,6 @@
 
     private final HashMap<String, ImportAttribute> bankAttributes;
 
-    private final HashMap<String, ImportAnnotationType> types;
-
 
     /***** CONSTRUCTORS *****/
 
@@ -79,9 +76,6 @@
         this.bankAttributes.put("links", new ImportAttribute(DB_BANK_LEFT));
         this.bankAttributes.put("rechts", new ImportAttribute(DB_BANK_RIGHT));
         this.bankAttributes.put("", new ImportAttribute(DB_BANK_NULL));
-        this.types = new HashMap<>();
-        for (final AnnotationType type : AnnotationType.getTypes())
-            this.types.put(type.getName().trim().toLowerCase(), new ImportAnnotationType(type.getName()));
     }
 
 
@@ -104,8 +98,9 @@
      */
     public static List<InfrastructureParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
         final List<InfrastructureParser> parsers = new ArrayList<>();
-        for (final File file : listFiles(importDir, ".csv"))
-            parsers.add(new InfrastructureParser(file, new File(relativeDir, file.getName()), river));
+        if (importDir.exists())
+            for (final File file : listFiles(importDir, ".csv"))
+                parsers.add(new InfrastructureParser(file, new File(relativeDir, file.getName()), river));
         return parsers;
     }
 
@@ -137,12 +132,15 @@
         final Matcher m = META_TYPE.matcher(this.currentLine);
         if (m.matches()) {
             this.metaPatternsMatched.add(META_TYPE);
-            if (this.types.containsKey(m.group(1).trim().toLowerCase()))
-                this.seriesHeader.setType(this.types.get(m.group(1).trim().toLowerCase()));
+            if (this.river.getAnnotationClassifier() != null) {
+                final ImportAnnotationType type = this.river.getAnnotationClassifier().classifyDescription(m.group(1).trim(),
+                        this.river.getAnnotationClassifier().getDefaultType());
+                this.seriesHeader.setType(type);
+                log.info(String.format("Type name in file: '%s', will be assigned to database name '%s'", m.group(1).trim(), type.getName()));
+            }
             else {
-                final ImportAnnotationType type = new ImportAnnotationType(m.group(1).trim());
-                this.types.put(m.group(1).trim().toLowerCase(), type);
-                this.seriesHeader.setType(type);
+                log.error("No annotation types file configured, cannot process type '" + m.group(1).trim() + "'");
+                this.headerParsingState = ParsingState.STOP;
             }
             return true;
         }
--- a/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/SelectedAdditionalParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/SelectedAdditionalParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -17,6 +17,7 @@
 import java.io.LineNumberReader;
 import java.io.Serializable;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 
 import org.apache.log4j.Logger;
@@ -43,9 +44,7 @@
 
     private static final Logger log = Logger.getLogger(SelectedAdditionalParser.class);
 
-    private static final String IMPORT_Q_FILENAME = "Mit_Abflussdaten.txt";
-
-    private static final String IMPORT_W_FILENAME = "Ohne_Abflussdaten.txt";
+    private static final String IMPORT_FILENAME = "Zus_Laengsschnitte.txt";
 
     private enum SelectionType {
         WITH_Q("Q", "with discharge"), //
@@ -66,6 +65,13 @@
         public String getLogText() {
             return this.logText;
         }
+
+        public static SelectionType parse(final String path) {
+            if (path.toLowerCase().endsWith(".wst"))
+                return WITH_Q;
+            else
+                return WITHOUT_Q;
+        }
     }
 
     private final File importPath;
@@ -74,19 +80,16 @@
 
     private final ImportRiver river;
 
-    private final SelectionType selectionType;
-
-    private final List<String> links;
+    private final HashMap<String, SelectionType> links;
 
 
     /***** CONSTRUCTORS *****/
 
-    public SelectedAdditionalParser(final File importPath, final File rootRelativePath, final ImportRiver river, final SelectionType selectionType) {
+    public SelectedAdditionalParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
         this.importPath = importPath;
         this.rootRelativePath = rootRelativePath;
         this.river = river;
-        this.selectionType = selectionType;
-        this.links = new ArrayList<>();
+        this.links = new HashMap<>();
     }
 
 
@@ -104,10 +107,9 @@
      */
     public static List<SelectedAdditionalParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
         final List<SelectedAdditionalParser> parsers = new ArrayList<>();
-        parsers.add(new SelectedAdditionalParser(new File(importDir, IMPORT_Q_FILENAME), new File(relativeDir, IMPORT_Q_FILENAME),
-                river, SelectionType.WITH_Q));
-        parsers.add(new SelectedAdditionalParser(new File(importDir, IMPORT_W_FILENAME), new File(relativeDir, IMPORT_W_FILENAME),
-                river, SelectionType.WITHOUT_Q));
+        final File importFile = new File(importDir, IMPORT_FILENAME);
+        if (importFile.exists())
+            parsers.add(new SelectedAdditionalParser(importFile, new File(relativeDir, IMPORT_FILENAME), river));
         return parsers;
     }
 
@@ -123,8 +125,8 @@
                 line = in.readLine();
                 if (line == null)
                     break;
-                if (!line.trim().isEmpty() && !line.trim().startsWith(AbstractParser.START_META_CHAR))
-                    this.links.add(line.trim());
+                if (!line.trim().isEmpty() && !line.trim().startsWith(AbstractParser.START_META_CHAR) && !this.links.containsKey(line.trim()))
+                    this.links.put(line.trim(), SelectionType.parse(line.trim()));
             }
             log.info("Number of file links found: " + this.links.size());
         }
@@ -138,17 +140,16 @@
     public void store() {
         final Session session = ImporterSession.getInstance().getDatabaseSession();
         final SQLQuery reset = session.createSQLQuery("UPDATE wsts SET sinfo_selection = NULL WHERE (river_id=:river_id) AND (kind=1)"
-                + " AND (sinfo_selection=:seltype)");
+                + " AND (sinfo_selection IS NOT NULL)");
         reset.setParameter("river_id", this.river.getPeer().getId());
-        reset.setParameter("seltype", this.selectionType.getKey());
         reset.executeUpdate();
         final Query query = session.createQuery("FROM Wst WHERE (river=:river) AND (kind=1) AND (lower(description) LIKE :path)");
         query.setParameter("river", this.river.getPeer());
         int count = 0;
-        for (final String wstfile : this.links) {
-            count += updateWst(session, query, this.river.getPeer(), wstfile, this.selectionType);
+        for (final String wstfile : this.links.keySet()) {
+            count += updateWst(session, query, this.river.getPeer(), wstfile, this.links.get(wstfile));
         }
-        log.info("Updated " + count + " wsts for selected additionals " + this.selectionType.getLogText());
+        log.info("Updated " + count + " wsts for selected additionals");
     }
 
     private int updateWst(final Session session, final Query query, final River river, final String path, final SelectionType selectionType) {
--- a/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/TkhParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/TkhParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -87,8 +87,9 @@
      */
     public static List<TkhParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
         final List<TkhParser> parsers = new ArrayList<>();
-        for (final File file : listFiles(importDir, ".csv"))
-            parsers.add(new TkhParser(file, new File(relativeDir, file.getName()), river));
+        if (importDir.exists())
+            for (final File file : listFiles(importDir, ".csv"))
+                parsers.add(new TkhParser(file, new File(relativeDir, file.getName()), river));
         return parsers;
     }
 
--- a/backend/src/main/java/org/dive4elements/river/importer/uinfo/UInfoImporter.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/uinfo/UInfoImporter.java	Wed Apr 11 14:20:01 2018 +0200
@@ -38,7 +38,7 @@
 
     private enum UInfoDirName {
         BASICS("Basisdaten"), //
-        SALIX("Salix-Linie_Fluss-Aue-Konnektivitaet" + File.separator + "Salix-Linie");
+        SALIX("Salix-Linie" + File.separator + "Salix-Linie");
 
         private final String dirname;
 
--- a/backend/src/main/java/org/dive4elements/river/importer/uinfo/parsers/SalixParser.java	Wed Apr 11 14:09:13 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/uinfo/parsers/SalixParser.java	Wed Apr 11 14:20:01 2018 +0200
@@ -87,7 +87,9 @@
      */
     public static List<SalixParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
         final List<SalixParser> parsers = new ArrayList<>();
-        parsers.add(new SalixParser(new File(importDir, IMPORT_FILENAME), new File(relativeDir, IMPORT_FILENAME), river));
+        final File importFile = new File(importDir, IMPORT_FILENAME);
+        if (importFile.exists())
+            parsers.add(new SalixParser(importFile, new File(relativeDir, IMPORT_FILENAME), river));
         return parsers;
     }
 

http://dive4elements.wald.intevation.org