changeset 6730:c14e17dabf39 double-precision

merged changes from default into double-precision branch
author Tom Gottfried <tom@intevation.de>
date Wed, 31 Jul 2013 11:31:23 +0200
parents 5da894daef32 (current diff) 4a12fd5c9e29 (diff)
children d2fddd344d7c
files backend/doc/schema/oracle-minfo.sql backend/doc/schema/postgresql-minfo.sql backend/src/main/java/org/dive4elements/river/importer/parsers/WstParser.java
diffstat 65 files changed, 902 insertions(+), 347 deletions(-) [+]
line wrap: on
line diff
--- a/.hgtags	Tue Jul 30 18:54:53 2013 +0200
+++ b/.hgtags	Wed Jul 31 11:31:23 2013 +0200
@@ -65,3 +65,4 @@
 0000000000000000000000000000000000000000 3.0.12
 0000000000000000000000000000000000000000 3.0.12
 da197a9236fde564d45379c0826510c69a5709ce 3.0.12
+71da3d4ffb4a46a2f8de7e6a9e1e4a32657802aa 3.0.13
--- a/artifacts/doc/conf/artifacts/chart.xml	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/doc/conf/artifacts/chart.xml	Wed Jul 31 11:31:23 2013 +0200
@@ -93,6 +93,13 @@
                         <facet name="longitudinal_section.w"/>
                         <facet name="longitudinal_section.q"/>
                         <facet name="longitudinal_section.manualpoints"/>
+                        <facet name="bedheight_difference.year" description="A facet for bed height differences"/>
+                        <facet name="bedheight_difference.year.filtered" description="A facet for bed height differences"/>
+                        <facet name="bedheight_difference.morph_width" description="A facet for morphologic width"/>
+                        <facet name="bedheight_difference.year.height1" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height2" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height1.filtered" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height2.filtered" description="A facet for raw heights."/>
                     </facets>
                 </outputmode>
             </outputmodes>
--- a/artifacts/doc/conf/artifacts/fixanalysis.xml	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/doc/conf/artifacts/fixanalysis.xml	Wed Jul 31 11:31:23 2013 +0200
@@ -171,8 +171,8 @@
                         <facet name="heightmarks_points" description="facet.other.wkms.heightmarks_points"/>
                         <facet name="discharge_curve.curve" description="facet.discharge_curve.curve"/>
                         <facet name="fix_wq_curve.manualpoints" description="Manual points"/>
-                        <facet name="mainvalues.w" description="facet.fix_wq.mainvalues.w"/>
-                        <facet name="mainvalues.q" description="facet.fix_wq.mainvalues.q"/>
+                        <facet name="mainvalues.q" description="mainvalues.q"/>
+                        <facet name="mainvalues.w" description="mainvalues.w"/>
                     </facets>
                 </outputmode>
                 <outputmode name="fix_deltawt_curve" description="output.fix_deltawt_curve" mine-type="image/png" type="chart">
--- a/artifacts/doc/conf/artifacts/gaugedischarge.xml	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/doc/conf/artifacts/gaugedischarge.xml	Wed Jul 31 11:31:23 2013 +0200
@@ -8,8 +8,15 @@
         <outputmode name="computed_discharge_curve" description="computed_discharge_curve" mime-type="image/png" type="chart">
           <facets>
             <facet name="discharge_curve.curve" description="facet.discharge_curve.curve" />
-            <facet name="mainvalues.q" description="facet.mainvalues.q"/>
-            <facet name="mainvalues.w" description="facet.mainvalues.w"/>
+            <facet name="computed_discharge_curve.mainvalues.q" description="mainvalues.q"/>
+            <facet name="computed_discharge_curve.mainvalues.w" description="mainvalues.w"/>
+            <facet name="discharge_curve.curve" description="facet.discharge_curve.curve"/>
+            <facet name="heightmarks_points" description="facet.other.wqkms"/>
+            <facet name="other.wqkms" description="facet.other.wqkms"/>
+            <facet name="other.wq"    description="Point-like data like fixations"/>
+            <facet name="other.wkms"  description="Point-like data like fixations"/>
+            <facet name="other.wkms.interpol" description="Height over km, like flood protections."/>
+            <facet name="computed_discharge_curve.manualpoints" description="Manuelle Punkte"/>
           </facets>
         </outputmode>
       </outputmodes>
--- a/artifacts/doc/conf/artifacts/gaugedischargecurve.xml	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/doc/conf/artifacts/gaugedischargecurve.xml	Wed Jul 31 11:31:23 2013 +0200
@@ -6,11 +6,18 @@
             <data name="reference_gauge" type="Long" />
             <data name="gauge_name" type="String" />
             <outputmodes>
-                <outputmode name="discharge_curve" description="output.discharge_curve" type="chart">
+                <outputmode name="discharge_curve" description="output.discharge_curve" mime-type="image/png" type="chart">
                     <facets>
                         <facet name="gauge_discharge_curve"/>
-                        <facet name="mainvalues.q" description="facet.mainvalues.q"/>
-                        <facet name="mainvalues.w" description="facet.mainvalues.w"/>
+                        <facet name="mainvalues.q" description="mainvalues.q"/>
+                        <facet name="computed_discharge_curve.mainvalues.w" description="mainvalues.w"/>
+                        <facet name="discharge_curve.curve" description="facet.discharge_curve.curve"/>
+                        <facet name="heightmarks_points" description="facet.other.wqkms"/>
+                        <facet name="other.wqkms" description="facet.other.wqkms"/>
+                        <facet name="other.wq"    description="Point-like data like fixations"/>
+                        <facet name="other.wkms"  description="Point-like data like fixations"/>
+                        <facet name="other.wkms.interpol" description="Height over km, like flood protections."/>
+                        <facet name="computed_discharge_curve.manualpoints" description="Manuelle Punkte"/>
                     </facets>
                 </outputmode>
                 <outputmode name="computed_dischargecurve_at_export" description="output.computed_dischargecurve_at_export" mime-type="text/plain" type="export">
--- a/artifacts/doc/conf/artifacts/minfo.xml	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/doc/conf/artifacts/minfo.xml	Wed Jul 31 11:31:23 2013 +0200
@@ -524,6 +524,14 @@
                         <facet name="longitudinal_section.annotations" description="facet.longitudinal_section.annotations"/>
                     </facets>
                 </outputmode>
+                <outputmode name="sedimentload_ls_export" description="output.sedimentload_ls_export" mime-type="text/plain" type="export">
+                    <facets>
+                        <facet name="csv" description="facet.sedimentload_ls_export.csv" />
+                        <!--
+                        <facet name="pdf" description=".pdf" />
+                        -->
+                    </facets>
+                </outputmode>
                 <outputmode name="sedimentload_ls_report" description="output.sedimentload_ls_report" mime-type="text/xml" type="report">
                     <facets>
                         <facet name="report" description="facet.sedimentload_ls_export.report" />
--- a/artifacts/doc/conf/artifacts/winfo.xml	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/doc/conf/artifacts/winfo.xml	Wed Jul 31 11:31:23 2013 +0200
@@ -641,8 +641,8 @@
                         <facet name="historical_discharge.wq.q"/>
                         <facet name="historical_discharge.wq.w"/>
                         <facet name="historical_discharge.wq.curve"/>
-                        <facet name="historical_discharge.mainvalues.q"/>
-                        <facet name="historical_discharge.mainvalues.w"/>
+                        <facet name="mainvalues.q"/>
+                        <facet name="mainvalues.w"/>
                         <facet name="historical_discharge_wq.manualpoints"/>
                     </facets>
                 </outputmode>
--- a/artifacts/doc/conf/conf.xml	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/doc/conf/conf.xml	Wed Jul 31 11:31:23 2013 +0200
@@ -363,6 +363,7 @@
         <output-generator name="fix_vollmer_wq_curve">org.dive4elements.river.exports.fixings.FixWQCurveGenerator</output-generator>
         <output-generator name="fix_vollmer_wq_curve_chartinfo">org.dive4elements.river.exports.fixings.FixWQCurveInfoGenerator</output-generator>
         <output-generator name="sedimentload_ls">org.dive4elements.river.exports.minfo.SedimentLoadLSGenerator</output-generator>
+        <output-generator name="sedimentload_ls_export">org.dive4elements.river.exports.minfo.SedimentLoadExporter</output-generator>
         <output-generator name="sedimentload_ls_chartinfo">org.dive4elements.river.exports.minfo.SedimentLoadLSInfoGenerator</output-generator>
         <!-- Error report generators. -->
         <output-generator name="discharge_longitudinal_section_report">org.dive4elements.river.exports.ReportGenerator</output-generator>
--- a/artifacts/doc/conf/meta-data.xml	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/doc/conf/meta-data.xml	Wed Jul 31 11:31:23 2013 +0200
@@ -36,6 +36,7 @@
     </dc:macro>
 
     <dc:macro name="generate-system-content">
+      <dc:message>Generate system content with variables: {dc:dump-variables()}</dc:message>
       <dc:call-macro name="artifact-range">
         <dc:call-macro name="km-filtered-wsts">
           <dc:choose>
@@ -44,6 +45,7 @@
                 Recommendations (client shall load immediately).
               </dc:comment>
               <dc:iterate var="out" container="artifact-outs">
+                <dc:message>Rec out iteration for: {$out}</dc:message>
                 <dc:choose>
                   <dc:when test="$out = 'w_differences'">
                     <dc:call-macro name="annotations"/>
@@ -122,6 +124,7 @@
                 Non - Recommendations.
               </dc:comment>
               <dc:iterate var="out" container="artifact-outs">
+                <dc:message>Non Rec out iteration for: {$out}</dc:message>
                 <dc:choose>
                   <dc:when test="$out = 'cross_section'">
                     <dc:call-macro name="basedata_0"/>
@@ -143,8 +146,29 @@
                   <dc:when test="$out = 'discharge_longitudinal_section'">
                     <dc:call-macro name="longitudinal-section-prototype"/>
                   </dc:when>
+                  <dc:when test="$out = 'historical_discharge_wq'">
+                    <dc:call-macro name="historical_discharge_curve"/>
+                    <dc:call-macro name="discharge_table_gauge"/>
+                    <dc:call-macro name="basedata_2_fixations_wqkms"/>
+                    <dc:call-macro name="basedata_5_flood-protections"/>
+                    <dc:call-macro name="basedata_0"/>
+                    <dc:call-macro name="basedata_1_additionals"/>
+                    <dc:call-macro name="basedata_4_heightmarks-points"/>
+                    <computed_discharge_curve>
+                      <dc:call-macro name="mainvalues"/>
+                    </computed_discharge_curve>
+                  </dc:when>
                   <dc:when test="$out = 'discharge_curve'">
-                    <dc:call-macro name="mainvalues"/>
+                    <dc:call-macro name="historical_discharge_curve"/>
+                    <dc:call-macro name="discharge_table_gauge"/>
+                    <dc:call-macro name="basedata_2_fixations_wqkms"/>
+                    <dc:call-macro name="basedata_5_flood-protections"/>
+                    <dc:call-macro name="basedata_0"/>
+                    <dc:call-macro name="basedata_1_additionals"/>
+                    <dc:call-macro name="basedata_4_heightmarks-points"/>
+                    <computed_discharge_curve>
+                      <dc:call-macro name="mainvalues"/>
+                    </computed_discharge_curve>
                   </dc:when>
                   <dc:when test="$out = 'duration_curve'">
                     <dc:call-macro name="mainvalues"/>
@@ -161,14 +185,14 @@
                     <dc:call-macro name="basedata_5_flood-protections_relative_points"/>
                   </dc:when>
                   <dc:when test="$out = 'fix_wq_curve'">
+                    <dc:call-macro name="historical_discharge_curve"/>
+                    <dc:call-macro name="discharge_table_gauge"/>
                     <dc:call-macro name="basedata_0_wq"/>
                     <dc:call-macro name="basedata_1_additionals_marks"/>
                     <dc:call-macro name="basedata_2_fixations_wqkms"/>
                     <dc:call-macro name="basedata_3_officials"/>
                     <dc:call-macro name="basedata_4_heightmarks-points"/>
                     <dc:call-macro name="basedata_5_flood-protections_relative_points"/>
-                    <dc:call-macro name="discharge_table_gauge"/>
-                    <dc:call-macro name="discharge_fix_wq"/>
                   </dc:when>
                   <dc:when test="$out = 'fix_longitudinal_section_curve'">
                     <dc:call-macro name="longitudinal-section-prototype"/>
@@ -254,7 +278,8 @@
                     </dc:choose>
                   </dc:when>
                   <dc:when test="$out = 'computed_discharge_curve'">
-                    <dc:call-macro name="discharge_computed"/>
+                    <dc:call-macro name="historical_discharge_curve"/>
+                    <dc:call-macro name="discharge_table_gauge"/>
                     <dc:call-macro name="basedata_2_fixations_wqkms"/>
                     <dc:call-macro name="basedata_5_flood-protections"/>
                     <dc:call-macro name="basedata_0"/>
@@ -341,9 +366,6 @@
                     <dc:when test="$out = 'reference_curve'">
                       <dc:call-macro name="reference-curves"/>
                     </dc:when>
-                    <dc:when test="$out = 'computed_discharge_curve'">
-                      <dc:call-macro name="computed-discharge-curve"/>
-                    </dc:when>
                     <dc:when test="$out = 'cross_section'">
                       <dc:call-macro name="waterlevels"/>
                     </dc:when>
@@ -355,6 +377,7 @@
                     </dc:when>
                     <dc:when test="$out = 'fix_wq_curve'">
                       <dc:call-macro name="fix-wq-curve"/>
+                      <dc:call-macro name="waterlevels-fix"/>
                     </dc:when>
                     <dc:when test="$out = 'duration_curve'">
                       <dc:call-macro name="duration-curve"/>
@@ -366,9 +389,6 @@
                     <dc:when test="$out = 'waterlevels'">
                       <dc:call-macro name="waterlevels-fix"/>
                     </dc:when>
-                    <dc:when test="$out = 'fix_wq_curve'">
-                      <dc:call-macro name="waterlevels-fix"/>
-                    </dc:when>
                     <dc:when test="$out = 'floodmap'">
                       <dc:call-macro name="flood-map"/>
                     </dc:when>
@@ -377,7 +397,7 @@
                     </dc:when>
                     <dc:when test="$out = 'bedheight_middle'">
                       <dc:call-macro name="waterlevels-discharge"/>
-                      <dc:call-macro name="waterlevels-fix"/>
+                      <dc:call-macro name="differenceable-fix"/>
                     </dc:when>
                     <dc:when test="$out = 'floodmap-hws'">
                       <dc:call-macro name="floodmap-hws-user"/>
@@ -460,31 +480,52 @@
       </dc:filter>
     </dc:macro>
 
-    <dc:macro name="computed-discharge-curve">
-      <dc:filter expr="$facet_name = 'computed_discharge_curve.q'">
+
+    <dc:macro name="historical_discharge_curve">
+      <dc:context>
+        <dc:statement>
+          SELECT g.id   AS gauge_id,
+                 g.name AS gauge_name,
+                 dt.id AS dt_id,
+                 t.start_time AS start_time,
+                 t.stop_time AS stop_time,
+                 dt.description AS desc,
+                 dt.bfg_id AS bfg_id
+          FROM gauges g
+          JOIN discharge_tables dt ON g.id = dt.gauge_id
+          LEFT JOIN time_intervals t ON dt.time_interval_id = t.id
+          WHERE g.river_id = ${river_id}
+          AND dt.kind &lt;&gt; 0
+          AND g.station = ${fromkm}
+          AND g.station = ${tokm}
+          ORDER BY start_time
+        </dc:statement>
         <dc:if test="dc:has-result()">
-          <computed_discharge_curves>
-            <dc:for-each>
-              <dc:element name="${facet_name}">
-                <dc:attribute name="description" value="${facet_description}"/>
-                <dc:attribute name="factory" value="winfo"/>
-                <dc:attribute name="target_out" value="${out}"/>
-                <dc:attribute name="artifact-id" value="${a_id}"/>
-                <dc:attribute name="ids" value="${a_id}"/>
-                <dc:attribute name="out" value="computed_discharge_curve"/>
-              </dc:element>
-            </dc:for-each>
-          </computed_discharge_curves>
+          <historical_discharge_curves>
+            <dc:group expr="$gauge_name">
+              <dc:for-each>
+                <dc:variable name="combined_desc" expr="concat($bfg_id, ' ', dc:date-format('dd.MM.yyyy', $start_time), ' - ', dc:date-format('dd.MM.yyyy', $stop_time))"/>
+                <dc:message>
+                  Hallo ANDRE23 {dc:dump-variables()}
+                </dc:message>
+                <histdis name="{$combined_desc}"
+                  description="{$combined_desc}"
+                  factory="gaugedischarge" target_out="{$out}"
+                  ids="{$gauge_name};{$dt_id};{$combined_desc}"/>
+              </dc:for-each>
+            </dc:group>
+          </historical_discharge_curves>
         </dc:if>
-      </dc:filter>
+      </dc:context>
     </dc:macro>
 
+
     <dc:macro name="flood-map">
       <dc:filter expr="$facet_name = 'floodmap.wsplgen'">
         <dc:if test="dc:has-result()">
           <floodmap>
             <dc:for-each>
-              <dc:variable name="combined_desc" expr="concat($facet_description, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"/>
+              <dc:variable name="combined_desc" expr="concat($facet_description, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"/>
               <dc:element name="${facet_name}">
                 <dc:attribute name="description" value="${combined_desc}"/>
                 <dc:attribute name="factory" value="winfo"/>
@@ -503,7 +544,7 @@
       <dc:filter expr="$out_name = 'cross_section'">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation))">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation))">
               <dc:comment>Aheinecke: Why is this grouping different from the rest?</dc:comment>
               <longitudinal_section_columns description="{dc:group-key()}">
                 <dc:for-each>
@@ -527,7 +568,7 @@
       <dc:filter expr="$out_name = 'longitudinal_section'">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation))">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation))">
               <dc:comment>Aheinecke: Why is this grouping different from the rest?</dc:comment>
               <longitudinal_section_columns description="{dc:group-key()}">
                 <dc:for-each>
@@ -555,7 +596,7 @@
         starts-with($facet_name, 'fix_reference_events_ls'))">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -583,7 +624,7 @@
         starts-with($facet_name, 'fix_sector_average_dwt'))">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -608,7 +649,7 @@
         starts-with($facet_name, 'fix_deviation_dwt'))">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -631,7 +672,7 @@
       <dc:filter expr="$out_name = 'fix_derivate_curve' and $facet_name = 'fix_derivate_curve'">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -658,7 +699,7 @@
         $facet_name = 'fix_wq_curve')">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -702,7 +743,7 @@
         (not ($current-state-id = 'state.winfo.uesk.wsp' and $ld_m = 'location'))">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:choose>
@@ -739,11 +780,10 @@
         </beddifferences>
       </dc:comment>
       <dc:comment comment="Vollmer curves need own factory"/>
-      <dc:filter expr="$a_state = 'state.fix.vollmer.compute' and (($out_name = 'longitudinal_section' and $facet_name = 'longitudinal_section.w') and
-        (not ($current-state-id = 'state.winfo.uesk.wsp' and $ld_m = 'location')))">
+      <dc:filter expr="$a_state = 'state.fix.vollmer.compute' and $facet_name = 'longitudinal_section.w'">
         <dc:if test="dc:has-result()">
           <vollmer_waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:choose>
@@ -773,7 +813,7 @@
         (not ($current-state-id = 'state.winfo.uesk.wsp' and $ld_m = 'location')))">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
                 <waterlevels description="{dc:group-key()}">
                   <dc:for-each>
                     <dc:choose>
@@ -826,7 +866,7 @@
         $facet_name = 'bed_longitudinal_section.bed_diameter_sublayer')">
         <dc:if test="dc:has-result()">
           <bed_quality_bed>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <quality-bed description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -850,7 +890,7 @@
         $facet_name = 'bed_longitudinal_section.bedload_diameter'">
         <dc:if test="dc:has-result()">
           <bed_quality_load>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <quality-load description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -875,7 +915,7 @@
          $facet_name = 'bed_longitudinal_section.sediment_density_sublayer')">
         <dc:if test="dc:has-result()">
           <bed_quality_density>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <density description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -900,7 +940,7 @@
         $facet_name = 'bed_longitudinal_section.porosity_sublayer')">
         <dc:if test="dc:has-result()">
           <bed_quality_porosity>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <porosity description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -927,7 +967,7 @@
          $facet_name = 'flow_velocity.mainchannel.filtered')">
         <dc:if test="dc:has-result()">
           <flow-velocity>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <flow description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -950,7 +990,7 @@
       <dc:filter expr="$out_name = 'sedimentload_ls' and starts-with($facet_name, 'sedimentload')">
         <dc:if test="dc:has-result()">
           <sediment-load>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <load description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -974,7 +1014,7 @@
         (starts-with($facet_name, 'bedheight_difference.year') or starts-with($facet_name, 'bedheight_difference.epoch'))">
         <dc:if test="dc:has-result()">
           <bedheight_differences>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <difference description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -994,10 +1034,10 @@
     </dc:macro>
 
     <dc:macro name="waterlevels-discharge">
-      <dc:filter expr="$out_name = 'discharge_longitudinal_section' and $facet_name = 'discharge_longitudinal_section.w">
+      <dc:filter expr="$out_name = 'discharge_longitudinal_section' and $facet_name = 'discharge_longitudinal_section.w'">
         <dc:if test="dc:has-result()">
           <waterlevels-discharge>
-            <dc:group expr="concat($oid, ' ', $river, ' ', $a_id, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($oid, ' ', $river, ' ', $a_id, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <discharge description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -1285,36 +1325,30 @@
     <dc:macro name="discharge_table_gauge">
       <dc:context>
         <dc:statement>
-          SELECT id   AS gauge_id,
-                 name AS gauge_name
-          FROM gauges WHERE river_id = ${river_id}
+          SELECT g.id   AS gauge_id,
+                 g.name AS gauge_name,
+                 t.start_time AS start_time,
+                 t.stop_time AS stop_time
+          FROM gauges g
+          JOIN discharge_tables dt ON g.id = dt.gauge_id
+          LEFT JOIN time_intervals t ON dt.time_interval_id = t.id
+          WHERE g.river_id = ${river_id}
+          AND dt.kind = 0
+          AND g.station = ${fromkm}
+          AND g.station = ${tokm}
         </dc:statement>
         <dc:if test="dc:has-result()">
-          <discharge_table_nn>
-            <discharge_table_gauge>
-              <dc:for-each>
-                <gauge name="{$gauge_name}"
-                  factory="gaugedischarge" target_out="{$out}"
-                  from="{$g_start}"
-                  to="{$g_stop}"
-                  ids="{$gauge_name}"/>
-              </dc:for-each>
-            </discharge_table_gauge>
-          </discharge_table_nn>
+          <current_gauge>
+            <dc:for-each>
+              <gauge name="{$gauge_name} ({dc:date-format('dd.MM.yyyy', $start_time)})"
+                factory="gaugedischarge" target_out="{$out}"
+                ids="{$gauge_name}"/>
+            </dc:for-each>
+          </current_gauge>
         </dc:if>
       </dc:context>
     </dc:macro>
 
-    <dc:comment>TODO: Why is this just a copy of the discharge_table_gauge?</dc:comment>
-    <dc:macro name="discharge_computed">
-      <dc:call-macro name="discharge_table_gauge"/>
-    </dc:macro>
-
-    <dc:comment>TODO: Why is this just a copy of the discharge_table_gauge?</dc:comment>
-    <dc:macro name="discharge_fix_wq">
-      <dc:call-macro name="discharge_table_gauge"/>
-    </dc:macro>
-
     <dc:macro name="qsectors">
       <qsector factory="qsectors" ids="{$river_id}" target_out="{$out}" />
     </dc:macro>
@@ -1937,7 +1971,6 @@
           FROM floodplain fp
             JOIN floodplain_kinds flk on fp.kind_id = flk.id
           WHERE river_id = ${river_id}
-            AND kind_id=1
         </dc:statement>
         <dc:if test="dc:has-result()">
           <floodplain>
@@ -2459,17 +2492,21 @@
 
     <dc:macro name="all-user-artifacts">
         <dc:context connection="user">
-        <dc:comment>Select collections and masterartifacts.</dc:comment>
+          <dc:comment>Select collections and masterartifacts.
+            XXX: The cast is a quick hack because the ld_* values are
+            now TEXT fields. To properly fix / asses the problems here
+            an SLT evaluation is needed.
+          </dc:comment>
         <dc:statement>
           SELECT c.name                     AS collection_name,
                  ma.id                      AS a_id,
                  ma.state                   AS a_state,
                  ma.gid                     AS a_gid,
                  ma.creation                AS a_creation,
-                 COALESCE(ma.ld_mode, '')      AS ld_m,
-                 COALESCE(ma.ld_locations, '') AS ld_l,
-                 COALESCE(ma.ld_from, '')      AS ld_f,
-                 COALESCE(ma.ld_to, '')        AS ld_t,
+                 CAST(COALESCE(ma.ld_mode, '') AS VARCHAR(255)) AS ld_m,
+                 CAST(COALESCE(ma.ld_locations, '') AS VARCHAR(255)) AS ld_l,
+                 CAST(COALESCE(ma.ld_from, '') AS VARCHAR(255)) AS ld_f,
+                 CAST(COALESCE(ma.ld_to, '') AS VARCHAR(255)) AS ld_t,
                  o.name                        AS out_name,
                  o.id                          AS out_id,
                  f.name                        AS facet_name,
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/GaugeDischargeArtifact.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/GaugeDischargeArtifact.java	Wed Jul 31 11:31:23 2013 +0200
@@ -12,6 +12,7 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
+import java.util.HashMap;
 
 import org.apache.log4j.Logger;
 
@@ -35,6 +36,7 @@
 
 import org.dive4elements.river.model.Gauge;
 import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.DischargeTable;
 
 import org.dive4elements.river.utils.RiverUtils;
 
@@ -52,6 +54,8 @@
     /** The name of the artifact. */
     public static final String ARTIFACT_NAME = "gaugedischarge";
 
+    /** The name a facet should have */
+    protected String facetWishName;
 
     /**
      * Trivial Constructor.
@@ -79,6 +83,13 @@
         String ids = StaticD4EArtifact.getDatacageIDValue(data);
         addStringData("ids", ids);
         logger.debug("id for gaugedischarge: " + ids);
+        String[] splitIds = ids.split(";");
+       /* We assume that if an id's string with a ; is given that the
+        * format is <gauge_name>;<discharge_table_id>;<facet_desc>
+        * so that a specific discharge table can be selected */
+        if (splitIds.length > 2) {
+            facetWishName = splitIds[2];
+        }
         super.setup(identifier, factory, context, callMeta, data);
     }
 
@@ -121,7 +132,10 @@
 
     /** Get the Gauges name which came with datacage data-document. */
     public String getGaugeName() {
-        return this.getDataAsString("ids");
+        if (getDataAsString("ids") == null) {
+            return null;
+        }
+        return getDataAsString("ids").split(";")[0];
     }
 
 
@@ -166,13 +180,34 @@
         }
         */
 
-        DischargeTables dt = new DischargeTables(river.getName(), getDataAsString("ids"));
+        Map<String, double [][]> map;
 
-        Map<String, double [][]> map = dt.getValues();
+        String[] ids = getDataAsString("ids").split(";");
+        if (ids.length > 1) {
+           /* We assume that if an id's string with a ; is given that the
+            * format is <gauge_name>;<discharge_table_id>;<facet_desc>
+            * so that a specific discharge table can be selected */
+            int tableId = 0;
+            try {
+                tableId = Integer.parseInt(ids[1]);
+            } catch (NumberFormatException e) {
+                logger.error("Discharge tables ids string is wrong." +
+                        " Fromat is <gauge_name>;<discharge_table_id>;<facet_desc>" +
+                        " Fix your Datacage!");
+                // Let's rather break down completly then show the wrong data.
+                return null;
+            }
+            DischargeTable table = DischargeTable.getDischargeTableById(tableId);
+            map = new HashMap<String, double [][]>();
+            map.put(getGaugeName(), DischargeTables.loadDischargeTableValues(table));
+        } else {
+            DischargeTables dt = new DischargeTables(river.getName(), getGaugeName());
+            map = dt.getValues();
+        }
 
         ArrayList<WQKms> res = new ArrayList<WQKms>();
 
-        Gauge gauge = river.determineGaugeByName(this.getDataAsString("ids"));
+        Gauge gauge = river.determineGaugeByName(getGaugeName());
 
         String name = getGaugeName();
         double [][] values = map.get(name);
@@ -191,5 +226,13 @@
             res.toArray(new WQKms[res.size()]),
             new Calculation());
     }
+
+    /** Gets the facet wish name.
+     *
+     * This is a hack to enable setting the name of the facet / theme in the
+     * UI from the datacage setting. */
+    public String getFacetWishName() {
+        return facetWishName;
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/MainValuesArtifact.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/MainValuesArtifact.java	Wed Jul 31 11:31:23 2013 +0200
@@ -81,7 +81,10 @@
                     String   outputName
                 ) {
                     return outputName.equals("computed_discharge_curve")
-                        || outputName.equals("duration_curve");
+                        || outputName.equals("duration_curve")
+                        || outputName.equals("discharge_curve")
+                        || outputName.equals("fix_wq_curve")
+                        || outputName.equals("historical_discharge_wq");
                 }
             });
     }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/RiverAxisArtifact.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/RiverAxisArtifact.java	Wed Jul 31 11:31:23 2013 +0200
@@ -91,6 +91,10 @@
             super(artifact);
         }
 
+        private boolean isUnofficial() {
+            return getIdPart(2) != null && !getIdPart(2).equals("1");
+        }
+
         @Override
         protected String getFacetType() {
             return FLOODMAP_RIVERAXIS;
@@ -98,12 +102,19 @@
 
         @Override
         protected String getLayer() {
+            if (isUnofficial()) {
+                return super.getLayer();
+            }
             return RiverFactory.getRiver(getRiverId()).getName();
         }
 
         @Override
         protected String getUrl() {
-            return RiverUtils.getRiverWMSUrl();
+            if (isUnofficial()) {
+                return RiverUtils.getUserWMSUrl(artifact.identifier());
+            } else {
+                return RiverUtils.getRiverWMSUrl();
+            }
         }
 
         @Override
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/access/SedimentLoadAccess.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/access/SedimentLoadAccess.java	Wed Jul 31 11:31:23 2013 +0200
@@ -91,6 +91,7 @@
         return null;
     }
 
+    /** Returns the selected unit (t/a or m3/a). */
     public String getUnit () {
         if (unit == null) {
             unit = getString("unit");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/Datacage.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/Datacage.java	Wed Jul 31 11:31:23 2013 +0200
@@ -70,6 +70,7 @@
     private String SQL_ARTIFACT_ID_NEXTVAL    = "artifact.id.nextval";
     private String SQL_INSERT_ARTIFACT        = "insert.artifact";
     private String SQL_ARTIFACT_DATA_ID_NEXTVAL = "artifact.data.id.nextval";
+    private String SQL_UPDATE_ARTIFACT_STATE  = "update.artifact.state";
     private String SQL_INSERT_ARTIFACT_DATA   = "insert.artifact.data";
     private String SQL_OUT_ID_NEXTVALUE       = "out.id.nextval";
     private String SQL_INSERT_OUT             = "insert.out";
@@ -382,6 +383,7 @@
         SQL_INSERT_ARTIFACT     = sql.get(SQL_INSERT_ARTIFACT);
         SQL_ARTIFACT_DATA_ID_NEXTVAL = sql.get(SQL_ARTIFACT_DATA_ID_NEXTVAL);
         SQL_INSERT_ARTIFACT_DATA = sql.get(SQL_INSERT_ARTIFACT_DATA);
+        SQL_UPDATE_ARTIFACT_STATE = sql.get(SQL_UPDATE_ARTIFACT_STATE);
         SQL_OUT_ID_NEXTVALUE     = sql.get(SQL_OUT_ID_NEXTVALUE);
         SQL_INSERT_OUT           = sql.get(SQL_INSERT_OUT);
         SQL_FACET_ID_NEXTVAL     = sql.get(SQL_FACET_ID_NEXTVAL);
@@ -401,6 +403,7 @@
             sql.get(SQL_DELETE_ARTIFACT_BY_GID);
     }
 
+    /** Sum over facets in outs. */
     protected static final int numFacets(List<Output> outs) {
         int sum = 0;
         for (Output out: outs) {
@@ -608,6 +611,7 @@
         // write new data
         storeData(res[0], flys);
         storeOuts(res[0], flys, context);
+        storeState(res[0], flys);
     }
 
     public void createdUser(
@@ -884,9 +888,30 @@
         }
     }
 
+    /** Update state of artifact. */
+    protected void storeState(
+        final int         artifactId,
+        final D4EArtifact artifact) {
+        SQLExecutor.Instance exec = sqlExecutor.new Instance() {
+            @Override
+            public boolean doIt() throws SQLException {
+                prepareStatement(SQL_UPDATE_ARTIFACT_STATE);
+                stmnt.setInt(1, artifactId);
+                stmnt.setString(2, artifact.getCurrentStateId());
+                stmnt.execute();
+                conn.commit();
+                return true;
+            }
+        };
+
+        if (!exec.runWrite()) {
+            log.error("storing state of artifact failed ("+artifactId+","+artifact.getCurrentStateId()+")");
+        }
+    }
+
     protected void storeData(
-        final int     artifactId,
-        D4EArtifact  artifact
+        final int   artifactId,
+        D4EArtifact artifact
     ) {
         final Collection<StateData> data = artifact.getAllData();
 
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/DatacageBackendListener.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/DatacageBackendListener.java	Wed Jul 31 11:31:23 2013 +0200
@@ -61,6 +61,7 @@
         }
     }
 
+    /** Stores the artifact in artifact-db, if any. */
     @Override
     public void storedArtifact(Artifact artifact, Backend backend) {
         log.debug("storedArtifact");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/math/StdDevOutlier.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/math/StdDevOutlier.java	Wed Jul 31 11:31:23 2013 +0200
@@ -10,9 +10,13 @@
 
 import java.util.List;
 
-import org.apache.commons.math.stat.descriptive.moment.StandardDeviation;
+import org.apache.log4j.Logger;
 
-import org.apache.log4j.Logger;
+/* XXX:
+ * Warning: This class is called StdDevOutlier because it caculates the
+ * Standard Deviation method for outlier removal as the BFG calls it.
+ * But the actual calculation used to remove the outliers calculates
+ * the Standard Error and not the Standard Deviation! */
 
 public class StdDevOutlier
 {
@@ -30,12 +34,12 @@
     public static Integer findOutlier(
         List<Double> values,
         double       factor,
-        double []    stdDevResult
+        double []    stdErrResult
     ) {
         boolean debug = log.isDebugEnabled();
 
         if (debug) {
-            log.debug("factor for std dev: " + factor);
+            log.debug("factor for std dev test (that calculates std err): " + factor);
         }
 
         int N = values.size();
@@ -48,31 +52,34 @@
             return null;
         }
 
-        StandardDeviation stdDev = new StandardDeviation();
-
         double maxValue = -Double.MAX_VALUE;
         int    maxIndex = -1;
+
+        double squareSumResiduals = 0;
+        for (Double db: values) {
+            squareSumResiduals += Math.pow(db, 2);
+        }
+
+        double stdErr = Math.sqrt(squareSumResiduals / (N - 2));
+
+        double accepted = factor * stdErr;
+
         for (int i = N-1; i >= 0; --i) {
             double value = Math.abs(values.get(i));
-            stdDev.increment(value);
             if (value > maxValue) {
                 maxValue = value;
                 maxIndex = i;
             }
         }
 
-        double sd = stdDev.getResult();
-
-        double accepted = factor * sd;
-
         if (debug) {
-            log.debug("std dev: " + stdDev);
+            log.debug("std err: " + stdErr);
             log.debug("accepted: " + accepted);
             log.debug("max value: " + maxValue);
         }
 
-        if (stdDevResult != null) {
-            stdDevResult[0] = sd;
+        if (stdErrResult != null) {
+            stdErrResult[0] = stdErr;
         }
 
         return maxValue > accepted ? maxIndex : null;
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/FacetTypes.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/FacetTypes.java	Wed Jul 31 11:31:23 2013 +0200
@@ -313,7 +313,7 @@
     String SEDIMENT_LOAD_SUSP_SEDIMENT = "sedimentload.susp_sediment";
     String SEDIMENT_LOAD_TOTAL         = "sedimentload.total";
     String SEDIMENT_LOAD_TOTAL_LOAD    = "sedimentload.total_load";
-    String SEDIMENT_LOAD_UNKOWN        = "sedimentload.unknown";
+    String SEDIMENT_LOAD_UNKNOWN       = "sedimentload.unknown";
 
     String SQ_OVERVIEW       = "sq_overview";
 
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesQFacet.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesQFacet.java	Wed Jul 31 11:31:23 2013 +0200
@@ -128,6 +128,10 @@
         StickyAxisAnnotation annotation = null;
         if (this.name.equals(DURATION_MAINVALUES_Q)) {
             for (NamedDouble q: qs) {
+                if (Double.isNaN(q.getValue())) {
+                    logger.warn("NaN MainValue " + q.getName());
+                    continue;
+                }
                 annotation =
                     new StickyAxisAnnotation(
                         q.getName(),
@@ -142,6 +146,10 @@
         }
         else {
             for (NamedDouble q: qs) {
+                if (Double.isNaN(q.getValue())) {
+                    logger.warn("NaN MainValue " + q.getName());
+                    continue;
+                }
                 annotation =
                     new StickyAxisAnnotation(
                         q.getName(),
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesWFacet.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesWFacet.java	Wed Jul 31 11:31:23 2013 +0200
@@ -110,7 +110,7 @@
             if (context.getContextValue(FixChartGenerator.CURRENT_KM) != null) {
                 Double ckm = (Double) context.getContextValue(FixChartGenerator.CURRENT_KM);
                 // Return linearly interpolated values, in m if not at gauge,
-                // in cm if at gauge.
+                // in cm over datum if at gauge.
                 ws = mvArtifact.getMainValuesW(new double[] {ckm});
             }
         }
@@ -122,6 +122,11 @@
         }
 
         for (NamedDouble w: ws) {
+            logger.debug("W Annotation at " + w.getValue() + " ("+w.getName()+")"+ wqdays);
+            if (Double.isNaN(w.getValue())) {
+                logger.warn("NaN MainValue " + w.getName());
+                continue;
+            }
             StickyAxisAnnotation annotation =
                 new StickyAxisAnnotation(
                     w.getName(),
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WstValueTable.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WstValueTable.java	Wed Jul 31 11:31:23 2013 +0200
@@ -850,6 +850,12 @@
             q, referenceKm, kms, ws, qs, 0, kms.length, errors);
     }
 
+    /**
+     * Interpolate Q at given positions.
+     * @param kms positions for which to calculate qs and ws
+     * @param ws [out] calculated ws for kms
+     * @param qs [out] looked up qs for kms.
+     */
     public QPosition interpolate(
         double      q,
         double      referenceKm,
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadCalculation.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadCalculation.java	Wed Jul 31 11:31:23 2013 +0200
@@ -42,6 +42,7 @@
     public SedimentLoadCalculation() {
     }
 
+    /** Returns CalculationResult with array of SedimentLoadResults. */
     public CalculationResult calculate(SedimentLoadAccess access) {
         logger.info("SedimentLoadCalculation.calculate");
 
@@ -87,6 +88,7 @@
         return new CalculationResult();
     }
 
+    /** Returns CalculationResult with array of SedimentLoadResults. */
     private CalculationResult internalCalculate() {
         logger.debug("internalCalulate; mode:" + yearEpoch);
         if (yearEpoch.equals("year")) {
@@ -259,8 +261,11 @@
         return result;
     }
 
-    /** Fetch loads for a single year, calculate total and
-     * return the result containing both. */
+    /**
+     * Fetch loads for a single year, calculate total and
+     * return the result containing both.
+     * @param y year, e.g. 1980
+     */
     private SedimentLoadResult calculateYear(int y) {
         SedimentLoad load = SedimentLoadFactory.getLoadWithData(
             this.river,
@@ -346,12 +351,15 @@
 
     /**
      * Set total values in load.
-     * Therefore, run over the kms and find ranges where either all
+     *
+     * Therefore, run over the sorted kms and find ranges where either all
      * or all Geschiebe or just the Schwebstoff fractions are set.
      * Merge these ranges and add (maybe new) respective fractions to
-     * load.
+     * load. In the process, remember any 'unfished' ends from measurements
+     * where the km-ranges did not completely match.
+     *
      * @param load SedimentLoad to add total values (and ranges) to.
-     * @return input param load.
+     * @return input param load, with total values set.
      */
     private SedimentLoad partialTotal(SedimentLoad load) {
         SedimentLoad fairLoad = load;
@@ -501,6 +509,11 @@
     }
 
 
+    /**
+     * Transform values in load.
+     * @param load The load of which values should be transformed.
+     * @return parameter load with transformed values.
+     */
     private SedimentLoad calculateUnit(SedimentLoad load, int year) {
         SedimentDensity density =
             SedimentDensityFactory.getSedimentDensity(river, kmLow, kmUp, year);
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java	Wed Jul 31 11:31:23 2013 +0200
@@ -55,6 +55,7 @@
         super(idx, name, description, type, hash, stateId);
     }
 
+    @Override
     public Object getData(Artifact artifact, CallContext context) {
         logger.debug("Get data for sediment load at index: " + index);
 
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFactory.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFactory.java	Wed Jul 31 11:31:23 2013 +0200
@@ -35,10 +35,12 @@
     /** Private logger to use here. */
     private static Logger log = Logger.getLogger(SedimentLoadFactory.class);
 
+    // Cache name/keys
     public static final String LOADS_CACHE_NAME = "sedimentloads";
     public static final String LOAD_DATA_CACHE_NAME = "sedimentload-data";
 
-    /** Query to get km and ws for wst_id and column_pos. */
+    /** Query to get description and start year of single type
+     * sediment_yields. */
     public static final String SQL_SELECT_SINGLES =
         "SELECT DISTINCT " +
         "       sy.description AS description, " +
@@ -51,7 +53,23 @@
         "       AND ti.stop_time IS NULL " +
         "       AND syv.station BETWEEN :startKm AND :endKm";
 
-    /** Query to get name for wst_id and column_pos. */
+    /** Query to get description, name and time range for official
+     * epoch-type sediment yields. */
+    public static final String SQL_SELECT_OFFEPOCHS =
+        "SELECT DISTINCT " +
+        "       ti.start_time AS startYear, " +
+        "       ti.stop_time AS end " +
+        "   FROM     sediment_yield sy " +
+        "       JOIN rivers r ON sy.river_id = r.id " +
+        "       JOIN sediment_yield_values syv ON sy.id = syv.sediment_yield_id " +
+        "       JOIN time_intervals ti ON sy.time_interval_id = ti.id " +
+        "   WHERE   r.name = :name " +
+        "       AND ti.stop_time IS NOT NULL " +
+        "       AND syv.station BETWEEN :startKm AND :endKm " +
+        "       AND sy.kind = 1";
+
+    /** Query to get description, name and time range for epoch-type
+     * sediment yields. */
     public static final String SQL_SELECT_EPOCHS =
         "SELECT DISTINCT " +
         "       sy.description AS description, " +
@@ -115,8 +133,10 @@
         "    JOIN rivers r ON sy.river_id = r.id " +
         "    JOIN time_intervals ti ON sy.time_interval_id = ti.id " +
         "    JOIN grain_fraction gf ON sy.grain_fraction_id = gf.id " +
-        "WHERE r.name = :river" +
-        "    AND gf.name = 'unknown'";
+        "    JOIN units u ON sy.unit_id = u.id " +
+        "WHERE r.name = :river " +
+        "    AND gf.name = 'unknown' " +
+        "    AND u.name = :unit";
 
     public static final String SQL_SELECT_EPOCHS_DATA =
         "SELECT" +
@@ -143,7 +163,8 @@
     }
 
     /**
-     *
+     * @param river name of river.
+     * @param type
      */
     public static SedimentLoad[] getLoads(
         String river,
@@ -287,6 +308,30 @@
             }
             return loads;
         }
+        else if (type.equals("off_epoch")) {
+            sqlQuery = session.createSQLQuery(SQL_SELECT_OFFEPOCHS)
+                .addScalar("startYear", StandardBasicTypes.DATE)
+                .addScalar("end", StandardBasicTypes.DATE);
+            sqlQuery.setString("name", river);
+            sqlQuery.setDouble("startKm", startKm);
+            sqlQuery.setDouble("endKm", endKm);
+            List<Object []> results = sqlQuery.list();
+
+            SedimentLoad[] loads = new SedimentLoad[results.size()];
+            for (int i = 0; i < results.size(); i++) {
+                Object[] row = results.get(i);
+                loads[i] = new SedimentLoad(
+                    ((Date) row[0]).toString() + (Date) row[1],
+                    (Date) row[0],
+                    (Date) row[1],
+                    true,
+                    "");
+            }
+            return loads;
+        }
+        else {
+             log.warn("getSedimentLoadsUncached does not understand type " + type);
+        }
         return new SedimentLoad[0];
     }
 
@@ -550,13 +595,18 @@
         return load;
     }
 
-    public static SedimentLoad[] getSedimentLoadUnknown(String river) {
+    /**
+     * Return sediment loads with 'unknown' fraction type.
+     * @param unit Restrict result set to those of given unit.
+     */
+    public static SedimentLoad[] getSedimentLoadUnknown(String river, String unit) {
         Session session = SessionHolder.HOLDER.get();
         SQLQuery sqlQuery = session.createSQLQuery(SQL_SELECT_UNKNOWN)
             .addScalar("description", StandardBasicTypes.STRING)
             .addScalar("startYear", StandardBasicTypes.DATE)
             .addScalar("end", StandardBasicTypes.DATE);
         sqlQuery.setString("river", river);
+        sqlQuery.setString("unit", unit);
         List<Object[]> results = sqlQuery.list();
         SedimentLoad[] loads = new SedimentLoad[results.size()];
         int counter = 0;
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadUnknownFacet.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadUnknownFacet.java	Wed Jul 31 11:31:23 2013 +0200
@@ -14,7 +14,7 @@
 extends DataFacet
 {
     /** Very own logger. */
-    private static Logger logger = Logger.getLogger(SedimentLoadFacet.class);
+    private static Logger logger = Logger.getLogger(SedimentLoadUnknownFacet.class);
 
     public SedimentLoadUnknownFacet() {
     }
@@ -24,6 +24,7 @@
         super(idx, name, description, type, hash, stateId);
     }
 
+    /** Get data from SedimentLoad with unknown type, from factory. */
     @Override
     public Object getData(Artifact artifact, CallContext context) {
         logger.debug("Get data for sediment load at index: " + index);
@@ -32,25 +33,12 @@
 
         SedimentLoadAccess access = new SedimentLoadAccess(flys);
         String river = access.getRiver();
+        String unit  = access.getUnit();
         SedimentLoad[] unknown =
-            SedimentLoadFactory.getSedimentLoadUnknown(river);
+            SedimentLoadFactory.getSedimentLoadUnknown(river, unit.replace("_per_","/"));
 
         SedimentLoad load = SedimentLoadFactory.getLoadUnknown(
             river, unknown[index].getDescription());
-        if (access.getUnit().equals("t/a") && load.getUnit().equals("m3/a")) {
-            for (Double km: load.getKms()) {
-                SedimentLoadFraction fraction = load.getFraction(km);
-                fraction.setUnknown(fraction.getUnknown() / 1.8);
-                load.addKm(km, fraction);
-            }
-        }
-        else if (access.getUnit().equals("m3/a") && load.getUnit().equals("t/a")) {
-            for (Double km: load.getKms()) {
-                SedimentLoadFraction fraction = load.getFraction(km);
-                fraction.setUnknown(fraction.getUnknown() * 1.8);
-                load.addKm(km, fraction);
-            }
-        }
         return load;
     }
 
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/services/SedimentLoadInfoService.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/services/SedimentLoadInfoService.java	Wed Jul 31 11:31:23 2013 +0200
@@ -22,6 +22,7 @@
 import org.dive4elements.river.artifacts.model.minfo.SedimentLoadFactory;
 
 
+/** Service delivering info about sediment loads. */
 public class SedimentLoadInfoService
 extends D4EService
 {
@@ -33,6 +34,10 @@
     public static final String FROM_XPATH = "/art:river/art:location/art:from/text()";
     public static final String TO_XPATH = "/art:river/art:location/art:to/text()";
 
+    /**
+     * Create document with sedimentload infos,
+     * constrained by contents in data.
+     */
     @Override
     protected Document doProcess(
         Document data,
@@ -54,17 +59,18 @@
             data,
             TO_XPATH,
             ArtifactNamespaceContext.INSTANCE);
-        double f, t;
+        double fromD, toD;
         try {
-            f = Double.parseDouble(from);
-            t = Double.parseDouble(to);
+            fromD = Double.parseDouble(from);
+            toD = Double.parseDouble(to);
         }
         catch (NumberFormatException nfe) {
             logger.warn("Invalid locations. Cannot return sediment loads.");
             return XMLUtils.newDocument();
         }
 
-        SedimentLoad[] loads = SedimentLoadFactory.getLoads(river, type, f, t);
+        SedimentLoad[] loads =
+            SedimentLoadFactory.getLoads(river, type, fromD, toD);
         return buildDocument(loads);
     }
 
@@ -98,3 +104,4 @@
         return result;
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/FloodMapState.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/FloodMapState.java	Wed Jul 31 11:31:23 2013 +0200
@@ -8,55 +8,35 @@
 
 package org.dive4elements.river.artifacts.states;
 
+import com.vividsolutions.jts.geom.Coordinate;
+import com.vividsolutions.jts.geom.Geometry;
+import com.vividsolutions.jts.geom.LineString;
+import com.vividsolutions.jts.geom.Polygon;
+
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
-
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
 import org.apache.log4j.Logger;
-
 import org.apache.velocity.Template;
-
-import org.geotools.feature.FeatureCollection;
-import org.geotools.feature.FeatureCollections;
-
-import org.geotools.feature.simple.SimpleFeatureBuilder;
-
-import org.hibernate.HibernateException;
-
-import org.opengis.feature.simple.SimpleFeature;
-import org.opengis.feature.simple.SimpleFeatureType;
-
-import com.vividsolutions.jts.geom.Coordinate;
-import com.vividsolutions.jts.geom.Geometry;
-import com.vividsolutions.jts.geom.LineString;
-import com.vividsolutions.jts.geom.Polygon;
-
 import org.dive4elements.artifactdatabase.state.Facet;
-
 import org.dive4elements.artifacts.Artifact;
 import org.dive4elements.artifacts.CallContext;
 import org.dive4elements.artifacts.CallMeta;
 import org.dive4elements.artifacts.GlobalContext;
-
 import org.dive4elements.artifacts.common.utils.FileTools;
-
 import org.dive4elements.river.artifacts.D4EArtifact;
-
 import org.dive4elements.river.artifacts.access.RangeAccess;
-
 import org.dive4elements.river.artifacts.context.RiverContext;
-
 import org.dive4elements.river.artifacts.model.CalculationMessage;
 import org.dive4elements.river.artifacts.model.CalculationResult;
 import org.dive4elements.river.artifacts.model.FacetTypes;
 import org.dive4elements.river.artifacts.model.LayerInfo;
 import org.dive4elements.river.artifacts.model.WQKms;
-
 import org.dive4elements.river.artifacts.model.map.HWS;
 import org.dive4elements.river.artifacts.model.map.HWSContainer;
 import org.dive4elements.river.artifacts.model.map.HWSFactory;
@@ -64,24 +44,25 @@
 import org.dive4elements.river.artifacts.model.map.WSPLGENCalculation;
 import org.dive4elements.river.artifacts.model.map.WSPLGENJob;
 import org.dive4elements.river.artifacts.model.map.WSPLGENReportFacet;
-
 import org.dive4elements.river.artifacts.resources.Resources;
-
 import org.dive4elements.river.exports.WstWriter;
-
 import org.dive4elements.river.model.CrossSectionTrack;
 import org.dive4elements.river.model.DGM;
 import org.dive4elements.river.model.Floodplain;
 import org.dive4elements.river.model.RiverAxis;
-
 import org.dive4elements.river.utils.ArtifactMapfileGenerator;
-import org.dive4elements.river.utils.RiverUtils;
 import org.dive4elements.river.utils.GeometryUtils;
 import org.dive4elements.river.utils.MapfileGenerator;
-
+import org.dive4elements.river.utils.RiverUtils;
 import org.dive4elements.river.wsplgen.FacetCreator;
 import org.dive4elements.river.wsplgen.JobObserver;
 import org.dive4elements.river.wsplgen.Scheduler;
+import org.geotools.feature.FeatureCollection;
+import org.geotools.feature.FeatureCollections;
+import org.geotools.feature.simple.SimpleFeatureBuilder;
+import org.hibernate.HibernateException;
+import org.opengis.feature.simple.SimpleFeature;
+import org.opengis.feature.simple.SimpleFeatureType;
 
 public class FloodMapState
 extends      DefaultState
@@ -109,9 +90,9 @@
     public static final String WSPLGEN_FLOODPLAIN     = "talaue.shp";
     public static final String WSPLGEN_WSP_FILE       = "waterlevel.wst";
     public static final String WSPLGEN_OUTPUT_FILE    = "wsplgen.shp";
-    public static final String WSPLGEN_USER_SHAPE     = "user-rgd.shp";
-    public static final String WSPLGEN_USER_ZIP       = "user-rgd.zip";
-    public static final String WSPLGEN_USER_FILENAME  = "user-rgd";
+    public static final String WSPLGEN_USER_RGD_SHAPE = "user-rgd.shp";
+    public static final String WSPLGEN_USER_RGD_ZIP   = "user-rgd.zip";
+    public static final String WSPLGEN_USER_RGD       = "user-rgd";
 
     public static final String WSPLGEN_QPS_NAME = "qps";
 
@@ -428,7 +409,7 @@
             createMapfile(
                 artifact,
                 artifactDir,
-                MapfileGenerator.MS_LAYER_PREFIX + "hws-lines",
+                MapfileGenerator.MS_LAYER_PREFIX + HWS_LINES,
                 HWS_LINES_SHAPE,
                 "LINE",
                 "31467",
@@ -441,7 +422,7 @@
     }
 
 
-    private void createMapfile(
+    public static void createMapfile(
         D4EArtifact artifact,
         File artifactDir,
         String name,
@@ -650,13 +631,13 @@
         File         dir,
         WSPLGENJob   job
     ) {
-        File archive = new File(dir, WSPLGEN_USER_SHAPE);
+        File archive = new File(dir, WSPLGEN_USER_RGD_SHAPE);
         boolean exists = archive.exists();
         logger.debug("shp file exists: " + exists);
         if (exists) {
-            job.addLin(dir + "/" + WSPLGEN_USER_SHAPE);
+            job.addLin(dir + "/" + WSPLGEN_USER_RGD_SHAPE);
             facetCreator.createShapeFacet(FacetCreator.I18N_USERSHAPE,
-                MapfileGenerator.MS_LAYER_PREFIX + "user-rgd",
+                MapfileGenerator.MS_LAYER_PREFIX + WSPLGEN_USER_RGD,
                 FLOODMAP_USERSHAPE,
                 4);
         }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/GaugeDischargeState.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/GaugeDischargeState.java	Wed Jul 31 11:31:23 2013 +0200
@@ -57,6 +57,12 @@
     protected String createFacetName(GaugeDischargeArtifact artifact,
         CallMeta meta) {
 
+        if (artifact.getFacetWishName() != null) {
+            /* We let the Artifact overwrite our name as this allows
+             * injecting the facet name from the Datacage */
+            return artifact.getFacetWishName();
+        }
+
         Gauge gauge = artifact.getGauge();
         Object[] args = new Object[] {
             gauge.getName(),
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/HWSBarriersState.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/HWSBarriersState.java	Wed Jul 31 11:31:23 2013 +0200
@@ -9,51 +9,29 @@
 package org.dive4elements.river.artifacts.states;
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
-
-import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.List;
 
 import org.apache.log4j.Logger;
-
-import org.apache.velocity.Template;
-
-import org.geotools.data.shapefile.ShapefileDataStore;
-
-import org.geotools.feature.FeatureCollection;
-import org.geotools.feature.FeatureCollections;
-
-import org.opengis.feature.simple.SimpleFeatureType;
-
-import org.opengis.feature.type.GeometryDescriptor;
-
-import org.w3c.dom.Element;
-
 import org.dive4elements.artifactdatabase.state.Facet;
-
 import org.dive4elements.artifacts.Artifact;
 import org.dive4elements.artifacts.CallContext;
-
 import org.dive4elements.artifacts.common.utils.FileTools;
-
 import org.dive4elements.artifacts.common.utils.XMLUtils.ElementCreator;
-
 import org.dive4elements.river.artifacts.D4EArtifact;
-
 import org.dive4elements.river.artifacts.access.MapAccess;
-
-import org.dive4elements.river.artifacts.model.LayerInfo;
-
 import org.dive4elements.river.artifacts.model.map.HWS;
 import org.dive4elements.river.artifacts.model.map.HWSContainer;
 import org.dive4elements.river.artifacts.model.map.HWSFactory;
-
-import org.dive4elements.river.utils.ArtifactMapfileGenerator;
-import org.dive4elements.river.utils.RiverUtils;
 import org.dive4elements.river.utils.GeometryUtils;
 import org.dive4elements.river.utils.MapfileGenerator;
+import org.dive4elements.river.utils.RiverUtils;
+import org.geotools.data.shapefile.ShapefileDataStore;
+import org.geotools.feature.FeatureCollection;
+import org.geotools.feature.FeatureCollections;
+import org.opengis.feature.simple.SimpleFeatureType;
+import org.opengis.feature.type.GeometryDescriptor;
+import org.w3c.dom.Element;
 
 public class HWSBarriersState
 extends DefaultState
@@ -63,9 +41,8 @@
     private static Logger logger = Logger.getLogger(HWSBarriersState.class);
     private static final String HWS_SHAPEFILE_LINES = "hws-lines.shp";
     private static final String HWS_SHAPEFILE_POINTS = "hws-points.shp";
-    private static final String USER_RGD_SHAPE     = "user-rgd.shp";
-    private static final String USER_RGD_ZIP       = "user-rgd.zip";
-    private static final String USER_RGD_FILENAME  = "user-rgd";
+
+
     @Override
     protected String getUIProvider() {
         return "map_digitize";
@@ -146,7 +123,7 @@
         }
 
         if (successLines) {
-            createMapfile(
+            FloodMapState.createMapfile(
                 artifact,
                 artifactDir,
                 MapfileGenerator.MS_LAYER_PREFIX + "hws-lines",
@@ -156,7 +133,7 @@
                 "hws");
         }
         if (successPoints) {
-            createMapfile(
+            FloodMapState.createMapfile(
                 artifact,
                 artifactDir,
                 MapfileGenerator.MS_LAYER_PREFIX + "hws-points",
@@ -172,7 +149,7 @@
                 try {
                     ShapefileDataStore store = new ShapefileDataStore(
                     new File(artifactDir.getCanonicalPath() +
-                        "/" + USER_RGD_SHAPE)
+                        "/" + FloodMapState.WSPLGEN_USER_RGD_SHAPE)
                             .toURI().toURL());
                     GeometryDescriptor desc =
                         store.getSchema().getGeometryDescriptor();
@@ -201,14 +178,14 @@
                     else {
                         type = "POINT";
                     }
-                    createMapfile(
+                    FloodMapState.createMapfile(
                         artifact,
                         artifactDir,
-                        MapfileGenerator.MS_LAYER_PREFIX + USER_RGD_FILENAME,
-                        USER_RGD_SHAPE,
+                        MapfileGenerator.MS_LAYER_PREFIX + FloodMapState.WSPLGEN_USER_RGD,
+                        FloodMapState.WSPLGEN_USER_RGD_SHAPE,
                         type,
                         epsg,
-                        "user-rgd");
+                        FloodMapState.WSPLGEN_USER_RGD);
                 }
                 catch (IOException e) {
                     logger.warn("No mapfile for user-rgd created!");
@@ -219,7 +196,7 @@
     }
 
     private boolean extractUserShp(File dir) {
-        File archive = new File(dir, USER_RGD_ZIP);
+        File archive = new File(dir, FloodMapState.WSPLGEN_USER_RGD_ZIP);
         boolean exists = archive.exists();
         logger.debug("Zip file exists: " + exists);
         if (exists) {
@@ -231,7 +208,7 @@
             }
             catch (IOException ioe) {
                 logger.warn("Zip archive " + dir + "/"
-                    + USER_RGD_ZIP + " could not be extracted.");
+                    + FloodMapState.WSPLGEN_USER_RGD_ZIP + " could not be extracted.");
                 return false;
             }
         }
@@ -261,7 +238,7 @@
                         return true;
                     }
                     try {
-                        FileTools.copyFile(file, new File(target, USER_RGD_FILENAME + "." + suffix));
+                        FileTools.copyFile(file, new File(target, FloodMapState.WSPLGEN_USER_RGD + "." + suffix));
                     }
                     catch (IOException ioe) {
                         logger.warn ("Error while copying file " + file.getName());
@@ -275,41 +252,6 @@
         FileTools.deleteRecursive(source);
     }
 
-    private void createMapfile(
-        D4EArtifact artifact,
-        File artifactDir,
-        String name,
-        String hwsShapefile,
-        String type,
-        String srid,
-        String group
-    ) {
-        LayerInfo info = new LayerInfo();
-        info.setName(name + artifact.identifier());
-        info.setType(type);
-        info.setDirectory(artifact.identifier());
-        info.setTitle(name);
-        info.setData(hwsShapefile);
-        info.setSrid(srid);
-        info.setGroupTitle(group);
-        MapfileGenerator generator = new ArtifactMapfileGenerator();
-        Template tpl = generator.getTemplateByName(MapfileGenerator.SHP_LAYER_TEMPLATE);
-        try {
-            File layer = new File(artifactDir.getCanonicalPath() + "/" + name);
-            generator.writeLayer(info, layer, tpl);
-            List<String> layers = new ArrayList<String>();
-            layers.add(layer.getAbsolutePath());
-            generator.generate();
-        }
-        catch(FileNotFoundException fnfe) {
-            logger.warn("Could not find mapfile for hws layer");
-        }
-        catch (Exception ioe) {
-            logger.warn("Could not create mapfile for hws layer");
-            logger.warn(Arrays.toString(ioe.getStackTrace()));
-        }
-    }
-
 
     @Override
     public void endOfLife(Artifact artifact, Object callContext) {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/DifferencesState.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/DifferencesState.java	Wed Jul 31 11:31:23 2013 +0200
@@ -69,7 +69,7 @@
     @Override
     public Object computeAdvance(D4EArtifact artifact, String hash,
         CallContext context, List<Facet> facets, Object old) {
-        logger.debug("BedQualityState.computeAdvance");
+        logger.debug("DifferencesState.computeAdvance");
 
         List<Facet> newFacets = new ArrayList<Facet>();
 
@@ -100,7 +100,7 @@
 
     protected void generateFacets(CallContext context, List<Facet> newFacets,
         BedDifferencesResult[] results, String stateId, String hash) {
-        logger.debug("BedQualityState.generateFacets");
+        logger.debug("DifferencesState.generateFacets");
 
         CallMeta meta = context.getMeta();
 
@@ -413,3 +413,4 @@
             });
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadCalculate.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadCalculate.java	Wed Jul 31 11:31:23 2013 +0200
@@ -21,6 +21,7 @@
 import org.dive4elements.river.artifacts.D4EArtifact;
 import org.dive4elements.river.artifacts.access.SedimentLoadAccess;
 import org.dive4elements.river.artifacts.model.CalculationResult;
+import org.dive4elements.river.artifacts.model.DataFacet;
 import org.dive4elements.river.artifacts.model.FacetTypes;
 import org.dive4elements.river.artifacts.model.ReportFacet;
 import org.dive4elements.river.artifacts.model.minfo.SedimentLoad;
@@ -33,7 +34,7 @@
 import org.dive4elements.river.artifacts.states.DefaultState;
 import org.dive4elements.river.utils.DateGuesser;
 
-
+/** State in which Sediment Load(s) are calculated/retrieved. */
 public class SedimentLoadCalculate
 extends DefaultState
 implements FacetTypes
@@ -73,7 +74,7 @@
                         name.equals(SEDIMENT_LOAD_SUSP_SAND_BED)){
                         return Boolean.FALSE;
                     }
-                    else if (name.equals(SEDIMENT_LOAD_UNKOWN)) {
+                    else if (name.equals(SEDIMENT_LOAD_UNKNOWN)) {
                         D4EArtifact d4e = (D4EArtifact)artifact;
                         SedimentLoadUnknownFacet f =
                             (SedimentLoadUnknownFacet)
@@ -141,7 +142,7 @@
 
         String river = access.getRiver();
         SedimentLoad[] unknown =
-            SedimentLoadFactory.getSedimentLoadUnknown(river);
+            SedimentLoadFactory.getSedimentLoadUnknown(river, access.getUnit().replace("_per_","/"));
 
         String type = access.getYearEpoch();
         if (type.equals("year")) {
@@ -161,12 +162,16 @@
         for (int i = 0; i < unknown.length; i++) {
             newFacets.add(new SedimentLoadUnknownFacet(
                 i,
-                SEDIMENT_LOAD_UNKOWN,
+                SEDIMENT_LOAD_UNKNOWN,
                 unknown[i].getDescription(),
                 ComputeType.ADVANCE,
                 getID(),
                 hash));
         }
+
+        newFacets.add(
+            new DataFacet(CSV, "CSV data", ComputeType.ADVANCE, hash, id));
+
         facets.addAll(newFacets);
 
         return res;
@@ -520,3 +525,4 @@
         }
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadOffEpochSelect.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadOffEpochSelect.java	Wed Jul 31 11:31:23 2013 +0200
@@ -12,7 +12,7 @@
 
 import org.dive4elements.river.artifacts.states.DefaultState;
 
-
+/** State in which official epoch is selected to calculate sediment load. */
 public class SedimentLoadOffEpochSelect
 extends DefaultState
 {
--- a/artifacts/src/main/java/org/dive4elements/river/collections/AttributeWriter.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/collections/AttributeWriter.java	Wed Jul 31 11:31:23 2013 +0200
@@ -46,7 +46,7 @@
     protected List<Facet>         newFacets;
 
     /**
-     * "Compatibility matrix", mapws list of facet names to output names.
+     * "Compatibility matrix", maps list of facet names to output names.
      * Any facet that is not found in the list for a specific output will
      * not be added to the resulting document.
      */
--- a/artifacts/src/main/java/org/dive4elements/river/exports/HistoricalDischargeWQCurveGenerator.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/HistoricalDischargeWQCurveGenerator.java	Wed Jul 31 11:31:23 2013 +0200
@@ -134,11 +134,10 @@
             doPoints(artifactFacet.getData(context), artifactFacet, theme,
                 visible, YAXIS.W.idx);
         }
-        else if (HISTORICAL_DISCHARGE_MAINVALUES_Q.equals(name)) {
-            doAnnotations((RiverAnnotation)
-                artifactFacet.getData(context), artifactFacet, theme, visible);
-        }
-        else if (HISTORICAL_DISCHARGE_MAINVALUES_W.equals(name)) {
+        else if (name.equals(MAINVALUES_W) ||
+                 name.equals(MAINVALUES_Q) ||
+                 HISTORICAL_DISCHARGE_MAINVALUES_Q.equals(name) ||
+                 HISTORICAL_DISCHARGE_MAINVALUES_W.equals(name)) {
             doAnnotations((RiverAnnotation)
                 artifactFacet.getData(context), artifactFacet, theme, visible);
         }
--- a/artifacts/src/main/java/org/dive4elements/river/exports/LongitudinalSectionGenerator.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/LongitudinalSectionGenerator.java	Wed Jul 31 11:31:23 2013 +0200
@@ -16,7 +16,13 @@
 import org.dive4elements.river.artifacts.model.FacetTypes;
 import org.dive4elements.river.artifacts.model.WKms;
 import org.dive4elements.river.artifacts.model.WQKms;
+
+import org.dive4elements.river.exports.process.Processor;
+import org.dive4elements.river.exports.process.BedDiffHeightYearProcessor;
+import org.dive4elements.river.exports.process.BedDiffYearProcessor;
+import org.dive4elements.river.exports.process.BedheightProcessor;
 import org.dive4elements.river.exports.process.WOutProcessor;
+
 import org.dive4elements.river.jfree.RiverAnnotation;
 import org.dive4elements.river.jfree.StyledAreaSeriesCollection;
 import org.dive4elements.river.jfree.StyledXYSeries;
@@ -333,9 +339,22 @@
             return;
         }
 
-        WOutProcessor processor = new WOutProcessor();
-        if (processor.canHandle(name)) {
-            processor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx);
+        WOutProcessor wProcessor = new WOutProcessor();
+        Processor bedp = new BedheightProcessor();
+        Processor bdyProcessor = new BedDiffYearProcessor();
+        Processor bdhyProcessor = new BedDiffHeightYearProcessor();
+
+        if (wProcessor.canHandle(name)) {
+            wProcessor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx);
+        }
+        else if (bedp.canHandle(name)) {
+           bedp.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx);
+        }
+        else if (bdyProcessor.canHandle(name)) {
+           bdyProcessor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx);
+        }
+        else if (bdhyProcessor.canHandle(name)) {
+           bdhyProcessor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx);
         }
         else if (name.equals(LONGITUDINAL_Q)) {
             doQOut(
--- a/artifacts/src/main/java/org/dive4elements/river/exports/WaterlevelExporter.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/WaterlevelExporter.java	Wed Jul 31 11:31:23 2013 +0200
@@ -637,7 +637,9 @@
                 colDesc = RiverUtils.getNamedMainValue(winfo, wqkms.getRawValue());
                 // For 'W am Pegel' s
                 if (colDesc == null) {
-                    colDesc = ((D4EArtifact)master).getDataAsString("wq_single");
+                    Double value = RiverUtils.getValueFromWQ(wqkms);
+                    colDesc = (value != null) ?
+                        Formatter.getWaterlevelW(context).format(value) : null;
                 }
             }
         }
--- a/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixATWriter.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixATWriter.java	Wed Jul 31 11:31:23 2013 +0200
@@ -115,7 +115,7 @@
 
         double wMin = minW(invInst, wMax, qMax[0]);
 
-        double wMinCM = wMin * 100d;
+        double wMinCM = wMin * 100d - subtractPNP;
         double wMaxCM = wMax * 100d;
 
         int wRow = ((int)wMinCM / 10) * 10;
@@ -124,7 +124,7 @@
             wMinCM = (int)wMinCM + 1d;
         }
 
-        double w = wMinCM / 100.0;
+        double w = (wMinCM + subtractPNP) / 100.0;
 
         int wcm = ((int)wMinCM) % 10;
 
@@ -132,9 +132,10 @@
             log.debug("wMinCM: " + wMinCM);
             log.debug("wMaxCM: " + wMaxCM);
             log.debug("wcm: " + wcm);
+            log.debug("subtractPNP: " + subtractPNP);
         }
 
-        out.printf(Locale.US, "%8d", wRow - subtractPNP);
+        out.printf(Locale.US, "%8d", wRow);
 
         for (int i = 0; i < wcm; i++) {
             out.print(ATWriter.EMPTY);
@@ -158,7 +159,7 @@
             if (w > wMax) {
                 break;
             }
-            out.printf(Locale.US, "%8d", (wRow += 10) - subtractPNP);
+            out.printf(Locale.US, "%8d", (wRow += 10));
             wcm = 0;
         }
 
--- a/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixWQCurveGenerator.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixWQCurveGenerator.java	Wed Jul 31 11:31:23 2013 +0200
@@ -174,24 +174,6 @@
         }
     }
 
-    /** Translate River annotations if not at gauge. */
-    public void translateRiverAnnotation(RiverAnnotation riverAnnotation) {
-        if (getCurrentGaugeDatum() == 0d) {
-            return;
-        }
-        logger.debug("Translate some river annotation.");
-        double translate = getCurrentGaugeDatum();
-        double factor    = 100d;
-        for (StickyAxisAnnotation annotation: riverAnnotation.getAxisTextAnnotations()){
-            if (!annotation.atX()) {
-                annotation.setPos((annotation.getPos() - translate)*factor);
-            }
-        }
-        for (XYTextAnnotation annotation: riverAnnotation.getTextAnnotations()) {
-            annotation.setY((annotation.getY() - translate)*factor);
-        }
-    }
-
     @Override
     public void doOut(ArtifactAndFacet aaf, Document doc, boolean visible) {
         logger.debug("doOut: " + aaf.getFacetName());
@@ -256,7 +238,6 @@
         }
         else if (name.equals(MAINVALUES_W) || name.equals(MAINVALUES_Q)) {
             RiverAnnotation mainValues = (RiverAnnotation) aaf.getData(context);
-            translateRiverAnnotation(mainValues);
             doAnnotations(
                 mainValues,
                 aaf,
--- a/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDifferenceExporter.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDifferenceExporter.java	Wed Jul 31 11:31:23 2013 +0200
@@ -1,3 +1,11 @@
+/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
 package org.dive4elements.river.exports.minfo;
 
 import java.io.IOException;
@@ -75,8 +83,7 @@
 
         NumberFormat kmf = Formatter.getCalculationKm(context.getMeta());
         NumberFormat mf = Formatter.getMeterFormat(context);
-        for (int i = 0; i < results.length; i++) {
-            BedDifferencesResult result = results[i];
+        for (BedDifferencesResult result : results) {
             double[][] kms = result.getDifferencesData();
             for (int j = 0; j < kms[0].length; j++) {
                 writer.writeNext(new String[] {
@@ -184,3 +191,4 @@
     }
 
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/SedimentLoadExporter.java	Wed Jul 31 11:31:23 2013 +0200
@@ -0,0 +1,165 @@
+/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.exports.minfo;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.TreeSet;
+
+import java.io.OutputStream;
+import java.io.IOException;
+
+import java.text.NumberFormat;
+
+import org.w3c.dom.Document;
+
+import org.apache.log4j.Logger;
+
+import org.dive4elements.artifacts.CallContext;
+
+import org.dive4elements.river.artifacts.model.CalculationResult;
+import org.dive4elements.river.artifacts.model.minfo.SedimentLoad;
+import org.dive4elements.river.artifacts.model.minfo.SedimentLoadFraction;
+import org.dive4elements.river.artifacts.model.minfo.SedimentLoadResult;
+
+import org.dive4elements.river.exports.AbstractExporter;
+
+import org.dive4elements.river.utils.Formatter;
+
+import au.com.bytecode.opencsv.CSVWriter;
+
+
+/** Do CSV export for sediment load calculations (will also be shown in
+ * client). */
+public class SedimentLoadExporter
+extends      AbstractExporter
+{
+    /** Private logger. */
+    private static Logger logger = Logger.getLogger(SedimentLoadExporter.class);
+
+    // i18n keys.
+    public static final String CSV_KM =
+        "export.sedimentload_ls.csv.header.km";
+
+    public static final String CSV_YEAR =
+        "export.sedimentload_ls.csv.header.year";
+
+    public static final String CSV_COARSE =
+        "export.sedimentload_ls.csv.header.coarse";
+
+    public static final String CSV_FINEMIDDLE =
+        "export.sedimentload_ls.csv.header.finemiddle";
+
+    public static final String CSV_SAND =
+        "export.sedimentload_ls.csv.header.sand";
+
+    public static final String CSV_SUSP_SAND =
+        "export.sedimentload_ls.csv.header.suspsand";
+
+    public static final String CSV_SUSP_SEDIMENT =
+        "export.sedimentload_ls.csv.header.suspsediment";
+
+    public static final String CSV_TOTAL =
+        "export.sedimentload_ls.csv.header.total";
+
+
+    /** Collected results. */
+    private SedimentLoadResult[] results;
+
+    /** Empty constructor. */
+    public SedimentLoadExporter() {
+    }
+
+    /** Trivial init. */
+    @Override
+    public void init(Document request, OutputStream out, CallContext context) {
+        super.init(request, out, context);
+        logger.debug("init");
+        results = new SedimentLoadResult[0];
+    }
+
+
+    /** Process all stored data and write csv. */
+    @Override
+    protected void writeCSVData(CSVWriter writer) throws IOException {
+        writeCSVHeader(writer);
+
+        for (SedimentLoadResult result: results) {
+            SedimentLoad load = result.getLoad();
+            // Put load.getName()+load.getDescription()}); somewhere?
+            for (double km: new TreeSet<Double>(load.getKms())) {
+                SedimentLoadFraction fraction = load.getFraction(km);
+                writeRecord(writer, km, result.getStartYear(), result.getEndYear(), fraction);
+            }
+        }
+    }
+
+    /** Write a line. */
+    private void writeRecord(CSVWriter writer, double km, int fromYear, int toYear, SedimentLoadFraction fraction) {
+        // year, total, susp sed, susp sandbed suspsand, sand, finemiddle, coarse
+        String years = (toYear == 0) ? fromYear+"" : fromYear + "-" + toYear;
+        NumberFormat kmf = Formatter.getCalculationKm(context.getMeta());
+        NumberFormat valf = Formatter.getFormatter(context.getMeta(), 0, 2);
+        writer.writeNext(new String[] {
+            kmf.format(km),
+            years,
+            valf.format(fraction.getTotal()),
+            valf.format(fraction.getSand()),
+            valf.format(fraction.getFineMiddle()),
+            valf.format(fraction.getCoarse()),
+            valf.format(fraction.getSuspSand()),
+            //valf.format(fraction.getSuspSandBed()),
+            valf.format(fraction.getSuspSediment())
+        });
+    }
+
+    /** Writes i18ned header for csv file/stream. */
+    protected void writeCSVHeader(CSVWriter writer) {
+        logger.debug("writeCSVHeader()");
+
+        List<String> header = new LinkedList<String>();
+        if (results != null)  {
+            header.add(msg(CSV_KM, "km"));
+            header.add(msg(CSV_YEAR, "Jahr"));
+            header.add(msg(CSV_TOTAL, "Gesamt"));
+            header.add(msg(CSV_SAND, "Sand"));
+            header.add(msg(CSV_FINEMIDDLE,"Fein"));
+            header.add(msg(CSV_COARSE,    "Grob"));
+            header.add(msg(CSV_SUSP_SAND, "Su.Sand"));
+            header.add(msg(CSV_SUSP_SEDIMENT, "Schwebst."));
+            //header.add("Susp.Sand Bett");
+        }
+        writer.writeNext(header.toArray(new String[header.size()]));
+    }
+
+    /** Store data internally, accepting only SedimentLoadResults[] in
+     * calculationresults data. */
+    @Override
+    protected void addData(Object data) {
+        if (!(data instanceof CalculationResult)) {
+            logger.warn("Invalid data type.");
+            return;
+        }
+        Object[] d = (Object[])((CalculationResult)data).getData();
+
+        if (!(d instanceof SedimentLoadResult[])) {
+            logger.warn("Invalid result object.");
+            return;
+        }
+        logger.debug("addData: Data added.");
+        results = (SedimentLoadResult[])d;
+    }
+
+    /** Write PDF to outputstream (not implemented yet). */
+    @Override
+    protected void writePDF(OutputStream out) {
+        logger.warn("Not implemented.");
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/exports/minfo/SedimentLoadLSGenerator.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/SedimentLoadLSGenerator.java	Wed Jul 31 11:31:23 2013 +0200
@@ -36,10 +36,12 @@
 import org.dive4elements.river.utils.DataUtil;
 
 
+/** Generator for Longitudinal Sections of SedimentLoad-Calculations. */
 public class SedimentLoadLSGenerator
 extends XYChartGenerator
 implements FacetTypes
 {
+    /** Y-Axis enum defining the four possible axes. */
     public enum YAXIS {
         L(0),
         D(1),
@@ -73,6 +75,7 @@
     public static final String I18N_YAXIS_D_LABEL_DEFAULT = "delta S [m]";
     public static final String I18N_YAXIS_V_LABEL_DEFAULT = "Geschwindigkeit v [m/s]";
 
+    /** Enumerator over y-axes. */
     @Override
     protected YAxisWalker getYAxisWalker() {
         return new YAxisWalker() {
--- a/artifacts/src/main/java/org/dive4elements/river/exports/process/WOutProcessor.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/process/WOutProcessor.java	Wed Jul 31 11:31:23 2013 +0200
@@ -69,7 +69,13 @@
             generator.addAreaSeries(area, index, visible);
         }
 
-        invertAxis(generator, wkms);
+        if (aaf.getFacetName().equals(FacetTypes.LONGITUDINAL_W) ||
+            aaf.getFacetName().equals(FacetTypes.DISCHARGE_LONGITUDINAL_W) ||
+            aaf.getFacetName().equals(FacetTypes.STATIC_WQKMS_W)) {
+            /* Only use W values to check if the diagram should be inverted
+             * see flys/issue1290 for details */
+            invertAxis(generator, wkms);
+        }
     }
 
     /**
@@ -104,9 +110,8 @@
     public void invertAxis(XYChartGenerator generator, WKms wkms) {
         boolean wsUp = wkms.guessWaterIncreasing();
         boolean kmUp = DataUtil.guessWaterIncreasing(wkms.allKms());
-        boolean inv = (wsUp && kmUp) || (!wsUp && !kmUp);
-
         int size = wkms.size();
+        boolean inv = ((wsUp && kmUp) || (!wsUp && !kmUp)) && size > 1;
 
         if (logger.isDebugEnabled()) {
             logger.debug("(Wkms)Values  : " + size);
@@ -116,6 +121,9 @@
             }
             logger.debug("wsUp: " + wsUp);
             logger.debug("kmUp: " + kmUp);
+            if (size == 1) {
+                logger.debug("InvertAxis not inverting because we have just one km");
+        }
             logger.debug("inv:  " + inv);
         }
         generator.setInverted(inv);
--- a/artifacts/src/main/java/org/dive4elements/river/utils/Formatter.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/utils/Formatter.java	Wed Jul 31 11:31:23 2013 +0200
@@ -135,7 +135,7 @@
     }
 
     /**
-     * Returns a formatter in engineering notation
+     * Returns a formatter in engineering notation.
      */
     public static NumberFormat getEngFormatter(CallContext c) {
         NumberFormat nf = getRawFormatter(c);
@@ -147,7 +147,7 @@
     }
 
     /**
-     * Returns a number formatter that uses an exponent after max digits
+     * Returns a number formatter that uses an exponent after max digits.
      */
     public static NumberFormat getScientificFormater(CallContext c, int min, int max) {
         NumberFormat nf = getRawFormatter(c);
--- a/artifacts/src/main/resources/datacage-sql/org-h2-driver.properties	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/resources/datacage-sql/org-h2-driver.properties	Wed Jul 31 11:31:23 2013 +0200
@@ -17,6 +17,7 @@
 insert.out = INSERT INTO outs (id, artifact_id, name, description, out_type) VALUES (?, ?, ?, ?, ?)
 facet.id.nextval = SELECT NEXTVAL('FACETS_ID_SEQ')
 insert.facet = INSERT INTO facets (id, out_id, name, num, state, description) VALUES (?, ?, ?, ?, ?, ?)
+update.artifact.state = UPDATE artifacts SET state = ? WHERE gid = ?
 
 update.collection.name = UPDATE collections SET name = ? WHERE gid = ?
 delete.artifact.from.collection = DELETE FROM collection_items WHERE collection_id = ? AND artifact_id = ?
--- a/artifacts/src/main/resources/messages.properties	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/resources/messages.properties	Wed Jul 31 11:31:23 2013 +0200
@@ -48,7 +48,7 @@
 state.minfo.dischargestate = Selection of discharge state and channel
 state.minfo.sq.location=Location
 state.minfo.sq.period=Periods
-state.minfo.sq.outliers=Outliers
+state.minfo.sq.outliers=Tolerance
 state.minfo.sq.outlier-method=Outliertest
 state.minfo.bed.year_epoch=Year/Epoch
 state.minfo.bed.difference_select=Differences
@@ -384,6 +384,14 @@
 export.bedheight_middle.csv.header.soundingwidth = Sounding Width [m]
 export.bedheight_middle.csv.header.width = morphological active width [m]
 export.bedheight_middle.csv.header.locations = Location
+export.sedimentload_ls.csv.header.km = km
+export.sedimentload_ls.csv.header.year = year
+export.sedimentload_ls.csv.header.coarse = coarse
+export.sedimentload_ls.csv.header.finemiddle = finemiddle
+export.sedimentload_ls.csv.header.sand = sand
+export.sedimentload_ls.csv.header.suspsand = susp. sand
+export.sedimentload_ls.csv.header.suspsediment = susp. sediment
+export.sedimentload_ls.csv.header.total = total
 export.sqrelation.csv.header.parameter = Parameter
 export.sqrelation.csv.header.station = Station
 export.sqrelation.csv.header.km = River-Km
--- a/artifacts/src/main/resources/messages_de.properties	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/resources/messages_de.properties	Wed Jul 31 11:31:23 2013 +0200
@@ -48,7 +48,7 @@
 state.minfo.dischargestate = Abflusszustand und Gerinne
 state.minfo.sq.location=Ort
 state.minfo.sq.period=Zeitraum
-state.minfo.sq.outliers=Ausrei\u00dfer
+state.minfo.sq.outliers=Toleranz
 state.minfo.sq.outlier-method=Ausrei\u00dfertest
 state.minfo.bed.year_epoch=Jahr/Epoche
 state.minfo.bed.difference_select=Differenzen
@@ -384,6 +384,14 @@
 export.bedheight_middle.csv.header.soundingwidth = Peilbreite [m]
 export.bedheight_middle.csv.header.width = morphologisch aktive Breite [m]
 export.bedheight_middle.csv.header.locations = Streckendaten
+export.sedimentload_ls.csv.header.km = km
+export.sedimentload_ls.csv.header.year = Jahr
+export.sedimentload_ls.csv.header.coarse = Grob
+export.sedimentload_ls.csv.header.finemiddle = Fein
+export.sedimentload_ls.csv.header.sand = Sand
+export.sedimentload_ls.csv.header.suspsand = susp. Sand
+export.sedimentload_ls.csv.header.suspsediment = susp. Sediment
+export.sedimentload_ls.csv.header.total = Total
 export.sqrelation.csv.header.parameter = Parameter
 export.sqrelation.csv.header.station = Station
 export.sqrelation.csv.header.km = Fluss-Km
--- a/artifacts/src/main/resources/messages_de_DE.properties	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/resources/messages_de_DE.properties	Wed Jul 31 11:31:23 2013 +0200
@@ -48,7 +48,7 @@
 state.minfo.dischargestate = Abflusszustand und Gerinne
 state.minfo.sq.location=Ort
 state.minfo.sq.period=Zeitraum
-state.minfo.sq.outliers=Ausrei\u00dfer
+state.minfo.sq.outliers=Toleranz
 state.minfo.sq.outlier-method=Ausrei\u00dfertest
 state.minfo.bed.year_epoch=Jahr/Epoche
 state.minfo.bed.difference_select=Differenzen
@@ -381,6 +381,14 @@
 export.bedheight_middle.csv.header.soundingwidth = Peilbreite [m]
 export.bedheight_middle.csv.header.width = morphologisch aktive Breite [m]
 export.bedheight_middle.csv.header.locations = Streckendaten
+export.sedimentload_ls.csv.header.km = km
+export.sedimentload_ls.csv.header.year = Jahr
+export.sedimentload_ls.csv.header.coarse = Grob
+export.sedimentload_ls.csv.header.finemiddle = Fein
+export.sedimentload_ls.csv.header.sand = Sand
+export.sedimentload_ls.csv.header.suspsand = susp. Sand
+export.sedimentload_ls.csv.header.suspsediment = susp. Sediment
+export.sedimentload_ls.csv.header.total = Total
 export.sqrelation.csv.header.parameter = Parameter
 export.sqrelation.csv.header.station = Station
 export.sqrelation.csv.header.km = Fluss-Km
--- a/artifacts/src/main/resources/messages_en.properties	Tue Jul 30 18:54:53 2013 +0200
+++ b/artifacts/src/main/resources/messages_en.properties	Wed Jul 31 11:31:23 2013 +0200
@@ -48,7 +48,7 @@
 state.minfo.dischargestate = Selection of discharge state and channel
 state.minfo.sq.location=Location
 state.minfo.sq.period=Periods
-state.minfo.sq.outliers=Outliers
+state.minfo.sq.outliers=Tolerance
 state.minfo.sq.outlier-method=Outliertest
 state.minfo.bed.year_epoch=Year/Epoch
 state.minfo.bed.difference_select=Differences
@@ -386,6 +386,14 @@
 export.bedheight_middle.csv.header.soundingwidth = Sounding Width [m]
 export.bedheight_middle.csv.header.width = morphological active width [m]
 export.bedheight_middle.csv.header.locations = Location
+export.sedimentload_ls.csv.header.km = km
+export.sedimentload_ls.csv.header.year = year
+export.sedimentload_ls.csv.header.coarse = coarse
+export.sedimentload_ls.csv.header.finemiddle = finemiddle
+export.sedimentload_ls.csv.header.sand = sand
+export.sedimentload_ls.csv.header.suspsand = susp. sand
+export.sedimentload_ls.csv.header.suspsediment = susp. sediment
+export.sedimentload_ls.csv.header.total = total
 export.sqrelation.csv.header.parameter = Parameter
 export.sqrelation.csv.header.station = Station
 export.sqrelation.csv.header.km = River-Km
--- a/backend/doc/schema/oracle-minfo.sql	Tue Jul 30 18:54:53 2013 +0200
+++ b/backend/doc/schema/oracle-minfo.sql	Wed Jul 31 11:31:23 2013 +0200
@@ -260,6 +260,7 @@
     unit_id             NUMBER(38,0) NOT NULL,
     time_interval_id    NUMBER(38,0) NOT NULL,
     description         VARCHAR(256),
+    kind                NUMBER(38,0),
     PRIMARY KEY (id),
     CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
     CONSTRAINT fk_sy_grain_fraction_id FOREIGN KEY (grain_fraction_id) REFERENCES grain_fraction(id),
--- a/backend/doc/schema/postgresql-minfo.sql	Tue Jul 30 18:54:53 2013 +0200
+++ b/backend/doc/schema/postgresql-minfo.sql	Wed Jul 31 11:31:23 2013 +0200
@@ -260,6 +260,7 @@
     unit_id             int NOT NULL,
     time_interval_id    int NOT NULL,
     description         VARCHAR(256),
+    kind                int,
     PRIMARY KEY (id),
     CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
     CONSTRAINT fk_sy_grain_fraction_id FOREIGN KEY (grain_fraction_id) REFERENCES grain_fraction(id),
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java	Wed Jul 31 11:31:23 2013 +0200
@@ -109,6 +109,8 @@
 
     public static final String SEDIMENT_YIELD_EPOCH_DIR = "Epochen";
 
+    public static final String SEDIMENT_YIELD_OFF_EPOCH_DIR = "amtliche Epochen";
+
     public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse";
 
     public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen";
@@ -502,6 +504,22 @@
     }
 
 
+    private void parseSedimentYieldDir(
+        File[] files,
+        SedimentYieldParser parser
+    ) throws IOException {
+       for (File file: files) {
+           if (file.isDirectory()) {
+               for (File child: file.listFiles()) {
+                   parser.parse(child);
+               }
+           }
+           else {
+               parser.parse(file);
+           }
+       }
+    }
+
     protected void parseSedimentYield() throws IOException {
         if (Config.INSTANCE.skipSedimentYield()) {
             log.info("skip parsing sediment yield data");
@@ -513,11 +531,13 @@
         File minfoDir         = getMinfoDir();
         File sedimentYieldDir = new File(minfoDir, SEDIMENT_YIELD_DIR);
 
-        File singleDir = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR);
-        File epochDir  = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR);
+        File singleDir   = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR);
+        File epochDir    = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR);
+        File offEpochDir = new File(sedimentYieldDir, SEDIMENT_YIELD_OFF_EPOCH_DIR);
 
-        File[] singles = singleDir.listFiles();
-        File[] epochs  = epochDir.listFiles();
+        File[] singles   = singleDir.listFiles();
+        File[] epochs    = epochDir.listFiles();
+        File[] offEpochs = offEpochDir.listFiles();
 
         SedimentYieldParser parser = new SedimentYieldParser();
 
@@ -525,32 +545,21 @@
             log.warn("Cannot read directory '" + singleDir + "'");
         }
         else {
-            for (File file: singles) {
-                if (file.isDirectory()) {
-                    for (File child: file.listFiles()) {
-                        parser.parse(child);
-                    }
-                }
-                else {
-                    parser.parse(file);
-                }
-            }
+            parseSedimentYieldDir(singles, parser);
         }
 
         if (epochs == null || epochs.length == 0) {
             log.warn("Cannot read directory '" + epochDir + "'");
         }
         else {
-            for (File file: epochs) {
-                if (file.isDirectory()) {
-                    for (File child: file.listFiles()) {
-                        parser.parse(child);
-                    }
-                }
-                else {
-                    parser.parse(file);
-                }
-            }
+            parseSedimentYieldDir(epochs, parser);
+        }
+
+        if (offEpochs == null || offEpochs.length == 0) {
+            log.warn("Cannot read directory '" + offEpochDir + "'");
+        }
+        else {
+            parseSedimentYieldDir(offEpochs, parser);
         }
 
         sedimentYields = parser.getSedimentYields();
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentYield.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentYield.java	Wed Jul 31 11:31:23 2013 +0200
@@ -35,6 +35,8 @@
 
     private String description;
 
+    private Integer kind;
+
     private List<ImportSedimentYieldValue> values;
 
     private SedimentYield peer;
@@ -56,6 +58,10 @@
         this.grainFraction = grainFraction;
     }
 
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
     public void addValue(ImportSedimentYieldValue value) {
         this.values.add(value);
     }
@@ -116,6 +122,7 @@
                 log.debug("create new SedimentYield");
 
                 peer = new SedimentYield(river, u, ti, gf, description);
+                peer.setKind(this.kind);
                 session.save(peer);
             }
             else {
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentYieldParser.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentYieldParser.java	Wed Jul 31 11:31:23 2013 +0200
@@ -29,6 +29,7 @@
 import org.dive4elements.river.model.GrainFraction;
 
 
+/** Parses Sediment Yield files. */
 public class SedimentYieldParser extends LineParser {
 
     private static final Logger log =
@@ -241,15 +242,29 @@
     }
 
 
+    /** Initialize SedimentYields from columns, set the kind
+     * with respect to file location (offical epoch or not?) */
     private void initializeSedimentYields() {
         // skip first column (Fluss-km) and last column (Hinweise)
         current = new ImportSedimentYield[columnNames.length-2];
 
+        Integer kind;
+
+        if (inputFile.getAbsolutePath().contains("amtliche Epochen")) {
+            log.warn("is amtlichbamtlich");
+            kind = new Integer(1);
+        }
+        else {
+            log.warn("is unamtlichbamtlich");
+            kind = new Integer(0);
+        }
+
         for (int i = 0, n = columnNames.length; i < n-2; i++) {
             current[i] = new ImportSedimentYield(this.description);
             current[i].setTimeInterval(getTimeInterval(columnNames[i+1]));
             current[i].setUnit(unit);
             current[i].setGrainFraction(grainFraction);
+            current[i].setKind(kind);
         }
     }
 
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/WstParser.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/WstParser.java	Wed Jul 31 11:31:23 2013 +0200
@@ -85,6 +85,7 @@
         this.wst = wst;
     }
 
+    /** Returns a new ImportTimeInterval with a date guessed from string. */
     public static ImportTimeInterval guessDate(String string) {
         try {
             Matcher m = YEAR_INTERVAL.matcher(string);
--- a/backend/src/main/java/org/dive4elements/river/model/DischargeTable.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/model/DischargeTable.java	Wed Jul 31 11:31:23 2013 +0200
@@ -24,6 +24,10 @@
 import javax.persistence.SequenceGenerator;
 import javax.persistence.Table;
 
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.dive4elements.river.backend.SessionHolder;
+
 @Entity
 @Table(name = "discharge_tables")
 public class DischargeTable
@@ -197,5 +201,16 @@
             return 0;
         }
     }
+
+    public static DischargeTable getDischargeTableById(int dtId)
+    {
+        Session session = SessionHolder.HOLDER.get();
+        Query query = session.createQuery(
+            "from DischargeTable where id =:dtId");
+        query.setParameter("dtId", dtId);
+
+        List<DischargeTable> list = query.list();
+        return list.isEmpty() ? null : list.get(0);
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/model/SedimentYield.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/model/SedimentYield.java	Wed Jul 31 11:31:23 2013 +0200
@@ -25,6 +25,7 @@
 import org.apache.log4j.Logger;
 
 
+/** SedimentYield of a certain Fraction with possibly many values. */
 @Entity
 @Table(name = "sediment_yield")
 public class SedimentYield
@@ -46,6 +47,8 @@
 
     private List<SedimentYieldValue> values;
 
+    private Integer kind;
+
 
     public SedimentYield() {
         this.values = new ArrayList<SedimentYieldValue>();
@@ -149,5 +152,15 @@
     public void setDescription(String description) {
         this.description = description;
     }
+
+    /** kind == 0: "normal", kind == 1: "official epoch". */
+    @Column(name = "kind")
+    public Integer getKind() {
+        return kind;
+    }
+
+    public void setKind(Integer newKind) {
+        this.kind = newKind;
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.java	Wed Jul 31 11:31:23 2013 +0200
@@ -1309,5 +1309,9 @@
     String FEDSTATE_KM();
 
     String official_regulation();
+
+    String historical_discharge_curves();
+
+    String current_gauge();
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.properties	Tue Jul 30 18:54:53 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.properties	Wed Jul 31 11:31:23 2013 +0200
@@ -220,6 +220,7 @@
 computed_discharge_curves = Discharge Curves
 longitudinal_section = Longitudinal Section Curve
 duration_curve = Duration Curve
+historical_discharge_curves = Historical Discharge Curve
 discharge_longitudinal_section = Discharge Longitudinal Section
 floodmap = Floodmap
 historical_discharge = Time-Chart
@@ -277,6 +278,7 @@
 pdf = PDF
 computed_dischargecurve_at_export = Discharge Curve Export
 gauge_discharge_curve_at_export = Gauge Discharge Curve
+current_gauge = Current Gauge Discharge Curve
 gauge_class = Gauge Class
 eventselect = Eventselection
 events = Events
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_de.properties	Tue Jul 30 18:54:53 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_de.properties	Wed Jul 31 11:31:23 2013 +0200
@@ -216,8 +216,10 @@
 discharge_curve = Abflusskurve am Pegel
 discharge_curve_gaugeless = Abflusskurve
 gauge_discharge_curve = Abflusstafel am Pegel
+current_gauge = aktuelle Abflusstafel am Pegel
 computed_discharge_curve = Abflusskurve
 computed_discharge_curves = Abflusskurven
+historical_discharge_curves = Historische Abflusskurven
 longitudinal_section = L\u00e4ngsschnitt
 duration_curve = Dauerlinie
 discharge_longitudinal_section = W f\u00fcr benutzerdefinierten Abflussl\u00e4ngsschnitt
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_en.properties	Tue Jul 30 18:54:53 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_en.properties	Wed Jul 31 11:31:23 2013 +0200
@@ -213,6 +213,7 @@
 static_sqrelation = SQ relation
 discharge_curve = Discharge Curve at Gauge
 discharge_curve_gaugeless = Discharge Curve
+current_gauge = Current Gauge Discharge Curve
 gauge_discharge_curve = Discharge Table at Gauge
 computed_discharge_curve = Discharge Curve
 computed_discharge_curves = Discharge Curves
@@ -221,6 +222,7 @@
 discharge_longitudinal_section = Discharge Longitudinal Section
 floodmap = Floodmap
 historical_discharge = Time Chart
+historical_discharge_curves = Historical Discharge Curve
 historical_discharge_wq = W/Q Chart
 flow_velocity = Flow Velocity
 flow_velocity_export = Flow Velocity Export
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WQAdaptedInputPanel.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WQAdaptedInputPanel.java	Wed Jul 31 11:31:23 2013 +0200
@@ -140,6 +140,7 @@
     }
 
 
+    /** Create labels, canvasses, layouts. */
     @Override
     public Canvas create(DataList data) {
         readGaugeRanges(data);
@@ -196,7 +197,7 @@
             qdTables.add(qdTable);
 
             qdTable.showSelect();
-            //wTable.showSelect();
+            wTable.showSelect();
             wTab.setPane(wTable);
             qTab.setPane(qdTable);
 
@@ -267,6 +268,57 @@
             qdTable.addCellClickHandler(handler);
             i++;
         }
+
+        i = 0;
+        for (WTable wTable: wTables) {
+            // Register listener such that values are filled in on click.
+            final WTable table = wTable;
+            final int fi = i;
+            CellClickHandler handler = new CellClickHandler() {
+                @Override
+                public void onCellClick(CellClickEvent e) {
+                    if (!isWMode() /*|| table.isLocked()*/) {
+                        return;
+                    }
+
+                    Record r   = e.getRecord();
+                    double val = r.getAttributeAsDouble("value");
+
+                    doubleArrayPanels.get(fi).setValues(new double[]{val});
+                    // If a named value for first gauge is chosen, try to find and set
+                    // the values to the other panels too.
+                    if (fi == 0) {
+                        String valueName = r.getAttribute("name");
+                        int oi = 0;
+                        // TODO instead of oi use random access.
+                        for (WTable otherWTable: wTables) {
+                            if (oi == 0) {
+                                oi++;
+                                continue;
+                            }
+                            Double value = otherWTable.findRecordValue(valueName);
+                            if (value == null) {
+                                // TODO: afterwards it freaks out
+                                SC.warn(MSG.noMainValueAtGauge());
+                            }
+                            else {
+                                doubleArrayPanels.get(oi).setValues(new double[]{value});
+                            }
+                            oi++;
+                        }
+                    }
+                    else {
+                        // Focus next.
+                        if (fi != doubleArrayPanels.size()-1) {
+                            doubleArrayPanels.get(fi+1).focusInItem(1);
+                        }
+                    }
+                }
+            };
+
+            wTable.addCellClickHandler(handler);
+            i++;
+        }
     }
 
 
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/minfo/SedLoadOffEpochPanel.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/minfo/SedLoadOffEpochPanel.java	Wed Jul 31 11:31:23 2013 +0200
@@ -38,7 +38,7 @@
 import org.dive4elements.river.client.shared.model.SedimentLoadInfoObject;
 import org.dive4elements.river.client.shared.model.SedimentLoadInfoRecord;
 
-
+/** Show input to select an official epoch. */
 public class SedLoadOffEpochPanel
 extends PeriodPanel
 {
@@ -47,6 +47,7 @@
 
     private ListGrid sedLoadTable;
 
+    /** Creates layout with title. */
     public Canvas createWidget(DataList data) {
         VLayout root = new VLayout();
 
@@ -58,6 +59,7 @@
         return root;
     }
 
+    /** Create layout for data entered previously. */
     @Override
     public Canvas createOld(DataList dataList) {
         HLayout layout = new HLayout();
@@ -104,6 +106,7 @@
         return layout;
     }
 
+    /** Creates the helper grid in which off epochs can be selected. */
     protected Canvas createHelper() {
         sedLoadTable = new ListGrid();
         sedLoadTable.setShowHeaderContextMenu(false);
@@ -154,8 +157,8 @@
     }
 
     protected void fetchSedimentLoadData() {
-        Config config    = Config.getInstance();
-        String locale    = config.getLocale ();
+        Config config = Config.getInstance();
+        String locale = config.getLocale ();
 
         ArtifactDescription adescr = artifact.getArtifactDescription();
         DataList[] data = adescr.getOldData();
@@ -163,16 +166,16 @@
         double[] km = artifact.getArtifactDescription().getKMRange();
         String river = artifact.getArtifactDescription().getRiver();
 
-        sedLoadInfoService.getSedimentLoadInfo(locale, river, "epoch", km[0], km[1],
+        sedLoadInfoService.getSedimentLoadInfo(locale, river, "off_epoch", km[0], km[1],
             new AsyncCallback<SedimentLoadInfoObject[]>() {
                 public void onFailure(Throwable caught) {
-                    GWT.log("Could not recieve sediment load informations.");
+                    GWT.log("Could not receive sediment load informations.");
                     SC.warn(caught.getMessage());
                 }
 
                 public void onSuccess(SedimentLoadInfoObject[] sedLoad) {
                     int num = sedLoad != null ? sedLoad.length :0;
-                    GWT.log("Recieved " + num + " sediment load informations.");
+                    GWT.log("Received " + num + " sediment load informations.");
 
                     if (num == 0) {
                         return;
@@ -191,5 +194,5 @@
             sedLoadTable.addData(rec);
         }
     }
-
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/wq/QDTable.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/wq/QDTable.java	Wed Jul 31 11:31:23 2013 +0200
@@ -22,7 +22,12 @@
 
 
 /**
- * Table showing Q and D main values, allowing for selection.
+ * Table showing Q and D main values, allowing for selection, if
+ * showSelect is called. In that case, a CellClickHandler should
+ * be registered.
+ *
+ * TODO becomes very similiar to WTable. Probably mergeable.
+ *
  * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a>
  */
 public class QDTable extends ListGrid {
@@ -119,6 +124,7 @@
         hideField("min");
     }
 
+    /** Whether or not can be clicked on. */
     public boolean isLocked() {
         return lockClick;
     }
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/wq/WTable.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/wq/WTable.java	Wed Jul 31 11:31:23 2013 +0200
@@ -21,6 +21,8 @@
 import org.dive4elements.river.client.client.FLYSConstants;
 
 /**
+ * Table showing W main values.
+ * TODO becomes very similiar to QDTable. Probably mergeable.
  * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a>
  */
 public class WTable extends ListGrid {
@@ -30,6 +32,8 @@
 
 
     public WTable() {
+        String baseUrl = GWT.getHostPageBaseURL();
+
         setWidth100();
         setHeight100();
         setSelectionType(SelectionStyle.NONE);
@@ -68,7 +72,31 @@
             }
         });
 
-        setFields(name, type, value);
+        ListGridField select = new ListGridField("select", MESSAGE.selection());
+        select.setType(ListGridFieldType.ICON);
+        select.setWidth(70);
+        select.setCellIcon(baseUrl + MESSAGE.markerGreen());
+
+        setFields(select, name, type, value);
+        hideField("select");
+    }
+
+    public void showSelect() {
+        showField("select");
+    }
+
+
+    /**
+     * Search all records for one with attribute name equals to given name.
+     * @return null if none found.
+     * */
+    public Double findRecordValue(String name) {
+        for (ListGridRecord record : getRecords()) {
+            if (record.getAttribute("name").equals(name)) {
+                return record.getAttributeAsDouble("value");
+            }
+        }
+        return null;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/gwt-client/src/main/java/org/dive4elements/river/client/server/MapPrintServiceImpl.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/server/MapPrintServiceImpl.java	Wed Jul 31 11:31:23 2013 +0200
@@ -212,7 +212,7 @@
             Map<String, Object> legend = new LinkedHashMap<String, Object>();
             List<Object> classes = new ArrayList<Object>(1);
             Map<String, Object> clazz = new LinkedHashMap<String, Object>();
-            String lgu = encode(MapUtils.getLegendGraphicUrl(layer.url, layer.layers, dpi));
+            String lgu = MapUtils.getLegendGraphicUrl(layer.url, encode(layer.layers), dpi);
             clazz.put("icon", lgu);
             clazz.put("name", layer.description);
             classes.add(clazz);
--- a/gwt-client/src/main/java/org/dive4elements/river/client/server/SedimentLoadInfoServiceImpl.java	Tue Jul 30 18:54:53 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/server/SedimentLoadInfoServiceImpl.java	Wed Jul 31 11:31:23 2013 +0200
@@ -29,9 +29,10 @@
 import org.dive4elements.river.client.shared.model.SedimentLoadInfoObjectImpl;
 
 
+/** Service to fetch info about sediment load. */
 public class SedimentLoadInfoServiceImpl
-extends RemoteServiceServlet
-implements SedimentLoadInfoService
+extends      RemoteServiceServlet
+implements   SedimentLoadInfoService
 {
     private static final Logger logger =
         Logger.getLogger(SedimentLoadInfoServiceImpl.class);
@@ -50,7 +51,7 @@
     {
         logger.info("SedimentLoadInfoServiceImpl.getSedimentLoadInfo");
 
-        String url  = getServletContext().getInitParameter("server-url");
+        String url = getServletContext().getInitParameter("server-url");
 
         Document doc = XMLUtils.newDocument();
 
@@ -158,3 +159,4 @@
         return null;
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :

http://dive4elements.wald.intevation.org