changeset 6791:dd5355775ce1 longitudinal-symmetry

merge changes from default into longitudinal-symmetrie branch and make it build
author Tom Gottfried <tom@intevation.de>
date Wed, 07 Aug 2013 18:53:15 +0200
parents 1a7f5d09b9d8 (current diff) 9479cb7c8cd5 (diff)
children 962f6b805b48
files artifacts/doc/conf/artifacts/minfo.xml artifacts/doc/conf/artifacts/winfo.xml artifacts/doc/conf/conf.xml artifacts/doc/conf/meta-data.xml artifacts/src/main/java/org/dive4elements/river/exports/LongitudinalSectionGenerator.java artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDiffBaseGenerator.java artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDiffHeightYearGenerator.java artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDifferenceYearGenerator.java artifacts/src/main/java/org/dive4elements/river/exports/minfo/SedimentLoadLSGenerator.java gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.java gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.properties gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_de.properties gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_en.properties
diffstat 124 files changed, 2334 insertions(+), 770 deletions(-) [+]
line wrap: on
line diff
--- a/.hgtags	Fri Jun 28 21:08:23 2013 +0200
+++ b/.hgtags	Wed Aug 07 18:53:15 2013 +0200
@@ -54,3 +54,15 @@
 5733d7f27196c5a8cf18231fbf187738f8fea560 3.0.7
 eec895f6ec801a7faaed96e9f01721e1143e7bb8 3.0.8
 41152c3a532d63a25e9fa348ca4286bdd76069ef 3.0.9
+5f4893db41e4fed644ddbd91bf4cdf0ac7952161 3.0.10
+5f4893db41e4fed644ddbd91bf4cdf0ac7952161 3.0.10
+0000000000000000000000000000000000000000 3.0.10
+0000000000000000000000000000000000000000 3.0.10
+bfcb513c1fdace07ce39616bc9fda3899e8ee914 3.0.10
+63baa1873b1f86a78ab10d19ae4221ec01cc75a2 3.0.11
+3999162f474fb5a6bced33521f81c9ccf274c4e7 3.0.12
+3999162f474fb5a6bced33521f81c9ccf274c4e7 3.0.12
+0000000000000000000000000000000000000000 3.0.12
+0000000000000000000000000000000000000000 3.0.12
+da197a9236fde564d45379c0826510c69a5709ce 3.0.12
+71da3d4ffb4a46a2f8de7e6a9e1e4a32657802aa 3.0.13
--- a/artifacts/doc/conf/artifacts/chart.xml	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/artifacts/chart.xml	Wed Aug 07 18:53:15 2013 +0200
@@ -58,8 +58,18 @@
                         <facet name="w_differences" description="facet.w_differences"/>
                         <facet name="other.wkms" description="facet.other.wkms"/>
                         <facet name="other.wqkms" description="facet.other.wqkms"/>
+                        <facet name="other.wqkms.w"          description="W-Type of data" />
+                        <facet name="other.wqkms.q"          description="Q-Type of data" />
                         <facet name="heightmarks_points" description="facet.other.wkms.heightmarks_points"/>
+                        <facet name="w_differences.manualpoints" description="Manuelle Punkte"/>
                         <facet name="longitudinal_section.annotations" description="facet.longitudinal_section.annotations"/>
+                        <facet name="bedheight_difference.year" description="A facet for bed height differences"/>
+                        <facet name="bedheight_difference.year.filtered" description="A facet for bed height differences"/>
+                        <facet name="bedheight_difference.morph_width" description="A facet for morphologic width"/>
+                        <facet name="bedheight_difference.year.height1" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height2" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height1.filtered" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height2.filtered" description="A facet for raw heights."/>
                     </facets>
                 </outputmode>
             </outputmodes>
@@ -83,6 +93,13 @@
                         <facet name="longitudinal_section.w"/>
                         <facet name="longitudinal_section.q"/>
                         <facet name="longitudinal_section.manualpoints"/>
+                        <facet name="bedheight_difference.year" description="A facet for bed height differences"/>
+                        <facet name="bedheight_difference.year.filtered" description="A facet for bed height differences"/>
+                        <facet name="bedheight_difference.morph_width" description="A facet for morphologic width"/>
+                        <facet name="bedheight_difference.year.height1" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height2" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height1.filtered" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height2.filtered" description="A facet for raw heights."/>
                     </facets>
                 </outputmode>
             </outputmodes>
--- a/artifacts/doc/conf/artifacts/fixanalysis.xml	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/artifacts/fixanalysis.xml	Wed Aug 07 18:53:15 2013 +0200
@@ -171,8 +171,8 @@
                         <facet name="heightmarks_points" description="facet.other.wkms.heightmarks_points"/>
                         <facet name="discharge_curve.curve" description="facet.discharge_curve.curve"/>
                         <facet name="fix_wq_curve.manualpoints" description="Manual points"/>
-                        <facet name="mainvalues.w" description="facet.fix_wq.mainvalues.w"/>
-                        <facet name="mainvalues.q" description="facet.fix_wq.mainvalues.q"/>
+                        <facet name="mainvalues.q" description="mainvalues.q"/>
+                        <facet name="mainvalues.w" description="mainvalues.w"/>
                     </facets>
                 </outputmode>
                 <outputmode name="fix_deltawt_curve" description="output.fix_deltawt_curve" mine-type="image/png" type="chart">
--- a/artifacts/doc/conf/artifacts/gaugedischarge.xml	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/artifacts/gaugedischarge.xml	Wed Aug 07 18:53:15 2013 +0200
@@ -8,8 +8,15 @@
         <outputmode name="computed_discharge_curve" description="computed_discharge_curve" mime-type="image/png" type="chart">
           <facets>
             <facet name="discharge_curve.curve" description="facet.discharge_curve.curve" />
-            <facet name="mainvalues.q" description="facet.mainvalues.q"/>
-            <facet name="mainvalues.w" description="facet.mainvalues.w"/>
+            <facet name="computed_discharge_curve.mainvalues.q" description="mainvalues.q"/>
+            <facet name="computed_discharge_curve.mainvalues.w" description="mainvalues.w"/>
+            <facet name="discharge_curve.curve" description="facet.discharge_curve.curve"/>
+            <facet name="heightmarks_points" description="facet.other.wqkms"/>
+            <facet name="other.wqkms" description="facet.other.wqkms"/>
+            <facet name="other.wq"    description="Point-like data like fixations"/>
+            <facet name="other.wkms"  description="Point-like data like fixations"/>
+            <facet name="other.wkms.interpol" description="Height over km, like flood protections."/>
+            <facet name="computed_discharge_curve.manualpoints" description="Manuelle Punkte"/>
           </facets>
         </outputmode>
       </outputmodes>
--- a/artifacts/doc/conf/artifacts/gaugedischargecurve.xml	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/artifacts/gaugedischargecurve.xml	Wed Aug 07 18:53:15 2013 +0200
@@ -6,11 +6,18 @@
             <data name="reference_gauge" type="Long" />
             <data name="gauge_name" type="String" />
             <outputmodes>
-                <outputmode name="discharge_curve" description="output.discharge_curve" type="chart">
+                <outputmode name="discharge_curve" description="output.discharge_curve" mime-type="image/png" type="chart">
                     <facets>
                         <facet name="gauge_discharge_curve"/>
-                        <facet name="mainvalues.q" description="facet.mainvalues.q"/>
-                        <facet name="mainvalues.w" description="facet.mainvalues.w"/>
+                        <facet name="mainvalues.q" description="mainvalues.q"/>
+                        <facet name="computed_discharge_curve.mainvalues.w" description="mainvalues.w"/>
+                        <facet name="discharge_curve.curve" description="facet.discharge_curve.curve"/>
+                        <facet name="heightmarks_points" description="facet.other.wqkms"/>
+                        <facet name="other.wqkms" description="facet.other.wqkms"/>
+                        <facet name="other.wq"    description="Point-like data like fixations"/>
+                        <facet name="other.wkms"  description="Point-like data like fixations"/>
+                        <facet name="other.wkms.interpol" description="Height over km, like flood protections."/>
+                        <facet name="computed_discharge_curve.manualpoints" description="Manuelle Punkte"/>
                     </facets>
                 </outputmode>
                 <outputmode name="computed_dischargecurve_at_export" description="output.computed_dischargecurve_at_export" mime-type="text/plain" type="export">
--- a/artifacts/doc/conf/artifacts/minfo.xml	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/artifacts/minfo.xml	Wed Aug 07 18:53:15 2013 +0200
@@ -524,6 +524,14 @@
                         <facet name="longitudinal_section.annotations" description="facet.longitudinal_section.annotations"/>
                     </facets>
                 </outputmode>
+                <outputmode name="sedimentload_ls_export" description="output.sedimentload_ls_export" mime-type="text/plain" type="export">
+                    <facets>
+                        <facet name="csv" description="facet.sedimentload_ls_export.csv" />
+                        <!--
+                        <facet name="pdf" description=".pdf" />
+                        -->
+                    </facets>
+                </outputmode>
                 <outputmode name="sedimentload_ls_report" description="output.sedimentload_ls_report" mime-type="text/xml" type="report">
                     <facets>
                         <facet name="report" description="facet.sedimentload_ls_export.report" />
--- a/artifacts/doc/conf/artifacts/winfo.xml	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/artifacts/winfo.xml	Wed Aug 07 18:53:15 2013 +0200
@@ -271,6 +271,14 @@
                         <facet name="heightmarks_points" description="facet.other.wkms.heightmarks_points"/>
                         <facet name="w_differences.manualpoints" description="Manuelle Punkte"/>
                         <facet name="longitudinal_section.annotations" description="facet.longitudinal_section.annotations"/>
+                        <facet name="bedheight_difference.year" description="A facet for bed height differences"/>
+                        <facet name="bedheight_difference.year.filtered" description="A facet for bed height differences"/>
+                        <facet name="bedheight_difference.morph_width" description="A facet for morphologic width"/>
+                        <facet name="bedheight_difference.year.height1" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height2" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height1.filtered" description="A facet for raw heights."/>
+                        <facet name="bedheight_difference.year.height2.filtered" description="A facet for raw heights."/>
+                        <facet name="morph-width" description="morphologic width, not sounding width!"/>
                     </facets>
                 </outputmode>
                 <outputmode name="w_differences_export" description="output.w_differences.export" mime-type="text/plain" type="export">
@@ -444,10 +452,12 @@
             </outputmodes>
         </state>
 
+        <!-- The brigde to UeSF
         <transition transition="org.dive4elements.river.artifacts.transitions.DefaultTransition">
             <from state="state.winfo.waterlevel"/>
             <to state="state.winfo.uesk.dgm"/>
         </transition>
+        -->
 
         <state id="state.winfo.uesk.wsp" description="state.winfo.uesk.wsp" state="org.dive4elements.river.artifacts.states.WaterlevelSelectState" helpText="help.state.winfo.uesk.wsp">
             <data name="wsp" type="String" />
@@ -631,8 +641,8 @@
                         <facet name="historical_discharge.wq.q"/>
                         <facet name="historical_discharge.wq.w"/>
                         <facet name="historical_discharge.wq.curve"/>
-                        <facet name="historical_discharge.mainvalues.q"/>
-                        <facet name="historical_discharge.mainvalues.w"/>
+                        <facet name="mainvalues.q"/>
+                        <facet name="mainvalues.w"/>
                         <facet name="historical_discharge_wq.manualpoints"/>
                     </facets>
                 </outputmode>
--- a/artifacts/doc/conf/datacage.sql	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/datacage.sql	Wed Aug 07 18:53:15 2013 +0200
@@ -41,7 +41,7 @@
     artifact_id INT             NOT NULL REFERENCES artifacts(id) ON DELETE CASCADE,
     kind        VARCHAR(256)    NOT NULL,
     k           VARCHAR(256)    NOT NULL,
-    v           VARCHAR(256),   -- Maybe too short
+    v           TEXT,
     UNIQUE (artifact_id, k)
 );
 
--- a/artifacts/doc/conf/meta-data.xml	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/meta-data.xml	Wed Aug 07 18:53:15 2013 +0200
@@ -36,6 +36,7 @@
     </dc:macro>
 
     <dc:macro name="generate-system-content">
+      <dc:message>Generate system content with variables: {dc:dump-variables()}</dc:message>
       <dc:call-macro name="artifact-range">
         <dc:call-macro name="km-filtered-wsts">
           <dc:choose>
@@ -44,6 +45,7 @@
                 Recommendations (client shall load immediately).
               </dc:comment>
               <dc:iterate var="out" container="artifact-outs">
+                <dc:message>Rec out iteration for: {$out}</dc:message>
                 <dc:choose>
                   <dc:when test="dc:contains($out, 'longitudinal_section')">
                     <dc:call-macro name="annotations"/>
@@ -82,14 +84,20 @@
                   <dc:when test="$out = 'fix_longitudinal_section_curve'">
                     <dc:call-macro name="annotations"/>
                   </dc:when>
+                  <dc:when test="$out = 'bedheight_middle'">
+                    <dc:call-macro name="annotations"/>
+                  </dc:when>
                   <dc:when test="$out = 'bed_difference_epoch'">
                     <dc:call-macro name="annotations"/>
+                    <dc:call-macro name="basedata_6_delta_w"/>
                   </dc:when>
                   <dc:when test="$out = 'bed_difference_year'">
                     <dc:call-macro name="annotations"/>
+                    <dc:call-macro name="basedata_6_delta_w"/>
                   </dc:when>
                   <dc:when test="$out = 'bed_difference_height_year'">
                     <dc:call-macro name="annotations"/>
+                    <dc:call-macro name="basedata_6_delta_w"/>
                   </dc:when>
                   <dc:when test="$out = 'floodmap'">
                     <dc:call-macro name="flood-map-recommended"/>
@@ -113,6 +121,7 @@
                 Non - Recommendations.
               </dc:comment>
               <dc:iterate var="out" container="artifact-outs">
+                <dc:message>Non Rec out iteration for: {$out}</dc:message>
                 <dc:choose>
                   <dc:when test="$out = 'cross_section'">
                     <dc:call-macro name="basedata_0"/>
@@ -129,12 +138,34 @@
                   </dc:when>
                   <dc:when test="$out = 'w_differences_longitudinal_section'">
                     <dc:call-macro name="longitudinal-section-prototype"/>
+                    <dc:call-macro name="bedheight_differences"/>
                   </dc:when>
                   <dc:when test="$out = 'discharge_longitudinal_section'">
                     <dc:call-macro name="longitudinal-section-prototype"/>
                   </dc:when>
+                  <dc:when test="$out = 'historical_discharge_wq'">
+                    <dc:call-macro name="historical_discharge_curve"/>
+                    <dc:call-macro name="discharge_table_gauge"/>
+                    <dc:call-macro name="basedata_2_fixations_wqkms"/>
+                    <dc:call-macro name="basedata_5_flood-protections"/>
+                    <dc:call-macro name="basedata_0"/>
+                    <dc:call-macro name="basedata_1_additionals"/>
+                    <dc:call-macro name="basedata_4_heightmarks-points"/>
+                    <computed_discharge_curve>
+                      <dc:call-macro name="mainvalues"/>
+                    </computed_discharge_curve>
+                  </dc:when>
                   <dc:when test="$out = 'discharge_curve'">
-                    <dc:call-macro name="mainvalues"/>
+                    <dc:call-macro name="historical_discharge_curve"/>
+                    <dc:call-macro name="discharge_table_gauge"/>
+                    <dc:call-macro name="basedata_2_fixations_wqkms"/>
+                    <dc:call-macro name="basedata_5_flood-protections"/>
+                    <dc:call-macro name="basedata_0"/>
+                    <dc:call-macro name="basedata_1_additionals"/>
+                    <dc:call-macro name="basedata_4_heightmarks-points"/>
+                    <computed_discharge_curve>
+                      <dc:call-macro name="mainvalues"/>
+                    </computed_discharge_curve>
                   </dc:when>
                   <dc:when test="$out = 'duration_curve'">
                     <dc:call-macro name="mainvalues"/>
@@ -151,14 +182,14 @@
                     <dc:call-macro name="basedata_5_flood-protections_relative_points"/>
                   </dc:when>
                   <dc:when test="$out = 'fix_wq_curve'">
+                    <dc:call-macro name="historical_discharge_curve"/>
+                    <dc:call-macro name="discharge_table_gauge"/>
                     <dc:call-macro name="basedata_0_wq"/>
                     <dc:call-macro name="basedata_1_additionals_marks"/>
                     <dc:call-macro name="basedata_2_fixations_wqkms"/>
                     <dc:call-macro name="basedata_3_officials"/>
                     <dc:call-macro name="basedata_4_heightmarks-points"/>
                     <dc:call-macro name="basedata_5_flood-protections_relative_points"/>
-                    <dc:call-macro name="discharge_table_gauge"/>
-                    <dc:call-macro name="discharge_fix_wq"/>
                   </dc:when>
                   <dc:when test="$out = 'fix_longitudinal_section_curve'">
                     <dc:call-macro name="longitudinal-section-prototype"/>
@@ -244,7 +275,8 @@
                     </dc:choose>
                   </dc:when>
                   <dc:when test="$out = 'computed_discharge_curve'">
-                    <dc:call-macro name="discharge_computed"/>
+                    <dc:call-macro name="historical_discharge_curve"/>
+                    <dc:call-macro name="discharge_table_gauge"/>
                     <dc:call-macro name="basedata_2_fixations_wqkms"/>
                     <dc:call-macro name="basedata_5_flood-protections"/>
                     <dc:call-macro name="basedata_0"/>
@@ -262,6 +294,16 @@
                       <dc:call-macro name="bed-heights-epoch"/>
                     </bedheights>
                   </dc:when>
+                  <dc:when test="$out = 'differenceable'">
+                    <dc:if test="$current-state-id != 'state.winfo.uesk.wsp'">
+                      <dc:call-macro name="basedata_0"/>
+                      <dc:call-macro name="basedata_1_additionals"/>
+                      <dc:call-macro name="basedata_2_fixations"/>
+                      <dc:call-macro name="basedata_4_heightmarks-points"/>
+                      <dc:call-macro name="basedata_5_flood-protections"/>
+                      <dc:call-macro name="minfo-heights"/>
+                    </dc:if>
+                  </dc:when>
                   <dc:when test="$out = 'waterlevels'">
                     <dc:if test="$current-state-id != 'state.winfo.uesk.wsp'">
                       <dc:call-macro name="basedata_0"/>
@@ -313,6 +355,7 @@
                     <dc:when test="$out = 'w_differences_longitudinal_section'">
                       <dc:call-macro name="longitudinal"/>
                       <dc:call-macro name="differences"/>
+                      <dc:call-macro name="bedheight_differences"/>
                     </dc:when>
                     <dc:when test="$out = 'fix_deltawt_curve'">
                       <dc:call-macro name="delta-wt"/>
@@ -320,9 +363,6 @@
                     <dc:when test="$out = 'reference_curve'">
                       <dc:call-macro name="reference-curves"/>
                     </dc:when>
-                    <dc:when test="$out = 'computed_discharge_curve'">
-                      <dc:call-macro name="computed-discharge-curve"/>
-                    </dc:when>
                     <dc:when test="$out = 'cross_section'">
                       <dc:call-macro name="waterlevels"/>
                     </dc:when>
@@ -334,16 +374,18 @@
                     </dc:when>
                     <dc:when test="$out = 'fix_wq_curve'">
                       <dc:call-macro name="fix-wq-curve"/>
+                      <dc:call-macro name="waterlevels-fix"/>
                     </dc:when>
                     <dc:when test="$out = 'duration_curve'">
                       <dc:call-macro name="duration-curve"/>
                     </dc:when>
+                    <dc:when test="$out = 'differenceable'">
+                      <dc:call-macro name="differenceable-fix"/>
+                      <dc:call-macro name="differences"/>
+                    </dc:when>
                     <dc:when test="$out = 'waterlevels'">
                       <dc:call-macro name="waterlevels-fix"/>
                     </dc:when>
-                    <dc:when test="$out = 'fix_wq_curve'">
-                      <dc:call-macro name="waterlevels-fix"/>
-                    </dc:when>
                     <dc:when test="$out = 'floodmap'">
                       <dc:call-macro name="flood-map"/>
                     </dc:when>
@@ -352,7 +394,7 @@
                     </dc:when>
                     <dc:when test="$out = 'bedheight_middle_longitudinal_section'">
                       <dc:call-macro name="waterlevels-discharge"/>
-                      <dc:call-macro name="waterlevels-fix"/>
+                      <dc:call-macro name="differenceable-fix"/>
                     </dc:when>
                     <dc:when test="$out = 'floodmap-hws'">
                       <dc:call-macro name="floodmap-hws-user"/>
@@ -369,20 +411,20 @@
                     </dc:when>
                     <dc:when test="$out = 'sedimentload_longitudinal_section'">
                       <dc:call-macro name="differences"/>
-                      <dc:call-macro name="bedheight-differences"/>
+                      <dc:call-macro name="bedheight_differences"/>
                       <dc:call-macro name="flow-velocity"/>
                       <dc:call-macro name="sediment-load"/>
                     </dc:when>
                     <dc:when test="$out = 'bed_difference_year_longitudinal_section'">
                       <dc:call-macro name="waterlevels-discharge"/>
-                      <dc:call-macro name="bedheight-differences"/>
+                      <dc:call-macro name="bedheight_differences"/>
                       <dc:call-macro name="differences"/>
                       <dc:call-macro name="waterlevels-fix"/>
                       <dc:call-macro name="delta-wt-ls"/>
                     </dc:when>
                     <dc:when test="$out = 'bed_difference_epoch_longitudinal_section'">
                       <dc:call-macro name="waterlevels-discharge"/>
-                      <dc:call-macro name="bedheight-differences"/>
+                      <dc:call-macro name="bedheight_differences"/>
                       <dc:call-macro name="differences"/>
                       <dc:call-macro name="waterlevels-fix"/>
                       <dc:call-macro name="delta-wt-ls"/>
@@ -405,9 +447,10 @@
             <dc:for-each>
               <w_differences
                 description="{$facet_description}"
-                factory="winfo" target_out="{$out}"
-                artifact-id="{$aid}"
-                ids="{$aid}"
+                factory="winfo"
+                target_out="{$out}"
+                artifact-id="{$a_gid}"
+                ids="{$facet_num}"
                 out="w_differences_longitudinal_section"/>
             </dc:for-each>
           </differences>
@@ -434,31 +477,52 @@
       </dc:filter>
     </dc:macro>
 
-    <dc:macro name="computed-discharge-curve">
-      <dc:filter expr="$facet_name = 'computed_discharge_curve.q'">
+
+    <dc:macro name="historical_discharge_curve">
+      <dc:context>
+        <dc:statement>
+          SELECT g.id   AS gauge_id,
+                 g.name AS gauge_name,
+                 dt.id AS dt_id,
+                 t.start_time AS start_time,
+                 t.stop_time AS stop_time,
+                 dt.description AS desc,
+                 dt.bfg_id AS bfg_id
+          FROM gauges g
+          JOIN discharge_tables dt ON g.id = dt.gauge_id
+          LEFT JOIN time_intervals t ON dt.time_interval_id = t.id
+          WHERE g.river_id = ${river_id}
+          AND dt.kind &lt;&gt; 0
+          AND g.station = ${fromkm}
+          AND g.station = ${tokm}
+          ORDER BY start_time
+        </dc:statement>
         <dc:if test="dc:has-result()">
-          <computed_discharge_curves>
-            <dc:for-each>
-              <dc:element name="${facet_name}">
-                <dc:attribute name="description" value="${facet_description}"/>
-                <dc:attribute name="factory" value="winfo"/>
-                <dc:attribute name="target_out" value="${out}"/>
-                <dc:attribute name="artifact-id" value="${a_id}"/>
-                <dc:attribute name="ids" value="${a_id}"/>
-                <dc:attribute name="out" value="computed_discharge_curve"/>
-              </dc:element>
-            </dc:for-each>
-          </computed_discharge_curves>
+          <historical_discharge_curves>
+            <dc:group expr="$gauge_name">
+              <dc:for-each>
+                <dc:variable name="combined_desc" expr="concat($bfg_id, ' ', dc:date-format('dd.MM.yyyy', $start_time), ' - ', dc:date-format('dd.MM.yyyy', $stop_time))"/>
+                <dc:message>
+                  Hallo ANDRE23 {dc:dump-variables()}
+                </dc:message>
+                <histdis name="{$combined_desc}"
+                  description="{$combined_desc}"
+                  factory="gaugedischarge" target_out="{$out}"
+                  ids="{$gauge_name};{$dt_id};{$combined_desc}"/>
+              </dc:for-each>
+            </dc:group>
+          </historical_discharge_curves>
         </dc:if>
-      </dc:filter>
+      </dc:context>
     </dc:macro>
 
+
     <dc:macro name="flood-map">
       <dc:filter expr="$facet_name = 'floodmap.wsplgen'">
         <dc:if test="dc:has-result()">
           <floodmap>
             <dc:for-each>
-              <dc:variable name="combined_desc" expr="concat($facet_description, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"/>
+              <dc:variable name="combined_desc" expr="concat($facet_description, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"/>
               <dc:element name="${facet_name}">
                 <dc:attribute name="description" value="${combined_desc}"/>
                 <dc:attribute name="factory" value="winfo"/>
@@ -477,7 +541,7 @@
       <dc:filter expr="$out_name = 'cross_section'">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation))">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation))">
               <dc:comment>Aheinecke: Why is this grouping different from the rest?</dc:comment>
               <longitudinal_section_columns description="{dc:group-key()}">
                 <dc:for-each>
@@ -501,7 +565,7 @@
       <dc:filter expr="$out_name = 'longitudinal_section'">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation))">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation))">
               <dc:comment>Aheinecke: Why is this grouping different from the rest?</dc:comment>
               <longitudinal_section_columns description="{dc:group-key()}">
                 <dc:for-each>
@@ -529,7 +593,7 @@
         starts-with($facet_name, 'fix_reference_events_ls'))">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -557,7 +621,7 @@
         starts-with($facet_name, 'fix_sector_average_dwt'))">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -582,7 +646,7 @@
         starts-with($facet_name, 'fix_deviation_dwt'))">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -605,7 +669,7 @@
       <dc:filter expr="$out_name = 'fix_derivate_curve' and $facet_name = 'fix_derivate_curve'">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -632,7 +696,7 @@
         $facet_name = 'fix_wq_curve')">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -676,7 +740,7 @@
         (not ($current-state-id = 'state.winfo.uesk.wsp' and $ld_m = 'location'))">
         <dc:if test="dc:has-result()">
           <waterlevels>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <waterlevels description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:choose>
@@ -704,6 +768,77 @@
       </dc:filter>
     </dc:macro>
 
+    <dc:comment comment="For building differences." />
+    <dc:macro name="differenceable-fix">
+      <dc:comment>
+        No diffs between beddiffs and others, for now.
+        <beddifferences>
+           <dc:call-macro name="bedheight_differences"/>
+        </beddifferences>
+      </dc:comment>
+      <dc:comment comment="Vollmer curves need own factory"/>
+      <dc:filter expr="$a_state = 'state.fix.vollmer.compute' and $facet_name = 'longitudinal_section.w'">
+        <dc:if test="dc:has-result()">
+          <vollmer_waterlevels>
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
+              <waterlevels description="{dc:group-key()}">
+                <dc:for-each>
+                  <dc:choose>
+                    <dc:when test="$ld_m = 'location'">
+                      <dc:variable name="combined_desc" expr="concat($facet_description, ' an KM ', $deffrom)"/>
+                    </dc:when>
+                    <dc:otherwise>
+                      <dc:variable name="combined_desc" expr="concat($facet_description, ' von KM ',
+                        $deffrom, ' bis KM ', $defto)"/>
+                    </dc:otherwise>
+                 </dc:choose>
+                  <dc:element name="${facet_name}">
+                    <dc:attribute name="description" value="${combined_desc}"/>
+                    <dc:attribute name="ids" value="${facet_num}"/>
+                    <dc:attribute name="factory" value="fixanalysis"/>
+                    <dc:attribute name="target_out" value="${out}"/>
+                    <dc:attribute name="artifact-id" value="${a_gid}"/>
+                    <dc:attribute name="out" value="longitudinal_section"/>
+                  </dc:element>
+                </dc:for-each>
+              </waterlevels>
+            </dc:group>
+          </vollmer_waterlevels>
+        </dc:if>
+      </dc:filter>
+      <dc:filter expr="not($a_state = 'state.fix.vollmer.compute') and (($out_name = 'longitudinal_section' and $facet_name = 'longitudinal_section.w') and
+        (not ($current-state-id = 'state.winfo.uesk.wsp' and $ld_m = 'location')))">
+        <dc:if test="dc:has-result()">
+          <waterlevels>
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
+                <waterlevels description="{dc:group-key()}">
+                  <dc:for-each>
+                    <dc:choose>
+                      <dc:when test="$ld_m = 'location'">
+                        <dc:variable name="combined_desc" expr="concat($facet_description, ' an KM ', $deffrom)"/>
+                      </dc:when>
+                      <dc:otherwise>
+                        <dc:variable name="combined_desc" expr="concat($facet_description, ' von KM ',
+                          $deffrom, ' bis KM ', $defto)"/>
+                      </dc:otherwise>
+                    </dc:choose>
+                    <dc:element name="${facet_name}">
+                      <dc:attribute name="description" value="${combined_desc}"/>
+                      <dc:attribute name="ids" value="${facet_num}"/>
+                      <dc:attribute name="factory" value="winfo"/>
+                      <dc:attribute name="target_out" value="${out}"/>
+                      <dc:attribute name="artifact-id" value="${a_gid}"/>
+                      <dc:attribute name="out" value="longitudinal_section"/>
+                    </dc:element>
+                  </dc:for-each>
+                </waterlevels>
+            </dc:group>
+          </waterlevels>
+        </dc:if>
+      </dc:filter>
+    </dc:macro>
+
+
     <dc:macro name="floodmap-hws-user">
       <dc:comment>No grouping in this?</dc:comment>
       <dc:filter expr="$out_name = 'floodmap' and $facet_name = 'floodmap.usershape'">
@@ -728,7 +863,7 @@
         $facet_name = 'bed_longitudinal_section.bed_diameter_sublayer')">
         <dc:if test="dc:has-result()">
           <bed_quality_bed>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <quality-bed description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -752,7 +887,7 @@
         $facet_name = 'bed_longitudinal_section.bedload_diameter'">
         <dc:if test="dc:has-result()">
           <bed_quality_load>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <quality-load description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -777,7 +912,7 @@
          $facet_name = 'bed_longitudinal_section.sediment_density_sublayer')">
         <dc:if test="dc:has-result()">
           <bed_quality_density>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <density description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -802,7 +937,7 @@
         $facet_name = 'bed_longitudinal_section.porosity_sublayer')">
         <dc:if test="dc:has-result()">
           <bed_quality_porosity>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <porosity description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -829,7 +964,7 @@
          $facet_name = 'flow_velocity.mainchannel.filtered')">
         <dc:if test="dc:has-result()">
           <flow-velocity>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <flow description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -852,7 +987,7 @@
       <dc:filter expr="$out_name = 'sedimentload_longitudinal_section' and starts-with($facet_name, 'sedimentload')">
         <dc:if test="dc:has-result()">
           <sediment-load>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <load description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -875,31 +1010,31 @@
       <dc:filter expr="($out_name = 'bed_difference_year_longitudinal_section' or $out_name = 'bed_difference_epoch_longitudinal_section') and
         (starts-with($facet_name, 'bedheight_difference.year') or starts-with($facet_name, 'bedheight_difference.epoch'))">
         <dc:if test="dc:has-result()">
-          <bedheight-differences>
-            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+          <bedheight_differences>
+            <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <difference description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
                     <dc:attribute name="factory" value="minfo"/>
                     <dc:attribute name="target_out" value="${out}"/>
                     <dc:attribute name="description" value="${facet_description}"/>
-                    <dc:attribute name="ids" value="${facet_num}-${facet_name}"/>
-                    <dc:attribute name="artifact-id" value="${a_id}"/>
-                    <dc:attribute name="out" value="${out}"/>
+                    <dc:attribute name="ids" value="${facet_num}"/>
+                    <dc:attribute name="artifact-id" value="${a_gid}"/>
+                    <dc:attribute name="out" value="${out_name}"/>
                   </dc:element>
                 </dc:for-each>
               </difference>
             </dc:group>
-          </bedheight-differences>
+          </bedheight_differences>
         </dc:if>
       </dc:filter>
     </dc:macro>
 
     <dc:macro name="waterlevels-discharge">
-      <dc:filter expr="$out_name = 'discharge_longitudinal_section' and $facet_name = 'discharge_longitudinal_section.w">
+      <dc:filter expr="$out_name = 'discharge_longitudinal_section' and $facet_name = 'discharge_longitudinal_section.w'">
         <dc:if test="dc:has-result()">
           <waterlevels-discharge>
-            <dc:group expr="concat($oid, ' ', $river, ' ', $a_id, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)">
+            <dc:group expr="concat($oid, ' ', $river, ' ', $a_id, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)">
               <discharge description="{dc:group-key()}">
                 <dc:for-each>
                   <dc:element name="${facet_name}">
@@ -1187,36 +1322,30 @@
     <dc:macro name="discharge_table_gauge">
       <dc:context>
         <dc:statement>
-          SELECT id   AS gauge_id,
-                 name AS gauge_name
-          FROM gauges WHERE river_id = ${river_id}
+          SELECT g.id   AS gauge_id,
+                 g.name AS gauge_name,
+                 t.start_time AS start_time,
+                 t.stop_time AS stop_time
+          FROM gauges g
+          JOIN discharge_tables dt ON g.id = dt.gauge_id
+          LEFT JOIN time_intervals t ON dt.time_interval_id = t.id
+          WHERE g.river_id = ${river_id}
+          AND dt.kind = 0
+          AND g.station = ${fromkm}
+          AND g.station = ${tokm}
         </dc:statement>
         <dc:if test="dc:has-result()">
-          <discharge_table_nn>
-            <discharge_table_gauge>
-              <dc:for-each>
-                <gauge name="{$gauge_name}"
-                  factory="gaugedischarge" target_out="{$out}"
-                  from="{$g_start}"
-                  to="{$g_stop}"
-                  ids="{$gauge_name}"/>
-              </dc:for-each>
-            </discharge_table_gauge>
-          </discharge_table_nn>
+          <current_gauge>
+            <dc:for-each>
+              <gauge name="{$gauge_name} ({dc:date-format('dd.MM.yyyy', $start_time)})"
+                factory="gaugedischarge" target_out="{$out}"
+                ids="{$gauge_name}"/>
+            </dc:for-each>
+          </current_gauge>
         </dc:if>
       </dc:context>
     </dc:macro>
 
-    <dc:comment>TODO: Why is this just a copy of the discharge_table_gauge?</dc:comment>
-    <dc:macro name="discharge_computed">
-      <dc:call-macro name="discharge_table_gauge"/>
-    </dc:macro>
-
-    <dc:comment>TODO: Why is this just a copy of the discharge_table_gauge?</dc:comment>
-    <dc:macro name="discharge_fix_wq">
-      <dc:call-macro name="discharge_table_gauge"/>
-    </dc:macro>
-
     <dc:macro name="qsectors">
       <qsector factory="qsectors" ids="{$river_id}" target_out="{$out}" />
     </dc:macro>
@@ -1839,7 +1968,6 @@
           FROM floodplain fp
             JOIN floodplain_kinds flk on fp.kind_id = flk.id
           WHERE river_id = ${river_id}
-            AND kind_id=1
         </dc:statement>
         <dc:if test="dc:has-result()">
           <floodplain>
@@ -2326,8 +2454,8 @@
             <officiallines>
               <dc:for-each>
                 <column name="{$olname}"
-                      ids="additionals-wstv-{$wstcolpos}-{$wstid}"
-                      factory="staticwqkms" target_out="{$out}"/>
+                  ids="official-wstv-{$wstcolpos}-{$wstid}"
+                  factory="staticwqkms" target_out="{$out}"/>
               </dc:for-each>
             </officiallines>
           </dc:if>
@@ -2361,17 +2489,21 @@
 
     <dc:macro name="all-user-artifacts">
         <dc:context connection="user">
-        <dc:comment>Select collections and masterartifacts.</dc:comment>
+          <dc:comment>Select collections and masterartifacts.
+            XXX: The cast is a quick hack because the ld_* values are
+            now TEXT fields. To properly fix / asses the problems here
+            an SLT evaluation is needed.
+          </dc:comment>
         <dc:statement>
           SELECT c.name                     AS collection_name,
                  ma.id                      AS a_id,
                  ma.state                   AS a_state,
                  ma.gid                     AS a_gid,
                  ma.creation                AS a_creation,
-                 COALESCE(ma.ld_mode, '')      AS ld_m,
-                 COALESCE(ma.ld_locations, '') AS ld_l,
-                 COALESCE(ma.ld_from, '')      AS ld_f,
-                 COALESCE(ma.ld_to, '')        AS ld_t,
+                 CAST(COALESCE(ma.ld_mode, '') AS VARCHAR(255)) AS ld_m,
+                 CAST(COALESCE(ma.ld_locations, '') AS VARCHAR(255)) AS ld_l,
+                 CAST(COALESCE(ma.ld_from, '') AS VARCHAR(255)) AS ld_f,
+                 CAST(COALESCE(ma.ld_to, '') AS VARCHAR(255)) AS ld_t,
                  o.name                        AS out_name,
                  o.id                          AS out_id,
                  f.name                        AS facet_name,
--- a/artifacts/doc/conf/themes/default.xml	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/themes/default.xml	Wed Aug 07 18:53:15 2013 +0200
@@ -834,11 +834,26 @@
     <!-- MIDDLE BED HEIGHT -->
     <theme name="MiddleBedHeightSingle">
         <inherits>
-            <inherit from="LongitudinalSection" />
+            <inherit from="Lines" />
+            <inherit from="Points" />
+            <inherit from="MinMaxPoints" />
+            <inherit from="Label" />
         </inherits>
         <fields>
+            <field name="showlines" type="boolean" display="Linie anzeigen"
+                default="true" />
+            <field name="linesize" type="int" display="Liniendicke"
+                default="1" />
+            <field name="linetype" type="Dash" display="Linienart"
+                default="10" />
             <field name="linecolor" type="Color" display="Linienfarbe"
-                default="204, 204, 204" />
+                />
+            <field name="showlinelabel" type="boolean"
+                display="Beschriftung anzeigen" default="false" />
+            <field name="showpoints" type="boolean" display="Punkte anzeigen"
+                default="false" />
+            <field name="showpointlabel" type="boolean"
+                display="Punktbeschriftung anzeigen" default="false" hints="hidden" />
         </fields>
     </theme>
 
@@ -1124,7 +1139,7 @@
                 display="Beschriftung anzeigen" default="false" hints="hidden" />
         </fields>
     </theme>
-     <theme name="SedimentLoadFineTotalLoad">
+     <theme name="SedimentLoadTotalLoad">
         <inherits>
             <inherit from="LongitudinalSection" />
         </inherits>
@@ -1399,12 +1414,14 @@
 
     <theme name="FixPoints">
         <inherits>
-            <inherit from="Points" />
+            <inherit from="ColorfulPoints" />
             <inherit from="Label" />
         </inherits>
         <fields>
             <field name="pointsize" type="int" display="Punktdicke"
                 default="3" />
+            <field name="showpointlabel" type="boolean"
+                display="Beschriftung anzeigen" default="true" hints="hidden" />
         </fields>
     </theme>
 
--- a/artifacts/doc/conf/themes/second.xml	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/themes/second.xml	Wed Aug 07 18:53:15 2013 +0200
@@ -834,11 +834,26 @@
     <!-- MIDDLE BED HEIGHT -->
     <theme name="MiddleBedHeightSingle">
         <inherits>
-            <inherit from="LongitudinalSection" />
+            <inherit from="Lines" />
+            <inherit from="Points" />
+            <inherit from="MinMaxPoints" />
+            <inherit from="Label" />
         </inherits>
         <fields>
+            <field name="showlines" type="boolean" display="Linie anzeigen"
+                default="true" />
+            <field name="linesize" type="int" display="Liniendicke"
+                default="2" />
+            <field name="linetype" type="Dash" display="Linienart"
+                default="10" />
             <field name="linecolor" type="Color" display="Linienfarbe"
-                default="204, 204, 204" />
+                />
+            <field name="showlinelabel" type="boolean"
+                display="Beschriftung anzeigen" default="false" />
+            <field name="showpoints" type="boolean" display="Punkte anzeigen"
+                default="false" />
+            <field name="showpointlabel" type="boolean"
+                display="Punktbeschriftung anzeigen" default="false" hints="hidden" />
         </fields>
     </theme>
 
@@ -1124,7 +1139,7 @@
                 display="Beschriftung anzeigen" default="false" hints="hidden" />
         </fields>
     </theme>
-     <theme name="SedimentLoadFineTotalLoad">
+     <theme name="SedimentLoadTotalLoad">
         <inherits>
             <inherit from="LongitudinalSection" />
         </inherits>
@@ -1399,12 +1414,14 @@
 
     <theme name="FixPoints">
         <inherits>
-            <inherit from="Points" />
+            <inherit from="ColorfulPoints" />
             <inherit from="Label" />
         </inherits>
         <fields>
             <field name="pointsize" type="int" display="Punktdicke"
                 default="3" />
+            <field name="showpointlabel" type="boolean"
+                display="Beschriftung anzeigen" default="true" hints="hidden" />
         </fields>
     </theme>
 
--- a/artifacts/doc/conf/themes/virtual.xml	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/doc/conf/themes/virtual.xml	Wed Aug 07 18:53:15 2013 +0200
@@ -30,6 +30,17 @@
         </fields>
     </theme>
 
+    <theme name="ColorfulPoints" type="virtual">
+        <fields>
+            <field name="showpoints" type="boolean" display="Punkte anzeigen"
+                default="true" />
+            <field name="pointsize" type="int" display="Punktdicke"
+                default="5" />
+            <field name="showpointlabel" type="boolean"
+                display="Punktbeschriftung anzeigen" default="false"/>
+        </fields>
+    </theme>
+
     <theme name="Label" type="virtual">
         <fields>
             <field name="labelfontface" type="Font"
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/CrossSectionArtifact.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/CrossSectionArtifact.java	Wed Aug 07 18:53:15 2013 +0200
@@ -53,6 +53,9 @@
     /** Name of data item keeping the position. */
     public static final String DATA_KM = "cross_section.km";
 
+    /** Name of data item keeping the 'parents' km. */
+    public static final String PARENT_KM = "cross_section.parent.km";
+
     /** Name of data item keeping the database id of this c.s.. */
     public static final String DATA_DBID = "cross_section.dbid";
 
@@ -140,7 +143,7 @@
         fs.add(new CrossSectionFacet(0, cs.getDescription()));
 
         // Find out if we are newest and become master if so.
-        boolean isNewest = CrossSectionFactory.isNewest(cs);
+        boolean isNewest = cs.shouldBeMaster(getParentKm());
         String newString = (isNewest) ? "1" : "0";
         addStringData(DATA_IS_NEWEST, newString);
         addStringData(DATA_IS_MASTER, newString);
@@ -154,19 +157,18 @@
     /** Copy km where master-artifact "starts". */
     @Override
     protected void initialize(
-        Artifact artifact,
+        Artifact master,
         Object   context,
         CallMeta callMeta)
     {
-        D4EArtifact flys = (D4EArtifact) artifact;
+        D4EArtifact masterArtifact = (D4EArtifact) master;
 
-        RangeAccess rangeAccess = new RangeAccess(flys);
+        RangeAccess rangeAccess = new RangeAccess(masterArtifact);
         double[] range = rangeAccess.getKmRange();
-        double min = 0.0f;
         if (range != null && range.length > 0) {
-            min = range[0];
+            this.addStringData(DATA_KM, Double.toString(range[0]));
+            this.addStringData(PARENT_KM, Double.toString(range[0]));
         }
-        this.addStringData(DATA_KM, Double.toString(min));
     }
 
 
@@ -242,6 +244,20 @@
 
 
     /**
+     * Return position (km) from parent (initial km), 0 if not found.
+     */
+    private double getParentKm() {
+        String val = getDataAsString(PARENT_KM);
+        try {
+            return Double.valueOf(val);
+        }
+        catch (NumberFormatException e) {
+            logger.warn("Could not get data " + PARENT_KM + " as double", e);
+            return 0;
+        }
+    }
+
+    /**
      * Return position (km) from data, 0 if not found.
      */
     protected double getKm() {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/GaugeDischargeArtifact.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/GaugeDischargeArtifact.java	Wed Aug 07 18:53:15 2013 +0200
@@ -12,6 +12,7 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
+import java.util.HashMap;
 
 import org.apache.log4j.Logger;
 
@@ -35,6 +36,7 @@
 
 import org.dive4elements.river.model.Gauge;
 import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.DischargeTable;
 
 import org.dive4elements.river.utils.RiverUtils;
 
@@ -52,6 +54,8 @@
     /** The name of the artifact. */
     public static final String ARTIFACT_NAME = "gaugedischarge";
 
+    /** The name a facet should have */
+    protected String facetWishName;
 
     /**
      * Trivial Constructor.
@@ -79,6 +83,13 @@
         String ids = StaticD4EArtifact.getDatacageIDValue(data);
         addStringData("ids", ids);
         logger.debug("id for gaugedischarge: " + ids);
+        String[] splitIds = ids.split(";");
+       /* We assume that if an id's string with a ; is given that the
+        * format is <gauge_name>;<discharge_table_id>;<facet_desc>
+        * so that a specific discharge table can be selected */
+        if (splitIds.length > 2) {
+            facetWishName = splitIds[2];
+        }
         super.setup(identifier, factory, context, callMeta, data);
     }
 
@@ -121,7 +132,10 @@
 
     /** Get the Gauges name which came with datacage data-document. */
     public String getGaugeName() {
-        return this.getDataAsString("ids");
+        if (getDataAsString("ids") == null) {
+            return null;
+        }
+        return getDataAsString("ids").split(";")[0];
     }
 
 
@@ -166,13 +180,34 @@
         }
         */
 
-        DischargeTables dt = new DischargeTables(river.getName(), getDataAsString("ids"));
+        Map<String, double [][]> map;
 
-        Map<String, double [][]> map = dt.getValues();
+        String[] ids = getDataAsString("ids").split(";");
+        if (ids.length > 1) {
+           /* We assume that if an id's string with a ; is given that the
+            * format is <gauge_name>;<discharge_table_id>;<facet_desc>
+            * so that a specific discharge table can be selected */
+            int tableId = 0;
+            try {
+                tableId = Integer.parseInt(ids[1]);
+            } catch (NumberFormatException e) {
+                logger.error("Discharge tables ids string is wrong." +
+                        " Fromat is <gauge_name>;<discharge_table_id>;<facet_desc>" +
+                        " Fix your Datacage!");
+                // Let's rather break down completly then show the wrong data.
+                return null;
+            }
+            DischargeTable table = DischargeTable.getDischargeTableById(tableId);
+            map = new HashMap<String, double [][]>();
+            map.put(getGaugeName(), DischargeTables.loadDischargeTableValues(table));
+        } else {
+            DischargeTables dt = new DischargeTables(river.getName(), getGaugeName());
+            map = dt.getValues();
+        }
 
         ArrayList<WQKms> res = new ArrayList<WQKms>();
 
-        Gauge gauge = river.determineGaugeByName(this.getDataAsString("ids"));
+        Gauge gauge = river.determineGaugeByName(getGaugeName());
 
         String name = getGaugeName();
         double [][] values = map.get(name);
@@ -191,5 +226,13 @@
             res.toArray(new WQKms[res.size()]),
             new Calculation());
     }
+
+    /** Gets the facet wish name.
+     *
+     * This is a hack to enable setting the name of the facet / theme in the
+     * UI from the datacage setting. */
+    public String getFacetWishName() {
+        return facetWishName;
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/MainValuesArtifact.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/MainValuesArtifact.java	Wed Aug 07 18:53:15 2013 +0200
@@ -81,7 +81,10 @@
                     String   outputName
                 ) {
                     return outputName.equals("computed_discharge_curve")
-                        || outputName.equals("duration_curve");
+                        || outputName.equals("duration_curve")
+                        || outputName.equals("discharge_curve")
+                        || outputName.equals("fix_wq_curve")
+                        || outputName.equals("historical_discharge_wq");
                 }
             });
     }
@@ -316,6 +319,21 @@
             return spawnState();
     }
 
+    /**
+     * Access the Gauge that the mainvalues are taken from.
+     * @return Gauge that main values are taken from or null in case of
+     *         invalid parameterization.
+     */
+    protected Gauge getGauge(double km) {
+        River river = RiverUtils.getRiver(this);
+
+        if (river == null) {
+            logger.error("River is null");
+            return null;
+        }
+
+        return river.determineGaugeByPosition(km);
+    }
 
     /**
      * Access the Gauge that the mainvalues are taken from.
@@ -358,6 +376,41 @@
      * Get a list of "Q" main values.
      * @return list of Q main values.
      */
+    public List<NamedDouble> getMainValuesQ(double[] kms) {
+        List<NamedDouble> filteredList = new ArrayList<NamedDouble>();
+        boolean atGauge = false;
+        Gauge gauge = getGauge(kms[0]);
+        WstValueTable interpolator = WstValueTableFactory.getTable(RiverUtils.getRiver(this));
+        Calculation c = new Calculation();
+        double w_out[] = {0.0f};
+        double q_out[] = {0.0f};
+        if (gauge != null) {
+            double gaugeStation = gauge.getStation().doubleValue();
+            atGauge = Math.abs(kms[0] - gaugeStation) < 1e-4;
+            List<MainValue> orig = gauge.getMainValues();
+            for (MainValue mv : orig) {
+                if (mv.getMainValue().getType().getName().equals("Q")) {
+                    if (atGauge) {
+                        q_out[0] = mv.getValue().doubleValue();
+                    }
+                    else {
+                        interpolator.interpolate(mv.getValue().doubleValue(),
+                            gaugeStation, kms, w_out, q_out, c);
+                    }
+                    filteredList.add(new NamedDouble(
+                                mv.getMainValue().getName(),
+                                q_out[0]
+                                ));
+                }
+            }
+        }
+        return filteredList;
+    }
+
+    /**
+     * Get a list of "Q" main values.
+     * @return list of Q main values.
+     */
     public List<NamedDouble> getMainValuesQ(boolean atGauge) {
         List<NamedDouble> filteredList = new ArrayList<NamedDouble>();
         Gauge gauge = getGauge();
@@ -366,8 +419,8 @@
         double w_out[] = {0.0f};
         double q_out[] = {0.0f};
         double kms[] = {getLocation()};
-        double gaugeStation = gauge.getStation().doubleValue();
         if (gauge != null) {
+            double gaugeStation = gauge.getStation().doubleValue();
             List<MainValue> orig = gauge.getMainValues();
             for (MainValue mv : orig) {
                 if (mv.getMainValue().getType().getName().equals("Q")) {
@@ -389,21 +442,26 @@
     }
 
 
-    /**
-     * Get a list of "W" main values.
-     * @param atGauge if true, do not interpolate
-     * @return list of W main values.
-     */
-    public List<NamedDouble> getMainValuesW(boolean atGauge) {
+    /** Get main values of km. */
+    public List<NamedDouble> getMainValuesW(double[] kms) {
         List<NamedDouble> filteredList = new ArrayList<NamedDouble>();
-        Gauge gauge = getGauge();
+        boolean atGauge = false;
+        double gaugeDatum = 0d;
+        Gauge gauge = getGauge(kms[0]);
+        if (gauge == null) {
+            return filteredList;
+        }
+        else if (Math.abs(kms[0] - gauge.getStation().doubleValue()) < 1e-4) {
+            atGauge = true;
+            gaugeDatum = gauge.getDatum().doubleValue();
+        }
+
         WstValueTable interpolator = WstValueTableFactory.getTable(RiverUtils.getRiver(this));
         Calculation c = new Calculation();
 
         double gaugeStation = gauge.getStation().doubleValue();
         double w_out[] = {0.0f};
         double q_out[] = {0.0f};
-        double kms[] = {getLocation()};
         if (gauge != null) {
             List<MainValue> orig = gauge.getMainValues();
             for (MainValue mv : orig) {
@@ -411,6 +469,41 @@
                     if (mv.getMainValue().getType().getName().equals("W")) {
                         filteredList.add(new NamedDouble(mv.getMainValue().getName(),
                                 mv.getValue().doubleValue()));
+                    }
+                } else
+                // We cannot interpolate the W values, so derive them
+                // from given Q values.
+                if (mv.getMainValue().getType().getName().equals("Q")) {
+                    interpolator.interpolate(mv.getValue().doubleValue(),
+                            gaugeStation, kms, w_out, q_out, c);
+
+                    filteredList.add(new NamedDouble(
+                                "W(" + mv.getMainValue().getName() +")",
+                                w_out[0]
+                                ));
+                }
+            }
+        }
+        return filteredList;
+    }
+
+
+    public List<NamedDouble> getMainValuesW(boolean atGauge, double[] kms) {
+        List<NamedDouble> filteredList = new ArrayList<NamedDouble>();
+        Gauge gauge = getGauge();
+        WstValueTable interpolator = WstValueTableFactory.getTable(RiverUtils.getRiver(this));
+        Calculation c = new Calculation();
+
+        double w_out[] = {0.0f};
+        double q_out[] = {0.0f};
+        if (gauge != null) {
+            double gaugeStation = gauge.getStation().doubleValue();
+            List<MainValue> orig = gauge.getMainValues();
+            for (MainValue mv : orig) {
+                if (atGauge) {
+                    if (mv.getMainValue().getType().getName().equals("W")) {
+                        filteredList.add(new NamedDouble(mv.getMainValue().getName(),
+                                mv.getValue().doubleValue()));
 
                     }
                 } else
@@ -431,6 +524,16 @@
 
 
     /**
+     * Get a list of "W" main values.
+     * @param atGauge if true, do not interpolate
+     * @return list of W main values.
+     */
+    public List<NamedDouble> getMainValuesW(boolean atGauge) {
+        return getMainValuesW(atGauge, new double[] {getLocation()});
+    }
+
+
+    /**
      * Returns the name of this artifact ('mainvalue').
      *
      * @return 'mainvalue'
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/RiverAxisArtifact.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/RiverAxisArtifact.java	Wed Aug 07 18:53:15 2013 +0200
@@ -91,6 +91,10 @@
             super(artifact);
         }
 
+        private boolean isUnofficial() {
+            return getIdPart(2) != null && !getIdPart(2).equals("1");
+        }
+
         @Override
         protected String getFacetType() {
             return FLOODMAP_RIVERAXIS;
@@ -98,12 +102,19 @@
 
         @Override
         protected String getLayer() {
+            if (isUnofficial()) {
+                return super.getLayer();
+            }
             return RiverFactory.getRiver(getRiverId()).getName();
         }
 
         @Override
         protected String getUrl() {
-            return RiverUtils.getRiverWMSUrl();
+            if (isUnofficial()) {
+                return RiverUtils.getUserWMSUrl(artifact.identifier());
+            } else {
+                return RiverUtils.getRiverWMSUrl();
+            }
         }
 
         @Override
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/StaticWKmsArtifact.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/StaticWKmsArtifact.java	Wed Aug 07 18:53:15 2013 +0200
@@ -310,7 +310,7 @@
             return -1;
         }
 
-        // Do linear interpolation
+        // Do linear interpolation.
         int mod = kmIncreasing ? -1 : +1;
         return Linear.linear(km, wkms.getKm(idx+mod), wkms.getKm(idx), wkms.getW(idx+mod), wkms.getW(idx));
     }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/StaticWQKmsArtifact.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/StaticWQKmsArtifact.java	Wed Aug 07 18:53:15 2013 +0200
@@ -59,6 +59,8 @@
 
     private static final String NAME = "staticwqkms";
 
+    private boolean official = false;
+
     static {
         // TODO: Move to configuration.
         FacetActivity.Registry.getInstance().register(
@@ -85,6 +87,10 @@
 
     /**
      * Gets called from factory, to set things up.
+     *
+     * If the id's string starts with official- it will be treated as
+     * an Artifact containing official data for the according special
+     * case handling.
      */
     @Override
     public void setup(
@@ -106,6 +112,10 @@
         if (code != null) {
             String [] parts = code.split("-");
 
+            if (parts.length >= 1) {
+                official = parts[0].toLowerCase().equals("official");
+            }
+
             if (parts.length >= 4) {
                 int col = Integer.parseInt(parts[2]);
                 int wst = Integer.parseInt(parts[3]);
@@ -222,5 +232,10 @@
         return Lines.createWaterLines(points, wAtKm);
     }
     // TODO implement deepCopy.
+
+    public boolean isOfficial()
+    {
+        return official;
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/WINFOArtifact.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/WINFOArtifact.java	Wed Aug 07 18:53:15 2013 +0200
@@ -197,15 +197,26 @@
 
         String calculationMode = getDataAsString("calculation_mode");
 
-        if (calculationMode.equals("calc.discharge.longitudinal.section")
-        ) {
-            return getDischargeLongitudinalSectionData();
-        }
-        else if (calculationMode.equals("calc.extreme.curve")) {
-            return (CalculationResult)
-                this.compute(context, ComputeType.ADVANCE, false);
+        // If this WINFO-Artifact has a calculation trait.
+        if (calculationMode != null) {
+            if (calculationMode.equals("calc.discharge.longitudinal.section")
+            ) {
+                return getDischargeLongitudinalSectionData();
+            }
+            else if (calculationMode.equals("calc.extreme.curve")) {
+                return (CalculationResult)
+                    this.compute(context, ComputeType.ADVANCE, false);
+            }
+            else if (calculationMode.equals("calc.w.differences")) {
+                return (CalculationResult)
+                    this.compute(context, ComputeType.ADVANCE, true);
+            }
+            else {
+                logger.warn("Unhandled calculation_mode " + calculationMode);
+            }
         }
 
+        // Otherwise get it from parameterization.
         River river = RiverUtils.getRiver(this);
         if (river == null) {
             return error(new WQKms[0], "no.river.selected");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/access/FixAnalysisAccess.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/access/FixAnalysisAccess.java	Wed Aug 07 18:53:15 2013 +0200
@@ -33,6 +33,7 @@
         super(artifact);
     }
 
+    /** Access the reference date period, return null in case of 'errors'. */
     public DateRange getReferencePeriod() {
         if (referencePeriod == null) {
             StateData refStart = artifact.getData("ref_start");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/access/SedimentLoadAccess.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/access/SedimentLoadAccess.java	Wed Aug 07 18:53:15 2013 +0200
@@ -91,6 +91,7 @@
         return null;
     }
 
+    /** Returns the selected unit (t/a or m3/a). */
     public String getUnit () {
         if (unit == null) {
             unit = getString("unit");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/Datacage.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/Datacage.java	Wed Aug 07 18:53:15 2013 +0200
@@ -70,6 +70,7 @@
     private String SQL_ARTIFACT_ID_NEXTVAL    = "artifact.id.nextval";
     private String SQL_INSERT_ARTIFACT        = "insert.artifact";
     private String SQL_ARTIFACT_DATA_ID_NEXTVAL = "artifact.data.id.nextval";
+    private String SQL_UPDATE_ARTIFACT_STATE  = "update.artifact.state";
     private String SQL_INSERT_ARTIFACT_DATA   = "insert.artifact.data";
     private String SQL_OUT_ID_NEXTVALUE       = "out.id.nextval";
     private String SQL_INSERT_OUT             = "insert.out";
@@ -382,6 +383,7 @@
         SQL_INSERT_ARTIFACT     = sql.get(SQL_INSERT_ARTIFACT);
         SQL_ARTIFACT_DATA_ID_NEXTVAL = sql.get(SQL_ARTIFACT_DATA_ID_NEXTVAL);
         SQL_INSERT_ARTIFACT_DATA = sql.get(SQL_INSERT_ARTIFACT_DATA);
+        SQL_UPDATE_ARTIFACT_STATE = sql.get(SQL_UPDATE_ARTIFACT_STATE);
         SQL_OUT_ID_NEXTVALUE     = sql.get(SQL_OUT_ID_NEXTVALUE);
         SQL_INSERT_OUT           = sql.get(SQL_INSERT_OUT);
         SQL_FACET_ID_NEXTVAL     = sql.get(SQL_FACET_ID_NEXTVAL);
@@ -401,6 +403,7 @@
             sql.get(SQL_DELETE_ARTIFACT_BY_GID);
     }
 
+    /** Sum over facets in outs. */
     protected static final int numFacets(List<Output> outs) {
         int sum = 0;
         for (Output out: outs) {
@@ -608,6 +611,7 @@
         // write new data
         storeData(res[0], flys);
         storeOuts(res[0], flys, context);
+        storeState(res[0], flys);
     }
 
     public void createdUser(
@@ -884,9 +888,30 @@
         }
     }
 
+    /** Update state of artifact. */
+    protected void storeState(
+        final int         artifactId,
+        final D4EArtifact artifact) {
+        SQLExecutor.Instance exec = sqlExecutor.new Instance() {
+            @Override
+            public boolean doIt() throws SQLException {
+                prepareStatement(SQL_UPDATE_ARTIFACT_STATE);
+                stmnt.setInt(1, artifactId);
+                stmnt.setString(2, artifact.getCurrentStateId());
+                stmnt.execute();
+                conn.commit();
+                return true;
+            }
+        };
+
+        if (!exec.runWrite()) {
+            log.error("storing state of artifact failed ("+artifactId+","+artifact.getCurrentStateId()+")");
+        }
+    }
+
     protected void storeData(
-        final int     artifactId,
-        D4EArtifact  artifact
+        final int   artifactId,
+        D4EArtifact artifact
     ) {
         final Collection<StateData> data = artifact.getAllData();
 
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/DatacageBackendListener.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/DatacageBackendListener.java	Wed Aug 07 18:53:15 2013 +0200
@@ -61,6 +61,7 @@
         }
     }
 
+    /** Stores the artifact in artifact-db, if any. */
     @Override
     public void storedArtifact(Artifact artifact, Backend backend) {
         log.debug("storedArtifact");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/Recommendations.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/Recommendations.java	Wed Aug 07 18:53:15 2013 +0200
@@ -173,7 +173,7 @@
 
     /**
      * Append recommendations to \param result.
-     * @param extraParameters parameters (typicall example: 'recommended')
+     * @param extraParameters parameters (typical example: 'recommended')
      */
     public void  recommend(
         D4EArtifact         artifact,
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/math/StdDevOutlier.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/math/StdDevOutlier.java	Wed Aug 07 18:53:15 2013 +0200
@@ -10,9 +10,13 @@
 
 import java.util.List;
 
-import org.apache.commons.math.stat.descriptive.moment.StandardDeviation;
+import org.apache.log4j.Logger;
 
-import org.apache.log4j.Logger;
+/* XXX:
+ * Warning: This class is called StdDevOutlier because it caculates the
+ * Standard Deviation method for outlier removal as the BFG calls it.
+ * But the actual calculation used to remove the outliers calculates
+ * the Standard Error and not the Standard Deviation! */
 
 public class StdDevOutlier
 {
@@ -30,12 +34,12 @@
     public static Integer findOutlier(
         List<Double> values,
         double       factor,
-        double []    stdDevResult
+        double []    stdErrResult
     ) {
         boolean debug = log.isDebugEnabled();
 
         if (debug) {
-            log.debug("factor for std dev: " + factor);
+            log.debug("factor for std dev test (that calculates std err): " + factor);
         }
 
         int N = values.size();
@@ -48,31 +52,34 @@
             return null;
         }
 
-        StandardDeviation stdDev = new StandardDeviation();
-
         double maxValue = -Double.MAX_VALUE;
         int    maxIndex = -1;
+
+        double squareSumResiduals = 0;
+        for (Double db: values) {
+            squareSumResiduals += Math.pow(db, 2);
+        }
+
+        double stdErr = Math.sqrt(squareSumResiduals / (N - 2));
+
+        double accepted = factor * stdErr;
+
         for (int i = N-1; i >= 0; --i) {
             double value = Math.abs(values.get(i));
-            stdDev.increment(value);
             if (value > maxValue) {
                 maxValue = value;
                 maxIndex = i;
             }
         }
 
-        double sd = stdDev.getResult();
-
-        double accepted = factor * sd;
-
         if (debug) {
-            log.debug("std dev: " + stdDev);
+            log.debug("std err: " + stdErr);
             log.debug("accepted: " + accepted);
             log.debug("max value: " + maxValue);
         }
 
-        if (stdDevResult != null) {
-            stdDevResult[0] = sd;
+        if (stdErrResult != null) {
+            stdErrResult[0] = stdErr;
         }
 
         return maxValue > accepted ? maxIndex : null;
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/CrossSectionFactory.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/CrossSectionFactory.java	Wed Aug 07 18:53:15 2013 +0200
@@ -59,30 +59,6 @@
     }
 
 
-    /**
-     * True if the given section is the "newest" for that river.
-     * @param section Given section
-     * @return true if the section has the most advanced end of its validity interval
-     *         or the most advanced start of its validity interval.
-     */
-    public static boolean isNewest(CrossSection section) {
-        Session session = SessionHolder.HOLDER.get();
-        Query query = session.createQuery(
-            "from CrossSection where river.id = :riverid "
-            + " order by timeInterval.stopTime desc, timeInterval.startTime desc");
-        query.setParameter("riverid", section.getRiver().getId());
-
-        List result = query.list();
-
-        if (result == null || result.isEmpty()) {
-            return true;
-        }
-        else {
-            CrossSection cs = (CrossSection) result.get(0);
-            return section.getId().equals(cs.getId());
-        }
-    }
-
 
     /**
      *  Get a specific CrossSection from db.
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/FacetTypes.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/FacetTypes.java	Wed Aug 07 18:53:15 2013 +0200
@@ -313,7 +313,7 @@
     String SEDIMENT_LOAD_SUSP_SEDIMENT = "sedimentload.susp_sediment";
     String SEDIMENT_LOAD_TOTAL         = "sedimentload.total";
     String SEDIMENT_LOAD_TOTAL_LOAD    = "sedimentload.total_load";
-    String SEDIMENT_LOAD_UNKOWN        = "sedimentload.unknown";
+    String SEDIMENT_LOAD_UNKNOWN       = "sedimentload.unknown";
 
     String SQ_OVERVIEW       = "sq_overview";
 
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/FastCrossSectionChunk.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/FastCrossSectionChunk.java	Wed Aug 07 18:53:15 2013 +0200
@@ -19,6 +19,10 @@
 
 import org.apache.log4j.Logger;
 
+/**
+ * Number of FastCrossSectionLines, e.g. to store in cache and retrieve
+ * a single contain CrossSectionLine by its km.
+ */
 public class FastCrossSectionChunk
 implements   Serializable
 {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesQFacet.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesQFacet.java	Wed Aug 07 18:53:15 2013 +0200
@@ -25,6 +25,7 @@
 import org.dive4elements.river.jfree.StickyAxisAnnotation;
 
 import org.dive4elements.river.exports.DurationCurveGenerator;
+import org.dive4elements.river.exports.fixings.FixChartGenerator;
 
 
 /**
@@ -89,7 +90,8 @@
      * Returns the data this facet requires.
      *
      * @param artifact the owner artifact.
-     * @param context  the CallContext (ignored).
+     * @param context  the CallContext (can be used to find out if in
+     *                 navigable fixation-setting, or durationcurve).
      *
      * @return the data.
      */
@@ -105,6 +107,15 @@
             getDataProvider(DurationCurveFacet.BB_DURATIONCURVE);
         if (providers.size() < 1) {
             logger.warn("Could not find durationcurve data provider.");
+            // Do we have a current km in context?
+            // If so, we are likely fetching data for a navigable
+            // diagram (i.e. in fixation branch).
+            if (context.getContextValue(FixChartGenerator.CURRENT_KM) != null) {
+                Double ckm = (Double) context.getContextValue(FixChartGenerator.CURRENT_KM);
+                // Return linearly interpolated values, in m if not at gauge,
+                // in cm if at gauge.
+                qs = mvArtifact.getMainValuesQ(new double[] {ckm});
+            }
         }
         else {
             wqdays = (WQDay) providers.get(0).provideData(
@@ -117,6 +128,10 @@
         StickyAxisAnnotation annotation = null;
         if (this.name.equals(DURATION_MAINVALUES_Q)) {
             for (NamedDouble q: qs) {
+                if (Double.isNaN(q.getValue())) {
+                    logger.warn("NaN MainValue " + q.getName());
+                    continue;
+                }
                 annotation =
                     new StickyAxisAnnotation(
                         q.getName(),
@@ -131,6 +146,10 @@
         }
         else {
             for (NamedDouble q: qs) {
+                if (Double.isNaN(q.getValue())) {
+                    logger.warn("NaN MainValue " + q.getName());
+                    continue;
+                }
                 annotation =
                     new StickyAxisAnnotation(
                         q.getName(),
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesWFacet.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesWFacet.java	Wed Aug 07 18:53:15 2013 +0200
@@ -23,6 +23,7 @@
 import org.dive4elements.river.artifacts.math.Linear;
 import org.dive4elements.river.jfree.RiverAnnotation;
 import org.dive4elements.river.jfree.StickyAxisAnnotation;
+import org.dive4elements.river.exports.fixings.FixChartGenerator;
 
 
 /**
@@ -85,7 +86,8 @@
      * Returns the data this facet provides.
      *
      * @param artifact the owner artifact.
-     * @param context  the CallContext (ignored).
+     * @param context  the CallContext (can be used to find out if in
+     *                 navigable fixation-setting, or durationcurve).
      *
      * @return the data.
      */
@@ -102,6 +104,15 @@
             getDataProvider(DurationCurveFacet.BB_DURATIONCURVE);
         if (providers.size() < 1) {
             logger.warn("Could not find durationcurve data provider.");
+            // Do we have a current km in context?
+            // If so, we are likely fetching data for a navigable
+            // diagram (i.e. in fixation branch).
+            if (context.getContextValue(FixChartGenerator.CURRENT_KM) != null) {
+                Double ckm = (Double) context.getContextValue(FixChartGenerator.CURRENT_KM);
+                // Return linearly interpolated values, in m if not at gauge,
+                // in cm over datum if at gauge.
+                ws = mvArtifact.getMainValuesW(new double[] {ckm});
+            }
         }
         else {
             wqdays = (WQDay) providers.get(0).provideData(
@@ -111,6 +122,11 @@
         }
 
         for (NamedDouble w: ws) {
+            logger.debug("W Annotation at " + w.getValue() + " ("+w.getName()+")"+ wqdays);
+            if (Double.isNaN(w.getValue())) {
+                logger.warn("NaN MainValue " + w.getName());
+                continue;
+            }
             StickyAxisAnnotation annotation =
                 new StickyAxisAnnotation(
                     w.getName(),
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/OfficialLineFinder.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/OfficialLineFinder.java	Wed Aug 07 18:53:15 2013 +0200
@@ -114,7 +114,9 @@
                 " wstId: " + wstId +
                 " pos: " + columnPos +
                 " source: " + source +
-                " date: " + date +"]";
+                " date: " + date +
+                " from: " + start +
+                " to: " + end + "]";
         }
     }
 
@@ -168,9 +170,8 @@
                     NamedMainValue tnmv = mainValue.getMainValue();
                     if (tnmv.getId().equals(mnvId)) {
                         // found gauge with this main value
-
                         double from  = gauge.getRange().getA().doubleValue();
-                        double to    = gauge.getRange().getA().doubleValue();
+                        double to    = gauge.getRange().getB().doubleValue();
                         double value = mainValue.getValue().doubleValue();
                         int    wstId = wst.getId();
                         int    pos   = wc.getPosition();
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQKms.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQKms.java	Wed Aug 07 18:53:15 2013 +0200
@@ -143,5 +143,15 @@
         double to   = getKm(size()-1);
         return from + " - " + to;
     }
+
+    /**
+     * Returns an array of two double values the first and last kilometer.
+     *
+     * @return a double array with the first and last km
+     */
+    public double[] getFirstLastKM() {
+        /* Behold the first km might be larger then the last! */
+        return new double[] {getKm(0), getKm(size()-1)};
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQKmsResult.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQKmsResult.java	Wed Aug 07 18:53:15 2013 +0200
@@ -12,4 +12,4 @@
 public interface WQKmsResult {
     public WQKms[] getWQKms();
 }
-
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WstValueTable.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WstValueTable.java	Wed Aug 07 18:53:15 2013 +0200
@@ -850,6 +850,12 @@
             q, referenceKm, kms, ws, qs, 0, kms.length, errors);
     }
 
+    /**
+     * Interpolate Q at given positions.
+     * @param kms positions for which to calculate qs and ws
+     * @param ws [out] calculated ws for kms
+     * @param qs [out] looked up qs for kms.
+     */
     public QPosition interpolate(
         double      q,
         double      referenceKm,
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixAnalysisCalculation.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixAnalysisCalculation.java	Wed Aug 07 18:53:15 2013 +0200
@@ -172,7 +172,7 @@
                 ArrayList<QWD> allQWDs = new ArrayList<QWD>();
 
                 // for all Q sectors.
-                for (int qSector = qSectorStart; qSector < qSectorEnd; ++qSector) {
+                for (int qSector = qSectorStart; qSector <= qSectorEnd; ++qSector) {
 
                     Filter filter = new AndFilter()
                         .add(kmFilter)
@@ -289,6 +289,7 @@
         return results;
     }
 
+    /** Returns the mapped value of colIdx or the size of the hashmap. */
     private static final int getIndex(TIntIntHashMap map, int colIdx) {
         if (map.containsKey(colIdx)) {
             return map.get(colIdx);
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedDiffCalculation.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedDiffCalculation.java	Wed Aug 07 18:53:15 2013 +0200
@@ -18,7 +18,10 @@
 import org.dive4elements.river.artifacts.model.Calculation;
 import org.dive4elements.river.artifacts.model.CalculationResult;
 
-
+/**
+ * Perform calculation of differences of bed height (german Sohlhoehe).
+ * The input are either single year data or epochs.
+ */
 public class BedDiffCalculation
 extends Calculation
 {
@@ -84,6 +87,7 @@
        */
     }
 
+    /** Get two BedHeights from factory. */
     private static BedHeight [] getHeightPair(int [] ids, String type) {
         return new BedHeight [] {
             BedHeightFactory.getHeight(type, ids[0], 0),
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedHeightFactory.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedHeightFactory.java	Wed Aug 07 18:53:15 2013 +0200
@@ -62,7 +62,7 @@
 
 
     /**
-     * Get WKms for given column and wst_id, caring about the cache.
+     * Get BedHeight for given type and height_id, caring about the cache.
      */
     public static BedHeight getHeight(String type, int height_id, int time) {
         log.debug("BedHeightFactory.getHeight");
@@ -92,7 +92,7 @@
         return values;
     }
 
-    /** Get name for a WKms. */
+    /** Get name for a BedHeight. */
     public static String getHeightName(String type, int height_id) {
         log.debug("BedHeightFactory.getHeightName height_id/" + height_id);
 
@@ -123,10 +123,9 @@
 
 
     /**
-     * Get WKms from db.
-     * @param column the position columns value
-     * @param wst_id database id of the wst
-     * @return according WKms.
+     * Get BedHeight from db.
+     * @param height_id database id of the bed_height
+     * @return according BedHeight.
      */
     public static BedHeight getBedHeightUncached(
         String type,
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/QualityMeasurement.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/QualityMeasurement.java	Wed Aug 07 18:53:15 2013 +0200
@@ -11,8 +11,11 @@
 import java.util.Date;
 import java.util.Map;
 
+import org.apache.log4j.Logger;
 
+/** A measurement of the bed quality, serving different diameter at given km. */
 public class QualityMeasurement {
+    private static Logger logger = Logger.getLogger(QualityMeasurements.class);
 
     private double              km;
     private Date                date;
@@ -61,8 +64,16 @@
         this.charDiameter = charDiameter;
     }
 
+    /**
+     * Get the stored diameter for given key (e.g. d10).
+     * @return NaN if no data found in this measurement.
+     */
     public double getDiameter(String key) {
-        return charDiameter.get(key);
+        Double diameter = charDiameter.get(key);
+        if (diameter == null) {
+            logger.warn("No Diameter at km " + km + " for " + key);
+        }
+        return (diameter != null) ? diameter : Double.NaN;
     }
 
     public void setDiameter(String key, double value) {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentDensity.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentDensity.java	Wed Aug 07 18:53:15 2013 +0200
@@ -93,7 +93,7 @@
         List<SedimentDensityValue> values,
         double km
     ) {
-        boolean found = true;
+        boolean found = false;
         SedimentDensityValue prev = null;
         SedimentDensityValue next = null;
         for (SedimentDensityValue sdv: values) {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadCalculation.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadCalculation.java	Wed Aug 07 18:53:15 2013 +0200
@@ -36,12 +36,14 @@
     protected double kmUp;
     protected double kmLow;
     protected int[] period;
+    /** Years of chosen epochs. */
     protected int[][] epoch;
     protected String unit;
 
     public SedimentLoadCalculation() {
     }
 
+    /** Returns CalculationResult with array of SedimentLoadResults. */
     public CalculationResult calculate(SedimentLoadAccess access) {
         logger.info("SedimentLoadCalculation.calculate");
 
@@ -87,6 +89,7 @@
         return new CalculationResult();
     }
 
+    /** Returns CalculationResult with array of SedimentLoadResults. */
     private CalculationResult internalCalculate() {
         logger.debug("internalCalulate; mode:" + yearEpoch);
         if (yearEpoch.equals("year")) {
@@ -187,12 +190,28 @@
                 }
             }
             SedimentLoadFraction fr = resLoad.getFraction(km);
-            resLoad.setCoarse(km, fr.getCoarse()/cSum, fr.getCoarseRange());
-            resLoad.setFineMiddle(km, fr.getFineMiddle()/fmSum, fr.getFineMiddleRange());
-            resLoad.setSand(km, fr.getSand()/sSum, fr.getSandRange());
-            resLoad.setSuspSand(km, fr.getSuspSand()/ssSum, fr.getSuspSandRange());
-            resLoad.setSuspSandBed(km, fr.getSuspSandBed()/ssbSum, fr.getSuspSandBedRange());
-            resLoad.setSuspSediment(km, fr.getSuspSediment()/sseSum, fr.getSuspSedimentRange());
+            // Prevent divisions by zero, the fraction defaults to 0d.
+            if (cSum != 0) {
+                resLoad.setCoarse(km, fr.getCoarse()/cSum, fr.getCoarseRange());
+            }
+            if (fmSum != 0) {
+                resLoad.setFineMiddle(km, fr.getFineMiddle()/fmSum,
+                    fr.getFineMiddleRange());
+            }
+            if (sSum != 0) {
+                resLoad.setSand(km, fr.getSand()/sSum, fr.getSandRange());
+            }
+            if (ssSum != 0) {
+                resLoad.setSuspSand(km, fr.getSuspSand()/ssSum,
+                    fr.getSuspSandRange());
+            }
+            if (ssbSum != 0) {
+                resLoad.setSuspSandBed(km, fr.getSuspSandBed()/ssbSum,
+                    fr.getSuspSandBedRange());
+            }
+            if (sseSum != 0) {
+                resLoad.setSuspSediment(km, fr.getSuspSediment()/sseSum, fr.getSuspSedimentRange());
+            }
         }
         resLoad.setDescription("");
         resLoad.setEpoch(true);
@@ -216,6 +235,7 @@
         return result;
     }
 
+    /** Calculate/Fetch values at off. epochs. */
     private SedimentLoadResult calculateOffEpoch(int i) {
         SedimentLoad load = SedimentLoadFactory.getLoadWithData(
             this.river,
@@ -243,8 +263,11 @@
         return result;
     }
 
-    /** Fetch loads for a single year, calculate total and
-     * return the result containing both. */
+    /**
+     * Fetch loads for a single year, calculate total and
+     * return the result containing both.
+     * @param y year, e.g. 1980
+     */
     private SedimentLoadResult calculateYear(int y) {
         SedimentLoad load = SedimentLoadFactory.getLoadWithData(
             this.river,
@@ -330,12 +353,15 @@
 
     /**
      * Set total values in load.
-     * Therefore, run over the kms and find ranges where either all
+     *
+     * Therefore, run over the sorted kms and find ranges where either all
      * or all Geschiebe or just the Schwebstoff fractions are set.
      * Merge these ranges and add (maybe new) respective fractions to
-     * load.
+     * load. In the process, remember any 'unfished' ends from measurements
+     * where the km-ranges did not completely match.
+     *
      * @param load SedimentLoad to add total values (and ranges) to.
-     * @return input param load.
+     * @return input param load, with total values set.
      */
     private SedimentLoad partialTotal(SedimentLoad load) {
         SedimentLoad fairLoad = load;
@@ -485,6 +511,11 @@
     }
 
 
+    /**
+     * Transform values in load.
+     * @param load The load of which values should be transformed.
+     * @return parameter load with transformed values.
+     */
     private SedimentLoad calculateUnit(SedimentLoad load, int year) {
         SedimentDensity density =
             SedimentDensityFactory.getSedimentDensity(river, kmLow, kmUp, year);
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java	Wed Aug 07 18:53:15 2013 +0200
@@ -55,6 +55,7 @@
         super(idx, name, description, type, hash, stateId);
     }
 
+    @Override
     public Object getData(Artifact artifact, CallContext context) {
         logger.debug("Get data for sediment load at index: " + index);
 
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFactory.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFactory.java	Wed Aug 07 18:53:15 2013 +0200
@@ -29,16 +29,19 @@
 import org.hibernate.Session;
 import org.hibernate.type.StandardBasicTypes;
 
+
 /** Pull Sediment Loads out of db. */
 public class SedimentLoadFactory
 {
     /** Private logger to use here. */
     private static Logger log = Logger.getLogger(SedimentLoadFactory.class);
 
+    // Cache name/keys
     public static final String LOADS_CACHE_NAME = "sedimentloads";
     public static final String LOAD_DATA_CACHE_NAME = "sedimentload-data";
 
-    /** Query to get km and ws for wst_id and column_pos. */
+    /** Query to get description and start year of single type
+     * sediment_yields. */
     public static final String SQL_SELECT_SINGLES =
         "SELECT DISTINCT " +
         "       sy.description AS description, " +
@@ -51,7 +54,23 @@
         "       AND ti.stop_time IS NULL " +
         "       AND syv.station BETWEEN :startKm AND :endKm";
 
-    /** Query to get name for wst_id and column_pos. */
+    /** Query to get description, name and time range for official
+     * epoch-type sediment yields. */
+    public static final String SQL_SELECT_OFFEPOCHS =
+        "SELECT DISTINCT " +
+        "       ti.start_time AS startYear, " +
+        "       ti.stop_time AS end " +
+        "   FROM     sediment_yield sy " +
+        "       JOIN rivers r ON sy.river_id = r.id " +
+        "       JOIN sediment_yield_values syv ON sy.id = syv.sediment_yield_id " +
+        "       JOIN time_intervals ti ON sy.time_interval_id = ti.id " +
+        "   WHERE   r.name = :name " +
+        "       AND ti.stop_time IS NOT NULL " +
+        "       AND syv.station BETWEEN :startKm AND :endKm " +
+        "       AND sy.kind = 1";
+
+    /** Query to get description, name and time range for epoch-type
+     * sediment yields. */
     public static final String SQL_SELECT_EPOCHS =
         "SELECT DISTINCT " +
         "       sy.description AS description, " +
@@ -115,8 +134,11 @@
         "    JOIN rivers r ON sy.river_id = r.id " +
         "    JOIN time_intervals ti ON sy.time_interval_id = ti.id " +
         "    JOIN grain_fraction gf ON sy.grain_fraction_id = gf.id " +
-        "WHERE r.name = :river" +
-        "    AND gf.name = 'unknown'";
+        "    JOIN units u ON sy.unit_id = u.id " +
+        "WHERE r.name = :river " +
+        "    AND gf.name = 'unknown' " +
+        "    AND sy.kind = :type " +
+        "    AND u.name = :unit";
 
     public static final String SQL_SELECT_EPOCHS_DATA =
         "SELECT" +
@@ -143,7 +165,8 @@
     }
 
     /**
-     *
+     * @param river name of river.
+     * @param type
      */
     public static SedimentLoad[] getLoads(
         String river,
@@ -180,6 +203,10 @@
         return values;
     }
 
+    /**
+     * Get a sedimentLoad filled with data from db (or cache).
+     * @param type "epoch","off_epoch" or "single"
+     */
     public static SedimentLoad getLoadWithData(
         String river,
         String type,
@@ -287,13 +314,37 @@
             }
             return loads;
         }
+        else if (type.equals("off_epoch")) {
+            sqlQuery = session.createSQLQuery(SQL_SELECT_OFFEPOCHS)
+                .addScalar("startYear", StandardBasicTypes.DATE)
+                .addScalar("end", StandardBasicTypes.DATE);
+            sqlQuery.setString("name", river);
+            sqlQuery.setDouble("startKm", startKm);
+            sqlQuery.setDouble("endKm", endKm);
+            List<Object []> results = sqlQuery.list();
+
+            SedimentLoad[] loads = new SedimentLoad[results.size()];
+            for (int i = 0; i < results.size(); i++) {
+                Object[] row = results.get(i);
+                loads[i] = new SedimentLoad(
+                    ((Date) row[0]).toString() + (Date) row[1],
+                    (Date) row[0],
+                    (Date) row[1],
+                    true,
+                    "");
+            }
+            return loads;
+        }
+        else {
+             log.warn("getSedimentLoadsUncached does not understand type " + type);
+        }
         return new SedimentLoad[0];
     }
 
     /**
      * Get sediment loads from db.
      * @param river the river
-     * @param type the sediment load type (year or epoch)
+     * @param type the sediment load type (year, epoch or off_epoch)
      * @return according sediment loads.
      */
     public static SedimentLoad getSedimentLoadWithDataUncached(
@@ -361,10 +412,13 @@
             return load;
         }
         else if (type.equals("off_epoch")) {
+            // Set calendars to fetch the epochs by their start and end
+            // dates.
             Calendar toStart = Calendar.getInstance();
             toStart.set(eyear - 1, 11, 31);
             Calendar toEnd = Calendar.getInstance();
-            toEnd.set(eyear, 11, 30);
+            toEnd.set(eyear+1, 00, 01);
+            // Set query parameters.
             sqlQuery = session.createSQLQuery(SQL_SELECT_EPOCHS_DATA)
                 .addScalar("description", StandardBasicTypes.STRING)
                 .addScalar("startYear", StandardBasicTypes.DATE)
@@ -377,33 +431,38 @@
             sqlQuery.setDouble("endKm", endKm);
             sqlQuery.setDate("sbegin", start.getTime());
             sqlQuery.setDate("send", end.getTime());
-            sqlQuery.setDate("ebegin",toStart.getTime());
+            sqlQuery.setDate("ebegin", toStart.getTime());
             sqlQuery.setDate("eend", toEnd.getTime());
             sqlQuery.setString("grain", "total");
 
             List<Object[]> results = null;
             results = sqlQuery.list();
 
-            SedimentLoad load = new SedimentLoad();
-            Object[] row = results.get(0);
-            load = new SedimentLoad(
-                    (String) row[0],
-                    (Date) row[1],
-                    (Date) row[4],
-                    true,
-                    (String)row[5]);
-            TDoubleArrayList kms = new TDoubleArrayList();
-            for (int i = 0; i < results.size(); i++) {
-                row = results.get(i);
-                kms.add((Double)row[3]);
-                load.setLoadTotal((Double)row[3], (Double)row[2]);
+            SedimentLoad load = null;
+            if (results.isEmpty()) {
+                log.warn("No total results for off-epoch");
             }
-            getValues("coarse", sqlQuery, load, floatStations);
-            getValues("fine_middle", sqlQuery, load, floatStations);
-            getValues("sand", sqlQuery, load, floatStations);
-            getValues("suspended_sediment", sqlQuery, load, suspStations);
-            getValues("susp_sand_bed", sqlQuery, load, floatStations);
-            getValues("susp_sand", sqlQuery, load, floatStations);
+            else {
+                Object[] row = results.get(0);
+                load = new SedimentLoad(
+                        (String) row[0],
+                        (Date) row[1],
+                        (Date) row[4],
+                        true,
+                        (String)row[5]);
+                TDoubleArrayList kms = new TDoubleArrayList();
+                for (int i = 0; i < results.size(); i++) {
+                    row = results.get(i);
+                    kms.add((Double)row[3]);
+                    load.setLoadTotal((Double)row[3], (Double)row[2]);
+                }
+            }
+            load = getValues("coarse", sqlQuery, load, floatStations);
+            load = getValues("fine_middle", sqlQuery, load, floatStations);
+            load = getValues("sand", sqlQuery, load, floatStations);
+            load = getValues("suspended_sediment", sqlQuery, load, suspStations);
+            load = getValues("susp_sand_bed", sqlQuery, load, floatStations);
+            load = getValues("susp_sand", sqlQuery, load, floatStations);
             return load;
         }
         return new SedimentLoad();
@@ -412,12 +471,15 @@
 
     /**
      * Run query with grain parameter set to fraction, feed result into
-     * load.
+     * load. Create load if null.
+     *
      * @param fraction value to set 'grain' parameter in query to.
      * @param query query in which to set 'grain' parameter and run.
-     * @param load[out] SedimentLoad which to populate with values.
+     * @param load[out] SedimentLoad which to populate with values. if null
+     *                  and values are found, return a new load.
+     * @return param load or new load if null.
      */
-    protected static void getValues (
+    protected static SedimentLoad getValues (
         String fraction,
         SQLQuery query,
         SedimentLoad load,
@@ -425,6 +487,18 @@
     ) {
         query.setString("grain", fraction);
         List<Object[]> results = query.list();
+
+        // We have the first results for this query, create new load.
+        if (!results.isEmpty() && load == null) {
+           Object[] row = results.get(0);
+           load = new SedimentLoad(
+                   (String) row[0],
+                   (Date) row[1],
+                   (Date) row[4],
+                   true,
+                   (String)row[5]);
+        }
+
         for (int i = 0; i < results.size(); i++) {
             Object[] row = results.get(i);
             double km = (Double)row[3];
@@ -436,6 +510,7 @@
             Range range = null;
             if (station == null) {
                 log.warn("No measurement station for " + fraction + " km " + km);
+                continue;
             }
             else {
                 if (nextStation != null)
@@ -470,6 +545,8 @@
                 load.setSuspSandBed(km, v, range);
             }
         }
+
+        return load;
     }
 
     public static SedimentLoad getLoadUnknown(
@@ -549,13 +626,28 @@
         return load;
     }
 
-    public static SedimentLoad[] getSedimentLoadUnknown(String river) {
+    /**
+     * Return sediment loads with 'unknown' fraction type.
+     * @param unit Restrict result set to those of given unit.
+     * @param type Type like year, epoch, off_epoch
+     */
+    public static SedimentLoad[] getSedimentLoadUnknown(
+        String river,
+        String unit,
+        String type) {
         Session session = SessionHolder.HOLDER.get();
         SQLQuery sqlQuery = session.createSQLQuery(SQL_SELECT_UNKNOWN)
             .addScalar("description", StandardBasicTypes.STRING)
             .addScalar("startYear", StandardBasicTypes.DATE)
             .addScalar("end", StandardBasicTypes.DATE);
         sqlQuery.setString("river", river);
+        sqlQuery.setString("unit", unit);
+        if (type.equals("off_epoch")) {
+            sqlQuery.setInteger("type", 1);
+        }
+        else {
+            sqlQuery.setInteger("type", 0);
+        }
         List<Object[]> results = sqlQuery.list();
         SedimentLoad[] loads = new SedimentLoad[results.size()];
         int counter = 0;
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadUnknownFacet.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadUnknownFacet.java	Wed Aug 07 18:53:15 2013 +0200
@@ -14,7 +14,7 @@
 extends DataFacet
 {
     /** Very own logger. */
-    private static Logger logger = Logger.getLogger(SedimentLoadFacet.class);
+    private static Logger logger = Logger.getLogger(SedimentLoadUnknownFacet.class);
 
     public SedimentLoadUnknownFacet() {
     }
@@ -24,6 +24,7 @@
         super(idx, name, description, type, hash, stateId);
     }
 
+    /** Get data from SedimentLoad with unknown type, from factory. */
     @Override
     public Object getData(Artifact artifact, CallContext context) {
         logger.debug("Get data for sediment load at index: " + index);
@@ -32,25 +33,15 @@
 
         SedimentLoadAccess access = new SedimentLoadAccess(flys);
         String river = access.getRiver();
+        String unit  = access.getUnit();
         SedimentLoad[] unknown =
-            SedimentLoadFactory.getSedimentLoadUnknown(river);
+            SedimentLoadFactory.getSedimentLoadUnknown(
+                river,
+                unit.replace("_per_","/"),
+                access.getYearEpoch());
 
         SedimentLoad load = SedimentLoadFactory.getLoadUnknown(
             river, unknown[index].getDescription());
-        if (access.getUnit().equals("t/a") && load.getUnit().equals("m3/a")) {
-            for (Double km: load.getKms()) {
-                SedimentLoadFraction fraction = load.getFraction(km);
-                fraction.setUnknown(fraction.getUnknown() / 1.8);
-                load.addKm(km, fraction);
-            }
-        }
-        else if (access.getUnit().equals("m3/a") && load.getUnit().equals("t/a")) {
-            for (Double km: load.getKms()) {
-                SedimentLoadFraction fraction = load.getFraction(km);
-                fraction.setUnknown(fraction.getUnknown() * 1.8);
-                load.addKm(km, fraction);
-            }
-        }
         return load;
     }
 
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/Fitting.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/Fitting.java	Wed Aug 07 18:53:15 2013 +0200
@@ -9,6 +9,7 @@
 package org.dive4elements.river.artifacts.model.sq;
 
 import org.dive4elements.river.artifacts.math.fitting.Function;
+import org.dive4elements.river.artifacts.math.fitting.Linear;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -17,6 +18,8 @@
 
 import org.apache.commons.math.optimization.fitting.CurveFitter;
 
+import org.apache.commons.math.optimization.general.AbstractLeastSquaresOptimizer;
+import org.apache.commons.math.optimization.general.GaussNewtonOptimizer;
 import org.apache.commons.math.optimization.general.LevenbergMarquardtOptimizer;
 
 import org.apache.log4j.Logger;
@@ -24,6 +27,10 @@
 public class Fitting
 implements   Outlier.Callback
 {
+    // XXX: Hack to force linear fitting!
+    private static final boolean USE_NON_LINEAR_FITTING =
+        Boolean.getBoolean("minfo.sq.fitting.nonlinear");
+
     private static Logger log = Logger.getLogger(Fitting.class);
 
     public interface Callback {
@@ -75,20 +82,51 @@
     @Override
     public void initialize(List<SQ> sqs) throws MathException {
 
-        LevenbergMarquardtOptimizer lmo =
-            new LevenbergMarquardtOptimizer();
+        AbstractLeastSquaresOptimizer optimizer = getOptimizer();
 
-        CurveFitter cf = new CurveFitter(lmo);
+        CurveFitter cf = new CurveFitter(optimizer);
+        double [] values = new double[2];
         for (SQ sq: sqs) {
-            cf.addObservedPoint(sq.getQ(), sq.getS());
+            values[0] = sq.getQ();
+            values[1] = sq.getS();
+            transformInputValues(values);
+            cf.addObservedPoint(values[0], values[1]);
         }
 
         coeffs = cf.fit(
             function, function.getInitialGuess());
 
+        transformCoeffsBack(coeffs);
+
         instance = function.instantiate(coeffs);
 
-        chiSqr = lmo.getChiSquare();
+        chiSqr = optimizer.getChiSquare();
+    }
+
+    protected Function getFunction(Function function) {
+        return USE_NON_LINEAR_FITTING
+            ? function
+            : Linear.INSTANCE;
+    }
+
+    protected void transformInputValues(double [] values) {
+        if (!USE_NON_LINEAR_FITTING) {
+            for (int i = 0; i < values.length; ++i) {
+                values[i] = Math.log(values[i]);
+            }
+        }
+    }
+
+    protected AbstractLeastSquaresOptimizer getOptimizer() {
+        return USE_NON_LINEAR_FITTING
+            ? new LevenbergMarquardtOptimizer()
+            : new GaussNewtonOptimizer(false);
+    }
+
+    protected void transformCoeffsBack(double [] coeffs) {
+        if (!USE_NON_LINEAR_FITTING && coeffs.length > 0) {
+            coeffs[0] = Math.exp(coeffs[0]);
+        }
     }
 
     @Override
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/services/SedimentLoadInfoService.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/services/SedimentLoadInfoService.java	Wed Aug 07 18:53:15 2013 +0200
@@ -22,6 +22,7 @@
 import org.dive4elements.river.artifacts.model.minfo.SedimentLoadFactory;
 
 
+/** Service delivering info about sediment loads. */
 public class SedimentLoadInfoService
 extends D4EService
 {
@@ -33,6 +34,10 @@
     public static final String FROM_XPATH = "/art:river/art:location/art:from/text()";
     public static final String TO_XPATH = "/art:river/art:location/art:to/text()";
 
+    /**
+     * Create document with sedimentload infos,
+     * constrained by contents in data.
+     */
     @Override
     protected Document doProcess(
         Document data,
@@ -54,17 +59,18 @@
             data,
             TO_XPATH,
             ArtifactNamespaceContext.INSTANCE);
-        double f, t;
+        double fromD, toD;
         try {
-            f = Double.parseDouble(from);
-            t = Double.parseDouble(to);
+            fromD = Double.parseDouble(from);
+            toD = Double.parseDouble(to);
         }
         catch (NumberFormatException nfe) {
             logger.warn("Invalid locations. Cannot return sediment loads.");
             return XMLUtils.newDocument();
         }
 
-        SedimentLoad[] loads = SedimentLoadFactory.getLoads(river, type, f, t);
+        SedimentLoad[] loads =
+            SedimentLoadFactory.getLoads(river, type, fromD, toD);
         return buildDocument(loads);
     }
 
@@ -98,3 +104,4 @@
         return result;
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/CalculationSelect.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/CalculationSelect.java	Wed Aug 07 18:53:15 2013 +0200
@@ -77,10 +77,11 @@
         CALCULATION_DISCHARGE_CURVE,
         CALCULATION_HISTORICAL_DISCHARGE_CURVE,
         CALCULATION_DURATION_CURVE,
-        CALCULATION_DISCHARGE_LONGITUDINAL_CURVE,
+//        CALCULATION_DISCHARGE_LONGITUDINAL_CURVE,
         CALCULATION_W_DIFFERENCES,
-        CALCULATION_REFERENCE_CURVE,
-        CALCULATION_EXTREME};
+        CALCULATION_REFERENCE_CURVE //,
+//        CALCULATION_EXTREME
+    };
 
 
     /** Error message that is thrown if no mode has been chosen. */
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/FloodMapState.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/FloodMapState.java	Wed Aug 07 18:53:15 2013 +0200
@@ -8,55 +8,35 @@
 
 package org.dive4elements.river.artifacts.states;
 
+import com.vividsolutions.jts.geom.Coordinate;
+import com.vividsolutions.jts.geom.Geometry;
+import com.vividsolutions.jts.geom.LineString;
+import com.vividsolutions.jts.geom.Polygon;
+
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
-
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
 import org.apache.log4j.Logger;
-
 import org.apache.velocity.Template;
-
-import org.geotools.feature.FeatureCollection;
-import org.geotools.feature.FeatureCollections;
-
-import org.geotools.feature.simple.SimpleFeatureBuilder;
-
-import org.hibernate.HibernateException;
-
-import org.opengis.feature.simple.SimpleFeature;
-import org.opengis.feature.simple.SimpleFeatureType;
-
-import com.vividsolutions.jts.geom.Coordinate;
-import com.vividsolutions.jts.geom.Geometry;
-import com.vividsolutions.jts.geom.LineString;
-import com.vividsolutions.jts.geom.Polygon;
-
 import org.dive4elements.artifactdatabase.state.Facet;
-
 import org.dive4elements.artifacts.Artifact;
 import org.dive4elements.artifacts.CallContext;
 import org.dive4elements.artifacts.CallMeta;
 import org.dive4elements.artifacts.GlobalContext;
-
 import org.dive4elements.artifacts.common.utils.FileTools;
-
 import org.dive4elements.river.artifacts.D4EArtifact;
-
 import org.dive4elements.river.artifacts.access.RangeAccess;
-
 import org.dive4elements.river.artifacts.context.RiverContext;
-
 import org.dive4elements.river.artifacts.model.CalculationMessage;
 import org.dive4elements.river.artifacts.model.CalculationResult;
 import org.dive4elements.river.artifacts.model.FacetTypes;
 import org.dive4elements.river.artifacts.model.LayerInfo;
 import org.dive4elements.river.artifacts.model.WQKms;
-
 import org.dive4elements.river.artifacts.model.map.HWS;
 import org.dive4elements.river.artifacts.model.map.HWSContainer;
 import org.dive4elements.river.artifacts.model.map.HWSFactory;
@@ -64,24 +44,25 @@
 import org.dive4elements.river.artifacts.model.map.WSPLGENCalculation;
 import org.dive4elements.river.artifacts.model.map.WSPLGENJob;
 import org.dive4elements.river.artifacts.model.map.WSPLGENReportFacet;
-
 import org.dive4elements.river.artifacts.resources.Resources;
-
 import org.dive4elements.river.exports.WstWriter;
-
 import org.dive4elements.river.model.CrossSectionTrack;
 import org.dive4elements.river.model.DGM;
 import org.dive4elements.river.model.Floodplain;
 import org.dive4elements.river.model.RiverAxis;
-
 import org.dive4elements.river.utils.ArtifactMapfileGenerator;
-import org.dive4elements.river.utils.RiverUtils;
 import org.dive4elements.river.utils.GeometryUtils;
 import org.dive4elements.river.utils.MapfileGenerator;
-
+import org.dive4elements.river.utils.RiverUtils;
 import org.dive4elements.river.wsplgen.FacetCreator;
 import org.dive4elements.river.wsplgen.JobObserver;
 import org.dive4elements.river.wsplgen.Scheduler;
+import org.geotools.feature.FeatureCollection;
+import org.geotools.feature.FeatureCollections;
+import org.geotools.feature.simple.SimpleFeatureBuilder;
+import org.hibernate.HibernateException;
+import org.opengis.feature.simple.SimpleFeature;
+import org.opengis.feature.simple.SimpleFeatureType;
 
 public class FloodMapState
 extends      DefaultState
@@ -109,9 +90,9 @@
     public static final String WSPLGEN_FLOODPLAIN     = "talaue.shp";
     public static final String WSPLGEN_WSP_FILE       = "waterlevel.wst";
     public static final String WSPLGEN_OUTPUT_FILE    = "wsplgen.shp";
-    public static final String WSPLGEN_USER_SHAPE     = "user-rgd.shp";
-    public static final String WSPLGEN_USER_ZIP       = "user-rgd.zip";
-    public static final String WSPLGEN_USER_FILENAME  = "user-rgd";
+    public static final String WSPLGEN_USER_RGD_SHAPE = "user-rgd.shp";
+    public static final String WSPLGEN_USER_RGD_ZIP   = "user-rgd.zip";
+    public static final String WSPLGEN_USER_RGD       = "user-rgd";
 
     public static final String WSPLGEN_QPS_NAME = "qps";
 
@@ -428,7 +409,7 @@
             createMapfile(
                 artifact,
                 artifactDir,
-                MapfileGenerator.MS_LAYER_PREFIX + "hws-lines",
+                MapfileGenerator.MS_LAYER_PREFIX + HWS_LINES,
                 HWS_LINES_SHAPE,
                 "LINE",
                 "31467",
@@ -441,7 +422,7 @@
     }
 
 
-    private void createMapfile(
+    public static void createMapfile(
         D4EArtifact artifact,
         File artifactDir,
         String name,
@@ -650,13 +631,13 @@
         File         dir,
         WSPLGENJob   job
     ) {
-        File archive = new File(dir, WSPLGEN_USER_SHAPE);
+        File archive = new File(dir, WSPLGEN_USER_RGD_SHAPE);
         boolean exists = archive.exists();
         logger.debug("shp file exists: " + exists);
         if (exists) {
-            job.addLin(dir + "/" + WSPLGEN_USER_SHAPE);
+            job.addLin(dir + "/" + WSPLGEN_USER_RGD_SHAPE);
             facetCreator.createShapeFacet(FacetCreator.I18N_USERSHAPE,
-                MapfileGenerator.MS_LAYER_PREFIX + "user-rgd",
+                MapfileGenerator.MS_LAYER_PREFIX + WSPLGEN_USER_RGD,
                 FLOODMAP_USERSHAPE,
                 4);
         }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/GaugeDischargeState.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/GaugeDischargeState.java	Wed Aug 07 18:53:15 2013 +0200
@@ -57,6 +57,12 @@
     protected String createFacetName(GaugeDischargeArtifact artifact,
         CallMeta meta) {
 
+        if (artifact.getFacetWishName() != null) {
+            /* We let the Artifact overwrite our name as this allows
+             * injecting the facet name from the Datacage */
+            return artifact.getFacetWishName();
+        }
+
         Gauge gauge = artifact.getGauge();
         Object[] args = new Object[] {
             gauge.getName(),
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/HWSBarriersState.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/HWSBarriersState.java	Wed Aug 07 18:53:15 2013 +0200
@@ -9,51 +9,29 @@
 package org.dive4elements.river.artifacts.states;
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
-
-import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.List;
 
 import org.apache.log4j.Logger;
-
-import org.apache.velocity.Template;
-
-import org.geotools.data.shapefile.ShapefileDataStore;
-
-import org.geotools.feature.FeatureCollection;
-import org.geotools.feature.FeatureCollections;
-
-import org.opengis.feature.simple.SimpleFeatureType;
-
-import org.opengis.feature.type.GeometryDescriptor;
-
-import org.w3c.dom.Element;
-
 import org.dive4elements.artifactdatabase.state.Facet;
-
 import org.dive4elements.artifacts.Artifact;
 import org.dive4elements.artifacts.CallContext;
-
 import org.dive4elements.artifacts.common.utils.FileTools;
-
 import org.dive4elements.artifacts.common.utils.XMLUtils.ElementCreator;
-
 import org.dive4elements.river.artifacts.D4EArtifact;
-
 import org.dive4elements.river.artifacts.access.MapAccess;
-
-import org.dive4elements.river.artifacts.model.LayerInfo;
-
 import org.dive4elements.river.artifacts.model.map.HWS;
 import org.dive4elements.river.artifacts.model.map.HWSContainer;
 import org.dive4elements.river.artifacts.model.map.HWSFactory;
-
-import org.dive4elements.river.utils.ArtifactMapfileGenerator;
-import org.dive4elements.river.utils.RiverUtils;
 import org.dive4elements.river.utils.GeometryUtils;
 import org.dive4elements.river.utils.MapfileGenerator;
+import org.dive4elements.river.utils.RiverUtils;
+import org.geotools.data.shapefile.ShapefileDataStore;
+import org.geotools.feature.FeatureCollection;
+import org.geotools.feature.FeatureCollections;
+import org.opengis.feature.simple.SimpleFeatureType;
+import org.opengis.feature.type.GeometryDescriptor;
+import org.w3c.dom.Element;
 
 public class HWSBarriersState
 extends DefaultState
@@ -63,9 +41,8 @@
     private static Logger logger = Logger.getLogger(HWSBarriersState.class);
     private static final String HWS_SHAPEFILE_LINES = "hws-lines.shp";
     private static final String HWS_SHAPEFILE_POINTS = "hws-points.shp";
-    private static final String USER_RGD_SHAPE     = "user-rgd.shp";
-    private static final String USER_RGD_ZIP       = "user-rgd.zip";
-    private static final String USER_RGD_FILENAME  = "user-rgd";
+
+
     @Override
     protected String getUIProvider() {
         return "map_digitize";
@@ -146,7 +123,7 @@
         }
 
         if (successLines) {
-            createMapfile(
+            FloodMapState.createMapfile(
                 artifact,
                 artifactDir,
                 MapfileGenerator.MS_LAYER_PREFIX + "hws-lines",
@@ -156,7 +133,7 @@
                 "hws");
         }
         if (successPoints) {
-            createMapfile(
+            FloodMapState.createMapfile(
                 artifact,
                 artifactDir,
                 MapfileGenerator.MS_LAYER_PREFIX + "hws-points",
@@ -172,7 +149,7 @@
                 try {
                     ShapefileDataStore store = new ShapefileDataStore(
                     new File(artifactDir.getCanonicalPath() +
-                        "/" + USER_RGD_SHAPE)
+                        "/" + FloodMapState.WSPLGEN_USER_RGD_SHAPE)
                             .toURI().toURL());
                     GeometryDescriptor desc =
                         store.getSchema().getGeometryDescriptor();
@@ -201,14 +178,14 @@
                     else {
                         type = "POINT";
                     }
-                    createMapfile(
+                    FloodMapState.createMapfile(
                         artifact,
                         artifactDir,
-                        MapfileGenerator.MS_LAYER_PREFIX + USER_RGD_FILENAME,
-                        USER_RGD_SHAPE,
+                        MapfileGenerator.MS_LAYER_PREFIX + FloodMapState.WSPLGEN_USER_RGD,
+                        FloodMapState.WSPLGEN_USER_RGD_SHAPE,
                         type,
                         epsg,
-                        "user-rgd");
+                        FloodMapState.WSPLGEN_USER_RGD);
                 }
                 catch (IOException e) {
                     logger.warn("No mapfile for user-rgd created!");
@@ -219,7 +196,7 @@
     }
 
     private boolean extractUserShp(File dir) {
-        File archive = new File(dir, USER_RGD_ZIP);
+        File archive = new File(dir, FloodMapState.WSPLGEN_USER_RGD_ZIP);
         boolean exists = archive.exists();
         logger.debug("Zip file exists: " + exists);
         if (exists) {
@@ -231,7 +208,7 @@
             }
             catch (IOException ioe) {
                 logger.warn("Zip archive " + dir + "/"
-                    + USER_RGD_ZIP + " could not be extracted.");
+                    + FloodMapState.WSPLGEN_USER_RGD_ZIP + " could not be extracted.");
                 return false;
             }
         }
@@ -261,7 +238,7 @@
                         return true;
                     }
                     try {
-                        FileTools.copyFile(file, new File(target, USER_RGD_FILENAME + "." + suffix));
+                        FileTools.copyFile(file, new File(target, FloodMapState.WSPLGEN_USER_RGD + "." + suffix));
                     }
                     catch (IOException ioe) {
                         logger.warn ("Error while copying file " + file.getName());
@@ -275,41 +252,6 @@
         FileTools.deleteRecursive(source);
     }
 
-    private void createMapfile(
-        D4EArtifact artifact,
-        File artifactDir,
-        String name,
-        String hwsShapefile,
-        String type,
-        String srid,
-        String group
-    ) {
-        LayerInfo info = new LayerInfo();
-        info.setName(name + artifact.identifier());
-        info.setType(type);
-        info.setDirectory(artifact.identifier());
-        info.setTitle(name);
-        info.setData(hwsShapefile);
-        info.setSrid(srid);
-        info.setGroupTitle(group);
-        MapfileGenerator generator = new ArtifactMapfileGenerator();
-        Template tpl = generator.getTemplateByName(MapfileGenerator.SHP_LAYER_TEMPLATE);
-        try {
-            File layer = new File(artifactDir.getCanonicalPath() + "/" + name);
-            generator.writeLayer(info, layer, tpl);
-            List<String> layers = new ArrayList<String>();
-            layers.add(layer.getAbsolutePath());
-            generator.generate();
-        }
-        catch(FileNotFoundException fnfe) {
-            logger.warn("Could not find mapfile for hws layer");
-        }
-        catch (Exception ioe) {
-            logger.warn("Could not create mapfile for hws layer");
-            logger.warn(Arrays.toString(ioe.getStackTrace()));
-        }
-    }
-
 
     @Override
     public void endOfLife(Artifact artifact, Object callContext) {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/WDifferencesState.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/WDifferencesState.java	Wed Aug 07 18:53:15 2013 +0200
@@ -18,10 +18,12 @@
 
 import org.dive4elements.artifacts.CallContext;
 import org.dive4elements.artifacts.Artifact;
+import org.dive4elements.river.artifacts.ChartArtifact;
 import org.dive4elements.river.artifacts.D4EArtifact;
+import org.dive4elements.river.artifacts.FixationArtifact;
+import org.dive4elements.river.artifacts.MINFOArtifact;
 import org.dive4elements.river.artifacts.StaticWKmsArtifact;
 import org.dive4elements.river.artifacts.WINFOArtifact;
-import org.dive4elements.river.artifacts.ChartArtifact;
 
 import org.dive4elements.river.artifacts.math.WKmsOperation;
 
@@ -29,15 +31,16 @@
 import org.dive4elements.river.artifacts.model.DataFacet;
 import org.dive4elements.river.artifacts.model.DifferenceCurveFacet;
 import org.dive4elements.river.artifacts.model.EmptyFacet;
-
 import org.dive4elements.river.artifacts.model.FacetTypes;
 import org.dive4elements.river.artifacts.model.WKms;
 import org.dive4elements.river.artifacts.model.WQKms;
 
+import org.dive4elements.river.artifacts.model.fixings.FixRealizingResult;
+
 import org.dive4elements.river.utils.RiverUtils;
 import org.dive4elements.river.utils.StringUtil;
 
-
+/** State of a WINFOArtifact to get differences of data of other artifacts. */
 public class WDifferencesState
 extends      DefaultState
 implements   FacetTypes
@@ -76,9 +79,10 @@
 
 
     /**
-     * Access the data (wkms).
+     * Access the data (wkms) of an artifact, coded in mingle.
      */
     protected WKms getWKms(String mingle, CallContext context) {
+        logger.debug("WDifferencesState.getWKms " + mingle);
         String[] def  = mingle.split(";");
         String   uuid = def[0];
         String   name = def[1];
@@ -96,23 +100,45 @@
             return wkms;
         }
 
-        WINFOArtifact flys = (WINFOArtifact) RiverUtils.getArtifact(
+        D4EArtifact d4eArtifact = RiverUtils.getArtifact(
             uuid,
             context);
 
-        if (flys == null) {
+        if (d4eArtifact == null) {
             logger.warn("One of the artifacts (1) for diff calculation could not be loaded");
             return null;
         }
-        else{
-            WQKms[] wqkms = (WQKms[]) flys.getWaterlevelData().
+
+        if (d4eArtifact instanceof WINFOArtifact) {
+            logger.debug("Get WKms from WINFOArtifact");
+            WINFOArtifact flys = (WINFOArtifact) d4eArtifact;
+
+            WKms[] wkms = (WKms[]) flys.getWaterlevelData(context).
                                               getData();
-            if (wqkms == null)
-            logger.warn("not  waterlevels in artifact");
-            else if (wqkms.length < idx)
-            logger.warn("not enough waterlevels in artifact");
-            return wqkms[idx];
+            if (wkms == null || wkms.length == 0) {
+                logger.warn("no waterlevels in artifact");
+            }
+            else if (wkms.length < idx+1) {
+                logger.warn("Not enough waterlevels in artifact.");
+                return new WQKms();
+            }
+            return wkms[idx];
         }
+        else if (d4eArtifact instanceof MINFOArtifact) {
+            logger.debug("Get WKms from MINFOArtifact");
+            CalculationResult r = (CalculationResult)
+                d4eArtifact.compute(context, ComputeType.ADVANCE, false);
+        }
+        else if (d4eArtifact instanceof FixationArtifact) {
+            logger.debug ("Get WKms from FixationArtifact.");
+            CalculationResult r = (CalculationResult)
+                d4eArtifact.compute(context, ComputeType.ADVANCE, false);
+            FixRealizingResult frR = (FixRealizingResult) r.getData();
+            return frR.getWQKms()[idx];
+        }
+
+        logger.error("Do not know how to handle (getWKms) minuend/subtrahend");
+        return null;
     }
 
 
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/WaterlevelState.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/WaterlevelState.java	Wed Aug 07 18:53:15 2013 +0200
@@ -138,6 +138,7 @@
         return res;
     }
 
+    /** Returns empty String if argument is null, argument itself otherwise. */
     private static final String nn(String s) {
         return s != null ? s : "";
     }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/DifferencesState.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/DifferencesState.java	Wed Aug 07 18:53:15 2013 +0200
@@ -37,6 +37,7 @@
 import org.dive4elements.river.utils.Formatter;
 
 /**
+ * State for BedDifferences.
  * @author <a href="mailto:raimund.renkert@intevation.de">Raimund Renkert</a>
  */
 public class DifferencesState
@@ -68,7 +69,7 @@
     @Override
     public Object computeAdvance(D4EArtifact artifact, String hash,
         CallContext context, List<Facet> facets, Object old) {
-        logger.debug("BedQualityState.computeAdvance");
+        logger.debug("DifferencesState.computeAdvance");
 
         List<Facet> newFacets = new ArrayList<Facet>();
 
@@ -99,7 +100,7 @@
 
     protected void generateFacets(CallContext context, List<Facet> newFacets,
         BedDifferencesResult[] results, String stateId, String hash) {
-        logger.debug("BedQualityState.generateFacets");
+        logger.debug("DifferencesState.generateFacets");
 
         CallMeta meta = context.getMeta();
 
@@ -412,3 +413,4 @@
             });
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadCalculate.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadCalculate.java	Wed Aug 07 18:53:15 2013 +0200
@@ -21,6 +21,7 @@
 import org.dive4elements.river.artifacts.D4EArtifact;
 import org.dive4elements.river.artifacts.access.SedimentLoadAccess;
 import org.dive4elements.river.artifacts.model.CalculationResult;
+import org.dive4elements.river.artifacts.model.DataFacet;
 import org.dive4elements.river.artifacts.model.FacetTypes;
 import org.dive4elements.river.artifacts.model.ReportFacet;
 import org.dive4elements.river.artifacts.model.minfo.SedimentLoad;
@@ -33,7 +34,7 @@
 import org.dive4elements.river.artifacts.states.DefaultState;
 import org.dive4elements.river.utils.DateGuesser;
 
-
+/** State in which Sediment Load(s) are calculated/retrieved. */
 public class SedimentLoadCalculate
 extends DefaultState
 implements FacetTypes
@@ -73,7 +74,7 @@
                         name.equals(SEDIMENT_LOAD_SUSP_SAND_BED)){
                         return Boolean.FALSE;
                     }
-                    else if (name.equals(SEDIMENT_LOAD_UNKOWN)) {
+                    else if (name.equals(SEDIMENT_LOAD_UNKNOWN)) {
                         D4EArtifact d4e = (D4EArtifact)artifact;
                         SedimentLoadUnknownFacet f =
                             (SedimentLoadUnknownFacet)
@@ -139,10 +140,6 @@
             return res;
         }
 
-        String river = access.getRiver();
-        SedimentLoad[] unknown =
-            SedimentLoadFactory.getSedimentLoadUnknown(river);
-
         String type = access.getYearEpoch();
         if (type.equals("year")) {
             generateYearFacets(context, newFacets, results, getID(), hash);
@@ -154,6 +151,12 @@
             generateOffEpochFacets(context, newFacets, results, getID(), hash);
         }
         logger.debug("Created " + newFacets.size() + " new Facets.");
+
+        String river = access.getRiver();
+        SedimentLoad[] unknown =
+            SedimentLoadFactory.getSedimentLoadUnknown(river,
+                access.getUnit().replace("_per_","/"), type);
+
         if (res.getReport().hasProblems()) {
             newFacets.add(new ReportFacet(ComputeType.ADVANCE, hash, id));
         }
@@ -161,12 +164,16 @@
         for (int i = 0; i < unknown.length; i++) {
             newFacets.add(new SedimentLoadUnknownFacet(
                 i,
-                SEDIMENT_LOAD_UNKOWN,
+                SEDIMENT_LOAD_UNKNOWN,
                 unknown[i].getDescription(),
                 ComputeType.ADVANCE,
                 getID(),
                 hash));
         }
+
+        newFacets.add(
+            new DataFacet(CSV, "CSV data", ComputeType.ADVANCE, hash, id));
+
         facets.addAll(newFacets);
 
         return res;
@@ -520,3 +527,4 @@
         }
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadOffEpochSelect.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadOffEpochSelect.java	Wed Aug 07 18:53:15 2013 +0200
@@ -12,7 +12,7 @@
 
 import org.dive4elements.river.artifacts.states.DefaultState;
 
-
+/** State in which official epoch is selected to calculate sediment load. */
 public class SedimentLoadOffEpochSelect
 extends DefaultState
 {
--- a/artifacts/src/main/java/org/dive4elements/river/collections/AttributeWriter.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/collections/AttributeWriter.java	Wed Aug 07 18:53:15 2013 +0200
@@ -46,7 +46,7 @@
     protected List<Facet>         newFacets;
 
     /**
-     * "Compatibility matrix", mapws list of facet names to output names.
+     * "Compatibility matrix", maps list of facet names to output names.
      * Any facet that is not found in the list for a specific output will
      * not be added to the resulting document.
      */
--- a/artifacts/src/main/java/org/dive4elements/river/collections/D4EArtifactCollection.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/collections/D4EArtifactCollection.java	Wed Aug 07 18:53:15 2013 +0200
@@ -628,6 +628,30 @@
         return persistent != null ? persistent.getArtifact() : null;
     }
 
-
+    /**
+     * Returns artifacts that name facetName.
+     *
+     * @param name The Artifact name to search
+     * @param context The CallContext
+     *
+     * @return a list of artifacts matching this name.
+     */
+    public List<Artifact> getArtifactsByName(String name, CallContext context)
+    {
+        log.debug("Searching for Artifacts: " + name);
+        List<Artifact> ret =  new ArrayList<Artifact>();
+        try {
+            for (String uuid: getArtifactUUIDs(context)) {
+                D4EArtifact subArt = (D4EArtifact)getArtifact(uuid, context);
+                if (subArt.getName() != null && subArt.getName().equals(name)) {
+                    ret.add(subArt);
+                }
+            }
+        } catch (ArtifactDatabaseException e) {
+            log.error("Unexpected Error!", e);
+        } finally {
+            return ret;
+        }
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/exports/DischargeCurveGenerator.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/DischargeCurveGenerator.java	Wed Aug 07 18:53:15 2013 +0200
@@ -72,9 +72,12 @@
      * Returns the PNP (Datum) of gauge, if at gauge, 0 otherwise.
      */
     public static double getCurrentGaugeDatum(double km, D4EArtifact artifact, double tolerance) {
-        // Code borrowed from FixATWriter.
-        Gauge gauge = RiverUtils.getGauge(artifact);
+        // Look if there is a gauge at chosen km:
+        // Get gauge which is defined for km
+        Gauge gauge =
+            RiverUtils.getRiver(artifact).determineGauge(km-0.1d, km+0.1d);
         double subtractPNP = 0d;
+        // Compare to km.
         if (Math.abs(km - gauge.getStation().doubleValue()) < tolerance) {
             subtractPNP = gauge.getDatum().doubleValue();
         }
@@ -88,6 +91,17 @@
     }
 
 
+    /** Overriden to show second axis also if no visible data present. */
+    @Override
+    protected void adjustAxes(XYPlot plot) {
+        super.adjustAxes(plot);
+        if (getCurrentGaugeDatum() != 0d) {
+            // Show the W[*m] axis even if there is no data.
+            plot.setRangeAxis(1, createYAxis(YAXIS.W.idx));
+        }
+    }
+
+
     public DischargeCurveGenerator() {
         super();
     }
@@ -142,6 +156,7 @@
         return zoomin;
     }
 
+    /** Translate River annotations if a gauge. */
     public void translateRiverAnnotation(RiverAnnotation riverAnnotation) {
         if (getCurrentGaugeDatum() == 0d) {
             return;
--- a/artifacts/src/main/java/org/dive4elements/river/exports/HistoricalDischargeWQCurveGenerator.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/HistoricalDischargeWQCurveGenerator.java	Wed Aug 07 18:53:15 2013 +0200
@@ -134,11 +134,10 @@
             doPoints(artifactFacet.getData(context), artifactFacet, theme,
                 visible, YAXIS.W.idx);
         }
-        else if (HISTORICAL_DISCHARGE_MAINVALUES_Q.equals(name)) {
-            doAnnotations((RiverAnnotation)
-                artifactFacet.getData(context), artifactFacet, theme, visible);
-        }
-        else if (HISTORICAL_DISCHARGE_MAINVALUES_W.equals(name)) {
+        else if (name.equals(MAINVALUES_W) ||
+                 name.equals(MAINVALUES_Q) ||
+                 HISTORICAL_DISCHARGE_MAINVALUES_Q.equals(name) ||
+                 HISTORICAL_DISCHARGE_MAINVALUES_W.equals(name)) {
             doAnnotations((RiverAnnotation)
                 artifactFacet.getData(context), artifactFacet, theme, visible);
         }
--- a/artifacts/src/main/java/org/dive4elements/river/exports/LongitudinalSectionGenerator.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/LongitudinalSectionGenerator.java	Wed Aug 07 18:53:15 2013 +0200
@@ -16,7 +16,13 @@
 import org.dive4elements.river.artifacts.model.FacetTypes;
 import org.dive4elements.river.artifacts.model.WKms;
 import org.dive4elements.river.artifacts.model.WQKms;
+
+import org.dive4elements.river.exports.process.Processor;
+import org.dive4elements.river.exports.process.BedDiffHeightYearProcessor;
+import org.dive4elements.river.exports.process.BedDiffYearProcessor;
+import org.dive4elements.river.exports.process.BedheightProcessor;
 import org.dive4elements.river.exports.process.WOutProcessor;
+
 import org.dive4elements.river.jfree.RiverAnnotation;
 import org.dive4elements.river.jfree.StyledAreaSeriesCollection;
 import org.dive4elements.river.jfree.StyledXYSeries;
@@ -333,9 +339,22 @@
             return;
         }
 
-        WOutProcessor processor = new WOutProcessor();
-        if (processor.canHandle(name)) {
-            processor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx);
+        WOutProcessor wProcessor = new WOutProcessor();
+        Processor bedp = new BedheightProcessor();
+        Processor bdyProcessor = new BedDiffYearProcessor();
+        Processor bdhyProcessor = new BedDiffHeightYearProcessor();
+
+        if (wProcessor.canHandle(name)) {
+            wProcessor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx);
+        }
+        else if (bedp.canHandle(name)) {
+           bedp.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx);
+        }
+        else if (bdyProcessor.canHandle(name)) {
+           bdyProcessor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx);
+        }
+        else if (bdhyProcessor.canHandle(name)) {
+           bdhyProcessor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx);
         }
         else if (name.equals(LONGITUDINAL_Q)) {
             doQOut(
--- a/artifacts/src/main/java/org/dive4elements/river/exports/OutputHelper.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/OutputHelper.java	Wed Aug 07 18:53:15 2013 +0200
@@ -71,7 +71,7 @@
         boolean debug = log.isDebugEnabled();
 
         if (debug) {
-            log.debug("D4EArtifactCollection.doOut: " + outName);
+            log.debug("OutputHelper.doOut: " + outName);
         }
 
         ThemeList themeList = new ThemeList(attributes);
@@ -154,7 +154,7 @@
 
         if (debug) {
             log.debug(
-                "D4EArtifactCollection.getFacetThemeFromAttribute(facet="
+                "OutputHelper.getFacetThemeFromAttribute(facet="
                 + facet + ", index=" + index + ")");
         }
 
@@ -235,7 +235,7 @@
         Theme       t,
         CallContext context)
     {
-        log.debug("D4EArtifactCollection.addThemeToAttribute: " + uuid);
+        log.debug("OutputHelper.addThemeToAttribute: " + uuid);
 
         if (t == null) {
             log.warn("Theme is empty - cancel adding it to attribute!");
@@ -380,7 +380,7 @@
     protected Artifact getArtifact(String uuid, CallContext context)
     throws    ArtifactDatabaseException
     {
-        log.debug("D4EArtifactCollection.getArtifact");
+        log.debug("OutputHelper.getArtifact");
 
         Backend backend               = Backend.getInstance();
         PersistentArtifact persistent = backend.getArtifact(uuid);
@@ -409,7 +409,7 @@
         boolean debug = log.isDebugEnabled();
 
         if (debug) {
-            log.debug("D4EArtifactCollection.initItemAttribute");
+            log.debug("OutputHelper.initItemAttribute");
         }
 
         Theme t = getThemeForFacet(uuid, facet, pattern, index, outName, context);
@@ -447,7 +447,7 @@
         String outName,
         CallContext context)
     {
-        log.info("D4EArtifactCollection.getThemeForFacet: " + facet);
+        log.info("OutputHelper.getThemeForFacet: " + facet);
 
         RiverContext flysContext = context instanceof RiverContext
             ? (RiverContext) context
--- a/artifacts/src/main/java/org/dive4elements/river/exports/WDifferencesCurveGenerator.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/WDifferencesCurveGenerator.java	Wed Aug 07 18:53:15 2013 +0200
@@ -8,11 +8,22 @@
 
 package org.dive4elements.river.exports;
 
+import org.apache.log4j.Logger;
+
+import org.dive4elements.artifactdatabase.state.ArtifactAndFacet;
+import org.dive4elements.artifactdatabase.state.Facet;
+
 import org.dive4elements.river.artifacts.model.FacetTypes;
 import org.dive4elements.river.artifacts.model.WKms;
 
+import org.dive4elements.river.exports.process.BedDiffYearProcessor;
+import org.dive4elements.river.exports.process.BedDiffHeightYearProcessor;
+import org.dive4elements.river.exports.process.BedheightProcessor;
+import org.dive4elements.river.exports.process.Processor;
+
 import org.jfree.chart.JFreeChart;
 import org.jfree.chart.plot.XYPlot;
+import org.w3c.dom.Document;
 
 
 /**
@@ -22,6 +33,9 @@
 extends      LongitudinalSectionGenerator
 implements   FacetTypes
 {
+    /** The logger that is used in this generator. */
+    private static Logger logger = Logger.getLogger(WDifferencesCurveGenerator.class);
+
     public enum YAXIS {
         W(0),
         D(1),
@@ -85,8 +99,46 @@
     }
 
 
+    /** Handle additional facets (beddifferences). */
+    @Override
+    public void doOut(ArtifactAndFacet bundle, Document attr, boolean visible) {
+        super.doOut(bundle, attr, visible);
+
+        String name = bundle.getFacetName();
+        logger.debug("doOut: " + name);
+
+        if (name == null) {
+            logger.error("No facet name for doOut(). No output generated!");
+            return;
+        }
+
+        Facet facet = bundle.getFacet();
+
+        if (facet == null) {
+            return;
+        }
+
+        Processor bedp = new BedheightProcessor();
+        Processor bdyProcessor = new BedDiffYearProcessor();
+        Processor bdhyProcessor = new BedDiffHeightYearProcessor();
+
+        if (bedp.canHandle(name)) {
+           bedp.doOut(this, bundle, attr, visible, YAXIS.W.idx);
+        }
+        else if (bdyProcessor.canHandle(name)) {
+           bdyProcessor.doOut(this, bundle, attr, visible, YAXIS.W.idx);
+        }
+        else if (bdhyProcessor.canHandle(name)) {
+           bdhyProcessor.doOut(this, bundle, attr, visible, YAXIS.W.idx);
+        }
+        else {
+            logger.warn("WDifferencesCurveGenerator.doOut: unknown facet type " + name);
+        }
+    }
+
+
     /**
-     *
+     * Sets the zero base line visible.
      */
     @Override
     public JFreeChart generateChart() {
--- a/artifacts/src/main/java/org/dive4elements/river/exports/WaterlevelExporter.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/WaterlevelExporter.java	Wed Aug 07 18:53:15 2013 +0200
@@ -34,6 +34,7 @@
 import net.sf.jasperreports.engine.JasperPrint;
 import net.sf.jasperreports.engine.JRException;
 
+import org.dive4elements.artifacts.Artifact;
 import org.dive4elements.artifacts.CallContext;
 import org.dive4elements.artifacts.CallMeta;
 import org.dive4elements.artifacts.common.utils.Config;
@@ -45,6 +46,7 @@
 import org.dive4elements.river.artifacts.FixationArtifact;
 import org.dive4elements.river.artifacts.D4EArtifact;
 import org.dive4elements.river.artifacts.WINFOArtifact;
+import org.dive4elements.river.artifacts.StaticWQKmsArtifact;
 import org.dive4elements.river.artifacts.model.CalculationResult;
 import org.dive4elements.river.artifacts.model.Segment;
 import org.dive4elements.river.artifacts.model.WQCKms;
@@ -70,6 +72,9 @@
 
     public static final String FACET_WST = "wst";
 
+    /* This should be the same as in the StaticWQKmsArtifact */
+    public static final String STATICWQKMSNAME = "staticwqkms";
+
     public static final String CSV_KM_HEADER =
         "export.waterlevel.csv.header.km";
 
@@ -134,9 +139,11 @@
     public static final String PDF_HEADER_MODE = "export.waterlevel.pdf.mode";
     public static final String JASPER_FILE     = "export.waterlevel.pdf.file";
 
-    /** The storage that contains all WQKms objects for the different facets.*/
+    /** The storage that contains all WQKms objects that are calculated.*/
     protected List<WQKms[]> data;
 
+    /** The storage that contains official fixings if available.*/
+    protected List<WQKms> officalFixings;
 
     public void init(Document request, OutputStream out, CallContext context) {
         logger.debug("WaterlevelExporter.init");
@@ -153,6 +160,37 @@
     {
         logger.debug("WaterlevelExporter.generate");
 
+        /* Check for official fixings. They should also be included in the
+         * export but only the calculation result is added with addData */
+
+        officalFixings = new ArrayList<WQKms>();
+
+        for (Artifact art: collection.getArtifactsByName(STATICWQKMSNAME, context)) {
+            if (art instanceof StaticWQKmsArtifact) {
+                StaticWQKmsArtifact sart = (StaticWQKmsArtifact) art;
+                if (!sart.isOfficial()) {
+                    continue;
+                }
+
+                /* Check that we add the data only once */
+                WQKms toAdd = sart.getWQKms();
+                String newName = toAdd.getName();
+
+                boolean exists = false;
+                for (WQKms wqkm: officalFixings) {
+                    /* The same official fixing could be in two
+                       artifacts/outs so let's deduplicate */
+                    if (wqkm.getName().equals(newName)) {
+                        exists = true;
+                    }
+                }
+                if (!exists) {
+                    officalFixings.add(toAdd);
+                    logger.debug("Adding additional offical fixing: " + newName);
+                }
+            }
+        }
+
         if (facet != null && facet.equals(AbstractExporter.FACET_CSV)) {
             generateCSV();
         }
@@ -183,7 +221,7 @@
 
 
     /**
-     * This method is used to prepare the column titles of waterlevel exports.
+     * Prepare the column titles of waterlevel exports.
      * Titles in this export include the Q value. If a Q value matches a named
      * main value (as HQ100 or MNQ) this named main value should be used as
      * title. This method resets the name of the <i>wqkms</i> object if such
@@ -294,11 +332,80 @@
         writeCSVMeta(writer);
         writeCSVHeader(writer, atGauge, isQ);
 
+        Double first = Double.NaN;
+        Double last = Double.NaN;
+
         for (WQKms[] tmp: data) {
             for (WQKms wqkms: tmp) {
                 wQKms2CSV(writer, wqkms, atGauge, isQ);
+                double[] firstLast = wqkms.getFirstLastKM();
+                if (first.isNaN()) {
+                    /* Initialize */
+                    first = firstLast[0];
+                    last = firstLast[1];
+                }
+                if (firstLast[0] > firstLast[1]) {
+                    /* Calculating upstream we assert that it is
+                     * impossible that the direction changes during this
+                     * loop */
+                    first = Math.max(first, firstLast[0]);
+                    last = Math.min(last, firstLast[1]);
+                } else if (firstLast[0] < firstLast[1]) {
+                    first = Math.min(first, firstLast[0]);
+                    last = Math.max(last, firstLast[1]);
+                } else {
+                    first = last = firstLast[0];
+                }
             }
         }
+        /* Append the official fixing at the bottom */
+        for (WQKms wqkms: officalFixings) {
+            wQKms2CSV(writer, filterWQKms(wqkms, first, last), atGauge, isQ);
+        }
+    }
+
+
+    /** Filter a wqkms object to a distance.
+     *
+     * To handle upstream / downstream and to limit
+     * the officialFixings to the calculation distance
+     * we create a new wqkms object here and fill it only
+     * with the relevant data.
+     *
+     * @param wqkms: The WQKms Object to filter
+     * @param first: The fist kilometer of the range
+     * @param last: The last kilometer of the range
+     *
+     * @return A new WQKms with the relevant data sorted by direction
+     */
+    private WQKms filterWQKms(WQKms wqkms, Double first, Double last) {
+        if (first.isNaN() || last.isNaN()) {
+            logger.warn("Filtering official fixing without valid first/last.");
+            return wqkms;
+        }
+        int firstIdx = first > last ? wqkms.size() - 1 : 0;
+        int lastIdx  = first > last ? 0 : wqkms.size() -1;
+        WQKms filtered = new WQKms (wqkms.size());
+        filtered.setReferenceSystem(wqkms.getReferenceSystem());
+        filtered.setName(wqkms.getName());
+        double [] dp = new double [3];
+
+        if (first > last) {
+            for (int i = wqkms.size() - 1; i >= 0; i--) {
+                dp = wqkms.get(i, dp);
+                if (dp[2] <= first + 1E-5 && dp[2] > last - 1E-5) {
+                    filtered.add(dp[0], dp[1], dp[2]);
+                }
+            }
+        } else {
+            for (int i = 0; i < wqkms.size(); i++) {
+                dp = wqkms.get(i, dp);
+                if (dp[2] < last + 1E-5 && dp[2] > first - 1E-5) {
+                    filtered.add(dp[0], dp[1], dp[2]);
+                }
+            }
+        }
+        return filtered;
     }
 
 
@@ -484,13 +591,12 @@
         NumberFormat kmf = getKmFormatter();
         NumberFormat wf  = getWFormatter();
         NumberFormat qf  = getQFormatter();
-
-       writer.writeNext(new String[] {
-           kmf.format(wqkm[2]),
-           wf.format(wqkm[0]),
-           qf.format(wqkm[1]),
-           RiverUtils.getLocationDescription(flys, wqkm[2])
-       });
+        writer.writeNext(new String[] {
+            kmf.format(wqkm[2]),
+            wf.format(wqkm[0]),
+            qf.format(wqkm[1]),
+            RiverUtils.getLocationDescription(flys, wqkm[2])
+        });
     }
 
 
@@ -511,6 +617,40 @@
         });
     }
 
+    private String getDesc(WQKms wqkms, boolean isQ)
+    {
+        D4EArtifact flys = (D4EArtifact) master;
+        String colDesc = "";
+
+        if (flys instanceof WINFOArtifact && isQ) {
+            colDesc = getCSVRowTitle((WINFOArtifact)flys, wqkms);
+        }
+        else if (!isQ) {
+            Double value = RiverUtils.getValueFromWQ(wqkms);
+            colDesc = (value != null) ?
+                Formatter.getWaterlevelW(context).format(value) : null;
+        }
+
+        if (flys instanceof WINFOArtifact) {
+            if (wqkms != null && wqkms.getRawValue() != null) {
+                WINFOArtifact winfo = (WINFOArtifact) flys;
+                colDesc = RiverUtils.getNamedMainValue(winfo, wqkms.getRawValue());
+                // For 'W am Pegel' s
+                if (colDesc == null) {
+                    Double value = RiverUtils.getValueFromWQ(wqkms);
+                    colDesc = (value != null) ?
+                        Formatter.getWaterlevelW(context).format(value) : null;
+                }
+            }
+        }
+        if (colDesc != null) {
+            /* Quick hack. Can be removed when database strings are
+             * adapted or left in here as it should never be harmful. */
+            colDesc = colDesc.replace("Amtl.Festlegung_", "Amtl. ");
+        }
+
+        return colDesc == null ? "" : colDesc;
+    }
 
     /**
      * Write "rows" of csv data from wqkms with writer.
@@ -543,35 +683,16 @@
         String       notinrange = msg(
             CSV_NOT_IN_GAUGE_RANGE,
             DEFAULT_CSV_NOT_IN_GAUGE_RANGE);
+        List<Segment> segments = null;
+        boolean isFixRealize = false;
 
         double a = gauge.getRange().getA().doubleValue();
         double b = gauge.getRange().getB().doubleValue();
-
-        if (flys instanceof WINFOArtifact && isQ) {
-            desc = getCSVRowTitle((WINFOArtifact)flys, wqkms);
-        }
-        else if (!isQ) {
-            Double value = RiverUtils.getValueFromWQ(wqkms);
-            desc         = value != null
-                ? Formatter.getWaterlevelW(context).format(value) : null;
-        }
-
         long startTime = System.currentTimeMillis();
 
-        String colDesc = desc;
-        List<Segment> segments = null;
-        boolean isFixRealize = false;
-        if (flys instanceof WINFOArtifact) {
-            if (wqkms != null && wqkms.getRawValue() != null) {
-                WINFOArtifact winfo = (WINFOArtifact) flys;
-                colDesc = RiverUtils.getNamedMainValue(winfo, wqkms.getRawValue());
-                // For 'W am Pegel' s
-                if (colDesc == null) {
-                    colDesc = ((D4EArtifact)master).getDataAsString("wq_single");
-                }
-            }
-        }
-        else if (flys instanceof FixationArtifact) {
+        desc = getDesc(wqkms, isQ);
+
+        if (flys instanceof FixationArtifact) {
             // Get W/Q input per gauge for this case.
             FixRealizingAccess fixAccess = new FixRealizingAccess(flys);
             segments = fixAccess.getSegments();
@@ -601,7 +722,7 @@
                         : findSegment(km, segments);
 
                     if (found != null) {
-                        colDesc = nf.format(found.getValues()[0]);
+                        desc = nf.format(found.getValues()[0]);
                     }
                     lastSegment = found;
                 }
@@ -622,7 +743,7 @@
                         ? gaugeName
                         : notinrange;
                 }
-                writeRow6(writer, result, colDesc, flys, gaugeN);
+                writeRow6(writer, result, desc, flys, gaugeN);
             }
         }
         else { // Not at gauge.
@@ -693,7 +814,7 @@
 
 
     /**
-     * This method is used to register a new column at <i>writer</i>. The name /
+     * Register a new column at <i>writer</i>. The name /
      * title of the column depends on the Q or W value of <i>wqkms</i>. If a Q
      * was selected and the Q fits to a named main value, the title is set to
      * the named main value. Otherwise, the name returned by
@@ -715,9 +836,6 @@
     }
 
 
-    /**
-     *
-     */
     @Override
     protected void writePDF(OutputStream out) {
         logger.debug("write PDF");
@@ -751,12 +869,38 @@
         boolean atGauge = mode == WQ_MODE.QGAUGE || mode == WQ_MODE.WGAUGE;
         boolean isQ     = mode == WQ_MODE.QGAUGE || mode == WQ_MODE.QFREE;
 
+        Double first = Double.NaN;
+        Double last = Double.NaN;
+
         addMetaData(source);
         for (WQKms[] tmp: data) {
             for (WQKms wqkms: tmp) {
                 addWKmsData(wqkms, atGauge, isQ, source);
+                double[] firstLast = wqkms.getFirstLastKM();
+                if (first.isNaN()) {
+                    /* Initialize */
+                    first = firstLast[0];
+                    last = firstLast[1];
+                }
+                if (firstLast[0] > firstLast[1]) {
+                    /* Calculating upstream we assert that it is
+                     * impossible that the direction changes during this
+                     * loop */
+                    first = Math.max(first, firstLast[0]);
+                    last = Math.min(last, firstLast[1]);
+                } else if (firstLast[0] < firstLast[1]) {
+                    first = Math.min(first, firstLast[0]);
+                    last = Math.max(last, firstLast[1]);
+                } else {
+                    first = last = firstLast[0];
+                }
             }
         }
+
+        /* Append the official fixing at the bottom */
+        for (WQKms wqkms: officalFixings) {
+            addWKmsData(filterWQKms(wqkms, first, last), atGauge, isQ, source);
+        }
         return source;
     }
 
@@ -815,15 +959,7 @@
         double a = gauge.getRange().getA().doubleValue();
         double b = gauge.getRange().getB().doubleValue();
 
-        if (flys instanceof WINFOArtifact && isQ) {
-            desc = getCSVRowTitle((WINFOArtifact)flys, wqkms);
-        }
-        else if (!isQ) {
-            Double value = RiverUtils.getValueFromWQ(wqkms);
-            desc         = value != null
-                ? Formatter.getWaterlevelW(context).format(value) : null;
-        }
-
+        desc = getDesc(wqkms, isQ);
         long startTime = System.currentTimeMillis();
 
         for (int i = 0; i < size; i ++) {
--- a/artifacts/src/main/java/org/dive4elements/river/exports/extreme/ExtremeWQCurveGenerator.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/extreme/ExtremeWQCurveGenerator.java	Wed Aug 07 18:53:15 2013 +0200
@@ -14,6 +14,7 @@
 import org.jfree.chart.JFreeChart;
 import org.jfree.chart.plot.Marker;
 import org.jfree.chart.plot.ValueMarker;
+import org.jfree.chart.plot.XYPlot;
 import org.jfree.chart.title.TextTitle;
 import org.jfree.data.xy.XYSeries;
 import org.w3c.dom.Document;
@@ -92,6 +93,16 @@
         return false;
     }
 
+    /** Overriden to show second axis also if no visible data present. */
+    @Override
+    protected void adjustAxes(XYPlot plot) {
+        super.adjustAxes(plot);
+        if (getCurrentGaugeDatum() != 0d) {
+            // Show the W[*m] axis even if there is no data.
+            plot.setRangeAxis(1, createYAxis(YAXIS.W.idx));
+        }
+    }
+
     /** Do Extreme Curve nonextrapolated points out. */
     protected void doExtremeCurveBaseOut(ArtifactAndFacet aaf, Document theme, boolean visible) {
         logger.debug("doExtremeCurveBaseOut");
--- a/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixATWriter.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixATWriter.java	Wed Aug 07 18:53:15 2013 +0200
@@ -115,7 +115,7 @@
 
         double wMin = minW(invInst, wMax, qMax[0]);
 
-        double wMinCM = wMin * 100d;
+        double wMinCM = wMin * 100d - subtractPNP;
         double wMaxCM = wMax * 100d;
 
         int wRow = ((int)wMinCM / 10) * 10;
@@ -124,7 +124,7 @@
             wMinCM = (int)wMinCM + 1d;
         }
 
-        double w = wMinCM / 100.0;
+        double w = (wMinCM + subtractPNP) / 100.0;
 
         int wcm = ((int)wMinCM) % 10;
 
@@ -132,9 +132,10 @@
             log.debug("wMinCM: " + wMinCM);
             log.debug("wMaxCM: " + wMaxCM);
             log.debug("wcm: " + wcm);
+            log.debug("subtractPNP: " + subtractPNP);
         }
 
-        out.printf(Locale.US, "%8d", wRow - subtractPNP);
+        out.printf(Locale.US, "%8d", wRow);
 
         for (int i = 0; i < wcm; i++) {
             out.print(ATWriter.EMPTY);
@@ -158,7 +159,7 @@
             if (w > wMax) {
                 break;
             }
-            out.printf(Locale.US, "%8d", (wRow += 10) - subtractPNP);
+            out.printf(Locale.US, "%8d", (wRow += 10));
             wcm = 0;
         }
 
--- a/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixWQCurveGenerator.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixWQCurveGenerator.java	Wed Aug 07 18:53:15 2013 +0200
@@ -21,6 +21,7 @@
 import org.jfree.chart.axis.NumberAxis;
 import org.jfree.chart.plot.Marker;
 import org.jfree.chart.plot.ValueMarker;
+import org.jfree.chart.plot.XYPlot;
 import org.jfree.chart.title.TextTitle;
 import org.jfree.data.xy.XYSeries;
 import org.jfree.ui.RectangleAnchor;
@@ -153,15 +154,25 @@
         return axis;
     }
 
+    /** Returns value != 0 if the current km is not at a gauge. */
     public double getCurrentGaugeDatum() {
         if (context.getContextValue(CURRENT_KM) != null) {
             return DischargeCurveGenerator.getCurrentGaugeDatum(
                 (Double) context.getContextValue(CURRENT_KM),
-                (D4EArtifact) getMaster(), 0.1d);
+                (D4EArtifact) getMaster(), 1e-4);
         }
         else return 0d;
     }
 
+    /** Overriden to show second axis also if no visible data present. */
+    @Override
+    protected void adjustAxes(XYPlot plot) {
+        super.adjustAxes(plot);
+        if (getCurrentGaugeDatum() != 0d) {
+            // Show the W[*m] axis even if there is no data.
+            plot.setRangeAxis(1, createYAxis(YAXIS.W.idx));
+        }
+    }
 
     @Override
     public void doOut(ArtifactAndFacet aaf, Document doc, boolean visible) {
@@ -226,8 +237,9 @@
                     visible);
         }
         else if (name.equals(MAINVALUES_W) || name.equals(MAINVALUES_Q)) {
+            RiverAnnotation mainValues = (RiverAnnotation) aaf.getData(context);
             doAnnotations(
-                ((RiverAnnotation) aaf.getData(context)),
+                mainValues,
                 aaf,
                 doc,
                 visible);
@@ -341,6 +353,17 @@
             if (Math.abs(kms[i] - ckm) <= EPSILON) {
                 series.add(wqkms.getQ(i), wqkms.getW(i));
                 addAxisSeries(series, YAXIS.W.idx, visible);
+                if(visible && ThemeUtil.parseShowPointLabel(theme)) {
+                    List<XYTextAnnotation> textAnnos = new ArrayList<XYTextAnnotation>();
+                    XYTextAnnotation anno = new CollisionFreeXYTextAnnotation(
+                            title,
+                            wqkms.getQ(i),
+                            wqkms.getW(i));
+                    textAnnos.add(anno);
+                    RiverAnnotation flysAnno = new RiverAnnotation(null, null, null, theme);
+                    flysAnno.setTextAnnotations(textAnnos);
+                    addAnnotations(flysAnno);
+                }
                 return;
             }
         }
@@ -385,6 +408,14 @@
                 addAxisSeries(series, YAXIS.W.idx, visible);
             }
             else {
+                StyledXYSeries series2 = JFreeUtil.sampleFunction2D(
+                        func.getFunction(),
+                        doc,
+                        aaf.getFacetDescription(),
+                        500,   // number of samples
+                        0.0 ,  // start
+                        maxQ); // end
+                addAxisSeries(series2, YAXIS.W.idx, false);
                 // Use second axis at cm if at gauge.
                 for (int i = 0; i < series.getItemCount(); i++) {
                     series.updateByIndex(i, new Double(factor*(series.getY(i).doubleValue()-gaugeDatum)));
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDiffBaseGenerator.java	Wed Aug 07 18:53:15 2013 +0200
@@ -0,0 +1,59 @@
+/* Copyright (C) 2013 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.exports.minfo;
+
+import org.apache.log4j.Logger;
+
+import org.dive4elements.artifactdatabase.state.ArtifactAndFacet;
+import org.dive4elements.river.artifacts.model.minfo.BedDifferencesResult;
+import org.dive4elements.river.jfree.Bounds;
+import org.dive4elements.river.jfree.DoubleBounds;
+import org.dive4elements.river.exports.LongitudinalSectionGenerator;
+
+/** Base for BedDiff*Year*Generators, to avoid duplicate code of setContextBounds. */
+public abstract class BedDiffBaseGenerator
+extends LongitudinalSectionGenerator
+{
+    /** The logger that is used in this generator. */
+    private static Logger logger = Logger.getLogger(BedDiffBaseGenerator.class);
+
+    /** Set context values for something. */
+    public void setContextBounds(ArtifactAndFacet bundle) {
+        Bounds xBounds = getXBounds(0);
+        if (xBounds != null && getDomainAxisRange() != null) {
+            Bounds bounds =
+                calculateZoom(xBounds, getDomainAxisRange());
+            context.putContextValue("startkm", bounds.getLower());
+            context.putContextValue("endkm", bounds.getUpper());
+        }
+        else if (xBounds != null && getDomainAxisRange() == null) {
+            context.putContextValue("startkm", xBounds.getLower());
+            context.putContextValue("endkm", xBounds.getUpper());
+        }
+        else if (xBounds == null && getDomainAxisRange() == null) {
+            BedDifferencesResult data = (BedDifferencesResult)bundle.getData(context);
+            if (data.getKms().size() > 0) {
+                context.putContextValue("startkm", data.getKms().min());
+                context.putContextValue("endkm", data.getKms().max());
+            }
+            else {
+                logger.warn("No data to define start and end km");
+            }
+        }
+        else if (xBounds == null && getDomainAxisRange() != null){
+            BedDifferencesResult data = (BedDifferencesResult)bundle.getData(context);
+            Bounds b = new DoubleBounds(data.getKms().min(), data.getKms().max());
+            Bounds bounds =
+                calculateZoom(b, getDomainAxisRange());
+            context.putContextValue("startkm", bounds.getLower());
+            context.putContextValue("endkm", bounds.getUpper());
+        }
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDiffHeightYearGenerator.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDiffHeightYearGenerator.java	Wed Aug 07 18:53:15 2013 +0200
@@ -9,26 +9,21 @@
 package org.dive4elements.river.exports.minfo;
 
 import org.apache.log4j.Logger;
-import org.jfree.data.xy.XYSeries;
 import org.w3c.dom.Document;
 
 import org.dive4elements.artifactdatabase.state.ArtifactAndFacet;
 import org.dive4elements.artifactdatabase.state.Facet;
 import org.dive4elements.river.artifacts.model.FacetTypes;
-import org.dive4elements.river.artifacts.model.minfo.BedDiffYearResult;
 import org.dive4elements.river.artifacts.model.minfo.BedDifferencesResult;
-import org.dive4elements.river.exports.StyledSeriesBuilder;
 import org.dive4elements.river.exports.LongitudinalSectionGenerator;
+import org.dive4elements.river.exports.process.BedDiffHeightYearProcessor;
 import org.dive4elements.river.exports.process.KMIndexProcessor;
 import org.dive4elements.river.exports.process.Processor;
-import org.dive4elements.river.jfree.Bounds;
-import org.dive4elements.river.jfree.DoubleBounds;
 import org.dive4elements.river.jfree.RiverAnnotation;
-import org.dive4elements.river.jfree.StyledXYSeries;
 
 
 public class BedDiffHeightYearGenerator
-extends LongitudinalSectionGenerator
+extends BedDiffBaseGenerator
 implements FacetTypes
 {
     public enum YAXIS {
@@ -89,42 +84,12 @@
         }
 
         if (bundle.getData(context) instanceof BedDifferencesResult) {
-            if (getXBounds(0) != null && getDomainAxisRange() != null) {
-                Bounds bounds =
-                    calculateZoom(getXBounds(0), getDomainAxisRange());
-                context.putContextValue("startkm", bounds.getLower());
-                context.putContextValue("endkm", bounds.getUpper());
-            }
-            else if (getXBounds(0) != null && getDomainAxisRange() == null) {
-                context.putContextValue("startkm", getXBounds(0).getLower());
-                context.putContextValue("endkm", getXBounds(0).getUpper());
-            }
-            else if (getXBounds(0) == null && getDomainAxisRange() == null) {
-                BedDifferencesResult data = (BedDifferencesResult)bundle.getData(context);
-                context.putContextValue("startkm", data.getKms().min());
-                context.putContextValue("endkm", data.getKms().max());
-            }
-            else if (getXBounds(0) == null && getDomainAxisRange() != null){
-                BedDifferencesResult data = (BedDifferencesResult)bundle.getData(context);
-                Bounds b = new DoubleBounds(data.getKms().min(), data.getKms().max());
-                Bounds bounds =
-                    calculateZoom(b, getDomainAxisRange());
-                context.putContextValue("startkm", bounds.getLower());
-                context.putContextValue("endkm", bounds.getUpper());
-            }
+            setContextBounds(bundle);
         }
+
         Processor processor = new KMIndexProcessor();
-        if (name.equals(BED_DIFFERENCE_HEIGHT_YEAR)) {
-            doBedDifferenceYearOut(
-                (BedDiffYearResult) bundle.getData(context),
-                bundle, attr, visible);
-        }
-        else if (name.equals(BED_DIFFERENCE_HEIGHT_YEAR_FILTERED)) {
-            doBedDifferenceYearOut(
-                (BedDiffYearResult) bundle.getData(context),
-                bundle, attr, visible);
-        }
-        else if (name.equals(LONGITUDINAL_ANNOTATION)) {
+        Processor bdyProcessor = new BedDiffHeightYearProcessor();
+        if (name.equals(LONGITUDINAL_ANNOTATION)) {
             doAnnotations(
                 (RiverAnnotation) bundle.getData(context),
                  bundle,
@@ -134,22 +99,20 @@
         else if (processor.canHandle(name)) {
             processor.doOut(this, bundle, attr, visible, YAXIS.dW.idx);
         }
+        else if (bdyProcessor.canHandle(name)) {
+            bdyProcessor.doOut(this, bundle, attr, visible, YAXIS.D.idx);
+        }
         else {
             logger.warn("Unknown facet name " + name);
         }
     }
 
+
     @Override
     protected String getDefaultChartTitle() {
         return msg(I18N_CHART_TITLE, I18N_CHART_TITLE_DEFAULT);
     }
 
-    @Override
-    protected String getDefaultXAxisLabel() {
-        return msg(I18N_XAXIS_LABEL,
-            I18N_XAXIS_LABEL_DEFAULT,
-            new Object[] { getRiverName() });
-    }
 
     @Override
     protected String getDefaultYAxisLabel(int pos) {
@@ -161,13 +124,4 @@
         }
         return "default";
     }
-
-    protected void doBedDifferenceYearOut(BedDiffYearResult data,
-        ArtifactAndFacet aandf, Document theme, boolean visible) {
-
-        XYSeries series = new StyledXYSeries(aandf.getFacetDescription(), theme);
-        StyledSeriesBuilder.addPoints(series, data.getHeightPerYearData(), true);
-
-        addAxisSeries(series, YAXIS.D.idx, visible);
-    }
 }
--- a/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDifferenceExporter.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDifferenceExporter.java	Wed Aug 07 18:53:15 2013 +0200
@@ -1,3 +1,11 @@
+/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
 package org.dive4elements.river.exports.minfo;
 
 import java.io.IOException;
@@ -75,8 +83,7 @@
 
         NumberFormat kmf = Formatter.getCalculationKm(context.getMeta());
         NumberFormat mf = Formatter.getMeterFormat(context);
-        for (int i = 0; i < results.length; i++) {
-            BedDifferencesResult result = results[i];
+        for (BedDifferencesResult result : results) {
             double[][] kms = result.getDifferencesData();
             for (int j = 0; j < kms[0].length; j++) {
                 writer.writeNext(new String[] {
@@ -184,3 +191,4 @@
     }
 
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDifferenceYearGenerator.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedDifferenceYearGenerator.java	Wed Aug 07 18:53:15 2013 +0200
@@ -21,12 +21,11 @@
 import org.dive4elements.river.artifacts.model.minfo.BedDifferencesResult;
 import org.dive4elements.river.artifacts.model.minfo.MorphologicWidth;
 import org.dive4elements.river.exports.StyledSeriesBuilder;
-import org.dive4elements.river.exports.LongitudinalSectionGenerator;
+import org.dive4elements.river.exports.process.BedDiffYearProcessor;
+import org.dive4elements.river.exports.process.BedDiffHeightYearProcessor;
 import org.dive4elements.river.exports.process.KMIndexProcessor;
 import org.dive4elements.river.exports.process.Processor;
 import org.dive4elements.river.exports.process.WOutProcessor;
-import org.dive4elements.river.jfree.Bounds;
-import org.dive4elements.river.jfree.DoubleBounds;
 import org.dive4elements.river.jfree.RiverAnnotation;
 import org.dive4elements.river.jfree.StyledXYSeries;
 import org.dive4elements.river.utils.DataUtil;
@@ -34,11 +33,11 @@
 
 
 public class BedDifferenceYearGenerator
-extends LongitudinalSectionGenerator
+extends BedDiffBaseGenerator
 implements FacetTypes
 {
     public enum YAXIS {
-        D(0), M(1), H(2),  dW(3), W(4);
+        D(0), M(1), H(2), dW(3), W(4);
 
         protected int idx;
 
@@ -84,6 +83,7 @@
         };
     }
 
+
     @Override
     public void doOut(ArtifactAndFacet bundle, Document attr, boolean visible) {
         String name = bundle.getFacetName();
@@ -101,66 +101,19 @@
             return;
         }
 
-        if (getXBounds(0) != null && getDomainAxisRange() != null) {
-            Bounds bounds =
-                calculateZoom(getXBounds(0), getDomainAxisRange());
-            context.putContextValue("startkm", bounds.getLower());
-            context.putContextValue("endkm", bounds.getUpper());
-        }
-        else if (getXBounds(0) != null && getDomainAxisRange() == null) {
-            context.putContextValue("startkm", getXBounds(0).getLower());
-            context.putContextValue("endkm", getXBounds(0).getUpper());
+        if (bundle.getData(context) instanceof BedDifferencesResult) {
+            setContextBounds(bundle);
         }
-        else if (getXBounds(0) == null && getDomainAxisRange() == null) {
-            BedDifferencesResult data = (BedDifferencesResult)bundle.getData(context);
-            context.putContextValue("startkm", data.getKms().min());
-            context.putContextValue("endkm", data.getKms().max());
-        }
-        else if (getXBounds(0) == null && getDomainAxisRange() != null){
-            BedDifferencesResult data = (BedDifferencesResult)bundle.getData(context);
-            Bounds b = new DoubleBounds(data.getKms().min(), data.getKms().max());
-            Bounds bounds =
-                calculateZoom(b, getDomainAxisRange());
-            context.putContextValue("startkm", bounds.getLower());
-            context.putContextValue("endkm", bounds.getUpper());
-        }
+
         Processor processor = new KMIndexProcessor();
         Processor woutp = new WOutProcessor();
-        if (name.equals(BED_DIFFERENCE_YEAR)) {
-            doBedDifferenceYearOut(
-                (BedDiffYearResult) bundle.getData(context),
-                bundle, attr, visible);
-        }
-        else if (name.equals(BED_DIFFERENCE_MORPH_WIDTH)) {
+        Processor bdhyProcessor = new BedDiffHeightYearProcessor();
+        Processor bdyProcessor = new BedDiffYearProcessor();
+        if (name.equals(BED_DIFFERENCE_MORPH_WIDTH)) {
             doBedDifferenceMorphWidthOut(
                 (BedDiffYearResult) bundle.getData(context),
                 bundle, attr, visible);
         }
-        else if (name.equals(BED_DIFFERENCE_YEAR_HEIGHT1)) {
-            doBedDifferenceHeightsOut(
-                (BedDiffYearResult)bundle.getData(context),
-                bundle, attr, visible, 0);
-        }
-        else if (name.equals(BED_DIFFERENCE_YEAR_HEIGHT2)) {
-            doBedDifferenceHeightsOut(
-                (BedDiffYearResult)bundle.getData(context),
-                bundle, attr, visible, 1);
-        }
-        else if (name.equals(BED_DIFFERENCE_YEAR_FILTERED)) {
-            doBedDifferenceYearOut(
-                (BedDiffYearResult) bundle.getData(context),
-                bundle, attr, visible);
-        }
-        else if (name.equals(BED_DIFFERENCE_YEAR_HEIGHT1_FILTERED)) {
-            doBedDifferenceHeightsOut(
-                (BedDiffYearResult)bundle.getData(context),
-                bundle, attr, visible, 0);
-        }
-        else if (name.equals(BED_DIFFERENCE_YEAR_HEIGHT2_FILTERED)) {
-            doBedDifferenceHeightsOut(
-                (BedDiffYearResult)bundle.getData(context),
-                bundle, attr, visible, 1);
-        }
         else if (name.equals(MORPHOLOGIC_WIDTH)) {
             doMorphologicWidthOut(
                 (MorphologicWidth)bundle.getData(context),
@@ -179,6 +132,12 @@
                  attr,
                  visible);
         }
+        else if (bdyProcessor.canHandle(name)) {
+            bdyProcessor.doOut(this, bundle, attr, visible, YAXIS.H.idx);
+        }
+        else if (bdhyProcessor.canHandle(name)) {
+            bdhyProcessor.doOut(this, bundle, attr, visible, YAXIS.D.idx);
+        }
         else if (woutp.canHandle(name)) {
             woutp.doOut(this, bundle, attr, visible, YAXIS.W.idx);
         }
@@ -206,36 +165,12 @@
         addAxisSeries(series, YAXIS.M.idx, visible);
     }
 
-    private void doBedDifferenceHeightsOut(
-        BedDiffYearResult data,
-        ArtifactAndFacet bundle,
-        Document attr,
-        boolean visible,
-        int idx) {
-         logger.debug("doBedDifferenceYearOut()");
-
-        XYSeries series = new StyledXYSeries(bundle.getFacetDescription(), attr);
-        if (idx == 0) {
-            StyledSeriesBuilder.addPoints(series, data.getHeights1Data(), true);
-        }
-        else {
-            StyledSeriesBuilder.addPoints(series, data.getHeights2Data(), true);
-        }
-
-        addAxisSeries(series, YAXIS.H.idx, visible);
-    }
 
     @Override
     protected String getDefaultChartTitle() {
         return msg(I18N_CHART_TITLE, I18N_CHART_TITLE_DEFAULT);
     }
 
-    @Override
-    protected String getDefaultXAxisLabel() {
-        return msg(I18N_XAXIS_LABEL,
-                   I18N_XAXIS_LABEL_DEFAULT,
-                   new Object[] { getRiverName() });
-    }
 
     @Override
     protected String getDefaultYAxisLabel(int pos) {
@@ -265,15 +200,6 @@
         return label;
     }
 
-    protected void doBedDifferenceYearOut(BedDiffYearResult data,
-        ArtifactAndFacet aandf, Document theme, boolean visible) {
-        logger.debug("doBedDifferenceYearOut()");
-
-        XYSeries series = new StyledXYSeries(aandf.getFacetDescription(), theme);
-        StyledSeriesBuilder.addPoints(series, data.getDifferencesData(), true);
-
-        addAxisSeries(series, YAXIS.D.idx, visible);
-    }
 
     protected void doBedDifferenceMorphWidthOut(BedDiffYearResult data,
         ArtifactAndFacet aandf, Document theme, boolean visible) {
@@ -305,3 +231,4 @@
         }
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/SedimentLoadExporter.java	Wed Aug 07 18:53:15 2013 +0200
@@ -0,0 +1,169 @@
+/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.exports.minfo;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.TreeSet;
+
+import java.io.OutputStream;
+import java.io.IOException;
+
+import java.text.NumberFormat;
+
+import org.w3c.dom.Document;
+
+import org.apache.log4j.Logger;
+
+import org.dive4elements.artifacts.CallContext;
+
+import org.dive4elements.river.artifacts.model.CalculationResult;
+import org.dive4elements.river.artifacts.model.minfo.SedimentLoad;
+import org.dive4elements.river.artifacts.model.minfo.SedimentLoadFraction;
+import org.dive4elements.river.artifacts.model.minfo.SedimentLoadResult;
+
+import org.dive4elements.river.exports.AbstractExporter;
+
+import org.dive4elements.river.utils.Formatter;
+
+import au.com.bytecode.opencsv.CSVWriter;
+
+
+/**
+ * Do CSV export for sediment load calculations (will also be shown in
+ * client). */
+public class SedimentLoadExporter
+extends      AbstractExporter
+{
+    /** Private logger. */
+    private static Logger logger = Logger.getLogger(SedimentLoadExporter.class);
+
+    // i18n keys.
+    public static final String CSV_KM =
+        "export.sedimentload_ls.csv.header.km";
+
+    public static final String CSV_YEAR =
+        "export.sedimentload_ls.csv.header.year";
+
+    public static final String CSV_COARSE =
+        "export.sedimentload_ls.csv.header.coarse";
+
+    public static final String CSV_FINEMIDDLE =
+        "export.sedimentload_ls.csv.header.finemiddle";
+
+    public static final String CSV_SAND =
+        "export.sedimentload_ls.csv.header.sand";
+
+    public static final String CSV_SUSP_SAND =
+        "export.sedimentload_ls.csv.header.suspsand";
+
+    public static final String CSV_SUSP_SAND_BB =
+        "export.sedimentload_ls.csv.header.suspsandbb";
+
+    public static final String CSV_SUSP_SEDIMENT =
+        "export.sedimentload_ls.csv.header.suspsediment";
+
+    public static final String CSV_TOTAL =
+        "export.sedimentload_ls.csv.header.total";
+
+
+    /** Collected results. */
+    private SedimentLoadResult[] results;
+
+    /** Empty constructor. */
+    public SedimentLoadExporter() {
+    }
+
+    /** Trivial init. */
+    @Override
+    public void init(Document request, OutputStream out, CallContext context) {
+        super.init(request, out, context);
+        logger.debug("init");
+        results = new SedimentLoadResult[0];
+    }
+
+
+    /** Process all stored data and write csv. */
+    @Override
+    protected void writeCSVData(CSVWriter writer) throws IOException {
+        writeCSVHeader(writer);
+
+        for (SedimentLoadResult result: results) {
+            SedimentLoad load = result.getLoad();
+            // Put load.getName()+load.getDescription()}); somewhere?
+            for (double km: new TreeSet<Double>(load.getKms())) {
+                SedimentLoadFraction fraction = load.getFraction(km);
+                writeRecord(writer, km, result.getStartYear(), result.getEndYear(), fraction);
+            }
+        }
+    }
+
+    /** Write a line. */
+    private void writeRecord(CSVWriter writer, double km, int fromYear, int toYear, SedimentLoadFraction fraction) {
+        // year, total, susp sed, susp sandbed suspsand, sand, finemiddle, coarse
+        String years = (toYear == 0) ? fromYear+"" : fromYear + "-" + toYear;
+        NumberFormat kmf = Formatter.getCalculationKm(context.getMeta());
+        NumberFormat valf = Formatter.getFormatter(context.getMeta(), 0, 2);
+        writer.writeNext(new String[] {
+            kmf.format(km),
+            years,
+            valf.format(fraction.getSuspSediment()),
+            valf.format(fraction.getSuspSand()),
+            valf.format(fraction.getSuspSandBed()),
+            valf.format(fraction.getSand()),
+            valf.format(fraction.getFineMiddle()),
+            valf.format(fraction.getCoarse()),
+            valf.format(fraction.getTotal())
+        });
+    }
+
+    /** Writes i18ned header for csv file/stream. */
+    protected void writeCSVHeader(CSVWriter writer) {
+        logger.debug("writeCSVHeader()");
+
+        List<String> header = new LinkedList<String>();
+        if (results != null)  {
+            header.add(msg(CSV_KM,          "km"));
+            header.add(msg(CSV_YEAR,        "Jahr"));
+            header.add(msg(CSV_SUSP_SEDIMENT, "Schwebst."));
+            header.add(msg(CSV_SUSP_SAND,   "Susp.Sand"));
+            header.add(msg(CSV_SUSP_SAND_BB, "Susp.Sand(BB)"));
+            header.add(msg(CSV_SAND,        "Sand"));
+            header.add(msg(CSV_FINEMIDDLE,  "Kies(f+m)"));
+            header.add(msg(CSV_COARSE,      "Kies(g)"));
+            header.add(msg(CSV_TOTAL,       "Gesamt"));
+        }
+        writer.writeNext(header.toArray(new String[header.size()]));
+    }
+
+    /** Store data internally, accepting only SedimentLoadResults[] in
+     * calculationresults data. */
+    @Override
+    protected void addData(Object data) {
+        if (!(data instanceof CalculationResult)) {
+            logger.warn("Invalid data type.");
+            return;
+        }
+        Object[] d = (Object[])((CalculationResult)data).getData();
+
+        if (!(d instanceof SedimentLoadResult[])) {
+            logger.warn("Invalid result object.");
+            return;
+        }
+        logger.debug("addData: Data added.");
+        results = (SedimentLoadResult[])d;
+    }
+
+    /** Write PDF to outputstream (not implemented yet). */
+    @Override
+    protected void writePDF(OutputStream out) {
+        logger.warn("Not implemented.");
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/exports/minfo/SedimentLoadLSGenerator.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/SedimentLoadLSGenerator.java	Wed Aug 07 18:53:15 2013 +0200
@@ -36,10 +36,12 @@
 import org.dive4elements.river.utils.DataUtil;
 
 
+/** Generator for Longitudinal Sections of SedimentLoad-Calculations. */
 public class SedimentLoadLSGenerator
 extends LongitudinalSectionGenerator
 implements FacetTypes
 {
+    /** Y-Axis enum defining the four possible axes. */
     public enum YAXIS {
         L(0),
         D(1),
@@ -71,8 +73,7 @@
     public static final String I18N_YAXIS_D_LABEL_DEFAULT = "delta S [m]";
     public static final String I18N_YAXIS_V_LABEL_DEFAULT = "Geschwindigkeit v [m/s]";
 
-    private String yLabel = "";
-
+    /** Enumerator over y-axes. */
     @Override
     protected YAxisWalker getYAxisWalker() {
         return new YAxisWalker() {
@@ -104,14 +105,6 @@
         Facet facet = bundle.getFacet();
         D4EArtifact artifact = (D4EArtifact)bundle.getArtifact();
 
-        SedimentLoadAccess slaccess = new SedimentLoadAccess(artifact);
-        String unit = slaccess.getUnit();
-        if (unit != null && unit.equals("m3_per_a")) {
-            yLabel = msg(I18N_YAXIS_LABEL_2, I18N_YAXIS_LABEL_DEFAULT_2);
-        }
-        else {
-            yLabel = msg(I18N_YAXIS_LABEL_1, I18N_YAXIS_LABEL_DEFAULT_1);
-        }
         if (facet == null) {
             return;
         }
@@ -235,7 +228,15 @@
     protected String getDefaultYAxisLabel(int pos) {
         String label = "default";
         if (pos == YAXIS.L.idx) {
-            label = yLabel;
+            SedimentLoadAccess slaccess =
+                new SedimentLoadAccess((D4EArtifact) getMaster());
+            String unit = slaccess.getUnit();
+            if (unit != null && unit.equals("m3_per_a")) {
+                label = msg(I18N_YAXIS_LABEL_2, I18N_YAXIS_LABEL_DEFAULT_2);
+            }
+            else {
+                label = msg(I18N_YAXIS_LABEL_1, I18N_YAXIS_LABEL_DEFAULT_1);
+            }
         }
         else if (pos == YAXIS.V.idx) {
             label = msg(I18N_YAXIS_V_LABEL, I18N_YAXIS_V_LABEL_DEFAULT);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/process/BedDiffHeightYearProcessor.java	Wed Aug 07 18:53:15 2013 +0200
@@ -0,0 +1,70 @@
+/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.exports.process;
+
+import org.apache.log4j.Logger;
+import org.jfree.data.xy.XYSeries;
+import org.w3c.dom.Document;
+
+import org.dive4elements.artifactdatabase.state.ArtifactAndFacet;
+import org.dive4elements.artifacts.CallContext;
+import org.dive4elements.river.artifacts.model.FacetTypes;
+import org.dive4elements.river.artifacts.model.minfo.BedDiffYearResult;
+import org.dive4elements.river.exports.StyledSeriesBuilder;
+import org.dive4elements.river.exports.XYChartGenerator;
+import org.dive4elements.river.jfree.StyledXYSeries;
+
+
+public class BedDiffHeightYearProcessor implements Processor, FacetTypes {
+
+    private final static Logger logger =
+            Logger.getLogger(BedDiffHeightYearProcessor.class);
+
+    @Override
+    public void doOut(
+            XYChartGenerator generator,
+            ArtifactAndFacet aandf,
+            Document theme,
+            boolean visible,
+            int index
+    ) {
+        CallContext context = generator.getCallContext();
+        Object data = aandf.getData(context);
+        if (data instanceof BedDiffYearResult) {
+            doBedDifferenceYearOut(
+               generator,
+               (BedDiffYearResult) data,
+               aandf, theme, visible, index);
+            return;
+        }
+        logger.error("Can't process " + data.getClass().getName() + " objects");
+    }
+
+    @Override
+    public boolean canHandle(String facetType) {
+        return BED_DIFFERENCE_HEIGHT_YEAR.equals(facetType) // from BedDiffYearHeight
+            || BED_DIFFERENCE_HEIGHT_YEAR_FILTERED.equals(facetType) // from BedDiffYearHeight
+            || BED_DIFFERENCE_YEAR.equals(facetType) // from BedDifferencesYear
+            || BED_DIFFERENCE_YEAR_FILTERED.equals(facetType); // from BedDifferencesYear
+    }
+
+    protected void doBedDifferenceYearOut(XYChartGenerator generator,
+        BedDiffYearResult data,
+        ArtifactAndFacet aandf,
+        Document theme,
+        boolean visible,
+        int axidx) {
+
+        XYSeries series = new StyledXYSeries(aandf.getFacetDescription(), theme);
+        StyledSeriesBuilder.addPoints(series, data.getHeightPerYearData(), true);
+
+        generator.addAxisSeries(series, axidx, visible);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/process/BedDiffYearProcessor.java	Wed Aug 07 18:53:15 2013 +0200
@@ -0,0 +1,83 @@
+/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.exports.process;
+
+import org.apache.log4j.Logger;
+import org.jfree.data.xy.XYSeries;
+import org.w3c.dom.Document;
+
+import org.dive4elements.artifactdatabase.state.ArtifactAndFacet;
+import org.dive4elements.artifacts.CallContext;
+import org.dive4elements.river.artifacts.model.FacetTypes;
+import org.dive4elements.river.artifacts.model.minfo.BedDiffYearResult;
+import org.dive4elements.river.exports.StyledSeriesBuilder;
+import org.dive4elements.river.exports.XYChartGenerator;
+import org.dive4elements.river.jfree.StyledXYSeries;
+
+
+public class BedDiffYearProcessor implements Processor, FacetTypes {
+
+    private final static Logger logger =
+            Logger.getLogger(BedDiffYearProcessor.class);
+
+    @Override
+    public void doOut(
+            XYChartGenerator generator,
+            ArtifactAndFacet aandf,
+            Document theme,
+            boolean visible,
+            int axidx
+    ) {
+        CallContext context = generator.getCallContext();
+        Object data = aandf.getData(context);
+        if (data instanceof BedDiffYearResult) {
+            String facetType = aandf.getFacetName();
+            int index = 
+               (BED_DIFFERENCE_YEAR_HEIGHT1.equals(facetType)
+               || BED_DIFFERENCE_YEAR_HEIGHT1_FILTERED.equals(facetType))?0:1;
+            doBedDifferenceHeightsOut(
+               generator,
+               (BedDiffYearResult) data,
+               aandf, theme, visible, index, axidx);
+            return;
+        }
+        logger.error("Can't process " + data.getClass().getName() + " objects");
+    }
+
+    @Override
+    public boolean canHandle(String facetType) {
+        return
+               BED_DIFFERENCE_YEAR_HEIGHT1.equals(facetType)
+            || BED_DIFFERENCE_YEAR_HEIGHT2.equals(facetType)
+            || BED_DIFFERENCE_YEAR_HEIGHT1_FILTERED.equals(facetType)
+            || BED_DIFFERENCE_YEAR_HEIGHT2_FILTERED.equals(facetType);
+    }
+
+    private void doBedDifferenceHeightsOut(
+        XYChartGenerator generator,
+        BedDiffYearResult data,
+        ArtifactAndFacet bundle,
+        Document attr,
+        boolean visible,
+        int idx,
+        int axidx) {
+         logger.debug("doBedDifferenceHeightsOut()");
+
+        XYSeries series = new StyledXYSeries(bundle.getFacetDescription(), attr);
+        if (idx == 0) {
+            StyledSeriesBuilder.addPoints(series, data.getHeights1Data(), true);
+        }
+        else {
+            StyledSeriesBuilder.addPoints(series, data.getHeights2Data(), true);
+        }
+
+        generator.addAxisSeries(series, axidx, visible);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/exports/process/KMIndexProcessor.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/process/KMIndexProcessor.java	Wed Aug 07 18:53:15 2013 +0200
@@ -148,3 +148,4 @@
         generator.addAxisDataset(col, idx, visible);
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/exports/process/WOutProcessor.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/process/WOutProcessor.java	Wed Aug 07 18:53:15 2013 +0200
@@ -69,7 +69,13 @@
             generator.addAreaSeries(area, index, visible);
         }
 
-        invertAxis(generator, wkms);
+        if (aaf.getFacetName().equals(FacetTypes.LONGITUDINAL_W) ||
+            aaf.getFacetName().equals(FacetTypes.DISCHARGE_LONGITUDINAL_W) ||
+            aaf.getFacetName().equals(FacetTypes.STATIC_WQKMS_W)) {
+            /* Only use W values to check if the diagram should be inverted
+             * see flys/issue1290 for details */
+            invertAxis(generator, wkms);
+        }
     }
 
     /**
@@ -104,9 +110,8 @@
     public void invertAxis(XYChartGenerator generator, WKms wkms) {
         boolean wsUp = wkms.guessWaterIncreasing();
         boolean kmUp = DataUtil.guessWaterIncreasing(wkms.allKms());
-        boolean inv = (wsUp && kmUp) || (!wsUp && !kmUp);
-
         int size = wkms.size();
+        boolean inv = ((wsUp && kmUp) || (!wsUp && !kmUp)) && size > 1;
 
         if (logger.isDebugEnabled()) {
             logger.debug("(Wkms)Values  : " + size);
@@ -116,6 +121,9 @@
             }
             logger.debug("wsUp: " + wsUp);
             logger.debug("kmUp: " + kmUp);
+            if (size == 1) {
+                logger.debug("InvertAxis not inverting because we have just one km");
+        }
             logger.debug("inv:  " + inv);
         }
         generator.setInverted(inv);
--- a/artifacts/src/main/java/org/dive4elements/river/utils/Formatter.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/utils/Formatter.java	Wed Aug 07 18:53:15 2013 +0200
@@ -135,7 +135,7 @@
     }
 
     /**
-     * Returns a formatter in engineering notation
+     * Returns a formatter in engineering notation.
      */
     public static NumberFormat getEngFormatter(CallContext c) {
         NumberFormat nf = getRawFormatter(c);
@@ -147,7 +147,7 @@
     }
 
     /**
-     * Returns a number formatter that uses an exponent after max digits
+     * Returns a number formatter that uses an exponent after max digits.
      */
     public static NumberFormat getScientificFormater(CallContext c, int min, int max) {
         NumberFormat nf = getRawFormatter(c);
--- a/artifacts/src/main/java/org/dive4elements/river/utils/RiverUtils.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/utils/RiverUtils.java	Wed Aug 07 18:53:15 2013 +0200
@@ -178,7 +178,7 @@
 
 
     /**
-     * This method returns an WQ_MODE enum which is based on the parameters
+     * Returns an WQ_MODE enum which is based on the parameters
      * stored in <i>flys</i> Artifact. If there is no <i>wq_isq</i> parameter
      * existing, WQ_MODE.NONE is returned.
      *
@@ -340,7 +340,7 @@
             return ((WINFOArtifact) flys).getQs();
         }
 
-        logger.warn("This method currently supports WINFOArtifact only!");
+        logger.warn("This method (getQs) currently supports WINFOArtifact only!");
 
         return null;
     }
@@ -360,7 +360,7 @@
             return ((WINFOArtifact) flys).getWs();
         }
 
-        logger.warn("This method currently supports WINFOArtifact only!");
+        logger.warn("This method (getWs) currently supports WINFOArtifact only!");
 
         return null;
     }
@@ -697,7 +697,7 @@
 
 
     /**
-     * This method returns the description for a given <i>km</i> for a specific
+     * Returns the description for a given <i>km</i> for a specific
      * river. The river is provided by the D4EArtifact <i>flys</i>.
      *
      * @param flys The D4EArtifact that provides a river.
@@ -718,7 +718,7 @@
 
 
     /**
-     * This method returns the differences for a w-differences calculation.
+     * Returns the differences for a w-differences calculation.
      *
      * @param winfo The WINFOArtifact.
      * @param context The context.
@@ -806,7 +806,7 @@
 
 
     /**
-     * This method transform a string into an int array. Therefore, the string
+     * Transform a string into an int array. Therefore, the string
      * <i>raw</i> must consist of int values separated by a <i>';'</i>.
      *
      * @param raw The raw integer array as string separated by a ';'.
@@ -837,7 +837,7 @@
 
 
     /**
-     * This method transform a string into a long array. Therefore, the string
+     * Transform a string into a long array. Therefore, the string
      * <i>raw</i> must consist of int values separated by a <i>';'</i>.
      *
      * @param raw The raw long array as string separated by a ';'.
@@ -868,7 +868,7 @@
 
 
     /**
-     * This method transform a string into an double array. Therefore, the
+     * Transform a string into an double array. Therefore, the
      * string <i>raw</i> must consist of double values separated by a
      * <i>';'</i>.
      *
--- a/artifacts/src/main/resources/datacage-sql/org-h2-driver.properties	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/resources/datacage-sql/org-h2-driver.properties	Wed Aug 07 18:53:15 2013 +0200
@@ -17,6 +17,7 @@
 insert.out = INSERT INTO outs (id, artifact_id, name, description, out_type) VALUES (?, ?, ?, ?, ?)
 facet.id.nextval = SELECT NEXTVAL('FACETS_ID_SEQ')
 insert.facet = INSERT INTO facets (id, out_id, name, num, state, description) VALUES (?, ?, ?, ?, ?, ?)
+update.artifact.state = UPDATE artifacts SET state = ? WHERE gid = ?
 
 update.collection.name = UPDATE collections SET name = ? WHERE gid = ?
 delete.artifact.from.collection = DELETE FROM collection_items WHERE collection_id = ? AND artifact_id = ?
--- a/artifacts/src/main/resources/messages.properties	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/resources/messages.properties	Wed Aug 07 18:53:15 2013 +0200
@@ -48,7 +48,7 @@
 state.minfo.dischargestate = Selection of discharge state and channel
 state.minfo.sq.location=Location
 state.minfo.sq.period=Periods
-state.minfo.sq.outliers=Outliers
+state.minfo.sq.outliers=Tolerance
 state.minfo.sq.outlier-method=Outliertest
 state.minfo.bed.year_epoch=Year/Epoch
 state.minfo.bed.difference_select=Differences
@@ -384,6 +384,15 @@
 export.bedheight_middle.csv.header.soundingwidth = Sounding Width [m]
 export.bedheight_middle.csv.header.width = morphological active width [m]
 export.bedheight_middle.csv.header.locations = Location
+export.sedimentload_ls.csv.header.km = km
+export.sedimentload_ls.csv.header.year = year
+export.sedimentload_ls.csv.header.coarse = coarse
+export.sedimentload_ls.csv.header.finemiddle = finemiddle
+export.sedimentload_ls.csv.header.sand = sand
+export.sedimentload_ls.csv.header.suspsand = susp. sand
+export.sedimentload_ls.csv.header.suspsediment = susp. sediment
+export.sedimentload_ls.csv.header.suspsandbb = susp. sand (BB)
+export.sedimentload_ls.csv.header.total = total
 export.sqrelation.csv.header.parameter = Parameter
 export.sqrelation.csv.header.station = Station
 export.sqrelation.csv.header.km = River-Km
--- a/artifacts/src/main/resources/messages_de.properties	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/resources/messages_de.properties	Wed Aug 07 18:53:15 2013 +0200
@@ -48,7 +48,7 @@
 state.minfo.dischargestate = Abflusszustand und Gerinne
 state.minfo.sq.location=Ort
 state.minfo.sq.period=Zeitraum
-state.minfo.sq.outliers=Ausrei\u00dfer
+state.minfo.sq.outliers=Toleranz
 state.minfo.sq.outlier-method=Ausrei\u00dfertest
 state.minfo.bed.year_epoch=Jahr/Epoche
 state.minfo.bed.difference_select=Differenzen
@@ -330,7 +330,7 @@
 export.waterlevel.csv.meta.gauge = # Bezugspegel: {0}
 export.waterlevel.csv.meta.q = # Q (m\u00b3/s): {0}
 export.waterlevel.csv.meta.w = # W (NN + m): {0} - {1}
-export.waterlevel.csv.not.in.gauge.range = au\u00dferhalb gew\u00e4hlter Bezugspegels
+export.waterlevel.csv.not.in.gauge.range = au\u00dferh. d. Bez.pegels
 export.computed.discharge.curve.csv.header.w = W [{0}]
 export.computed.discharge.curve.csv.header.q = Q [m\u00b3/s]
 export.duration.curve.csv.header.duration = D [Tagen]
@@ -384,6 +384,15 @@
 export.bedheight_middle.csv.header.soundingwidth = Peilbreite [m]
 export.bedheight_middle.csv.header.width = morphologisch aktive Breite [m]
 export.bedheight_middle.csv.header.locations = Streckendaten
+export.sedimentload_ls.csv.header.km = km
+export.sedimentload_ls.csv.header.year = Jahr
+export.sedimentload_ls.csv.header.coarse = Kies(g)
+export.sedimentload_ls.csv.header.finemiddle = Kies(f+m)
+export.sedimentload_ls.csv.header.sand = Sand
+export.sedimentload_ls.csv.header.suspsand = susp.Sand
+export.sedimentload_ls.csv.header.suspsandbb = susp.Sand(BB)
+export.sedimentload_ls.csv.header.suspsediment = Schwebst.
+export.sedimentload_ls.csv.header.total = Gesamt
 export.sqrelation.csv.header.parameter = Parameter
 export.sqrelation.csv.header.station = Station
 export.sqrelation.csv.header.km = Fluss-Km
--- a/artifacts/src/main/resources/messages_de_DE.properties	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/resources/messages_de_DE.properties	Wed Aug 07 18:53:15 2013 +0200
@@ -48,7 +48,7 @@
 state.minfo.dischargestate = Abflusszustand und Gerinne
 state.minfo.sq.location=Ort
 state.minfo.sq.period=Zeitraum
-state.minfo.sq.outliers=Ausrei\u00dfer
+state.minfo.sq.outliers=Toleranz
 state.minfo.sq.outlier-method=Ausrei\u00dfertest
 state.minfo.bed.year_epoch=Jahr/Epoche
 state.minfo.bed.difference_select=Differenzen
@@ -328,7 +328,7 @@
 export.waterlevel.csv.meta.gauge = # Bezugspegel: {0}
 export.waterlevel.csv.meta.q = # Q (m\u00b3/s): {0}
 export.waterlevel.csv.meta.w = # W (NN + m): {0} - {1}
-export.waterlevel.csv.not.in.gauge.range = au\u00dferhalb gew\u00e4hlter Bezugspegels
+export.waterlevel.csv.not.in.gauge.range = au\u00dferh. d. Bez.pegels
 export.computed.discharge.curve.csv.header.w = W [{0}]
 export.computed.discharge.curve.csv.header.q = Q [m\u00b3/s]
 export.duration.curve.csv.header.duration = D [Tagen]
@@ -381,6 +381,15 @@
 export.bedheight_middle.csv.header.soundingwidth = Peilbreite [m]
 export.bedheight_middle.csv.header.width = morphologisch aktive Breite [m]
 export.bedheight_middle.csv.header.locations = Streckendaten
+export.sedimentload_ls.csv.header.km = km
+export.sedimentload_ls.csv.header.year = Jahr
+export.sedimentload_ls.csv.header.coarse = Kies(g)
+export.sedimentload_ls.csv.header.finemiddle = Kies(f+m)
+export.sedimentload_ls.csv.header.sand = Sand
+export.sedimentload_ls.csv.header.suspsand = susp.Sand
+export.sedimentload_ls.csv.header.suspsandbb = susp.Sand(BB)
+export.sedimentload_ls.csv.header.suspsediment = Schwebst.
+export.sedimentload_ls.csv.header.total = Gesamt
 export.sqrelation.csv.header.parameter = Parameter
 export.sqrelation.csv.header.station = Station
 export.sqrelation.csv.header.km = Fluss-Km
--- a/artifacts/src/main/resources/messages_en.properties	Fri Jun 28 21:08:23 2013 +0200
+++ b/artifacts/src/main/resources/messages_en.properties	Wed Aug 07 18:53:15 2013 +0200
@@ -48,7 +48,7 @@
 state.minfo.dischargestate = Selection of discharge state and channel
 state.minfo.sq.location=Location
 state.minfo.sq.period=Periods
-state.minfo.sq.outliers=Outliers
+state.minfo.sq.outliers=Tolerance
 state.minfo.sq.outlier-method=Outliertest
 state.minfo.bed.year_epoch=Year/Epoch
 state.minfo.bed.difference_select=Differences
@@ -386,6 +386,15 @@
 export.bedheight_middle.csv.header.soundingwidth = Sounding Width [m]
 export.bedheight_middle.csv.header.width = morphological active width [m]
 export.bedheight_middle.csv.header.locations = Location
+export.sedimentload_ls.csv.header.km = km
+export.sedimentload_ls.csv.header.year = year
+export.sedimentload_ls.csv.header.coarse = coarse
+export.sedimentload_ls.csv.header.finemiddle = finemiddle
+export.sedimentload_ls.csv.header.sand = sand
+export.sedimentload_ls.csv.header.suspsandbb = susp. sand (BB)
+export.sedimentload_ls.csv.header.suspsand = susp. sand
+export.sedimentload_ls.csv.header.suspsediment = susp. sediment
+export.sedimentload_ls.csv.header.total = total
 export.sqrelation.csv.header.parameter = Parameter
 export.sqrelation.csv.header.station = Station
 export.sqrelation.csv.header.km = River-Km
--- a/backend/doc/schema/oracle-minfo.sql	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/doc/schema/oracle-minfo.sql	Wed Aug 07 18:53:15 2013 +0200
@@ -260,6 +260,7 @@
     unit_id             NUMBER(38,0) NOT NULL,
     time_interval_id    NUMBER(38,0) NOT NULL,
     description         VARCHAR(256),
+    kind                NUMBER(38,0),
     PRIMARY KEY (id),
     CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
     CONSTRAINT fk_sy_grain_fraction_id FOREIGN KEY (grain_fraction_id) REFERENCES grain_fraction(id),
--- a/backend/doc/schema/postgresql-minfo.sql	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/doc/schema/postgresql-minfo.sql	Wed Aug 07 18:53:15 2013 +0200
@@ -260,6 +260,7 @@
     unit_id             int NOT NULL,
     time_interval_id    int NOT NULL,
     description         VARCHAR(256),
+    kind                int,
     PRIMARY KEY (id),
     CONSTRAINT fk_sy_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
     CONSTRAINT fk_sy_grain_fraction_id FOREIGN KEY (grain_fraction_id) REFERENCES grain_fraction(id),
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java	Wed Aug 07 18:53:15 2013 +0200
@@ -109,6 +109,8 @@
 
     public static final String SEDIMENT_YIELD_EPOCH_DIR = "Epochen";
 
+    public static final String SEDIMENT_YIELD_OFF_EPOCH_DIR = "amtliche Epochen";
+
     public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse";
 
     public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen";
@@ -502,6 +504,22 @@
     }
 
 
+    private void parseSedimentYieldDir(
+        File[] files,
+        SedimentYieldParser parser
+    ) throws IOException {
+       for (File file: files) {
+           if (file.isDirectory()) {
+               for (File child: file.listFiles()) {
+                   parser.parse(child);
+               }
+           }
+           else {
+               parser.parse(file);
+           }
+       }
+    }
+
     protected void parseSedimentYield() throws IOException {
         if (Config.INSTANCE.skipSedimentYield()) {
             log.info("skip parsing sediment yield data");
@@ -513,11 +531,13 @@
         File minfoDir         = getMinfoDir();
         File sedimentYieldDir = new File(minfoDir, SEDIMENT_YIELD_DIR);
 
-        File singleDir = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR);
-        File epochDir  = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR);
+        File singleDir   = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR);
+        File epochDir    = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR);
+        File offEpochDir = new File(sedimentYieldDir, SEDIMENT_YIELD_OFF_EPOCH_DIR);
 
-        File[] singles = singleDir.listFiles();
-        File[] epochs  = epochDir.listFiles();
+        File[] singles   = singleDir.listFiles();
+        File[] epochs    = epochDir.listFiles();
+        File[] offEpochs = offEpochDir.listFiles();
 
         SedimentYieldParser parser = new SedimentYieldParser();
 
@@ -525,32 +545,21 @@
             log.warn("Cannot read directory '" + singleDir + "'");
         }
         else {
-            for (File file: singles) {
-                if (file.isDirectory()) {
-                    for (File child: file.listFiles()) {
-                        parser.parse(child);
-                    }
-                }
-                else {
-                    parser.parse(file);
-                }
-            }
+            parseSedimentYieldDir(singles, parser);
         }
 
         if (epochs == null || epochs.length == 0) {
             log.warn("Cannot read directory '" + epochDir + "'");
         }
         else {
-            for (File file: epochs) {
-                if (file.isDirectory()) {
-                    for (File child: file.listFiles()) {
-                        parser.parse(child);
-                    }
-                }
-                else {
-                    parser.parse(file);
-                }
-            }
+            parseSedimentYieldDir(epochs, parser);
+        }
+
+        if (offEpochs == null || offEpochs.length == 0) {
+            log.warn("Cannot read directory '" + offEpochDir + "'");
+        }
+        else {
+            parseSedimentYieldDir(offEpochs, parser);
         }
 
         sedimentYields = parser.getSedimentYields();
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentYield.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentYield.java	Wed Aug 07 18:53:15 2013 +0200
@@ -35,6 +35,8 @@
 
     private String description;
 
+    private Integer kind;
+
     private List<ImportSedimentYieldValue> values;
 
     private SedimentYield peer;
@@ -56,6 +58,10 @@
         this.grainFraction = grainFraction;
     }
 
+    public void setKind(Integer kind) {
+        this.kind = kind;
+    }
+
     public void addValue(ImportSedimentYieldValue value) {
         this.values.add(value);
     }
@@ -116,6 +122,7 @@
                 log.debug("create new SedimentYield");
 
                 peer = new SedimentYield(river, u, ti, gf, description);
+                peer.setKind(this.kind);
                 session.save(peer);
             }
             else {
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentYieldParser.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentYieldParser.java	Wed Aug 07 18:53:15 2013 +0200
@@ -29,6 +29,7 @@
 import org.dive4elements.river.model.GrainFraction;
 
 
+/** Parses Sediment Yield files. */
 public class SedimentYieldParser extends LineParser {
 
     private static final Logger log =
@@ -241,15 +242,27 @@
     }
 
 
+    /** Initialize SedimentYields from columns, set the kind
+     * with respect to file location (offical epoch or not?) */
     private void initializeSedimentYields() {
         // skip first column (Fluss-km) and last column (Hinweise)
         current = new ImportSedimentYield[columnNames.length-2];
 
+        Integer kind;
+
+        if (inputFile.getAbsolutePath().contains("amtliche Epochen")) {
+            kind = new Integer(1);
+        }
+        else {
+            kind = new Integer(0);
+        }
+
         for (int i = 0, n = columnNames.length; i < n-2; i++) {
             current[i] = new ImportSedimentYield(this.description);
             current[i].setTimeInterval(getTimeInterval(columnNames[i+1]));
             current[i].setUnit(unit);
             current[i].setGrainFraction(grainFraction);
+            current[i].setKind(kind);
         }
     }
 
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/W80CSVParser.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/W80CSVParser.java	Wed Aug 07 18:53:15 2013 +0200
@@ -192,8 +192,9 @@
 
     /**
      * Add a Point (YZ,Index) to the current cross section line.
-     * @param y The y coordinate of new point.
-     * @param z The z coordinate of new point.
+     * @param y The y coordinate of new point in GK.
+     * @param z The z coordinate of new point in GK.
+     * @param height The hight (3rd coord) of point, in meter.
      * @param idx Ignored, the parameter of new point.
      * @return true if point could been added, false otherwise (e.g. not
      *         parsable y or z values.
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/WaterlevelDifferencesParser.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/WaterlevelDifferencesParser.java	Wed Aug 07 18:53:15 2013 +0200
@@ -199,9 +199,12 @@
     }
 
 
-    /** Handle one line of data, add one value for all columns. */
+    /** Handle one line of data, add one value for all columns.
+     * @param line the line to parse
+     */
     private void handleDataLine(String line) {
-        String[] cols = line.split(SEPERATOR_CHAR);
+        // Split by separator, do not exclude trailing empty string.
+        String[] cols = line.split(SEPERATOR_CHAR, -1);
 
         if (cols == null || cols.length < 2) {
             log.warn("skip invalid waterlevel-diff line: '" + line + "'");
@@ -209,6 +212,8 @@
         }
 
         try {
+            // The first value in a line like 12,9;4,3;4,5 is the station, later
+            // real values.
             Double station = nf.parse(cols[0]).doubleValue();
 
             for (int i = 0; i < columns.length; i++) {
@@ -221,13 +226,15 @@
 
                 String value = cols[idx];
 
-                try {
-                    columns[i].addColumnValue(
-                        new BigDecimal(station),
-                        new BigDecimal(nf.parse(value).doubleValue()));
-                }
-                catch (ParseException pe) {
-                    log.warn("Could not parse value: '" + value + "'");
+                if (value != null && !value.equals("")) {
+                    try {
+                        columns[i].addColumnValue(
+                            new BigDecimal(station),
+                            new BigDecimal(nf.parse(value).doubleValue()));
+                    }
+                    catch (ParseException pe) {
+                        log.warn("Could not parse value: '" + value + "'");
+                    }
                 }
             }
         }
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/WstParser.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/WstParser.java	Wed Aug 07 18:53:15 2013 +0200
@@ -85,6 +85,7 @@
         this.wst = wst;
     }
 
+    /** Returns a new ImportTimeInterval with a date guessed from string. */
     public static ImportTimeInterval guessDate(String string) {
         try {
             Matcher m = YEAR_INTERVAL.matcher(string);
--- a/backend/src/main/java/org/dive4elements/river/model/CrossSection.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/model/CrossSection.java	Wed Aug 07 18:53:15 2013 +0200
@@ -38,11 +38,16 @@
 
 import org.dive4elements.river.backend.SessionHolder;
 
+import org.apache.log4j.Logger;
+
 @Entity
 @Table(name = "cross_sections")
 public class CrossSection
 implements   Serializable
 {
+    private static Logger logger =
+        Logger.getLogger(CrossSection.class);
+
     public static final MathContext PRECISION = new MathContext(6);
 
     public static final String SQL_FAST_CROSS_SECTION_LINES =
@@ -53,6 +58,20 @@
         "km between :from_km AND :to_km " +
         "ORDER BY csl.km, csl.id, csp.col_pos";
 
+    public static final String SQL_MIN_MAX =
+        "SELECT * FROM ( "+
+            "SELECT cross_section_id, MIN(km) AS minkm, MAX(km) AS maxkm " +
+            "FROM cross_section_lines " +
+            "WHERE cross_section_id IN " +
+            " (SELECT id FROM cross_sections WHERE river_id = :river_id) " +
+            "  GROUP BY cross_section_id" +
+        ") cs_ranges " +
+        "JOIN cross_sections cs ON cs_ranges.cross_section_id = cs.id " +
+        "LEFT OUTER JOIN time_intervals ON cs.time_interval_id = time_intervals.id " +
+        "WHERE :km BETWEEN minkm AND maxkm " +
+        "ORDER BY stop_time desc, start_time asc, :km - minkm";
+    // Order by time interval missing.
+
     private Integer                id;
     private River                  river;
     private TimeInterval           timeInterval;
@@ -203,5 +222,31 @@
 
         return lines;
     }
+
+    /**
+     * True if the given section is the "newest" for that river and has values at km.
+     * @param km Given station.
+     * @return true if the section has the most advanced end of its validity interval
+     *         or the most advanced start of its validity interval.
+     */
+    public boolean shouldBeMaster(double km) {
+        Session session = SessionHolder.HOLDER.get();
+
+        SQLQuery sqlQuery = session.createSQLQuery(SQL_MIN_MAX)
+            .addScalar("cross_section_id", StandardBasicTypes.INTEGER);
+
+        sqlQuery
+            .setInteger("river_id", getRiver().getId())
+            .setDouble("km", km);
+
+        List<Integer> results = sqlQuery.list();
+
+        for (Integer result: results) {
+            if (result == getId()) {
+                return true;
+            }
+        }
+        return false;
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/model/DischargeTable.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/model/DischargeTable.java	Wed Aug 07 18:53:15 2013 +0200
@@ -24,6 +24,10 @@
 import javax.persistence.SequenceGenerator;
 import javax.persistence.Table;
 
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.dive4elements.river.backend.SessionHolder;
+
 @Entity
 @Table(name = "discharge_tables")
 public class DischargeTable
@@ -197,5 +201,16 @@
             return 0;
         }
     }
+
+    public static DischargeTable getDischargeTableById(int dtId)
+    {
+        Session session = SessionHolder.HOLDER.get();
+        Query query = session.createQuery(
+            "from DischargeTable where id =:dtId");
+        query.setParameter("dtId", dtId);
+
+        List<DischargeTable> list = query.list();
+        return list.isEmpty() ? null : list.get(0);
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/model/SedimentYield.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/model/SedimentYield.java	Wed Aug 07 18:53:15 2013 +0200
@@ -25,6 +25,7 @@
 import org.apache.log4j.Logger;
 
 
+/** SedimentYield of a certain Fraction with possibly many values. */
 @Entity
 @Table(name = "sediment_yield")
 public class SedimentYield
@@ -46,6 +47,8 @@
 
     private List<SedimentYieldValue> values;
 
+    private Integer kind;
+
 
     public SedimentYield() {
         this.values = new ArrayList<SedimentYieldValue>();
@@ -149,5 +152,15 @@
     public void setDescription(String description) {
         this.description = description;
     }
+
+    /** kind == 0: "normal", kind == 1: "official epoch". */
+    @Column(name = "kind")
+    public Integer getKind() {
+        return kind;
+    }
+
+    public void setKind(Integer newKind) {
+        this.kind = newKind;
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/model/Wst.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/backend/src/main/java/org/dive4elements/river/model/Wst.java	Wed Aug 07 18:53:15 2013 +0200
@@ -33,6 +33,7 @@
 import org.dive4elements.river.backend.SessionHolder;
 
 
+/** DB-mapped WST. */
 @Entity
 @Table(name = "wsts")
 public class Wst
--- a/contrib/make_flys_release/make_release.sh	Fri Jun 28 21:08:23 2013 +0200
+++ b/contrib/make_flys_release/make_release.sh	Wed Aug 07 18:53:15 2013 +0200
@@ -367,6 +367,11 @@
 mv $WORK_DIR/datacagedb $WORK_DIR/server/
 
 echo "INFO: create tarball"
+if [ -d "$WORK_DIR/flys-$VERSION" ]; then
+    echo "INFO: removing old directory"
+    rm -rf "$WORK_DIR/flys-$VERSION"
+fi
+
 mkdir $WORK_DIR/flys-$VERSION
 mv $WORK_DIR/server $WORK_DIR/client $WORK_DIR/flys-$VERSION
 cd $WORK_DIR
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.java	Wed Aug 07 18:53:15 2013 +0200
@@ -824,6 +824,12 @@
 
     String waterlevels();
 
+    String beddifferences();
+
+    String bedheight_differences();
+
+    String vollmer_waterlevels();
+
     String old_calculations();
 
     String officiallines();
@@ -1305,5 +1311,9 @@
     String FEDSTATE_KM();
 
     String official_regulation();
+
+    String historical_discharge_curves();
+
+    String current_gauge();
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.properties	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.properties	Wed Aug 07 18:53:15 2013 +0200
@@ -220,6 +220,7 @@
 computed_discharge_curves = Discharge Curves
 longitudinal_section = Longitudinal Section Curve
 duration_curve = Duration Curve
+historical_discharge_curves = Historical Discharge Curve
 discharge_longitudinal_section = Discharge Longitudinal Section
 floodmap = Floodmap
 historical_discharge = Time-Chart
@@ -278,6 +279,7 @@
 pdf = PDF
 computed_dischargecurve_at_export = Discharge Curve Export
 gauge_discharge_curve_at_export = Gauge Discharge Curve
+current_gauge = Current Gauge Discharge Curve
 gauge_class = Gauge Class
 eventselect = Eventselection
 events = Events
@@ -423,6 +425,9 @@
 
 # data cage
 waterlevels = Waterlevels
+beddifferences = Bedheight Differences
+bedheight_differences = Bedheight Differences
+vollmer_waterlevels = Vollmer Waterlevels
 old_calculations = Former Calculations
 officiallines = Official lines
 datacageAdd = Add data
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_de.properties	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_de.properties	Wed Aug 07 18:53:15 2013 +0200
@@ -216,8 +216,10 @@
 discharge_curve = Abflusskurve am Pegel
 discharge_curve_gaugeless = Abflusskurve
 gauge_discharge_curve = Abflusstafel am Pegel
+current_gauge = aktuelle Abflusstafel am Pegel
 computed_discharge_curve = Abflusskurve
 computed_discharge_curves = Abflusskurven
+historical_discharge_curves = Historische Abflusskurven
 longitudinal_section = L\u00e4ngsschnitt
 duration_curve = Dauerlinie
 discharge_longitudinal_section = W f\u00fcr benutzerdefinierten Abflussl\u00e4ngsschnitt
@@ -427,6 +429,9 @@
 
 # data cage
 waterlevels = Wasserst\u00e4nde
+beddifferences = Sohlh\u00f6hendifferenzen
+bedheight_differences = Sohlh\u00f6hendifferenzen
+vollmer_waterlevels = Ausgelagerte WSPL.
 old_calculations = Fr\u00fchere Berechnungen
 officiallines = Amtliche Linien
 datacageAdd = Daten laden
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_en.properties	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_en.properties	Wed Aug 07 18:53:15 2013 +0200
@@ -213,6 +213,7 @@
 static_sqrelation = SQ relation
 discharge_curve = Discharge Curve at Gauge
 discharge_curve_gaugeless = Discharge Curve
+current_gauge = Current Gauge Discharge Curve
 gauge_discharge_curve = Discharge Table at Gauge
 computed_discharge_curve = Discharge Curve
 computed_discharge_curves = Discharge Curves
@@ -221,6 +222,7 @@
 discharge_longitudinal_section = Discharge Longitudinal Section
 floodmap = Floodmap
 historical_discharge = Time Chart
+historical_discharge_curves = Historical Discharge Curve
 historical_discharge_wq = W/Q Chart
 flow_velocity_longitudinal_section = Flow Velocity
 flow_velocity_export = Flow Velocity Export
@@ -425,6 +427,9 @@
 
 # data cage
 waterlevels = Waterlevels
+beddifferences = Bedheight Differences
+bedheight_differences = Bedheight Differences
+vollmer_waterlevels = Vollmer Waterlevels
 old_calculations = Former Calculations
 officiallines = Official lines
 datacageAdd = Add data
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/CollectionView.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/CollectionView.java	Wed Aug 07 18:53:15 2013 +0200
@@ -393,7 +393,8 @@
     }
 
     /**
-     * Loads all information of a collection
+     * Loads all information of a collection.
+     * If 'recommendations' present, load these.
      * @param c the Collection
      */
     private void loadCollection(Collection c) {
@@ -641,7 +642,7 @@
             );
         }
         else {
-            // Create new collection and add artifact
+            // Create new collection and add artifact.
             final Artifact art = artifact;
             createCollectionService.create(
                 locale,
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/DatacageTwinPanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/DatacageTwinPanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -175,7 +175,7 @@
         VLayout layout       = new VLayout();
         HLayout helperLayout = new HLayout();
         helperLayout.addMember(new DatacagePairWidget(this.artifact,
-            user, "waterlevels", differencesList));
+            user, "differenceable", differencesList));
 
         layout.addMember(widget);
         layout.addMember(submit);
@@ -277,6 +277,29 @@
         this.removedPairs.add(pr);
     }
 
+    /**
+     * Set factory of recommendation such that the correct artifacts will
+     * be cloned for difference calculations.
+     */
+    public void adjustRecommendation(Recommendation recommendation) {
+        if (recommendation.getIDs() != null) {
+            GWT.log("Setting staticwkms factory for rec with ID "
+                + recommendation.getIDs());
+            recommendation.setFactory("staticwkms");
+        }
+        /*
+        // So far, we do not need to rewrite the factory anymore,
+        // except for staticwkms; probably other cases will pop up later.
+        else if (recommendation.getFactory().equals("winfo")) {
+            GWT.log("Setting waterlevel factory for a winfo rec.");
+            recommendation.setFactory("waterlevel");
+        }
+        */
+        else {
+           GWT.log("Leave rec. id " + recommendation.getIDs() + ", factory "
+               + recommendation.getFactory() + " untouched.");
+        }
+    }
 
     /**
      * Validates data, does nothing if invalid, otherwise clones new selected
@@ -308,29 +331,10 @@
                 // Check whether one of those is a dike or similar.
                 // TODO differentiate and merge: new clones, new, old.
                 Recommendation firstR = r.getFirst();
-                if(firstR.getIDs() != null) {
-                    GWT.log("First IDs: " + firstR.getIDs() + " factory: "
-                            + firstR.getFactory());
-                }
-                if(firstR.getIDs() != null) {
-                    // These do not get cloned but loaded ("spawned").
-                    firstR.setFactory("staticwkms");
-                }
-                else {
-                    firstR.setFactory("waterlevel");
-                }
+                adjustRecommendation(firstR);
+
                 Recommendation secondR = r.getSecond();
-                if(secondR.getIDs() != null) {
-                    GWT.log("Second IDs: " + secondR.getIDs() + " factory: "
-                            + secondR.getFactory());
-                }
-                if (secondR.getIDs() != null) {
-                    // These do not get cloned but loaded ("spawned").
-                    secondR.setFactory("staticwkms");
-                }
-                else {
-                    secondR.setFactory("waterlevel");
-                }
+                adjustRecommendation(secondR);
 
                 ar.add(firstR);
                 ar.add(secondR);
@@ -389,6 +393,7 @@
         }
 
         // Clone new ones (and spawn statics), go forward.
+        parameterList.lockUI();
         loadArtifactService.loadMany(
             this.collection,
             toClone,
@@ -399,6 +404,7 @@
                 @Override
                 public void onFailure(Throwable caught) {
                     GWT.log("Failure of cloning with factories!");
+                    parameterList.unlockUI();
                 }
                 @Override
                 public void onSuccess(Artifact[] artifacts) {
@@ -407,6 +413,7 @@
 
                     fireStepForwardEvent(new StepForwardEvent(
                         getData(toClone, artifacts, toUse)));
+                    parameterList.unlockUI();
                 }
             });
     }
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/FLYSHeader.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/FLYSHeader.java	Wed Aug 07 18:53:15 2013 +0200
@@ -12,6 +12,7 @@
 import com.google.gwt.i18n.client.LocaleInfo;
 import com.google.gwt.user.client.Window;
 import com.google.gwt.user.client.rpc.AsyncCallback;
+import com.google.gwt.user.client.ui.HTML;
 import com.smartgwt.client.types.Alignment;
 import com.smartgwt.client.types.VerticalAlignment;
 import com.smartgwt.client.util.BooleanCallback;
@@ -135,14 +136,30 @@
         info.addClickHandler(new ClickHandler() {
             @Override
             public void onClick(ClickEvent event) {
-                GWT.log("Clicked 'info' button.");
                 String wikiLink = Config.getInstance().getWikiUrl() + "/Info";
-                Window.open(wikiLink, "_blank", null);
+
+                // Create a form which contains the SAML session
+                // for the user which is currently logged in
+                String html = WikiLinks.imageLinkForm(
+                        getFlys(), wikiLink, "", "wikiLinkForm");
+                HTML htmlObj = new HTML(html);
+                info.addChild(htmlObj);
+                fireWikiLinkSubmit();
+                htmlObj.removeFromParent();
             }
         });
         init();
     }
 
+    /**
+     * Calls the JS submit() function on the dynamically added
+     * wikiLinkForm. This is a workaround for a SmartGWT issue(?) that
+     * clears all form fields when using DynamicForm.submit() or .submitForm().
+     */
+    protected native void fireWikiLinkSubmit() /*-{
+        $doc.wikiLinkForm.submit();
+    }-*/;
+
     public void init() {
         setStyleName("header");
         setWidth100();
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/ImgLink.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/ImgLink.java	Wed Aug 07 18:53:15 2013 +0200
@@ -12,6 +12,7 @@
 import com.smartgwt.client.widgets.HTMLPane;
 
 
+/** An image wrapped in a clickable link. */
 public class ImgLink extends HTMLPane {
 
     protected int width;
@@ -59,3 +60,4 @@
         update();
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/MultiPeriodPanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/MultiPeriodPanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -36,7 +36,7 @@
 import java.util.List;
 
 /**
- * This UIProvider creates a panel for location or distance input.
+ * This UIProvider creates a panel for input of multiple time periods.
  *
  * @author <a href="mailto:raimund.renkert@intevation.de">Raimund Renkert</a>
  */
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/ParameterList.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/ParameterList.java	Wed Aug 07 18:53:15 2013 +0200
@@ -83,7 +83,7 @@
 
     public static final String STYLENAME_OLD_PARAMETERS = "oldParameters";
 
-    /** The message class that provides i18n strings.*/
+    /** The message class that provides i18n strings. */
     protected FLYSConstants MSG = GWT.create(FLYSConstants.class);
 
     /** The ArtifactService used to communicate with the Artifact server. */
@@ -103,7 +103,7 @@
         GWT.create(ReportService.class);
 
 
-    /** The list of ParameterizationChangeHandler.*/
+    /** The list of ParameterizationChangeHandler. */
     protected List<ParameterChangeHandler> parameterHandlers;
 
     protected FLYS flys;
@@ -568,7 +568,7 @@
 
 
     /**
-     * This method refreshes the part displaying the data of the current state.
+     * Refreshes the part displaying the data of the current state.
      * The UI is created using the UIProvider stored in the Data object.
      */
     public void refreshCurrent() {
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/QSegmentedInputPanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/QSegmentedInputPanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -428,14 +428,14 @@
             new AsyncCallback<WQInfoObject[]>() {
                 @Override
                 public void onFailure(Throwable caught) {
-                    GWT.log("Could not recieve wq informations.");
+                    GWT.log("Could not receive wq informations.");
                     SC.warn(caught.getMessage());
                 }
 
                 @Override
                 public void onSuccess(WQInfoObject[] wqi) {
                     int num = wqi != null ? wqi.length :0;
-                    GWT.log("Recieved " + num + " wq informations.");
+                    GWT.log("Received " + num + " wq informations.");
 
                     if (num == 0) {
                         return;
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/RiverInfoPanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/RiverInfoPanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -99,7 +99,7 @@
             MSG.gauge_river_url() + number :
             MSG.gauge_river_url();
         String wikiBaseUrl = Config.getInstance().getWikiUrl();
-        DynamicForm infoLink = WikiLinks.linkHTML(this.flys, wikiBaseUrl + url,
+        DynamicForm infoLink = WikiLinks.linkDynamicForm(this.flys, wikiBaseUrl + url,
                                         MSG.gauge_river_info_link());
         infoLink.setTop(5);
         LinkItem item = (LinkItem)infoLink.getField("saml");
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WQAdaptedInputPanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WQAdaptedInputPanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -140,6 +140,7 @@
     }
 
 
+    /** Create labels, canvasses, layouts. */
     @Override
     public Canvas create(DataList data) {
         readGaugeRanges(data);
@@ -196,7 +197,7 @@
             qdTables.add(qdTable);
 
             qdTable.showSelect();
-            //wTable.showSelect();
+            wTable.showSelect();
             wTab.setPane(wTable);
             qTab.setPane(qdTable);
 
@@ -267,6 +268,57 @@
             qdTable.addCellClickHandler(handler);
             i++;
         }
+
+        i = 0;
+        for (WTable wTable: wTables) {
+            // Register listener such that values are filled in on click.
+            final WTable table = wTable;
+            final int fi = i;
+            CellClickHandler handler = new CellClickHandler() {
+                @Override
+                public void onCellClick(CellClickEvent e) {
+                    if (!isWMode() /*|| table.isLocked()*/) {
+                        return;
+                    }
+
+                    Record r   = e.getRecord();
+                    double val = r.getAttributeAsDouble("value");
+
+                    doubleArrayPanels.get(fi).setValues(new double[]{val});
+                    // If a named value for first gauge is chosen, try to find and set
+                    // the values to the other panels too.
+                    if (fi == 0) {
+                        String valueName = r.getAttribute("name");
+                        int oi = 0;
+                        // TODO instead of oi use random access.
+                        for (WTable otherWTable: wTables) {
+                            if (oi == 0) {
+                                oi++;
+                                continue;
+                            }
+                            Double value = otherWTable.findRecordValue(valueName);
+                            if (value == null) {
+                                // TODO: afterwards it freaks out
+                                SC.warn(MSG.noMainValueAtGauge());
+                            }
+                            else {
+                                doubleArrayPanels.get(oi).setValues(new double[]{value});
+                            }
+                            oi++;
+                        }
+                    }
+                    else {
+                        // Focus next.
+                        if (fi != doubleArrayPanels.size()-1) {
+                            doubleArrayPanels.get(fi+1).focusInItem(1);
+                        }
+                    }
+                }
+            };
+
+            wTable.addCellClickHandler(handler);
+            i++;
+        }
     }
 
 
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WQInputPanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WQInputPanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -1811,14 +1811,14 @@
             new AsyncCallback<WQInfoObject[]>() {
                 @Override
                 public void onFailure(Throwable caught) {
-                    GWT.log("Could not recieve wq informations.");
+                    GWT.log("Could not receive wq informations.");
                     SC.warn(caught.getMessage());
                 }
 
                 @Override
                 public void onSuccess(WQInfoObject[] wqi) {
                     int num = wqi != null ? wqi.length :0;
-                    GWT.log("Recieved " + num + " wq informations.");
+                    GWT.log("Received " + num + " wq informations.");
 
                     if (num == 0) {
                         return;
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WQSimpleArrayPanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WQSimpleArrayPanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -265,13 +265,13 @@
             new AsyncCallback<WQInfoObject[]>() {
                 @Override
                 public void onFailure(Throwable caught) {
-                    GWT.log("Could not recieve wq informations.");
+                    GWT.log("Could not receive wq informations.");
                 }
 
                 @Override
                 public void onSuccess(WQInfoObject[] wqi) {
                     int num = wqi != null ? wqi.length :0;
-                    GWT.log("Recieved " + num + " wq informations.");
+                    GWT.log("Received " + num + " wq informations.");
 
                     if (num == 0) {
                         return;
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WikiImgLink.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WikiImgLink.java	Wed Aug 07 18:53:15 2013 +0200
@@ -24,7 +24,7 @@
 
     @Override
     protected void update() {
-        setContents(WikiLinks.imageLinkHTML(instance, href, imgUrl));
+        setContents(WikiLinks.imageLinkForm(instance, href, imgUrl, "wikiImgLink" + toString()));
         setWidth(width);
         setHeight(height);
         setOverflow(Overflow.VISIBLE);
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WikiLinks.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WikiLinks.java	Wed Aug 07 18:53:15 2013 +0200
@@ -8,18 +8,21 @@
 
 package org.dive4elements.river.client.client.ui;
 
+import com.google.gwt.core.client.GWT;
+import com.google.gwt.safehtml.shared.SafeHtmlUtils;
 import com.smartgwt.client.types.FormMethod;
 import com.smartgwt.client.widgets.form.DynamicForm;
+import com.smartgwt.client.widgets.form.fields.HiddenItem;
 import com.smartgwt.client.widgets.form.fields.LinkItem;
 import com.smartgwt.client.widgets.form.fields.events.ClickEvent;
 import com.smartgwt.client.widgets.form.fields.events.ClickHandler;
-import com.google.gwt.safehtml.shared.SafeHtmlUtils;
 
 import org.dive4elements.river.client.client.FLYS;
+import org.dive4elements.river.client.shared.model.User;
 
 public class WikiLinks
 {
-    public static String imageLinkHTML(FLYS instance, String url, String imageUrl) {
+    public static String imageLinkForm(FLYS instance, String url, String imageUrl, String formName) {
         String saml = null;
         if (instance != null && instance.getCurrentUser() != null) {
             saml = instance.getCurrentUser().getSamlXMLBase64();
@@ -29,7 +32,7 @@
 
         if (saml != null) {
             return "<form method=\"POST\" target=\"_blank\" action=\""
-                + quotedUrl + "\">"
+                + quotedUrl + "\" " + "name=\"" + formName + "\">"
                 + "<input type=\"hidden\" name=\"saml\" value=\""
                 + SafeHtmlUtils.htmlEscape(saml) + "\">"
                 + "<input type=\"image\" src=\""+ quotedImage + "\">"
@@ -40,11 +43,13 @@
         }
     }
 
-    public static DynamicForm linkHTML(FLYS flys, String url, String text) {
-        String saml = flys.getCurrentUser().getSamlXMLBase64();
+    public static DynamicForm linkDynamicForm(FLYS flys, String url, String text) {
+        User currentUser = flys.getCurrentUser();
         String quotedUrl = SafeHtmlUtils.htmlEscape(url);
         String quotedText = SafeHtmlUtils.htmlEscape(text);
-        if (saml != null) {
+
+        if (currentUser != null) {
+            String saml = currentUser.getSamlXMLBase64();
             final DynamicForm form = new DynamicForm();
             form.setMethod(FormMethod.POST);
             form.setTarget("_blank");
@@ -71,4 +76,33 @@
             return form;
         }
     }
+
+    public static DynamicForm dynamicForm(FLYS flys, String url) {
+        User currentUser = flys.getCurrentUser();
+        String quotedUrl = SafeHtmlUtils.htmlEscape(url);
+
+        if (currentUser != null) {
+            String saml = currentUser.getSamlXMLBase64();
+            saml = SafeHtmlUtils.htmlEscape(saml);
+            GWT.log("saml=" + saml);
+            DynamicForm form = new DynamicForm();
+            form.setID("wikiDynamicForm");
+            form.setMethod(FormMethod.POST);
+            form.setTarget("_blank");
+            form.setAction(quotedUrl);
+            form.setCanSubmit(true);
+            HiddenItem item = new HiddenItem("saml");
+            item.setDefaultValue(saml);
+            item.setValue(saml);
+            form.setFields(item);
+            //form.setValue("saml", saml);
+            return form;
+        }
+        else {
+            DynamicForm form = new DynamicForm();
+            form.setTarget("_blank");
+            form.setAction(quotedUrl);
+            return form;
+        }
+    }
 }
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WspDatacagePanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WspDatacagePanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -78,7 +78,7 @@
 
     /**
      * We need to override this method (defined in AbstractUIProvider) because
-     * we have to create a new Artifact specified by  the Datacage selection via
+     * we have to create a new Artifact specified by the Datacage selection via
      * Async request.
      *
      * @param e The ClickEvent.
@@ -144,6 +144,7 @@
     }
 
 
+    /** Returns a Data Array with one default item. */
     protected Data[] getData(Recommendation r, Artifact newArtifact) {
         String uuid = newArtifact.getUuid();
         r.setMasterArtifact(uuid);
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/chart/ChartPropertiesEditor.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/chart/ChartPropertiesEditor.java	Wed Aug 07 18:53:15 2013 +0200
@@ -188,6 +188,17 @@
         PropertyGroup origPg = (PropertyGroup)orig;
 
         if (pg.getName().equals("axis")) {
+            // Certain axis shall be skipped (W/Q-Diagrams cm-axis especially).
+            String outputName = tab.getOutputName();
+            if (outputName.equals("fix_wq_curve") || outputName.equals("computed_discharge_curve")
+                || outputName.equals("extreme_wq_curve")) {
+                String labelString = ((StringProperty)origPg.getPropertyByName("label")).getValue();
+                if(labelString.equals("W [cm]")) {
+                    VLayout layout = new VLayout();
+                    layout.setHeight(0);
+                    return layout;
+                }
+            }
             Label scale = new Label(MSG.scale() + " :");
             scale.setHeight(25);
             scale.setMargin(2);
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/chart/CrossSectionChartThemePanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/chart/CrossSectionChartThemePanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -312,7 +312,7 @@
 
 
     /**
-     * Feed a single artifact with the km of the crosssection to display.
+     * Feed artifacts with the km of the crosssection to display.
      * If its the selected master, also feed the collectionmaster.
      *
      * @param artifacts List of artifacts to feed.
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/fixation/FixGaugeSelectPanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/fixation/FixGaugeSelectPanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -30,7 +30,8 @@
 import java.util.List;
 
 /**
- * This UIProvider creates a panel for location or distance input.
+ * This UIProvider creates a panel to select discharge classes / sectors
+ * (german Abflussklassen).
  *
  * @author <a href="mailto:raimund.renkert@intevation.de">Raimund Renkert</a>
  */
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/minfo/SedLoadOffEpochPanel.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/minfo/SedLoadOffEpochPanel.java	Wed Aug 07 18:53:15 2013 +0200
@@ -38,7 +38,7 @@
 import org.dive4elements.river.client.shared.model.SedimentLoadInfoObject;
 import org.dive4elements.river.client.shared.model.SedimentLoadInfoRecord;
 
-
+/** Show input to select an official epoch. */
 public class SedLoadOffEpochPanel
 extends PeriodPanel
 {
@@ -47,6 +47,7 @@
 
     private ListGrid sedLoadTable;
 
+    /** Creates layout with title. */
     public Canvas createWidget(DataList data) {
         VLayout root = new VLayout();
 
@@ -58,6 +59,7 @@
         return root;
     }
 
+    /** Create layout for data entered previously. */
     @Override
     public Canvas createOld(DataList dataList) {
         HLayout layout = new HLayout();
@@ -104,6 +106,7 @@
         return layout;
     }
 
+    /** Creates the helper grid in which off epochs can be selected. */
     protected Canvas createHelper() {
         sedLoadTable = new ListGrid();
         sedLoadTable.setShowHeaderContextMenu(false);
@@ -119,15 +122,11 @@
         date.setType(ListGridFieldType.TEXT);
         date.setWidth(100);
 
-        ListGridField descr =
-            new ListGridField("description", MSG.description());
-        descr.setType(ListGridFieldType.TEXT);
-        descr.setWidth("*");
-
-        sedLoadTable.setFields(date, descr);
+        sedLoadTable.setFields(date);
         return sedLoadTable;
     }
 
+    /** Get data via listgrid selection. */
     @Override
     public Data[] getData() {
         List<Data> data = new ArrayList<Data>();
@@ -153,9 +152,10 @@
         return data.toArray(new Data[data.size()]);
     }
 
+    /** Fetch load info from service and populate table. */
     protected void fetchSedimentLoadData() {
-        Config config    = Config.getInstance();
-        String locale    = config.getLocale ();
+        Config config = Config.getInstance();
+        String locale = config.getLocale ();
 
         ArtifactDescription adescr = artifact.getArtifactDescription();
         DataList[] data = adescr.getOldData();
@@ -163,16 +163,16 @@
         double[] km = artifact.getArtifactDescription().getKMRange();
         String river = artifact.getArtifactDescription().getRiver();
 
-        sedLoadInfoService.getSedimentLoadInfo(locale, river, "epoch", km[0], km[1],
+        sedLoadInfoService.getSedimentLoadInfo(locale, river, "off_epoch", km[0], km[1],
             new AsyncCallback<SedimentLoadInfoObject[]>() {
                 public void onFailure(Throwable caught) {
-                    GWT.log("Could not recieve sediment load informations.");
+                    GWT.log("Could not receive sediment load informations.");
                     SC.warn(caught.getMessage());
                 }
 
                 public void onSuccess(SedimentLoadInfoObject[] sedLoad) {
                     int num = sedLoad != null ? sedLoad.length :0;
-                    GWT.log("Recieved " + num + " sediment load informations.");
+                    GWT.log("Received " + num + " sediment load informations.");
 
                     if (num == 0) {
                         return;
@@ -185,11 +185,12 @@
     }
 
 
+    /** Add record to input helper listgrid. */
     protected void addSedimentLoadInfo (SedimentLoadInfoObject[] sedLoad) {
         for(SedimentLoadInfoObject sl: sedLoad) {
             SedimentLoadInfoRecord rec = new SedimentLoadInfoRecord(sl);
             sedLoadTable.addData(rec);
         }
     }
-
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/stationinfo/InfoListGrid.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/stationinfo/InfoListGrid.java	Wed Aug 07 18:53:15 2013 +0200
@@ -46,7 +46,7 @@
     ) {
         String name = this.getFieldName(colNum);
         if (name.equals("infolink")) {
-            return WikiLinks.linkHTML(
+            return WikiLinks.linkDynamicForm(
                 flys,
                 record.getAttribute("link"),
                 record.getLinkText());
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/wq/QDTable.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/wq/QDTable.java	Wed Aug 07 18:53:15 2013 +0200
@@ -22,7 +22,12 @@
 
 
 /**
- * Table showing Q and D main values, allowing for selection.
+ * Table showing Q and D main values, allowing for selection, if
+ * showSelect is called. In that case, a CellClickHandler should
+ * be registered.
+ *
+ * TODO becomes very similiar to WTable. Probably mergeable.
+ *
  * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a>
  */
 public class QDTable extends ListGrid {
@@ -119,6 +124,7 @@
         hideField("min");
     }
 
+    /** Whether or not can be clicked on. */
     public boolean isLocked() {
         return lockClick;
     }
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/wq/WTable.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/wq/WTable.java	Wed Aug 07 18:53:15 2013 +0200
@@ -21,6 +21,8 @@
 import org.dive4elements.river.client.client.FLYSConstants;
 
 /**
+ * Table showing W main values.
+ * TODO becomes very similiar to QDTable. Probably mergeable.
  * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a>
  */
 public class WTable extends ListGrid {
@@ -30,6 +32,8 @@
 
 
     public WTable() {
+        String baseUrl = GWT.getHostPageBaseURL();
+
         setWidth100();
         setHeight100();
         setSelectionType(SelectionStyle.NONE);
@@ -68,7 +72,31 @@
             }
         });
 
-        setFields(name, type, value);
+        ListGridField select = new ListGridField("select", MESSAGE.selection());
+        select.setType(ListGridFieldType.ICON);
+        select.setWidth(70);
+        select.setCellIcon(baseUrl + MESSAGE.markerGreen());
+
+        setFields(select, name, type, value);
+        hideField("select");
+    }
+
+    public void showSelect() {
+        showField("select");
+    }
+
+
+    /**
+     * Search all records for one with attribute name equals to given name.
+     * @return null if none found.
+     * */
+    public Double findRecordValue(String name) {
+        for (ListGridRecord record : getRecords()) {
+            if (record.getAttribute("name").equals(name)) {
+                return record.getAttributeAsDouble("value");
+            }
+        }
+        return null;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/gwt-client/src/main/java/org/dive4elements/river/client/server/MapPrintServiceImpl.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/server/MapPrintServiceImpl.java	Wed Aug 07 18:53:15 2013 +0200
@@ -212,7 +212,7 @@
             Map<String, Object> legend = new LinkedHashMap<String, Object>();
             List<Object> classes = new ArrayList<Object>(1);
             Map<String, Object> clazz = new LinkedHashMap<String, Object>();
-            String lgu = encode(MapUtils.getLegendGraphicUrl(layer.url, layer.layers, dpi));
+            String lgu = MapUtils.getLegendGraphicUrl(layer.url, encode(layer.layers), dpi);
             clazz.put("icon", lgu);
             clazz.put("name", layer.description);
             classes.add(clazz);
--- a/gwt-client/src/main/java/org/dive4elements/river/client/server/SedimentLoadInfoServiceImpl.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/server/SedimentLoadInfoServiceImpl.java	Wed Aug 07 18:53:15 2013 +0200
@@ -29,9 +29,10 @@
 import org.dive4elements.river.client.shared.model.SedimentLoadInfoObjectImpl;
 
 
+/** Service to fetch info about sediment load. */
 public class SedimentLoadInfoServiceImpl
-extends RemoteServiceServlet
-implements SedimentLoadInfoService
+extends      RemoteServiceServlet
+implements   SedimentLoadInfoService
 {
     private static final Logger logger =
         Logger.getLogger(SedimentLoadInfoServiceImpl.class);
@@ -50,7 +51,7 @@
     {
         logger.info("SedimentLoadInfoServiceImpl.getSedimentLoadInfo");
 
-        String url  = getServletContext().getInitParameter("server-url");
+        String url = getServletContext().getInitParameter("server-url");
 
         Document doc = XMLUtils.newDocument();
 
@@ -158,3 +159,4 @@
         return null;
     }
 }
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/gwt-client/src/main/java/org/dive4elements/river/client/shared/model/DefaultCollection.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/shared/model/DefaultCollection.java	Wed Aug 07 18:53:15 2013 +0200
@@ -284,6 +284,10 @@
     }
 
 
+    /**
+     * Returns true if a recommendation with given factory and id
+     * is already member of this collection.
+     */
     public boolean loadedRecommendation(Recommendation recommendation) {
         String factory = recommendation.getFactory();
         String dbids   = recommendation.getIDs();
@@ -307,7 +311,7 @@
     }
 
     /**
-     * Returns the name of the collection or uuid if no name is set
+     * Returns the name of the collection or uuid if no name is set.
      */
     @Override
     public String getDisplayName() {
--- a/gwt-client/src/main/java/org/dive4elements/river/client/shared/model/ToLoad.java	Fri Jun 28 21:08:23 2013 +0200
+++ b/gwt-client/src/main/java/org/dive4elements/river/client/shared/model/ToLoad.java	Wed Aug 07 18:53:15 2013 +0200
@@ -20,6 +20,7 @@
 public class ToLoad implements Serializable
 {
 
+    /** Two strings. */
     public class StringPair {
         public String first;
         public String second;
@@ -51,16 +52,17 @@
         }
         return key;
     }
-   public void add(
-        String artifactName,
-        String factory,
-        String out,
-        String name,
-        String ids,
-        String displayName
-    ) {
-       add(artifactName, factory, out, name, ids, displayName, null);
-    }
+
+    public void add(
+         String artifactName,
+         String factory,
+         String out,
+         String name,
+         String ids,
+         String displayName
+     ) {
+        add(artifactName, factory, out, name, ids, displayName, null);
+     }
 
     public void add(
         String artifactName,
@@ -114,7 +116,7 @@
             for (Map.Entry<StringPair, ArtifactFilter> entry:
                 all.getValue().entrySet()
             ) {
-                StringPair pair         = entry.getKey();
+                StringPair pair = entry.getKey();
                 String factory = pair.first;
                 ArtifactFilter artifactFilter = entry.getValue();
 

http://dive4elements.wald.intevation.org