Mercurial > dive4elements > river
changeset 6995:e0311aa32efb
merged accidental head (nothing changed)
line wrap: on
line diff
--- a/.hgtags Sat Jun 29 00:20:58 2013 +0200 +++ b/.hgtags Thu Sep 12 10:13:09 2013 +0200 @@ -55,3 +55,15 @@ eec895f6ec801a7faaed96e9f01721e1143e7bb8 3.0.8 41152c3a532d63a25e9fa348ca4286bdd76069ef 3.0.9 5f4893db41e4fed644ddbd91bf4cdf0ac7952161 3.0.10 +5f4893db41e4fed644ddbd91bf4cdf0ac7952161 3.0.10 +0000000000000000000000000000000000000000 3.0.10 +0000000000000000000000000000000000000000 3.0.10 +bfcb513c1fdace07ce39616bc9fda3899e8ee914 3.0.10 +63baa1873b1f86a78ab10d19ae4221ec01cc75a2 3.0.11 +3999162f474fb5a6bced33521f81c9ccf274c4e7 3.0.12 +3999162f474fb5a6bced33521f81c9ccf274c4e7 3.0.12 +0000000000000000000000000000000000000000 3.0.12 +0000000000000000000000000000000000000000 3.0.12 +da197a9236fde564d45379c0826510c69a5709ce 3.0.12 +71da3d4ffb4a46a2f8de7e6a9e1e4a32657802aa 3.0.13 +84b1e87e86692db4202c5d68e0c521185ef0f9d2 3.0.14
--- a/artifacts/doc/conf/artifacts/chart.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/artifacts/chart.xml Thu Sep 12 10:13:09 2013 +0200 @@ -58,8 +58,18 @@ <facet name="w_differences" description="facet.w_differences"/> <facet name="other.wkms" description="facet.other.wkms"/> <facet name="other.wqkms" description="facet.other.wqkms"/> + <facet name="other.wqkms.w" description="W-Type of data" /> + <facet name="other.wqkms.q" description="Q-Type of data" /> <facet name="heightmarks_points" description="facet.other.wkms.heightmarks_points"/> + <facet name="w_differences.manualpoints" description="Manuelle Punkte"/> <facet name="longitudinal_section.annotations" description="facet.longitudinal_section.annotations"/> + <facet name="bedheight_difference.year" description="A facet for bed height differences"/> + <facet name="bedheight_difference.year.filtered" description="A facet for bed height differences"/> + <facet name="bedheight_difference.morph_width" description="A facet for morphologic width"/> + <facet name="bedheight_difference.year.height1" description="A facet for raw heights."/> + <facet name="bedheight_difference.year.height2" description="A facet for raw heights."/> + <facet name="bedheight_difference.year.height1.filtered" description="A facet for raw heights."/> + <facet name="bedheight_difference.year.height2.filtered" description="A facet for raw heights."/> </facets> </outputmode> </outputmodes> @@ -70,6 +80,7 @@ <outputmode name="longitudinal_section" description="output.discharge_longitudinal_section" mime-type="image/png" type="chart"> <facets> <facet name="empty.facet" decription= "Empty"/> + <facet name="w_differences" decription= "W Differences"/> <facet name="discharge_longitudinal_section.w"/> <facet name="discharge_longitudinal_section.q"/> <facet name="discharge_longitudinal_section.c"/> @@ -83,6 +94,13 @@ <facet name="longitudinal_section.w"/> <facet name="longitudinal_section.q"/> <facet name="longitudinal_section.manualpoints"/> + <facet name="bedheight_difference.year" description="A facet for bed height differences"/> + <facet name="bedheight_difference.year.filtered" description="A facet for bed height differences"/> + <facet name="bedheight_difference.morph_width" description="A facet for morphologic width"/> + <facet name="bedheight_difference.year.height1" description="A facet for raw heights."/> + <facet name="bedheight_difference.year.height2" description="A facet for raw heights."/> + <facet name="bedheight_difference.year.height1.filtered" description="A facet for raw heights."/> + <facet name="bedheight_difference.year.height2.filtered" description="A facet for raw heights."/> </facets> </outputmode> </outputmodes>
--- a/artifacts/doc/conf/artifacts/fixanalysis.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/artifacts/fixanalysis.xml Thu Sep 12 10:13:09 2013 +0200 @@ -171,8 +171,8 @@ <facet name="heightmarks_points" description="facet.other.wkms.heightmarks_points"/> <facet name="discharge_curve.curve" description="facet.discharge_curve.curve"/> <facet name="fix_wq_curve.manualpoints" description="Manual points"/> - <facet name="mainvalues.w" description="facet.fix_wq.mainvalues.w"/> - <facet name="mainvalues.q" description="facet.fix_wq.mainvalues.q"/> + <facet name="mainvalues.q" description="mainvalues.q"/> + <facet name="mainvalues.w" description="mainvalues.w"/> </facets> </outputmode> <outputmode name="fix_deltawt_curve" description="output.fix_deltawt_curve" mine-type="image/png" type="chart">
--- a/artifacts/doc/conf/artifacts/gaugedischarge.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/artifacts/gaugedischarge.xml Thu Sep 12 10:13:09 2013 +0200 @@ -8,8 +8,15 @@ <outputmode name="computed_discharge_curve" description="computed_discharge_curve" mime-type="image/png" type="chart"> <facets> <facet name="discharge_curve.curve" description="facet.discharge_curve.curve" /> - <facet name="mainvalues.q" description="facet.mainvalues.q"/> - <facet name="mainvalues.w" description="facet.mainvalues.w"/> + <facet name="computed_discharge_curve.mainvalues.q" description="mainvalues.q"/> + <facet name="computed_discharge_curve.mainvalues.w" description="mainvalues.w"/> + <facet name="discharge_curve.curve" description="facet.discharge_curve.curve"/> + <facet name="heightmarks_points" description="facet.other.wqkms"/> + <facet name="other.wqkms" description="facet.other.wqkms"/> + <facet name="other.wq" description="Point-like data like fixations"/> + <facet name="other.wkms" description="Point-like data like fixations"/> + <facet name="other.wkms.interpol" description="Height over km, like flood protections."/> + <facet name="computed_discharge_curve.manualpoints" description="Manuelle Punkte"/> </facets> </outputmode> </outputmodes>
--- a/artifacts/doc/conf/artifacts/gaugedischargecurve.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/artifacts/gaugedischargecurve.xml Thu Sep 12 10:13:09 2013 +0200 @@ -6,11 +6,20 @@ <data name="reference_gauge" type="Long" /> <data name="gauge_name" type="String" /> <outputmodes> - <outputmode name="discharge_curve" description="output.discharge_curve" type="chart"> + <outputmode name="discharge_curve" description="output.discharge_curve" mime-type="image/png" type="chart"> <facets> <facet name="gauge_discharge_curve"/> - <facet name="mainvalues.q" description="facet.mainvalues.q"/> - <facet name="mainvalues.w" description="facet.mainvalues.w"/> + <facet name="mainvalues.q" description="mainvalues.q"/> + <facet name="computed_discharge_curve.mainvalues.w" description="mainvalues.w"/> + <facet name="discharge_curve.curve" description="facet.discharge_curve.curve"/> + <facet name="heightmarks_points" description="facet.other.wqkms"/> + <facet name="other.wqkms" description="facet.other.wqkms"/> + <facet name="other.wq" description="Point-like data like fixations"/> + <facet name="other.wkms" description="Point-like data like fixations"/> + <facet name="other.wkms.interpol" description="Height over km, like flood protections."/> + <facet name="computed_discharge_curve.manualpoints" description="Manuelle Punkte"/> + <facet name="other.w.interpol" description="Interpolated (likely single) W Values"/> + <facet name="other.wqkms.w" description="facet.other.wqkms"/> </facets> </outputmode> <outputmode name="computed_dischargecurve_at_export" description="output.computed_dischargecurve_at_export" mime-type="text/plain" type="export">
--- a/artifacts/doc/conf/artifacts/minfo.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/artifacts/minfo.xml Thu Sep 12 10:13:09 2013 +0200 @@ -524,6 +524,14 @@ <facet name="longitudinal_section.annotations" description="facet.longitudinal_section.annotations"/> </facets> </outputmode> + <outputmode name="sedimentload_ls_export" description="output.sedimentload_ls_export" mime-type="text/plain" type="export"> + <facets> + <facet name="csv" description="facet.sedimentload_ls_export.csv" /> + <!-- + <facet name="pdf" description=".pdf" /> + --> + </facets> + </outputmode> <outputmode name="sedimentload_ls_report" description="output.sedimentload_ls_report" mime-type="text/xml" type="report"> <facets> <facet name="report" description="facet.sedimentload_ls_export.report" />
--- a/artifacts/doc/conf/artifacts/winfo.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/artifacts/winfo.xml Thu Sep 12 10:13:09 2013 +0200 @@ -271,6 +271,14 @@ <facet name="heightmarks_points" description="facet.other.wkms.heightmarks_points"/> <facet name="w_differences.manualpoints" description="Manuelle Punkte"/> <facet name="longitudinal_section.annotations" description="facet.longitudinal_section.annotations"/> + <facet name="bedheight_difference.year" description="A facet for bed height differences"/> + <facet name="bedheight_difference.year.filtered" description="A facet for bed height differences"/> + <facet name="bedheight_difference.morph_width" description="A facet for morphologic width"/> + <facet name="bedheight_difference.year.height1" description="A facet for raw heights."/> + <facet name="bedheight_difference.year.height2" description="A facet for raw heights."/> + <facet name="bedheight_difference.year.height1.filtered" description="A facet for raw heights."/> + <facet name="bedheight_difference.year.height2.filtered" description="A facet for raw heights."/> + <facet name="morph-width" description="morphologic width, not sounding width!"/> </facets> </outputmode> <outputmode name="w_differences_export" description="output.w_differences.export" mime-type="text/plain" type="export"> @@ -318,6 +326,8 @@ <facet name="computed_discharge_curve.mainvalues.w" description="facet.computed_discharge_curve.mainvalues.w"/> <facet name="discharge_curve.curve" description="facet.discharge_curve.curve"/> <facet name="heightmarks_points" description="facet.other.wqkms"/> + <facet name="other.wqkms.w" description="facet.other.wqkms"/> + <facet name="other.wqkms.q" description="facet.other.wqkms"/> <facet name="other.wqkms" description="facet.other.wqkms"/> <facet name="other.wq" description="Point-like data like fixations"/> <facet name="other.wkms" description="Point-like data like fixations"/> @@ -374,6 +384,8 @@ </outputmode> <outputmode name="waterlevel_export" description="output.waterlevel_export" mime-type="text/plain" type="export"> <facets> + <!-- include other.wqkms.w to be able to include official lines (issue1384) --> + <facet name="other.wqkms.w" description="W-Type of data" /> <facet name="csv" description="facet.waterlevel_export.csv" /> <facet name="wst" description="facet.waterlevel_export.wst" /> <facet name="pdf" description="facet.waterlevel_export.pdf" /> @@ -630,11 +642,12 @@ </outputmode> <outputmode name="historical_discharge_wq" description="output.historical_discharge_wq.description" mime-type="image/png" type="chart"> <facets> + <facet name="discharge_curve.curve"/> <facet name="historical_discharge.wq.q"/> <facet name="historical_discharge.wq.w"/> <facet name="historical_discharge.wq.curve"/> - <facet name="historical_discharge.mainvalues.q"/> - <facet name="historical_discharge.mainvalues.w"/> + <facet name="mainvalues.q"/> + <facet name="mainvalues.w"/> <facet name="historical_discharge_wq.manualpoints"/> </facets> </outputmode>
--- a/artifacts/doc/conf/conf.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/conf.xml Thu Sep 12 10:13:09 2013 +0200 @@ -363,6 +363,7 @@ <output-generator name="fix_vollmer_wq_curve">org.dive4elements.river.exports.fixings.FixWQCurveGenerator</output-generator> <output-generator name="fix_vollmer_wq_curve_chartinfo">org.dive4elements.river.exports.fixings.FixWQCurveInfoGenerator</output-generator> <output-generator name="sedimentload_ls">org.dive4elements.river.exports.minfo.SedimentLoadLSGenerator</output-generator> + <output-generator name="sedimentload_ls_export">org.dive4elements.river.exports.minfo.SedimentLoadExporter</output-generator> <output-generator name="sedimentload_ls_chartinfo">org.dive4elements.river.exports.minfo.SedimentLoadLSInfoGenerator</output-generator> <!-- Error report generators. --> <output-generator name="discharge_longitudinal_section_report">org.dive4elements.river.exports.ReportGenerator</output-generator>
--- a/artifacts/doc/conf/datacage.sql Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/datacage.sql Thu Sep 12 10:13:09 2013 +0200 @@ -41,7 +41,7 @@ artifact_id INT NOT NULL REFERENCES artifacts(id) ON DELETE CASCADE, kind VARCHAR(256) NOT NULL, k VARCHAR(256) NOT NULL, - v VARCHAR(256), -- Maybe too short + v TEXT, UNIQUE (artifact_id, k) );
--- a/artifacts/doc/conf/mapserver/barrier_lines_class.vm Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/mapserver/barrier_lines_class.vm Thu Sep 12 10:13:09 2013 +0200 @@ -1,6 +1,6 @@ CLASS NAME "Damm" - EXPRESSION ("[TYP]"="Damm") + EXPRESSION /damm/i STYLE SIZE 5 OUTLINECOLOR "#008000" @@ -8,7 +8,7 @@ END CLASS NAME "Rohr 1" - EXPRESSION ("[TYP]"="Rohr 1") + EXPRESSION /rohr.1/i STYLE SIZE 5 OUTLINECOLOR "#800080" @@ -16,7 +16,7 @@ END CLASS NAME "Rohr 2" - EXPRESSION ("[TYP]"="Rohr 2") + EXPRESSION /rohr.2/i STYLE SIZE 5 OUTLINECOLOR "#808080" @@ -24,7 +24,7 @@ END CLASS NAME "Graben" - EXPRESSION ("[TYP]"="Graben") + EXPRESSION /graben/i STYLE SIZE 5 OUTLINECOLOR "#800000" @@ -32,7 +32,7 @@ END CLASS NAME "Ringdeich" - EXPRESSION ("[TYP]"="Ringdeich") + EXPRESSION /ringdeich/i STYLE SIZE 5 OUTLINECOLOR "#800000"
--- a/artifacts/doc/conf/mapserver/barrier_polygons_class.vm Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/mapserver/barrier_polygons_class.vm Thu Sep 12 10:13:09 2013 +0200 @@ -1,6 +1,6 @@ CLASS NAME "Ringdeich" - EXPRESSION ("[TYP]"="Ringdeich") + EXPRESSION /ringdeich/i STYLE SIZE 5 OUTLINECOLOR "#FF8000"
--- a/artifacts/doc/conf/mapserver/mapfile.vm Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/mapserver/mapfile.vm Thu Sep 12 10:13:09 2013 +0200 @@ -19,23 +19,27 @@ WEB METADATA "wms_title" "FLYS Web Map Service" - "wms_onlineresource" "$MAPSERVERURL" + "wms_onlineresource" "${MAPSERVERURL}user-wms" "wms_encoding" "UTF-8" "wms_accessconstraints" "none" "wms_fees" "none" "wms_addresstype" "postal" - "wms_address" "Any Street" - "wms_city" "Any City" - "wms_stateorprovince" "Any state" - "wms_postcode" "My Postalcode" - "wms_country" "Any Country" - "wms_contactperson" "Any Person" - "wms_contactorganization" "Any Orga" - "wms_contactelectronicmailaddress" "any-email@example.com" - "wms_contactvoicetelephone" "Any's telephone number" + "wms_address" "" + "wms_city" "" + "wms_stateorprovince" "" + "wms_postcode" "" + "wms_country" "" + "wms_contactperson" "" + "wms_contactorganization" "" + "wms_contactelectronicmailaddress" "" + "wms_contactvoicetelephone" "" "wms_srs" "EPSG:4326 EPSG:31466 EPSG:31467" "wms_feature_info_mime_type" "text/html" + "wms_getmap_formatlist" "image/png,image/png; mode=24bit,image/jpeg" "ows_enable_request" "*" + "ows_sld_enabled" "false" + "ows_title" "FLYS Web Map Service" + "ows_extent" "3233232 5303455 3421524 5585825" END END
--- a/artifacts/doc/conf/mapserver/river-mapfile.vm Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/mapserver/river-mapfile.vm Thu Sep 12 10:13:09 2013 +0200 @@ -19,7 +19,7 @@ WEB METADATA "wms_title" "FLYS Rivers Web Map Service" - #"wms_onlineresource" "http://localhost:7777/river-wms" # "$MAPSERVERURL" + "wms_onlineresource" "${MAPSERVERURL}river-wms" "wms_encoding" "UTF-8" "wms_accessconstraints" "none" "wms_fees" "none"
--- a/artifacts/doc/conf/mapserver/shapefile_layer.vm Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/mapserver/shapefile_layer.vm Thu Sep 12 10:13:09 2013 +0200 @@ -21,6 +21,8 @@ "wms_group_title" "$LAYER.getGroupTitle()" #end END + + CLASSITEM "TYP" #if ( !$LAYER.getStyle() ) #if ( $LAYER.getGroupTitle() )
--- a/artifacts/doc/conf/mapserver/wsplgen_layer.vm Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/mapserver/wsplgen_layer.vm Thu Sep 12 10:13:09 2013 +0200 @@ -19,6 +19,7 @@ "gml_include_items" "all" "ows_enable_request" "GetFeatureInfo" "wms_feature_info_mime_type" "gml" + "wms_srs" "EPSG:4326 EPSG:31466 EPSG:31467" #if ( $LAYER.getGroupTitle() ) "wms_group_title" "$LAYER.getGroupTitle()" #end
--- a/artifacts/doc/conf/meta-data.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/meta-data.xml Thu Sep 12 10:13:09 2013 +0200 @@ -36,6 +36,7 @@ </dc:macro> <dc:macro name="generate-system-content"> + <dc:message>Generate system content with variables: {dc:dump-variables()}</dc:message> <dc:call-macro name="artifact-range"> <dc:call-macro name="km-filtered-wsts"> <dc:choose> @@ -44,6 +45,7 @@ Recommendations (client shall load immediately). </dc:comment> <dc:iterate var="out" container="artifact-outs"> + <dc:message>Rec out iteration for: {$out}</dc:message> <dc:choose> <dc:when test="$out = 'w_differences'"> <dc:call-macro name="annotations"/> @@ -85,14 +87,20 @@ <dc:when test="$out = 'fix_longitudinal_section_curve'"> <dc:call-macro name="annotations"/> </dc:when> + <dc:when test="$out = 'bedheight_middle'"> + <dc:call-macro name="annotations"/> + </dc:when> <dc:when test="$out = 'bed_difference_epoch'"> <dc:call-macro name="annotations"/> + <dc:call-macro name="basedata_6_delta_w"/> </dc:when> <dc:when test="$out = 'bed_difference_year'"> <dc:call-macro name="annotations"/> + <dc:call-macro name="basedata_6_delta_w"/> </dc:when> <dc:when test="$out = 'bed_difference_height_year'"> <dc:call-macro name="annotations"/> + <dc:call-macro name="basedata_6_delta_w"/> </dc:when> <dc:when test="$out = 'floodmap'"> <dc:call-macro name="flood-map-recommended"/> @@ -119,6 +127,7 @@ Non - Recommendations. </dc:comment> <dc:iterate var="out" container="artifact-outs"> + <dc:message>Non Rec out iteration for: {$out}</dc:message> <dc:choose> <dc:when test="$out = 'cross_section'"> <dc:call-macro name="basedata_0"/> @@ -135,12 +144,34 @@ </dc:when> <dc:when test="$out = 'w_differences'"> <dc:call-macro name="longitudinal-section-prototype"/> + <dc:call-macro name="bedheight_differences"/> </dc:when> <dc:when test="$out = 'discharge_longitudinal_section'"> <dc:call-macro name="longitudinal-section-prototype"/> </dc:when> + <dc:when test="$out = 'historical_discharge_wq'"> + <dc:call-macro name="historical_discharge_curve"/> + <dc:call-macro name="discharge_table_gauge"/> + <dc:call-macro name="basedata_2_fixations_wqkms"/> + <dc:call-macro name="basedata_5_flood-protections"/> + <dc:call-macro name="basedata_0"/> + <dc:call-macro name="basedata_1_additionals"/> + <dc:call-macro name="basedata_4_heightmarks-points"/> + <computed_discharge_curve> + <dc:call-macro name="mainvalues"/> + </computed_discharge_curve> + </dc:when> <dc:when test="$out = 'discharge_curve'"> - <dc:call-macro name="mainvalues"/> + <dc:call-macro name="historical_discharge_curve"/> + <dc:call-macro name="discharge_table_gauge"/> + <dc:call-macro name="basedata_2_fixations_wqkms"/> + <dc:call-macro name="basedata_5_flood-protections"/> + <dc:call-macro name="basedata_0_wq"/> + <dc:call-macro name="basedata_1_additionals"/> + <dc:call-macro name="basedata_4_heightmarks-points"/> + <computed_discharge_curve> + <dc:call-macro name="mainvalues"/> + </computed_discharge_curve> </dc:when> <dc:when test="$out = 'duration_curve'"> <dc:call-macro name="mainvalues"/> @@ -157,14 +188,14 @@ <dc:call-macro name="basedata_5_flood-protections_relative_points"/> </dc:when> <dc:when test="$out = 'fix_wq_curve'"> + <dc:call-macro name="historical_discharge_curve"/> + <dc:call-macro name="discharge_table_gauge"/> <dc:call-macro name="basedata_0_wq"/> <dc:call-macro name="basedata_1_additionals_marks"/> <dc:call-macro name="basedata_2_fixations_wqkms"/> <dc:call-macro name="basedata_3_officials"/> <dc:call-macro name="basedata_4_heightmarks-points"/> <dc:call-macro name="basedata_5_flood-protections_relative_points"/> - <dc:call-macro name="discharge_table_gauge"/> - <dc:call-macro name="discharge_fix_wq"/> </dc:when> <dc:when test="$out = 'fix_longitudinal_section_curve'"> <dc:call-macro name="longitudinal-section-prototype"/> @@ -250,11 +281,12 @@ </dc:choose> </dc:when> <dc:when test="$out = 'computed_discharge_curve'"> - <dc:call-macro name="discharge_computed"/> + <dc:call-macro name="historical_discharge_curve"/> + <dc:call-macro name="discharge_table_gauge"/> <dc:call-macro name="basedata_2_fixations_wqkms"/> <dc:call-macro name="basedata_5_flood-protections"/> - <dc:call-macro name="basedata_0"/> - <dc:call-macro name="basedata_1_additionals"/> + <dc:call-macro name="basedata_0_wq"/> + <dc:call-macro name="basedata_1_additionals_wq"/> <dc:call-macro name="basedata_4_heightmarks-points"/> <computed_discharge_curve> <dc:call-macro name="mainvalues"/> @@ -268,6 +300,16 @@ <dc:call-macro name="bed-heights-epoch"/> </bedheights> </dc:when> + <dc:when test="$out = 'differenceable'"> + <dc:if test="$current-state-id != 'state.winfo.uesk.wsp'"> + <dc:call-macro name="basedata_0"/> + <dc:call-macro name="basedata_1_additionals"/> + <dc:call-macro name="basedata_2_fixations"/> + <dc:call-macro name="basedata_4_heightmarks-points"/> + <dc:call-macro name="basedata_5_flood-protections"/> + <dc:call-macro name="minfo-heights"/> + </dc:if> + </dc:when> <dc:when test="$out = 'waterlevels'"> <dc:if test="$current-state-id != 'state.winfo.uesk.wsp'"> <dc:call-macro name="basedata_0"/> @@ -311,6 +353,7 @@ <dc:when test="$out = 'longitudinal_section'"> <dc:call-macro name="longitudinal"/> <dc:call-macro name="differences"/> + <dc:call-macro name="bedheight_differences"/> </dc:when> <dc:when test="$out = 'discharge_longitudinal_section'"> <dc:call-macro name="longitudinal"/> @@ -319,6 +362,7 @@ <dc:when test="$out = 'w_differences'"> <dc:call-macro name="longitudinal"/> <dc:call-macro name="differences"/> + <dc:call-macro name="bedheight_differences"/> </dc:when> <dc:when test="$out = 'fix_deltawt_curve'"> <dc:call-macro name="delta-wt"/> @@ -326,9 +370,6 @@ <dc:when test="$out = 'reference_curve'"> <dc:call-macro name="reference-curves"/> </dc:when> - <dc:when test="$out = 'computed_discharge_curve'"> - <dc:call-macro name="computed-discharge-curve"/> - </dc:when> <dc:when test="$out = 'cross_section'"> <dc:call-macro name="waterlevels"/> </dc:when> @@ -340,16 +381,18 @@ </dc:when> <dc:when test="$out = 'fix_wq_curve'"> <dc:call-macro name="fix-wq-curve"/> + <dc:call-macro name="waterlevels-fix"/> </dc:when> <dc:when test="$out = 'duration_curve'"> <dc:call-macro name="duration-curve"/> </dc:when> + <dc:when test="$out = 'differenceable'"> + <dc:call-macro name="differenceable-fix"/> + <dc:call-macro name="differences"/> + </dc:when> <dc:when test="$out = 'waterlevels'"> <dc:call-macro name="waterlevels-fix"/> </dc:when> - <dc:when test="$out = 'fix_wq_curve'"> - <dc:call-macro name="waterlevels-fix"/> - </dc:when> <dc:when test="$out = 'floodmap'"> <dc:call-macro name="flood-map"/> </dc:when> @@ -358,7 +401,8 @@ </dc:when> <dc:when test="$out = 'bedheight_middle'"> <dc:call-macro name="waterlevels-discharge"/> - <dc:call-macro name="waterlevels-fix"/> + <dc:call-macro name="differenceable-fix"/> + <dc:call-macro name="differences"/> </dc:when> <dc:when test="$out = 'floodmap-hws'"> <dc:call-macro name="floodmap-hws-user"/> @@ -375,20 +419,20 @@ </dc:when> <dc:when test="$out = 'sedimentload_ls'"> <dc:call-macro name="differences"/> - <dc:call-macro name="bedheight-differences"/> + <dc:call-macro name="bedheight_differences"/> <dc:call-macro name="flow-velocity"/> <dc:call-macro name="sediment-load"/> </dc:when> <dc:when test="$out = 'bed_difference_year'"> <dc:call-macro name="waterlevels-discharge"/> - <dc:call-macro name="bedheight-differences"/> + <dc:call-macro name="bedheight_differences"/> <dc:call-macro name="differences"/> <dc:call-macro name="waterlevels-fix"/> <dc:call-macro name="delta-wt-ls"/> </dc:when> <dc:when test="$out = 'bed_difference_epoch'"> <dc:call-macro name="waterlevels-discharge"/> - <dc:call-macro name="bedheight-differences"/> + <dc:call-macro name="bedheight_differences"/> <dc:call-macro name="differences"/> <dc:call-macro name="waterlevels-fix"/> <dc:call-macro name="delta-wt-ls"/> @@ -411,9 +455,10 @@ <dc:for-each> <w_differences description="{$facet_description}" - factory="winfo" target_out="{$out}" - artifact-id="{$aid}" - ids="{$aid}" + factory="winfo" + target_out="{$out}" + artifact-id="{$a_gid}" + ids="{$facet_num}" out="w_differences"/> </dc:for-each> </differences> @@ -440,31 +485,50 @@ </dc:filter> </dc:macro> - <dc:macro name="computed-discharge-curve"> - <dc:filter expr="$facet_name = 'computed_discharge_curve.q'"> + + <dc:macro name="historical_discharge_curve"> + <dc:variable name="refgauge" type="number" expr="$reference_gauge"/> + <dc:context> + <dc:statement> + SELECT g.id AS gauge_id, + g.name AS gauge_name, + dt.id AS dt_id, + t.start_time AS start_time, + t.stop_time AS stop_time, + dt.description AS desc, + dt.bfg_id AS bfg_id + FROM gauges g + JOIN discharge_tables dt ON g.id = dt.gauge_id + LEFT JOIN time_intervals t ON dt.time_interval_id = t.id + WHERE g.river_id = ${river_id} + AND dt.kind <> 0 + AND ((g.station = ${fromkm} AND g.station = ${tokm}) + OR g.official_number = ${refgauge}) + ORDER BY start_time + </dc:statement> <dc:if test="dc:has-result()"> - <computed_discharge_curves> - <dc:for-each> - <dc:element name="${facet_name}"> - <dc:attribute name="description" value="${facet_description}"/> - <dc:attribute name="factory" value="winfo"/> - <dc:attribute name="target_out" value="${out}"/> - <dc:attribute name="artifact-id" value="${a_id}"/> - <dc:attribute name="ids" value="${a_id}"/> - <dc:attribute name="out" value="computed_discharge_curve"/> - </dc:element> - </dc:for-each> - </computed_discharge_curves> + <historical_discharge_curves> + <dc:group expr="$gauge_name"> + <dc:for-each> + <dc:variable name="combined_desc" expr="concat($bfg_id, ' ', dc:date-format('dd.MM.yyyy', $start_time), ' - ', dc:date-format('dd.MM.yyyy', $stop_time))"/> + <histdis name="{$combined_desc}" + description="{$combined_desc}" + factory="gaugedischarge" target_out="{$out}" + ids="{$gauge_name};{$dt_id};{$combined_desc}"/> + </dc:for-each> + </dc:group> + </historical_discharge_curves> </dc:if> - </dc:filter> + </dc:context> </dc:macro> + <dc:macro name="flood-map"> <dc:filter expr="$facet_name = 'floodmap.wsplgen'"> <dc:if test="dc:has-result()"> <floodmap> <dc:for-each> - <dc:variable name="combined_desc" expr="concat($facet_description, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"/> + <dc:variable name="combined_desc" expr="concat($facet_description, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"/> <dc:element name="${facet_name}"> <dc:attribute name="description" value="${combined_desc}"/> <dc:attribute name="factory" value="winfo"/> @@ -483,7 +547,7 @@ <dc:filter expr="$out_name = 'cross_section'"> <dc:if test="dc:has-result()"> <waterlevels> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation))"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation))"> <dc:comment>Aheinecke: Why is this grouping different from the rest?</dc:comment> <longitudinal_section_columns description="{dc:group-key()}"> <dc:for-each> @@ -507,7 +571,7 @@ <dc:filter expr="$out_name = 'longitudinal_section'"> <dc:if test="dc:has-result()"> <waterlevels> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation))"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation))"> <dc:comment>Aheinecke: Why is this grouping different from the rest?</dc:comment> <longitudinal_section_columns description="{dc:group-key()}"> <dc:for-each> @@ -535,7 +599,7 @@ starts-with($facet_name, 'fix_reference_events_ls'))"> <dc:if test="dc:has-result()"> <waterlevels> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <waterlevels description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -563,7 +627,7 @@ starts-with($facet_name, 'fix_sector_average_dwt'))"> <dc:if test="dc:has-result()"> <waterlevels> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <waterlevels description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -588,7 +652,7 @@ starts-with($facet_name, 'fix_deviation_dwt'))"> <dc:if test="dc:has-result()"> <waterlevels> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <waterlevels description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -611,7 +675,7 @@ <dc:filter expr="$out_name = 'fix_derivate_curve' and $facet_name = 'fix_derivate_curve'"> <dc:if test="dc:has-result()"> <waterlevels> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <waterlevels description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -638,7 +702,7 @@ $facet_name = 'fix_wq_curve')"> <dc:if test="dc:has-result()"> <waterlevels> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <waterlevels description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -682,7 +746,7 @@ (not ($current-state-id = 'state.winfo.uesk.wsp' and $ld_m = 'location'))"> <dc:if test="dc:has-result()"> <waterlevels> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <waterlevels description="{dc:group-key()}"> <dc:for-each> <dc:choose> @@ -710,6 +774,77 @@ </dc:filter> </dc:macro> + <dc:comment comment="For building differences." /> + <dc:macro name="differenceable-fix"> + <dc:comment> + No diffs between beddiffs and others, for now. + <beddifferences> + <dc:call-macro name="bedheight_differences"/> + </beddifferences> + </dc:comment> + <dc:comment comment="Vollmer curves need own factory"/> + <dc:filter expr="$a_state = 'state.fix.vollmer.compute' and $facet_name = 'longitudinal_section.w'"> + <dc:if test="dc:has-result()"> + <vollmer_waterlevels> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> + <waterlevels description="{dc:group-key()}"> + <dc:for-each> + <dc:choose> + <dc:when test="$ld_m = 'location'"> + <dc:variable name="combined_desc" expr="concat($facet_description, ' an KM ', $deffrom)"/> + </dc:when> + <dc:otherwise> + <dc:variable name="combined_desc" expr="concat($facet_description, ' von KM ', + $deffrom, ' bis KM ', $defto)"/> + </dc:otherwise> + </dc:choose> + <dc:element name="${facet_name}"> + <dc:attribute name="description" value="${combined_desc}"/> + <dc:attribute name="ids" value="${facet_num}"/> + <dc:attribute name="factory" value="fixanalysis"/> + <dc:attribute name="target_out" value="${out}"/> + <dc:attribute name="artifact-id" value="${a_gid}"/> + <dc:attribute name="out" value="longitudinal_section"/> + </dc:element> + </dc:for-each> + </waterlevels> + </dc:group> + </vollmer_waterlevels> + </dc:if> + </dc:filter> + <dc:filter expr="not($a_state = 'state.fix.vollmer.compute') and (($out_name = 'longitudinal_section' and $facet_name = 'longitudinal_section.w') and + (not ($current-state-id = 'state.winfo.uesk.wsp' and $ld_m = 'location')))"> + <dc:if test="dc:has-result()"> + <waterlevels> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> + <waterlevels description="{dc:group-key()}"> + <dc:for-each> + <dc:choose> + <dc:when test="$ld_m = 'location'"> + <dc:variable name="combined_desc" expr="concat($facet_description, ' an KM ', $deffrom)"/> + </dc:when> + <dc:otherwise> + <dc:variable name="combined_desc" expr="concat($facet_description, ' von KM ', + $deffrom, ' bis KM ', $defto)"/> + </dc:otherwise> + </dc:choose> + <dc:element name="${facet_name}"> + <dc:attribute name="description" value="${combined_desc}"/> + <dc:attribute name="ids" value="${facet_num}"/> + <dc:attribute name="factory" value="winfo"/> + <dc:attribute name="target_out" value="${out}"/> + <dc:attribute name="artifact-id" value="${a_gid}"/> + <dc:attribute name="out" value="longitudinal_section"/> + </dc:element> + </dc:for-each> + </waterlevels> + </dc:group> + </waterlevels> + </dc:if> + </dc:filter> + </dc:macro> + + <dc:macro name="floodmap-hws-user"> <dc:comment>No grouping in this?</dc:comment> <dc:filter expr="$out_name = 'floodmap' and $facet_name = 'floodmap.usershape'"> @@ -734,7 +869,7 @@ $facet_name = 'bed_longitudinal_section.bed_diameter_sublayer')"> <dc:if test="dc:has-result()"> <bed_quality_bed> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <quality-bed description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -758,7 +893,7 @@ $facet_name = 'bed_longitudinal_section.bedload_diameter'"> <dc:if test="dc:has-result()"> <bed_quality_load> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <quality-load description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -783,7 +918,7 @@ $facet_name = 'bed_longitudinal_section.sediment_density_sublayer')"> <dc:if test="dc:has-result()"> <bed_quality_density> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <density description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -808,7 +943,7 @@ $facet_name = 'bed_longitudinal_section.porosity_sublayer')"> <dc:if test="dc:has-result()"> <bed_quality_porosity> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <porosity description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -835,7 +970,7 @@ $facet_name = 'flow_velocity.mainchannel.filtered')"> <dc:if test="dc:has-result()"> <flow-velocity> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <flow description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -858,7 +993,7 @@ <dc:filter expr="$out_name = 'sedimentload_ls' and starts-with($facet_name, 'sedimentload')"> <dc:if test="dc:has-result()"> <sediment-load> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <load description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -877,35 +1012,35 @@ </dc:filter> </dc:macro> - <dc:macro name="bedheight-differences"> + <dc:macro name="bedheight_differences"> <dc:filter expr="($out_name = 'bed_difference_year' or $out_name = 'bed_difference_epoch') and (starts-with($facet_name, 'bedheight_difference.year') or starts-with($facet_name, 'bedheight_difference.epoch'))"> <dc:if test="dc:has-result()"> - <bedheight-differences> - <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <bedheight_differences> + <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <difference description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> <dc:attribute name="factory" value="minfo"/> <dc:attribute name="target_out" value="${out}"/> <dc:attribute name="description" value="${facet_description}"/> - <dc:attribute name="ids" value="${facet_num}-${facet_name}"/> - <dc:attribute name="artifact-id" value="${a_id}"/> - <dc:attribute name="out" value="${out}"/> + <dc:attribute name="ids" value="${facet_num}"/> + <dc:attribute name="artifact-id" value="${a_gid}"/> + <dc:attribute name="out" value="${out_name}"/> </dc:element> </dc:for-each> </difference> </dc:group> - </bedheight-differences> + </bedheight_differences> </dc:if> </dc:filter> </dc:macro> <dc:macro name="waterlevels-discharge"> - <dc:filter expr="$out_name = 'discharge_longitudinal_section' and $facet_name = 'discharge_longitudinal_section.w"> + <dc:filter expr="$out_name = 'discharge_longitudinal_section' and $facet_name = 'discharge_longitudinal_section.w'"> <dc:if test="dc:has-result()"> <waterlevels-discharge> - <dc:group expr="concat($oid, ' ', $river, ' ', $a_id, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> + <dc:group expr="concat($oid, ' ', $river, ' ', $a_id, ' ', dc:date-format('dd.MM.yyyy - H:mm:ss', $a_creation), ' ', $collection_name)"> <discharge description="{dc:group-key()}"> <dc:for-each> <dc:element name="${facet_name}"> @@ -1193,36 +1328,27 @@ <dc:macro name="discharge_table_gauge"> <dc:context> <dc:statement> - SELECT id AS gauge_id, - name AS gauge_name - FROM gauges WHERE river_id = ${river_id} + SELECT g.id AS gauge_id, + g.name AS gauge_name, + t.start_time AS start_time, + t.stop_time AS stop_time + FROM gauges g + JOIN discharge_tables dt ON g.id = dt.gauge_id + LEFT JOIN time_intervals t ON dt.time_interval_id = t.id + WHERE g.river_id = ${river_id} + AND dt.kind = 0 + AND ((g.station = ${fromkm} AND g.station = ${tokm}) + OR g.official_number = ${refgauge}) </dc:statement> <dc:if test="dc:has-result()"> - <discharge_table_nn> - <discharge_table_gauge> - <dc:for-each> - <gauge name="{$gauge_name}" - factory="gaugedischarge" target_out="{$out}" - from="{$g_start}" - to="{$g_stop}" - ids="{$gauge_name}"/> - </dc:for-each> - </discharge_table_gauge> - </discharge_table_nn> + <dc:for-each> + <current_gauge factory="gaugedischarge" target_out="{$out}" + ids="{$gauge_name}"/> + </dc:for-each> </dc:if> </dc:context> </dc:macro> - <dc:comment>TODO: Why is this just a copy of the discharge_table_gauge?</dc:comment> - <dc:macro name="discharge_computed"> - <dc:call-macro name="discharge_table_gauge"/> - </dc:macro> - - <dc:comment>TODO: Why is this just a copy of the discharge_table_gauge?</dc:comment> - <dc:macro name="discharge_fix_wq"> - <dc:call-macro name="discharge_table_gauge"/> - </dc:macro> - <dc:macro name="qsectors"> <qsector factory="qsectors" ids="{$river_id}" target_out="{$out}" /> </dc:macro> @@ -1845,7 +1971,6 @@ FROM floodplain fp JOIN floodplain_kinds flk on fp.kind_id = flk.id WHERE river_id = ${river_id} - AND kind_id=1 </dc:statement> <dc:if test="dc:has-result()"> <floodplain> @@ -2332,8 +2457,8 @@ <officiallines> <dc:for-each> <column name="{$olname}" - ids="additionals-wstv-{$wstcolpos}-{$wstid}" - factory="staticwqkms" target_out="{$out}"/> + ids="official-wstv-{$wstcolpos}-{$wstid}" + factory="staticwqkms" target_out="{$out}"/> </dc:for-each> </officiallines> </dc:if> @@ -2367,17 +2492,21 @@ <dc:macro name="all-user-artifacts"> <dc:context connection="user"> - <dc:comment>Select collections and masterartifacts.</dc:comment> + <dc:comment>Select collections and masterartifacts. + XXX: The cast is a quick hack because the ld_* values are + now TEXT fields. To properly fix / asses the problems here + an SLT evaluation is needed. + </dc:comment> <dc:statement> SELECT c.name AS collection_name, ma.id AS a_id, ma.state AS a_state, ma.gid AS a_gid, ma.creation AS a_creation, - COALESCE(ma.ld_mode, '') AS ld_m, - COALESCE(ma.ld_locations, '') AS ld_l, - COALESCE(ma.ld_from, '') AS ld_f, - COALESCE(ma.ld_to, '') AS ld_t, + CAST(COALESCE(ma.ld_mode, '') AS VARCHAR(255)) AS ld_m, + CAST(COALESCE(ma.ld_locations, '') AS VARCHAR(255)) AS ld_l, + CAST(COALESCE(ma.ld_from, '') AS VARCHAR(255)) AS ld_f, + CAST(COALESCE(ma.ld_to, '') AS VARCHAR(255)) AS ld_t, o.name AS out_name, o.id AS out_id, f.name AS facet_name,
--- a/artifacts/doc/conf/themes.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/themes.xml Thu Sep 12 10:13:09 2013 +0200 @@ -177,9 +177,12 @@ <mapping from="other.wq" to="WQPoints" /> <mapping from="other.wkms" to="WKms" /> <mapping from="other.wkms.marks" to="WKmsAnnotation" /> + <mapping from="other.wqkms" to="WQKmsHorizontal" masterAttr="calculation_mode==calc.discharge.curve" /> <mapping from="other.wqkms" to="WQKms" /> + <mapping from="other.wqkms.w" to="WQKmsHorizontal" masterAttr="calculation_mode==calc.discharge.curve" /> <mapping from="other.wqkms.w" to="WQKms" /> <mapping from="other.wqkms.q" to="WQKms" /> + <mapping from="other.w.interpol" to="MainValuesW" /> <mapping from="heightmarks_points" to="heightmarks_points" /> <mapping from="area" to="Area" /> <mapping from="cross_section.area" to="CrossSectionArea" /> @@ -207,6 +210,7 @@ <mapping from="flow_velocity.mainchannel.filtered" to="FlowVelocityVMainChannel" /> <mapping from="flow_velocity.tau.filtered" to="FlowVelocityTau" /> <mapping from="flow_velocity.discharge" to="FlowVelocityDischarge" /> + <mapping from="flow_velocity.measurement" to="FlowVelocityDischarge" /> <mapping from="bedheight_middle.single" to="MiddleBedHeightSingle" /> <mapping from="bedheight_middle.epoch" to="MiddleBedHeightEpoch" /> <mapping from="bed_longitudinal_section.porosity_toplayer" to="PorosityTopLayer" /> @@ -273,6 +277,7 @@ <mapping from="fix_analysis_events_wq" to="FixingAnalysisEventsWQ" /> <mapping from="fix_outlier" to="FixingOutlier" /> <mapping from="fix_wq_curve" to="FixingWQCurve" /> + <mapping from="fix_wq_ls" to="FixingCalculatedPoint" /> <mapping from="fix_reference_events_wq" to="FixingReferenceEvents" /> <mapping from="fix_sector_average_dwt_0" to="FixingDeltaWtAverage0" /> <mapping from="fix_sector_average_dwt_1" to="FixingDeltaWtAverage1" />
--- a/artifacts/doc/conf/themes/default.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/themes/default.xml Thu Sep 12 10:13:09 2013 +0200 @@ -84,6 +84,8 @@ display="Punktbeschriftung anzeigen" default="false" hints="hidden" /> <field name="linecolor" type="Color" display="Linienfarbe" default="204, 204, 204" /> + <field name="textorientation" type="boolean" display="Textausrichtung" + default="true" /> </fields> </theme> @@ -116,6 +118,19 @@ </fields> </theme> + <theme name="WQKmsHorizontal"> + <inherits> + <inherit from="WQKms" /> + </inherits> + <fields> + <field name="linecolor" type="Color" display="Linienfarbe" + default="204, 204, 204" /> + <field name="textorientation" type="boolean" display="Textausrichtung" + default="true" /> + </fields> + </theme> + + <theme name="WQPoints"> <inherits> <inherit from="Points" /> @@ -834,11 +849,26 @@ <!-- MIDDLE BED HEIGHT --> <theme name="MiddleBedHeightSingle"> <inherits> - <inherit from="LongitudinalSection" /> + <inherit from="Lines" /> + <inherit from="Points" /> + <inherit from="MinMaxPoints" /> + <inherit from="Label" /> </inherits> <fields> + <field name="showlines" type="boolean" display="Linie anzeigen" + default="true" /> + <field name="linesize" type="int" display="Liniendicke" + default="1" /> + <field name="linetype" type="Dash" display="Linienart" + default="10" /> <field name="linecolor" type="Color" display="Linienfarbe" - default="204, 204, 204" /> + /> + <field name="showlinelabel" type="boolean" + display="Beschriftung anzeigen" default="false" /> + <field name="showpoints" type="boolean" display="Punkte anzeigen" + default="false" /> + <field name="showpointlabel" type="boolean" + display="Punktbeschriftung anzeigen" default="false" hints="hidden" /> </fields> </theme> @@ -1124,7 +1154,7 @@ display="Beschriftung anzeigen" default="false" hints="hidden" /> </fields> </theme> - <theme name="SedimentLoadFineTotalLoad"> + <theme name="SedimentLoadTotalLoad"> <inherits> <inherit from="LongitudinalSection" /> </inherits> @@ -1399,12 +1429,14 @@ <theme name="FixPoints"> <inherits> - <inherit from="Points" /> + <inherit from="ColorfulPoints" /> <inherit from="Label" /> </inherits> <fields> <field name="pointsize" type="int" display="Punktdicke" default="3" /> + <field name="showpointlabel" type="boolean" + display="Beschriftung anzeigen" default="true" hints="hidden" /> </fields> </theme> @@ -1442,6 +1474,20 @@ </fields> </theme> + <theme name="FixingCalculatedPoint"> + <inherits> + <inherit from="FixPoints" /> + </inherits> + <fields> + <field name="pointcolor" type="Color" display="Punktfarbe" + default="0, 0, 0" /> + <field name="pointsize" type="int" display="Punktdicke" + default="5"/> + <field name="showpointlabel" type="boolean" + display="Punktbeschriftung anzeigen" default="false"/> + </fields> + </theme> + <theme name="FixingWQCurve"> <inherits> <inherit from="FixLines" /> @@ -1544,9 +1590,21 @@ </fields> </theme> + <theme name="FixingDeltaWtAverage"> + <inherits> + <inherit from="FixLines" /> + </inherits> + <fields> + <field name="showpointlabel" type="boolean" + display="Punktbeschriftung anzeigen" default="false" hints="hidden"/> + <field name="showlinelabel" type="boolean" + display="Beschriftung anzeigen" default="true" /> + </fields> + </theme> + <theme name="FixingDeltaWtAverage0"> <inherits> - <inherit from="FixLines" /> + <inherit from="FixingDeltaWtAverage" /> </inherits> <fields> <field name="linecolor" type="Color" display="Linienfarbe" @@ -1556,7 +1614,7 @@ <theme name="FixingDeltaWtAverage1"> <inherits> - <inherit from="FixLines" /> + <inherit from="FixingDeltaWtAverage" /> </inherits> <fields> <field name="linecolor" type="Color" display="Linienfarbe" @@ -1566,7 +1624,7 @@ <theme name="FixingDeltaWtAverage2"> <inherits> - <inherit from="FixLines" /> + <inherit from="FixingDeltaWtAverage" /> </inherits> <fields> <field name="linecolor" type="Color" display="Linienfarbe" @@ -1576,7 +1634,7 @@ <theme name="FixingDeltaWtAverage3"> <inherits> - <inherit from="FixLines" /> + <inherit from="FixingDeltaWtAverage" /> </inherits> <fields> <field name="linecolor" type="Color" display="Linienfarbe"
--- a/artifacts/doc/conf/themes/second.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/themes/second.xml Thu Sep 12 10:13:09 2013 +0200 @@ -14,6 +14,20 @@ </fields> </theme> + <theme name="MainValuesQVerticalText"> + <inherits> + <inherit from="Lines" /> + </inherits> + <fields> + <field name="linecolor" type="Color" display="Farbe" + default="200, 0, 15" /> + <field name="textorientation" type="boolean" display="Textausrichtung" + default="false" /> + <field name="showlinelabel" type="boolean" + display="Linienbeschriftung anzeigen" default="false" hints="hidden" /> + </fields> + </theme> + <theme name="MainValuesW"> <inherits> <inherit from="Lines" /> @@ -28,20 +42,6 @@ </fields> </theme> - <theme name="MainValuesQVerticalText"> - <inherits> - <inherit from="Lines" /> - </inherits> - <fields> - <field name="linecolor" type="Color" display="Farbe" - default="200, 0, 15" /> - <field name="textorientation" type="boolean" display="Textausrichtung" - default="false" /> - <field name="showlinelabel" type="boolean" - display="Linienbeschriftung anzeigen" default="false" hints="hidden" /> - </fields> - </theme> - <theme name="RelativePoint"> <inherits> <inherit from="Points" /> @@ -834,11 +834,26 @@ <!-- MIDDLE BED HEIGHT --> <theme name="MiddleBedHeightSingle"> <inherits> - <inherit from="LongitudinalSection" /> + <inherit from="Lines" /> + <inherit from="Points" /> + <inherit from="MinMaxPoints" /> + <inherit from="Label" /> </inherits> <fields> + <field name="showlines" type="boolean" display="Linie anzeigen" + default="true" /> + <field name="linesize" type="int" display="Liniendicke" + default="1" /> + <field name="linetype" type="Dash" display="Linienart" + default="10" /> <field name="linecolor" type="Color" display="Linienfarbe" - default="204, 204, 204" /> + /> + <field name="showlinelabel" type="boolean" + display="Beschriftung anzeigen" default="false" /> + <field name="showpoints" type="boolean" display="Punkte anzeigen" + default="false" /> + <field name="showpointlabel" type="boolean" + display="Punktbeschriftung anzeigen" default="false" hints="hidden" /> </fields> </theme> @@ -1124,7 +1139,7 @@ display="Beschriftung anzeigen" default="false" hints="hidden" /> </fields> </theme> - <theme name="SedimentLoadFineTotalLoad"> + <theme name="SedimentLoadTotalLoad"> <inherits> <inherit from="LongitudinalSection" /> </inherits> @@ -1399,12 +1414,14 @@ <theme name="FixPoints"> <inherits> - <inherit from="Points" /> + <inherit from="ColorfulPoints" /> <inherit from="Label" /> </inherits> <fields> <field name="pointsize" type="int" display="Punktdicke" default="3" /> + <field name="showpointlabel" type="boolean" + display="Beschriftung anzeigen" default="true" hints="hidden" /> </fields> </theme> @@ -1442,6 +1459,20 @@ </fields> </theme> + <theme name="FixingCalculatedPoint"> + <inherits> + <inherit from="FixPoints" /> + </inherits> + <fields> + <field name="pointcolor" type="Color" display="Punktfarbe" + default="0, 0, 0" /> + <field name="pointsize" type="int" display="Punktdicke" + default="5"/> + <field name="showpointlabel" type="boolean" + display="Punktbeschriftung anzeigen" default="false"/> + </fields> + </theme> + <theme name="FixingWQCurve"> <inherits> <inherit from="FixLines" /> @@ -1544,9 +1575,21 @@ </fields> </theme> + <theme name="FixingDeltaWtAverage"> + <inherits> + <inherit from="FixLines" /> + </inherits> + <fields> + <field name="showpointlabel" type="boolean" + display="Punktbeschriftung anzeigen" default="false" hints="hidden"/> + <field name="showlinelabel" type="boolean" + display="Beschriftung anzeigen" default="true" /> + </fields> + </theme> + <theme name="FixingDeltaWtAverage0"> <inherits> - <inherit from="FixLines" /> + <inherit from="FixingDeltaWtAverage" /> </inherits> <fields> <field name="linecolor" type="Color" display="Linienfarbe" @@ -1556,7 +1599,7 @@ <theme name="FixingDeltaWtAverage1"> <inherits> - <inherit from="FixLines" /> + <inherit from="FixingDeltaWtAverage" /> </inherits> <fields> <field name="linecolor" type="Color" display="Linienfarbe" @@ -1566,7 +1609,7 @@ <theme name="FixingDeltaWtAverage2"> <inherits> - <inherit from="FixLines" /> + <inherit from="FixingDeltaWtAverage" /> </inherits> <fields> <field name="linecolor" type="Color" display="Linienfarbe" @@ -1576,7 +1619,7 @@ <theme name="FixingDeltaWtAverage3"> <inherits> - <inherit from="FixLines" /> + <inherit from="FixingDeltaWtAverage" /> </inherits> <fields> <field name="linecolor" type="Color" display="Linienfarbe"
--- a/artifacts/doc/conf/themes/virtual.xml Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/doc/conf/themes/virtual.xml Thu Sep 12 10:13:09 2013 +0200 @@ -30,6 +30,17 @@ </fields> </theme> + <theme name="ColorfulPoints" type="virtual"> + <fields> + <field name="showpoints" type="boolean" display="Punkte anzeigen" + default="true" /> + <field name="pointsize" type="int" display="Punktdicke" + default="5" /> + <field name="showpointlabel" type="boolean" + display="Punktbeschriftung anzeigen" default="false"/> + </fields> + </theme> + <theme name="Label" type="virtual"> <fields> <field name="labelfontface" type="Font"
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/CrossSectionArtifact.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/CrossSectionArtifact.java Thu Sep 12 10:13:09 2013 +0200 @@ -53,6 +53,9 @@ /** Name of data item keeping the position. */ public static final String DATA_KM = "cross_section.km"; + /** Name of data item keeping the 'parents' km. */ + public static final String PARENT_KM = "cross_section.parent.km"; + /** Name of data item keeping the database id of this c.s.. */ public static final String DATA_DBID = "cross_section.dbid"; @@ -132,7 +135,8 @@ CrossSectionLine csl = csls.get(0); // Find min-km of cross sections, // then set DATA_KM to min(DATA_KM, minCross). - double dataKm = Double.valueOf(getDataAsString(DATA_KM)); + String dataKmValue = getDataAsString(DATA_KM); + double dataKm = (dataKmValue != null) ? Double.valueOf(dataKmValue) : Double.MIN_VALUE; if (dataKm < csl.getKm().doubleValue()) { addStringData(DATA_KM, csl.getKm().toString()); } @@ -140,7 +144,7 @@ fs.add(new CrossSectionFacet(0, cs.getDescription())); // Find out if we are newest and become master if so. - boolean isNewest = CrossSectionFactory.isNewest(cs); + boolean isNewest = cs.shouldBeMaster(getParentKm()); String newString = (isNewest) ? "1" : "0"; addStringData(DATA_IS_NEWEST, newString); addStringData(DATA_IS_MASTER, newString); @@ -154,19 +158,18 @@ /** Copy km where master-artifact "starts". */ @Override protected void initialize( - Artifact artifact, + Artifact master, Object context, CallMeta callMeta) { - D4EArtifact flys = (D4EArtifact) artifact; + D4EArtifact masterArtifact = (D4EArtifact) master; - RangeAccess rangeAccess = new RangeAccess(flys); + RangeAccess rangeAccess = new RangeAccess(masterArtifact); double[] range = rangeAccess.getKmRange(); - double min = 0.0f; if (range != null && range.length > 0) { - min = range[0]; + this.addStringData(DATA_KM, Double.toString(range[0])); + this.addStringData(PARENT_KM, Double.toString(range[0])); } - this.addStringData(DATA_KM, Double.toString(min)); } @@ -242,6 +245,24 @@ /** + * Return position (km) from parent (initial km), 0 if not found. + */ + private double getParentKm() { + String val = getDataAsString(PARENT_KM); + if (val == null) { + logger.warn("Empty data: " + PARENT_KM); + return 0; + } + try { + return Double.valueOf(val); + } + catch (NumberFormatException e) { + logger.warn("Could not get data " + PARENT_KM + " as double", e); + return 0; + } + } + + /** * Return position (km) from data, 0 if not found. */ protected double getKm() {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/D4EArtifact.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/D4EArtifact.java Thu Sep 12 10:13:09 2013 +0200 @@ -1491,7 +1491,7 @@ /** * Method to dump the artifacts state/data. */ - protected void dumpArtifact() { + public void dumpArtifact() { log.debug("++++++++++++++ DUMP ARTIFACT DATA +++++++++++++++++"); // Include uuid, type, name log.debug(" - Name: " + getName()); @@ -1525,7 +1525,7 @@ } - protected void debugFacets() { + public void debugFacets() { log.debug("######### FACETS #########"); for (Map.Entry<String, List<Facet>> entry: facets.entrySet()) { @@ -1541,7 +1541,7 @@ } - protected void dumpFilterFacets() { + public void dumpFilterFacets() { log.debug("######## FILTER FACETS ########"); if (filterFacets == null || filterFacets.isEmpty()) {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/FlowVelocityMeasurementArtifact.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/FlowVelocityMeasurementArtifact.java Thu Sep 12 10:13:09 2013 +0200 @@ -8,6 +8,8 @@ package org.dive4elements.river.artifacts; +import java.text.DateFormat; + import java.util.ArrayList; import java.util.List; @@ -29,6 +31,8 @@ import org.dive4elements.river.artifacts.model.FacetTypes; +import org.dive4elements.river.utils.Formatter; + /** Artefact to access flow velocity measurements. */ public class FlowVelocityMeasurementArtifact @@ -111,13 +115,24 @@ List<Facet> fs = new ArrayList<Facet>(); String code = getDatacageIDValue(data); + DateFormat dateFormatter = Formatter.getDateFormatter( + callMeta, "dd.MM.yyy HH:mm"); if (code != null) { // parse code, interact with factory, add real facets. // store relevant parts of code as data. + FlowVelocityMeasurementValue.FastFlowVelocityMeasurementValue + flowVelocityMeasurement = + FlowVelocityMeasurementFactory.getFlowVelocityMeasurement( + Integer.parseInt(code)); + String name = flowVelocityMeasurement.getDescription(); + logger.debug ("datetime " + flowVelocityMeasurement.getDatetime()); + name += " - " + dateFormatter.format( + flowVelocityMeasurement.getDatetime()); + Facet facet = new FlowVelocityMeasurementFacet( FLOW_VELOCITY_MEASUREMENT, - "flowvelocity-name"); + name); fs.add(facet); addFacets(state.getID(), fs); addStringData(DATA_NAME, code);
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/GaugeDischargeArtifact.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/GaugeDischargeArtifact.java Thu Sep 12 10:13:09 2013 +0200 @@ -12,6 +12,7 @@ import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.HashMap; import org.apache.log4j.Logger; @@ -35,6 +36,7 @@ import org.dive4elements.river.model.Gauge; import org.dive4elements.river.model.River; +import org.dive4elements.river.model.DischargeTable; import org.dive4elements.river.utils.RiverUtils; @@ -52,6 +54,8 @@ /** The name of the artifact. */ public static final String ARTIFACT_NAME = "gaugedischarge"; + /** The name a facet should have */ + protected String facetWishName; /** * Trivial Constructor. @@ -79,6 +83,13 @@ String ids = StaticD4EArtifact.getDatacageIDValue(data); addStringData("ids", ids); logger.debug("id for gaugedischarge: " + ids); + String[] splitIds = ids.split(";"); + /* We assume that if an id's string with a ; is given that the + * format is <gauge_name>;<discharge_table_id>;<facet_desc> + * so that a specific discharge table can be selected */ + if (splitIds.length > 2) { + facetWishName = splitIds[2]; + } super.setup(identifier, factory, context, callMeta, data); } @@ -121,7 +132,10 @@ /** Get the Gauges name which came with datacage data-document. */ public String getGaugeName() { - return this.getDataAsString("ids"); + if (getDataAsString("ids") == null) { + return null; + } + return getDataAsString("ids").split(";")[0]; } @@ -166,13 +180,34 @@ } */ - DischargeTables dt = new DischargeTables(river.getName(), getDataAsString("ids")); + Map<String, double [][]> map; - Map<String, double [][]> map = dt.getValues(); + String[] ids = getDataAsString("ids").split(";"); + if (ids.length > 1) { + /* We assume that if an id's string with a ; is given that the + * format is <gauge_name>;<discharge_table_id>;<facet_desc> + * so that a specific discharge table can be selected */ + int tableId = 0; + try { + tableId = Integer.parseInt(ids[1]); + } catch (NumberFormatException e) { + logger.error("Discharge tables ids string is wrong." + + " Fromat is <gauge_name>;<discharge_table_id>;<facet_desc>" + + " Fix your Datacage!"); + // Let's rather break down completly then show the wrong data. + return null; + } + DischargeTable table = DischargeTable.getDischargeTableById(tableId); + map = new HashMap<String, double [][]>(); + map.put(getGaugeName(), DischargeTables.loadDischargeTableValues(table)); + } else { + DischargeTables dt = new DischargeTables(river.getName(), getGaugeName()); + map = dt.getValues(); + } ArrayList<WQKms> res = new ArrayList<WQKms>(); - Gauge gauge = river.determineGaugeByName(this.getDataAsString("ids")); + Gauge gauge = river.determineGaugeByName(getGaugeName()); String name = getGaugeName(); double [][] values = map.get(name); @@ -191,5 +226,13 @@ res.toArray(new WQKms[res.size()]), new Calculation()); } + + /** Gets the facet wish name. + * + * This is a hack to enable setting the name of the facet / theme in the + * UI from the datacage setting. */ + public String getFacetWishName() { + return facetWishName; + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/MainValuesArtifact.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/MainValuesArtifact.java Thu Sep 12 10:13:09 2013 +0200 @@ -81,7 +81,10 @@ String outputName ) { return outputName.equals("computed_discharge_curve") - || outputName.equals("duration_curve"); + || outputName.equals("duration_curve") + || outputName.equals("discharge_curve") + || outputName.equals("fix_wq_curve") + || outputName.equals("historical_discharge_wq"); } }); } @@ -316,6 +319,21 @@ return spawnState(); } + /** + * Access the Gauge that the mainvalues are taken from. + * @return Gauge that main values are taken from or null in case of + * invalid parameterization. + */ + protected Gauge getGauge(double km) { + River river = RiverUtils.getRiver(this); + + if (river == null) { + logger.error("River is null"); + return null; + } + + return river.determineGaugeByPosition(km); + } /** * Access the Gauge that the mainvalues are taken from. @@ -358,6 +376,41 @@ * Get a list of "Q" main values. * @return list of Q main values. */ + public List<NamedDouble> getMainValuesQ(double[] kms) { + List<NamedDouble> filteredList = new ArrayList<NamedDouble>(); + boolean atGauge = false; + Gauge gauge = getGauge(kms[0]); + WstValueTable interpolator = WstValueTableFactory.getTable(RiverUtils.getRiver(this)); + Calculation c = new Calculation(); + double w_out[] = {0.0f}; + double q_out[] = {0.0f}; + if (gauge != null) { + double gaugeStation = gauge.getStation().doubleValue(); + atGauge = Math.abs(kms[0] - gaugeStation) < 1e-4; + List<MainValue> orig = gauge.getMainValues(); + for (MainValue mv : orig) { + if (mv.getMainValue().getType().getName().equals("Q")) { + if (atGauge) { + q_out[0] = mv.getValue().doubleValue(); + } + else { + interpolator.interpolate(mv.getValue().doubleValue(), + gaugeStation, kms, w_out, q_out, c); + } + filteredList.add(new NamedDouble( + mv.getMainValue().getName(), + q_out[0] + )); + } + } + } + return filteredList; + } + + /** + * Get a list of "Q" main values. + * @return list of Q main values. + */ public List<NamedDouble> getMainValuesQ(boolean atGauge) { List<NamedDouble> filteredList = new ArrayList<NamedDouble>(); Gauge gauge = getGauge(); @@ -366,8 +419,8 @@ double w_out[] = {0.0f}; double q_out[] = {0.0f}; double kms[] = {getLocation()}; - double gaugeStation = gauge.getStation().doubleValue(); if (gauge != null) { + double gaugeStation = gauge.getStation().doubleValue(); List<MainValue> orig = gauge.getMainValues(); for (MainValue mv : orig) { if (mv.getMainValue().getType().getName().equals("Q")) { @@ -389,21 +442,26 @@ } - /** - * Get a list of "W" main values. - * @param atGauge if true, do not interpolate - * @return list of W main values. - */ - public List<NamedDouble> getMainValuesW(boolean atGauge) { + /** Get main values of km. */ + public List<NamedDouble> getMainValuesW(double[] kms) { List<NamedDouble> filteredList = new ArrayList<NamedDouble>(); - Gauge gauge = getGauge(); + boolean atGauge = false; + double gaugeDatum = 0d; + Gauge gauge = getGauge(kms[0]); + if (gauge == null) { + return filteredList; + } + else if (Math.abs(kms[0] - gauge.getStation().doubleValue()) < 1e-4) { + atGauge = true; + gaugeDatum = gauge.getDatum().doubleValue(); + } + WstValueTable interpolator = WstValueTableFactory.getTable(RiverUtils.getRiver(this)); Calculation c = new Calculation(); double gaugeStation = gauge.getStation().doubleValue(); double w_out[] = {0.0f}; double q_out[] = {0.0f}; - double kms[] = {getLocation()}; if (gauge != null) { List<MainValue> orig = gauge.getMainValues(); for (MainValue mv : orig) { @@ -411,6 +469,41 @@ if (mv.getMainValue().getType().getName().equals("W")) { filteredList.add(new NamedDouble(mv.getMainValue().getName(), mv.getValue().doubleValue())); + } + } else + // We cannot interpolate the W values, so derive them + // from given Q values. + if (mv.getMainValue().getType().getName().equals("Q")) { + interpolator.interpolate(mv.getValue().doubleValue(), + gaugeStation, kms, w_out, q_out, c); + + filteredList.add(new NamedDouble( + "W(" + mv.getMainValue().getName() +")", + w_out[0] + )); + } + } + } + return filteredList; + } + + + public List<NamedDouble> getMainValuesW(boolean atGauge, double[] kms) { + List<NamedDouble> filteredList = new ArrayList<NamedDouble>(); + Gauge gauge = getGauge(); + WstValueTable interpolator = WstValueTableFactory.getTable(RiverUtils.getRiver(this)); + Calculation c = new Calculation(); + + double w_out[] = {0.0f}; + double q_out[] = {0.0f}; + if (gauge != null) { + double gaugeStation = gauge.getStation().doubleValue(); + List<MainValue> orig = gauge.getMainValues(); + for (MainValue mv : orig) { + if (atGauge) { + if (mv.getMainValue().getType().getName().equals("W")) { + filteredList.add(new NamedDouble(mv.getMainValue().getName(), + mv.getValue().doubleValue())); } } else @@ -431,6 +524,16 @@ /** + * Get a list of "W" main values. + * @param atGauge if true, do not interpolate + * @return list of W main values. + */ + public List<NamedDouble> getMainValuesW(boolean atGauge) { + return getMainValuesW(atGauge, new double[] {getLocation()}); + } + + + /** * Returns the name of this artifact ('mainvalue'). * * @return 'mainvalue'
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/RiverAxisArtifact.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/RiverAxisArtifact.java Thu Sep 12 10:13:09 2013 +0200 @@ -91,6 +91,10 @@ super(artifact); } + private boolean isUnofficial() { + return getIdPart(2) != null && !getIdPart(2).equals("1"); + } + @Override protected String getFacetType() { return FLOODMAP_RIVERAXIS; @@ -98,12 +102,19 @@ @Override protected String getLayer() { + if (isUnofficial()) { + return super.getLayer(); + } return RiverFactory.getRiver(getRiverId()).getName(); } @Override protected String getUrl() { - return RiverUtils.getRiverWMSUrl(); + if (isUnofficial()) { + return RiverUtils.getUserWMSUrl(artifact.identifier()); + } else { + return RiverUtils.getRiverWMSUrl(); + } } @Override
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/StaticWKmsArtifact.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/StaticWKmsArtifact.java Thu Sep 12 10:13:09 2013 +0200 @@ -310,7 +310,7 @@ return -1; } - // Do linear interpolation + // Do linear interpolation. int mod = kmIncreasing ? -1 : +1; return Linear.linear(km, wkms.getKm(idx+mod), wkms.getKm(idx), wkms.getW(idx+mod), wkms.getW(idx)); }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/StaticWQKmsArtifact.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/StaticWQKmsArtifact.java Thu Sep 12 10:13:09 2013 +0200 @@ -59,6 +59,8 @@ private static final String NAME = "staticwqkms"; + private boolean official = false; + static { // TODO: Move to configuration. FacetActivity.Registry.getInstance().register( @@ -85,6 +87,10 @@ /** * Gets called from factory, to set things up. + * + * If the id's string starts with official- it will be treated as + * an Artifact containing official data for the according special + * case handling. */ @Override public void setup( @@ -98,7 +104,7 @@ // Store the 'ids' (from datacage). if (logger.isDebugEnabled()) { - logger.debug("StaticWQKmsArtiact.setup" + XMLUtils.toString(data)); + logger.debug("StaticWQKmsArtifact.setup" + XMLUtils.toString(data)); } String code = getDatacageIDValue(data); @@ -106,6 +112,10 @@ if (code != null) { String [] parts = code.split("-"); + if (parts.length >= 1) { + official = parts[0].toLowerCase().equals("official"); + } + if (parts.length >= 4) { int col = Integer.parseInt(parts[2]); int wst = Integer.parseInt(parts[3]); @@ -222,5 +232,10 @@ return Lines.createWaterLines(points, wAtKm); } // TODO implement deepCopy. + + public boolean isOfficial() + { + return official; + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/WINFOArtifact.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/WINFOArtifact.java Thu Sep 12 10:13:09 2013 +0200 @@ -197,15 +197,26 @@ String calculationMode = getDataAsString("calculation_mode"); - if (calculationMode.equals("calc.discharge.longitudinal.section") - ) { - return getDischargeLongitudinalSectionData(); - } - else if (calculationMode.equals("calc.extreme.curve")) { - return (CalculationResult) - this.compute(context, ComputeType.ADVANCE, false); + // If this WINFO-Artifact has a calculation trait. + if (calculationMode != null) { + if (calculationMode.equals("calc.discharge.longitudinal.section") + ) { + return getDischargeLongitudinalSectionData(); + } + else if (calculationMode.equals("calc.extreme.curve")) { + return (CalculationResult) + this.compute(context, ComputeType.ADVANCE, false); + } + else if (calculationMode.equals("calc.w.differences")) { + return (CalculationResult) + this.compute(context, ComputeType.ADVANCE, true); + } + else { + logger.warn("Unhandled calculation_mode " + calculationMode); + } } + // Otherwise get it from parameterization. River river = RiverUtils.getRiver(this); if (river == null) { return error(new WQKms[0], "no.river.selected");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/WQKmsInterpolArtifact.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/WQKmsInterpolArtifact.java Thu Sep 12 10:13:09 2013 +0200 @@ -132,7 +132,15 @@ name = STATIC_WKMS_INTERPOL; } else { - name = STATIC_WQ; + // If all Qs are zero, add different facet to + // signalize that we want data to be drawn as marks + // on axis. + if (wstValueHasZeroQ()) { + name = STATIC_W_INTERPOL; + } + else { + name = STATIC_WQ; + } } Facet wQFacet = new WQFacet(name, @@ -204,23 +212,51 @@ } + /** True if Wst has only 'fake' (zero) Q-ranges. */ + private boolean wstValueHasZeroQ() { + WstValueTable table = getValueTable(); + return table.hasEmptyQ(); + } + + + /** Get the WstValueTable that matches parameterization. */ + private WstValueTable getValueTable() { + // Get WstValueTable + int wstId = getDataAsInt("wst_id"); + if (getDataAsString("col_pos") != null) { + return WstValueTableFactory.getWstColumnTable( + wstId, getDataAsInt("col_pos")); + } + else { + return WstValueTableFactory.getTable(wstId); + } + } + + + /** + * Get WQ Values at a certain km, interpolating only if distance + * between two stations is smaller than given distance. + */ + public double [][] getWQAtKm( + Double currentKm, + double maxKmInterpolDistance + ) { + // TODO yet to be implemented (issue1378). + return null; + } + + /** * Get WQ at a given km. + * * @param currentKm the requested km. If NULL, ld_location data * will be used. + * @return [[q1,q2,q2],[w1,w2,w3]] ... */ public double [][] getWQAtKm(Double currentKm) { - WstValueTable interpolator = null; - // Get WstValueTable - if (getDataAsString("col_pos") != null) { - interpolator = WstValueTableFactory.getWstColumnTable( - getDataAsInt("wst_id"), getDataAsInt("col_pos")); - } - else { - interpolator = WstValueTableFactory.getTable( - getDataAsInt("wst_id")); - } + // TODO issue1378: only interpolate if dist <= 100m + WstValueTable interpolator = getValueTable(); Double tmp = (currentKm != null) ? currentKm
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/access/FixAnalysisAccess.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/access/FixAnalysisAccess.java Thu Sep 12 10:13:09 2013 +0200 @@ -33,6 +33,7 @@ super(artifact); } + /** Access the reference date period, return null in case of 'errors'. */ public DateRange getReferencePeriod() { if (referencePeriod == null) { StateData refStart = artifact.getData("ref_start");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/access/RangeAccess.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/access/RangeAccess.java Thu Sep 12 10:13:09 2013 +0200 @@ -111,6 +111,14 @@ return locations.toNativeArray(); } + public boolean hasFrom() { + return from != null || (from = getDouble("ld_from")) != null; + } + + public boolean hasTo() { + return to != null || (to = getDouble("ld_to")) != null; + } + /** Return ld_from data (in km). */ public double getFrom() {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/access/SQRelationAccess.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/access/SQRelationAccess.java Thu Sep 12 10:13:09 2013 +0200 @@ -9,11 +9,13 @@ package org.dive4elements.river.artifacts.access; import java.util.Date; +import java.util.List; import org.apache.log4j.Logger; import org.dive4elements.river.artifacts.D4EArtifact; import org.dive4elements.river.artifacts.model.DateRange; +import org.dive4elements.river.model.MeasurementStation; public class SQRelationAccess extends RiverAccess @@ -28,6 +30,8 @@ private String method; + protected MeasurementStation measurementStation; + public SQRelationAccess() { } @@ -80,5 +84,30 @@ } return method; } + + public String getMeasurementStationName() { + MeasurementStation station = getMeasurementStation(); + return station == null ? null : station.getName(); + } + + public String getMeasurementStationGaugeName() { + MeasurementStation station = getMeasurementStation(); + return station == null ? null : station.getGaugeName(); + } + + public MeasurementStation getMeasurementStation() { + if (measurementStation != null) { + return measurementStation; + } + List<MeasurementStation> candidates = MeasurementStation.getStationsAtKM( + getRiver(), getLocation()); + if (candidates != null) { + // Just take the first one as we only use the name + // and that "should" be unique at the location + measurementStation = candidates.get(0); + } + + return measurementStation; + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/access/SedimentLoadAccess.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/access/SedimentLoadAccess.java Thu Sep 12 10:13:09 2013 +0200 @@ -91,6 +91,7 @@ return null; } + /** Returns the selected unit (t/a or m3/a). */ public String getUnit () { if (unit == null) { unit = getString("unit");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/context/RiverContextFactory.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/context/RiverContextFactory.java Thu Sep 12 10:13:09 2013 +0200 @@ -93,6 +93,8 @@ private static final String XPATH_DGM_PATH = "/artifact-database/options/dgm-path/text()"; + private static GlobalContext GLOBAL_CONTEXT_INSTANCE; + /** * Creates a new D4EArtifactContext object and initialize all @@ -115,9 +117,17 @@ configureZoomScales(config, context); configureDGMPath(config, context); + synchronized (RiverContextFactory.class) { + GLOBAL_CONTEXT_INSTANCE = context; + } + return context; } + public static synchronized GlobalContext getGlobalContext() { + return GLOBAL_CONTEXT_INSTANCE; + } + private void configureDGMPath(Document config, RiverContext context) { String dgmPath = (String) XMLUtils.xpath(
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/Datacage.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/Datacage.java Thu Sep 12 10:13:09 2013 +0200 @@ -70,6 +70,7 @@ private String SQL_ARTIFACT_ID_NEXTVAL = "artifact.id.nextval"; private String SQL_INSERT_ARTIFACT = "insert.artifact"; private String SQL_ARTIFACT_DATA_ID_NEXTVAL = "artifact.data.id.nextval"; + private String SQL_UPDATE_ARTIFACT_STATE = "update.artifact.state"; private String SQL_INSERT_ARTIFACT_DATA = "insert.artifact.data"; private String SQL_OUT_ID_NEXTVALUE = "out.id.nextval"; private String SQL_INSERT_OUT = "insert.out"; @@ -382,6 +383,7 @@ SQL_INSERT_ARTIFACT = sql.get(SQL_INSERT_ARTIFACT); SQL_ARTIFACT_DATA_ID_NEXTVAL = sql.get(SQL_ARTIFACT_DATA_ID_NEXTVAL); SQL_INSERT_ARTIFACT_DATA = sql.get(SQL_INSERT_ARTIFACT_DATA); + SQL_UPDATE_ARTIFACT_STATE = sql.get(SQL_UPDATE_ARTIFACT_STATE); SQL_OUT_ID_NEXTVALUE = sql.get(SQL_OUT_ID_NEXTVALUE); SQL_INSERT_OUT = sql.get(SQL_INSERT_OUT); SQL_FACET_ID_NEXTVAL = sql.get(SQL_FACET_ID_NEXTVAL); @@ -401,6 +403,7 @@ sql.get(SQL_DELETE_ARTIFACT_BY_GID); } + /** Sum over facets in outs. */ protected static final int numFacets(List<Output> outs) { int sum = 0; for (Output out: outs) { @@ -608,6 +611,7 @@ // write new data storeData(res[0], flys); storeOuts(res[0], flys, context); + storeState(res[0], flys); } public void createdUser( @@ -884,9 +888,30 @@ } } + /** Update state of artifact. */ + protected void storeState( + final int artifactId, + final D4EArtifact artifact) { + SQLExecutor.Instance exec = sqlExecutor.new Instance() { + @Override + public boolean doIt() throws SQLException { + prepareStatement(SQL_UPDATE_ARTIFACT_STATE); + stmnt.setString(1, artifact.getCurrentStateId()); + stmnt.setInt(2, artifactId); + stmnt.execute(); + conn.commit(); + return true; + } + }; + + if (!exec.runWrite()) { + log.error("storing state of artifact failed ("+artifactId+","+artifact.getCurrentStateId()+")"); + } + } + protected void storeData( - final int artifactId, - D4EArtifact artifact + final int artifactId, + D4EArtifact artifact ) { final Collection<StateData> data = artifact.getAllData();
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/DatacageBackendListener.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/DatacageBackendListener.java Thu Sep 12 10:13:09 2013 +0200 @@ -61,6 +61,7 @@ } } + /** Stores the artifact in artifact-db, if any. */ @Override public void storedArtifact(Artifact artifact, Backend backend) { log.debug("storedArtifact");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/Recommendations.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/Recommendations.java Thu Sep 12 10:13:09 2013 +0200 @@ -142,6 +142,7 @@ ) { parameters.put("CURRENT-STATE-ID", artifact.getCurrentStateId()); parameters.put("ARTIFACT-ID", artifact.identifier()); + parameters.put("ARTIFACT-NAME", artifact.getName()); for (StateData sd: artifact.getAllData()) { Object value = sd.getValue(); @@ -173,7 +174,7 @@ /** * Append recommendations to \param result. - * @param extraParameters parameters (typicall example: 'recommended') + * @param extraParameters parameters (typical example: 'recommended') */ public void recommend( D4EArtifact artifact,
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/templating/FunctionResolver.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/templating/FunctionResolver.java Thu Sep 12 10:13:09 2013 +0200 @@ -10,10 +10,12 @@ import java.text.SimpleDateFormat; import java.util.Collection; +import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Map; import java.util.HashMap; +import java.util.Set; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -25,6 +27,10 @@ import javax.xml.xpath.XPathFunctionResolver; import org.apache.log4j.Logger; +import org.dive4elements.artifactdatabase.transition.TransitionEngine; +import org.dive4elements.artifacts.GlobalContext; +import org.dive4elements.river.artifacts.context.RiverContext; +import org.dive4elements.river.artifacts.context.RiverContextFactory; /** Resolves functions (e.g. dc:contains) in Datacage/Meta-Data system. */ @@ -151,6 +157,19 @@ return StackFrames.NULL; } }); + + addFunction("all-state-successors", 2, new XPathFunction() { + @Override + public Object evaluate(List args) throws XPathFunctionException { + Object artifactName = args.get(0); + Object stateId = args.get(1); + + return artifactName instanceof String + && stateId instanceof String + ? allStateSuccessors((String)artifactName, (String)stateId) + : Collections.<String>emptySet(); + } + }); } /** @@ -378,5 +397,18 @@ } return ""; } + + public Set<String> allStateSuccessors(String artifactName, String stateId) { + GlobalContext gc = RiverContextFactory.getGlobalContext(); + if (gc == null) { + return Collections.<String>emptySet(); + } + Object o = gc.get(RiverContext.TRANSITION_ENGINE_KEY); + if (o instanceof TransitionEngine) { + TransitionEngine te = (TransitionEngine)o; + return te.allRecursiveSuccessorStateIds(artifactName, stateId); + } + return Collections.<String>emptySet(); + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/math/MovingAverage.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/math/MovingAverage.java Thu Sep 12 10:13:09 2013 +0200 @@ -37,7 +37,11 @@ return new double [][] { xs, ys }; } - public static double[][] weighted(double[][] values, double radius) { + /** Build moving average over values. Weight them. */ + public static double[][] weighted( + double[][] values, + double radius + ) { TreeMap<Double, Double> map = toMap(values); int N = map.size(); double [] xs = new double[N]; @@ -62,6 +66,7 @@ return new double [][] { xs, ys }; } + /** From [x1,x2][y1,y2] makes {x1:y1,x2:y2}. Sorted by x! */ private static TreeMap<Double, Double> toMap(double[][] values) { TreeMap<Double, Double> map = new TreeMap<Double, Double>(); double [] xs = values[0];
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/math/StdDevOutlier.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/math/StdDevOutlier.java Thu Sep 12 10:13:09 2013 +0200 @@ -10,9 +10,13 @@ import java.util.List; -import org.apache.commons.math.stat.descriptive.moment.StandardDeviation; +import org.apache.log4j.Logger; -import org.apache.log4j.Logger; +/* XXX: + * Warning: This class is called StdDevOutlier because it caculates the + * Standard Deviation method for outlier removal as the BFG calls it. + * But the actual calculation used to remove the outliers calculates + * the Standard Error and not the Standard Deviation! */ public class StdDevOutlier { @@ -30,12 +34,12 @@ public static Integer findOutlier( List<Double> values, double factor, - double [] stdDevResult + double [] stdErrResult ) { boolean debug = log.isDebugEnabled(); if (debug) { - log.debug("factor for std dev: " + factor); + log.debug("factor for std dev test (that calculates std err): " + factor); } int N = values.size(); @@ -48,31 +52,34 @@ return null; } - StandardDeviation stdDev = new StandardDeviation(); - double maxValue = -Double.MAX_VALUE; int maxIndex = -1; + + double squareSumResiduals = 0; + for (Double db: values) { + squareSumResiduals += Math.pow(db, 2); + } + + double stdErr = Math.sqrt(squareSumResiduals / (N - 2)); + + double accepted = factor * stdErr; + for (int i = N-1; i >= 0; --i) { double value = Math.abs(values.get(i)); - stdDev.increment(value); if (value > maxValue) { maxValue = value; maxIndex = i; } } - double sd = stdDev.getResult(); - - double accepted = factor * sd; - if (debug) { - log.debug("std dev: " + stdDev); + log.debug("std err: " + stdErr); log.debug("accepted: " + accepted); log.debug("max value: " + maxValue); } - if (stdDevResult != null) { - stdDevResult[0] = sd; + if (stdErrResult != null) { + stdErrResult[0] = stdErr; } return maxValue > accepted ? maxIndex : null;
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/Calculation6.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/Calculation6.java Thu Sep 12 10:13:09 2013 +0200 @@ -37,6 +37,7 @@ private long [] timerange; private double [] values; private Long officialGaugeNumber; + private String riverName; public Calculation6(HistoricalDischargeAccess access) { @@ -45,6 +46,7 @@ double [] vs = mode != null && mode == EvaluationMode.W ? access.getWs() : access.getQs(); + riverName = access.getRiver(); Long officialGaugeNumber = access.getOfficialGaugeNumber(); @@ -98,7 +100,8 @@ return null; } - Gauge gauge = Gauge.getGaugeByOfficialNumber(officialGaugeNumber); + Gauge gauge = Gauge.getGaugeByOfficialNumber(officialGaugeNumber, + riverName); if (gauge == null) { // TODO: i18n return error("hist.discharge.gauge.not.found");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/CrossSectionFactory.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/CrossSectionFactory.java Thu Sep 12 10:13:09 2013 +0200 @@ -59,30 +59,6 @@ } - /** - * True if the given section is the "newest" for that river. - * @param section Given section - * @return true if the section has the most advanced end of its validity interval - * or the most advanced start of its validity interval. - */ - public static boolean isNewest(CrossSection section) { - Session session = SessionHolder.HOLDER.get(); - Query query = session.createQuery( - "from CrossSection where river.id = :riverid " - + " order by timeInterval.stopTime desc, timeInterval.startTime desc"); - query.setParameter("riverid", section.getRiver().getId()); - - List result = query.list(); - - if (result == null || result.isEmpty()) { - return true; - } - else { - CrossSection cs = (CrossSection) result.get(0); - return section.getId().equals(cs.getId()); - } - } - /** * Get a specific CrossSection from db.
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/FacetTypes.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/FacetTypes.java Thu Sep 12 10:13:09 2013 +0200 @@ -240,6 +240,7 @@ String STATIC_WQKMS_W = "other.wqkms.w"; String STATIC_WQKMS_Q = "other.wqkms.q"; String STATIC_WKMS_INTERPOL = "other.wkms.interpol"; + String STATIC_W_INTERPOL = "other.w.interpol"; String HEIGHTMARKS_POINTS = "heightmarks_points"; @@ -313,7 +314,7 @@ String SEDIMENT_LOAD_SUSP_SEDIMENT = "sedimentload.susp_sediment"; String SEDIMENT_LOAD_TOTAL = "sedimentload.total"; String SEDIMENT_LOAD_TOTAL_LOAD = "sedimentload.total_load"; - String SEDIMENT_LOAD_UNKOWN = "sedimentload.unknown"; + String SEDIMENT_LOAD_UNKNOWN = "sedimentload.unknown"; String SQ_OVERVIEW = "sq_overview";
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/FixingsOverview.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/FixingsOverview.java Thu Sep 12 10:13:09 2013 +0200 @@ -28,8 +28,10 @@ import org.w3c.dom.Document; import org.w3c.dom.Element; +import org.dive4elements.river.utils.BatchLoader; -/** Generate Fixings Table chart. */ + +/** Generate Fixings Table overview data structure to be stored in cache. */ public class FixingsOverview implements Serializable { @@ -56,39 +58,156 @@ "WHERE" + " river_id = :river_id AND kind = 2"; - /** All columns from given wst. */ - public static final String SQL_FIXING_COLUMNS = - "SELECT" + - " wc.id AS wst_column_id," + - " ti.start_time AS start_time," + - " wc.name AS name " + - "FROM wst_columns wc" + - " JOIN time_intervals ti ON wc.time_interval_id = ti.id " + - "WHERE" + - " wc.wst_id = :wst_id " + - "ORDER BY position"; + public static final String SQL_FIXING_COLUMNS_BATCH = + "SELECT " + + "wc.wst_id AS wst_id," + + "wc.id AS wst_column_id," + + "ti.start_time AS start_time," + + "wc.name AS name " + + "FROM wst_columns wc " + + "JOIN time_intervals ti ON wc.time_interval_id = ti.id " + + "WHERE " + + "wc.wst_id IN ($IDS) " + + "ORDER BY wc.wst_id, position"; - public static final String SQL_FIXING_COLUMN_Q_RANGES = - "SELECT" + - " wqr.q AS q," + - " r.a AS start_km," + - " r.b AS stop_km " + - "FROM wst_column_q_ranges wcqr" + - " JOIN wst_q_ranges wqr ON wcqr.wst_q_range_id = wqr.id" + - " JOIN ranges r ON wqr.range_id = r.id " + - "WHERE" + - " wcqr.wst_column_id = :column_id " + - "ORDER BY r.a"; + public static final String SQL_FIXING_COLUMN_Q_RANGES_BATCH = + "SELECT " + + "wcqr.wst_column_id AS wst_column_id," + + "wqr.q AS q," + + "r.a AS start_km," + + "r.b AS stop_km " + + "FROM wst_column_q_ranges wcqr " + + "JOIN wst_q_ranges wqr ON wcqr.wst_q_range_id = wqr.id " + + "JOIN ranges r ON wqr.range_id = r.id " + + "WHERE " + + "wcqr.wst_column_id IN ($IDS) " + + "ORDER BY wcqr.wst_column_id, r.a"; - public static final String SQL_FIXING_COLUMN_KM_RANGE = - "SELECT" + - " MIN(position) AS start_km," + - " MAX(position) AS stop_km " + - "FROM" + - " wst_column_values " + - "WHERE" + - " wst_column_id = :column_id"; + public static final String SQL_FIXING_COLUMN_KM_RANGE_BATCH = + "SELECT " + + "wst_column_id," + + "MIN(position) AS start_km," + + "MAX(position) AS stop_km " + + "FROM " + + "wst_column_values " + + "WHERE " + + "wst_column_id IN ($IDS) " + + "GROUP BY wst_column_id"; + public static final class KMRangeLoader extends BatchLoader<double []> { + + public KMRangeLoader(List<Integer> columns, Session session) { + super(columns, session, SQL_FIXING_COLUMN_KM_RANGE_BATCH); + } + + @Override + protected void fill(SQLQuery query) { + query + .addScalar("wst_column_id", StandardBasicTypes.INTEGER) + .addScalar("start_km", StandardBasicTypes.DOUBLE) + .addScalar("stop_km", StandardBasicTypes.DOUBLE); + + List<Object []> ranges = query.list(); + for (Object [] r: ranges) { + Integer cid = (Integer)r[0]; + double [] vs = new double [] { (Double)r[1], (Double)r[2] }; + cache(cid, vs); + } + } + } // class KMRangeLoader + + public static final class ColumnQRangeLoader + extends BatchLoader<List<double []>> + { + public ColumnQRangeLoader(List<Integer> columns, Session session) { + super(columns, session, SQL_FIXING_COLUMN_Q_RANGES_BATCH); + } + + @Override + protected void fill(SQLQuery query) { + query + .addScalar("wst_column_id", StandardBasicTypes.INTEGER) + .addScalar("q", StandardBasicTypes.DOUBLE) + .addScalar("start_km", StandardBasicTypes.DOUBLE) + .addScalar("stop_km", StandardBasicTypes.DOUBLE); + + int lastId = Integer.MIN_VALUE; + List<double []> column = new ArrayList<double []>(); + + List<Object []> ranges = query.list(); + for (Object [] r: ranges) { + int cid = (Integer)r[0]; + + if (cid != lastId && !column.isEmpty()) { + cache(lastId, column); + column = new ArrayList<double []>(); + } + column.add(new double [] { + (Double)r[1], + (Double)r[2], + (Double)r[3] + }); + + lastId = cid; + } + + if (!column.isEmpty()) { + cache(lastId, column); + } + } + } // class ColumnQRangeLoader + + /** Helper class to store data from batching fixing columns. */ + private static final class FixColumn { + int columnId; + Date startTime; + String name; + + FixColumn(int columnId, Date startTime, String name) { + this.columnId = columnId; + this.startTime = startTime; + this.name = name; + } + } // class FixColumn + + public static final class FixColumnLoader + extends BatchLoader<List<FixColumn>> + { + public FixColumnLoader(List<Integer> columns, Session session) { + super(columns, session, SQL_FIXING_COLUMNS_BATCH); + } + + @Override + protected void fill(SQLQuery query) { + query + .addScalar("wst_id", StandardBasicTypes.INTEGER) + .addScalar("wst_column_id", StandardBasicTypes.INTEGER) + .addScalar("start_time", StandardBasicTypes.TIMESTAMP) + .addScalar("name", StandardBasicTypes.STRING); + + int lastId = Integer.MIN_VALUE; + List<FixColumn> cols = new ArrayList<FixColumn>(); + + List<Object []> columns = query.list(); + for (Object [] c: columns) { + int wid = (Integer)c[0]; + + if (wid != lastId && !cols.isEmpty()) { + cache(lastId, cols); + cols = new ArrayList<FixColumn>(); + } + cols.add(new FixColumn( + (Integer)c[1], + (Date) c[2], + (String) c[3])); + + lastId = wid; + } + if (!cols.isEmpty()) { + cache(lastId, cols); + } + } + } // class FixColumnLoader public static class QRange extends Range { @@ -255,34 +374,35 @@ } // for all Q ranges } - public void loadKmRange(SQLQuery query) { - query.setInteger("column_id", columnId); - - List<Object []> kms = query.list(); + public void loadKmRange(KMRangeLoader loader) { - if (kms.isEmpty()) { + double [] range = loader.get(columnId); + + if (range == null) { log.warn("No km range for column " + columnId + "."); + return; } - else { - Object [] obj = kms.get(0); - start = (Double)obj[0]; - end = (Double)obj[1]; - } + start = range[0]; + end = range[1]; } public void loadQRanges( - SQLQuery query, - GaugeFinder gaugeFinder + ColumnQRangeLoader loader, + GaugeFinder gaugeFinder ) { - query.setInteger("column_id", columnId); - List<Object []> list = query.list(); + List<double []> qrs = loader.get(columnId); + if (qrs == null) { + log.warn("No q ranges found for column " + columnId); + return; + } - List<QRange> qRanges = new ArrayList<QRange>(list.size()); + List<QRange> qRanges = new ArrayList<QRange>(qrs.size()); - for (Object [] row: list) { - double q = (Double)row[0]; - double start = (Double)row[1]; - double end = (Double)row[2]; + for (double [] qr: qrs) { + double q = qr[0]; + double start = qr[1]; + double end = qr[2]; + QRange qRange = new QRange(start, end, q); if (qRange.clip(this)) { qRanges.add(qRange); @@ -314,20 +434,26 @@ columns = new ArrayList<Column>(); } - public void loadColumns(SQLQuery query) { - query.setInteger("wst_id", wstId); - List<Object []> list = query.list(); - for (Object [] row: list) { - int columnId = (Integer)row[0]; - Date startTime = (Date) row[1]; - String name = (String) row[2]; - columns.add(new Column(columnId, startTime, name)); + public void allColumnIds(List<Integer> cIds) { + for (Column column: columns) { + cIds.add(column.columnId); } } - public void loadColumnsKmRange(SQLQuery query) { + public void loadColumns(FixColumnLoader loader) { + List<FixColumn> fcs = loader.get(wstId); + if (fcs == null) { + log.warn("No columns for wst " + wstId); + return; + } + for (FixColumn fc: fcs) { + columns.add(new Column(fc.columnId, fc.startTime, fc.name)); + } + } + + public void loadColumnsKmRange(KMRangeLoader loader) { for (Column column: columns) { - column.loadKmRange(query); + column.loadKmRange(loader); } } @@ -338,11 +464,11 @@ } public void loadColumnsQRanges( - SQLQuery query, - GaugeFinder gaugeFinder + ColumnQRangeLoader loader, + GaugeFinder gaugeFinder ) { for (Column column: columns) { - column.loadQRanges(query, gaugeFinder); + column.loadQRanges(loader, gaugeFinder); } } @@ -427,23 +553,40 @@ } protected void loadFixingsColumns(Session session) { - SQLQuery query = session.createSQLQuery(SQL_FIXING_COLUMNS) - .addScalar("wst_column_id", StandardBasicTypes.INTEGER) - .addScalar("start_time", StandardBasicTypes.DATE) - .addScalar("name", StandardBasicTypes.STRING); + + FixColumnLoader loader = new FixColumnLoader( + allFixingIds(), + session); for (Fixing fixing: fixings) { - fixing.loadColumns(query); + fixing.loadColumns(loader); } } + protected List<Integer> allFixingIds() { + List<Integer> ids = new ArrayList<Integer>(fixings.size()); + for (Fixing fixing: fixings) { + ids.add(fixing.getId()); + } + return ids; + } + + protected List<Integer> allColumnIds() { + List<Integer> cIds = new ArrayList<Integer>(); + for (Fixing fixing: fixings) { + fixing.allColumnIds(cIds); + } + return cIds; + } + protected void loadFixingsColumnsKmRange(Session session) { - SQLQuery query = session.createSQLQuery(SQL_FIXING_COLUMN_KM_RANGE) - .addScalar("start_km", StandardBasicTypes.DOUBLE) - .addScalar("stop_km", StandardBasicTypes.DOUBLE); + + KMRangeLoader loader = new KMRangeLoader( + allColumnIds(), + session); for (Fixing fixing: fixings) { - fixing.loadColumnsKmRange(query); + fixing.loadColumnsKmRange(loader); } } @@ -451,13 +594,13 @@ Session session, GaugeFinder gaugeFinder ) { - SQLQuery query = session.createSQLQuery(SQL_FIXING_COLUMN_Q_RANGES) - .addScalar("q", StandardBasicTypes.DOUBLE) - .addScalar("start_km", StandardBasicTypes.DOUBLE) - .addScalar("stop_km", StandardBasicTypes.DOUBLE); + + ColumnQRangeLoader loader = new ColumnQRangeLoader( + allColumnIds(), + session); for (Fixing fixing: fixings) { - fixing.loadColumnsQRanges(query, gaugeFinder); + fixing.loadColumnsQRanges(loader, gaugeFinder); } }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesQFacet.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesQFacet.java Thu Sep 12 10:13:09 2013 +0200 @@ -25,6 +25,7 @@ import org.dive4elements.river.jfree.StickyAxisAnnotation; import org.dive4elements.river.exports.DurationCurveGenerator; +import org.dive4elements.river.exports.fixings.FixChartGenerator; /** @@ -89,7 +90,8 @@ * Returns the data this facet requires. * * @param artifact the owner artifact. - * @param context the CallContext (ignored). + * @param context the CallContext (can be used to find out if in + * navigable fixation-setting, or durationcurve). * * @return the data. */ @@ -105,6 +107,15 @@ getDataProvider(DurationCurveFacet.BB_DURATIONCURVE); if (providers.size() < 1) { logger.warn("Could not find durationcurve data provider."); + // Do we have a current km in context? + // If so, we are likely fetching data for a navigable + // diagram (i.e. in fixation branch). + if (context.getContextValue(FixChartGenerator.CURRENT_KM) != null) { + Double ckm = (Double) context.getContextValue(FixChartGenerator.CURRENT_KM); + // Return linearly interpolated values, in m if not at gauge, + // in cm if at gauge. + qs = mvArtifact.getMainValuesQ(new double[] {ckm}); + } } else { wqdays = (WQDay) providers.get(0).provideData( @@ -117,6 +128,10 @@ StickyAxisAnnotation annotation = null; if (this.name.equals(DURATION_MAINVALUES_Q)) { for (NamedDouble q: qs) { + if (Double.isNaN(q.getValue())) { + logger.warn("NaN MainValue " + q.getName()); + continue; + } annotation = new StickyAxisAnnotation( q.getName(), @@ -131,6 +146,10 @@ } else { for (NamedDouble q: qs) { + if (Double.isNaN(q.getValue())) { + logger.warn("NaN MainValue " + q.getName()); + continue; + } annotation = new StickyAxisAnnotation( q.getName(),
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesWFacet.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MainValuesWFacet.java Thu Sep 12 10:13:09 2013 +0200 @@ -23,6 +23,7 @@ import org.dive4elements.river.artifacts.math.Linear; import org.dive4elements.river.jfree.RiverAnnotation; import org.dive4elements.river.jfree.StickyAxisAnnotation; +import org.dive4elements.river.exports.fixings.FixChartGenerator; /** @@ -85,7 +86,8 @@ * Returns the data this facet provides. * * @param artifact the owner artifact. - * @param context the CallContext (ignored). + * @param context the CallContext (can be used to find out if in + * navigable fixation-setting, or durationcurve). * * @return the data. */ @@ -102,6 +104,15 @@ getDataProvider(DurationCurveFacet.BB_DURATIONCURVE); if (providers.size() < 1) { logger.warn("Could not find durationcurve data provider."); + // Do we have a current km in context? + // If so, we are likely fetching data for a navigable + // diagram (i.e. in fixation branch). + if (context.getContextValue(FixChartGenerator.CURRENT_KM) != null) { + Double ckm = (Double) context.getContextValue(FixChartGenerator.CURRENT_KM); + // Return linearly interpolated values, in m if not at gauge, + // in cm over datum if at gauge. + ws = mvArtifact.getMainValuesW(new double[] {ckm}); + } } else { wqdays = (WQDay) providers.get(0).provideData( @@ -111,6 +122,11 @@ } for (NamedDouble w: ws) { + logger.debug("W Annotation at " + w.getValue() + " ("+w.getName()+")"+ wqdays); + if (Double.isNaN(w.getValue())) { + logger.warn("NaN MainValue " + w.getName()); + continue; + } StickyAxisAnnotation annotation = new StickyAxisAnnotation( w.getName(),
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/ManagedFacet.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/ManagedFacet.java Thu Sep 12 10:13:09 2013 +0200 @@ -16,7 +16,7 @@ import org.dive4elements.artifactdatabase.state.Facet; import org.dive4elements.artifacts.ArtifactNamespaceContext; import org.dive4elements.artifacts.common.utils.XMLUtils.ElementCreator; - +import org.dive4elements.river.utils.CompareUtil; /** * Facet with user-supplied theme-control-information (pos in list, @@ -140,5 +140,24 @@ return 0; } } + + /** + * Returns true if the other is likely the same facet. + * This happens if a facet is defined for two outs. + */ + public boolean isSame(Object other) { + if (!(other instanceof ManagedFacet)) { + return false; + } + ManagedFacet otherFacet = (ManagedFacet) other; + return this.getVisible() == otherFacet.getVisible() && + this.getActive() == otherFacet.getActive() && + CompareUtil.areSame(this.getArtifact(), otherFacet.getArtifact()) && + this.getIndex() == otherFacet.getIndex() && + CompareUtil.areSame(this.getName(), otherFacet.getName()) && + CompareUtil.areSame(this.getBoundToOut(), otherFacet.getBoundToOut()) && + CompareUtil.areSame(this.getDescription(), otherFacet.getDescription()); + // Missing properties are blackboard, data, position. + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/OfficialLineFinder.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/OfficialLineFinder.java Thu Sep 12 10:13:09 2013 +0200 @@ -114,7 +114,9 @@ " wstId: " + wstId + " pos: " + columnPos + " source: " + source + - " date: " + date +"]"; + " date: " + date + + " from: " + start + + " to: " + end + "]"; } } @@ -168,9 +170,8 @@ NamedMainValue tnmv = mainValue.getMainValue(); if (tnmv.getId().equals(mnvId)) { // found gauge with this main value - double from = gauge.getRange().getA().doubleValue(); - double to = gauge.getRange().getA().doubleValue(); + double to = gauge.getRange().getB().doubleValue(); double value = mainValue.getValue().doubleValue(); int wstId = wst.getId(); int pos = wc.getPosition(); @@ -273,6 +274,18 @@ return list; } + private static List<ValueRange> filterByQValues(double[] values, List<ValueRange> ranges) { + List<ValueRange> list = new ArrayList<ValueRange>(ranges.size()); + for (ValueRange r: ranges) { + for (double val: values) { + if (r.sameValue(val) && !list.contains(r)) { + list.add(r); + } + } + } + return list; + } + private static boolean isQ(D4EArtifact artifact) { Boolean b = artifact.getDataAsBoolean("wq_isq"); return b != null && b; @@ -285,25 +298,23 @@ public static final Range Q_OUT_OF_RANGE = new Range(-10000, -9999); - private static Range singleQs(D4EArtifact artifact) { + private static double[] singleQs(D4EArtifact artifact) { String singleData = nn(artifact.getDataAsString("wq_single")); - double min = Double.MAX_VALUE; - double max = -Double.MAX_VALUE; + String[] values = singleData.split(" "); + double[] ret = new double[values.length]; + int i = 0; - for (String value: singleData.split(" ")) { + for (String value: values) { try { - double x = Double.parseDouble(value); - if (x < min) min = x; - if (x > max) max = x; + ret[i] = Double.parseDouble(value); } catch (NumberFormatException nfe) { + ret[i] = -1; // INVALID_Q_VALUE } + i++; } - return min == Double.MAX_VALUE - ? Q_OUT_OF_RANGE - : new Range(min, max); - + return ret; } private static Range qRange(D4EArtifact artifact) { @@ -382,21 +393,20 @@ return Collections.<ValueRange>emptyList(); } - Range qRange = isRange(artifact) - ? qRange(artifact) - : singleQs(artifact); - - if (qRange == Q_OUT_OF_RANGE) { - qRange = tripleQRange(artifact); + if (isRange(artifact)) { + Range qRange = qRange(artifact); + if (qRange == Q_OUT_OF_RANGE) { + qRange = tripleQRange(artifact); + } + ranges = filterByQRange(qRange, ranges); + if (debug) { + log.debug("Q range filter: " + qRange); + } + } else { + ranges = filterByQValues(singleQs(artifact), ranges); } if (debug) { - log.debug("Q range filter: " + qRange); - } - - ranges = filterByQRange(qRange, ranges); - - if (debug) { log.debug("After q range filter: " + ranges); }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/QRangeTree.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/QRangeTree.java Thu Sep 12 10:13:09 2013 +0200 @@ -297,6 +297,7 @@ return max; } + /** @param pos the station (km). */ public double findQ(double pos) { return root != null ? root.findQ(pos) : Double.NaN; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQKms.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQKms.java Thu Sep 12 10:13:09 2013 +0200 @@ -143,5 +143,15 @@ double to = getKm(size()-1); return from + " - " + to; } + + /** + * Returns an array of two double values the first and last kilometer. + * + * @return a double array with the first and last km + */ + public double[] getFirstLastKM() { + /* Behold the first km might be larger then the last! */ + return new double[] {getKm(0), getKm(size()-1)}; + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQKmsResult.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQKmsResult.java Thu Sep 12 10:13:09 2013 +0200 @@ -12,4 +12,4 @@ public interface WQKmsResult { public WQKms[] getWQKms(); } - +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WstValueTable.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WstValueTable.java Thu Sep 12 10:13:09 2013 +0200 @@ -850,6 +850,12 @@ q, referenceKm, kms, ws, qs, 0, kms.length, errors); } + /** + * Interpolate Q at given positions. + * @param kms positions for which to calculate qs and ws + * @param ws [out] calculated ws for kms + * @param qs [out] looked up qs for kms. + */ public QPosition interpolate( double q, double referenceKm, @@ -1269,6 +1275,27 @@ } + /** True if no QRange is given or Q equals zero. */ + public boolean hasEmptyQ() { + for (Column column: columns) { + if (column.getQRangeTree() == null) { + return true; + } + else { + if (Math.abs(column.getQRangeTree().maxQ()) <= 0.01d) { + return true; + } + } + } + + if (columns.length == 0) { + log.warn("No columns in WstValueTable."); + } + + return false; + } + + /** Find ranges that are between km1 and km2 (inclusive?) */ public List<Range> findSegments(double km1, double km2) { return columns.length != 0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/DateUniqueMaker.java Thu Sep 12 10:13:09 2013 +0200 @@ -0,0 +1,41 @@ +package org.dive4elements.river.artifacts.model.fixings; + +import java.util.Date; + +import gnu.trove.TIntObjectHashMap; +import gnu.trove.TLongHashSet; + +public class DateUniqueMaker { + + private TLongHashSet times; + private TIntObjectHashMap already; + + public DateUniqueMaker() { + times = new TLongHashSet(); + already = new TIntObjectHashMap(); + } + + public <T extends QWI> void makeUnique(T t) { + + // Map same index to same new value + if (already.containsKey(t.index)) { + t.date = (Date)already.get(t.index); + return; + } + long time = t.date.getTime(); + if (!times.add(time)) { // same found before + do { + time += 30L*1000L; // Add 30secs + } + while (!times.add(time)); + Date newDate = new Date(time); + already.put(t.index, newDate); + // Write back modified time. + t.date = newDate; + } + else { + // register as seen. + already.put(t.index, t.date); + } + } +}
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/Fitting.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/Fitting.java Thu Sep 12 10:13:09 2013 +0200 @@ -121,8 +121,9 @@ double maxQ = -Double.MAX_VALUE; if (referenced != null) { for (QWI qw: referenced) { - if (qw.getQ() > maxQ) { - maxQ = qw.getQ(); + double q = qw.getQ(); + if (q > maxQ) { + maxQ = q; } } }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixAnalysisCalculation.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixAnalysisCalculation.java Thu Sep 12 10:13:09 2013 +0200 @@ -101,6 +101,15 @@ fitResult.getOutliers(), analysisPeriods); + // Workaraound to deal with same dates in data set + far.makeReferenceEventsDatesUnique(); + far.remapReferenceIndicesToRank(); + + far.makeAnalysisEventsUnique(); + for (int i = 0; i < this.analysisPeriods.length; ++i) { + far.remapAnalysisEventsIndicesToRank(i); + } + return new CalculationResult(far, this); } @@ -172,7 +181,7 @@ ArrayList<QWD> allQWDs = new ArrayList<QWD>(); // for all Q sectors. - for (int qSector = qSectorStart; qSector < qSectorEnd; ++qSector) { + for (int qSector = qSectorStart; qSector <= qSectorEnd; ++qSector) { Filter filter = new AndFilter() .add(kmFilter) @@ -289,6 +298,7 @@ return results; } + /** Returns the mapped value of colIdx or the size of the hashmap. */ private static final int getIndex(TIntIntHashMap map, int colIdx) { if (map.containsKey(colIdx)) { return map.get(colIdx);
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixAnalysisEventsFacet.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixAnalysisEventsFacet.java Thu Sep 12 10:13:09 2013 +0200 @@ -64,48 +64,47 @@ public Object getData(Artifact artifact, CallContext context) { logger.debug("FixAnalysisEventsFacet.getData"); - if (artifact instanceof D4EArtifact) { - D4EArtifact flys = (D4EArtifact)artifact; - - CalculationResult res = - (CalculationResult) flys.compute(context, - ComputeType.ADVANCE, - false); - - FixAnalysisResult result = (FixAnalysisResult) res.getData(); - double currentKm = getCurrentKm(context); - - KMIndex<AnalysisPeriod []> kmPeriods = result.getAnalysisPeriods(); - KMIndex.Entry<AnalysisPeriod []> kmPeriodsEntry = - kmPeriods.binarySearch(currentKm); - - if(kmPeriodsEntry == null) { - logger.debug("getData: kmPeriodsEntry == null"); - return null; - } - - AnalysisPeriod[] periods = kmPeriodsEntry.getValue(); - if (periods == null) { - logger.debug("getData: periods == null"); - return null; - } - int ndx = index >> 8; - QWD[] qwdData = periods[ndx].getQWDs(); - if (qwdData == null) { - return null; - } - int ndy = index & 255; - for (int i = 0; i < qwdData.length; i++) { - if (qwdData[i].getIndex() == ndy) { - return qwdData[i]; - } - } - return null; - } - else { + if (!(artifact instanceof D4EArtifact)) { logger.debug("Not an instance of FixationArtifact."); return null; } + D4EArtifact flys = (D4EArtifact)artifact; + + CalculationResult res = + (CalculationResult) flys.compute(context, + ComputeType.ADVANCE, + false); + + FixAnalysisResult result = (FixAnalysisResult) res.getData(); + double currentKm = getCurrentKm(context); + + KMIndex<AnalysisPeriod []> kmPeriods = result.getAnalysisPeriods(); + KMIndex.Entry<AnalysisPeriod []> kmPeriodsEntry = + kmPeriods.binarySearch(currentKm); + + if (kmPeriodsEntry == null) { + logger.debug("getData: kmPeriodsEntry == null"); + return null; + } + + AnalysisPeriod[] periods = kmPeriodsEntry.getValue(); + if (periods == null) { + logger.debug("getData: periods == null"); + return null; + } + int ndx = index >> 8; + QWD[] qwdData = periods[ndx].getQWDs(); + if (qwdData == null) { + return null; + } + int ndy = index & 255; + + for (QWD qwd: qwdData) { + if (qwd.getIndex() == ndy) { + return qwd; + } + } + return null; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixAnalysisResult.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixAnalysisResult.java Thu Sep 12 10:13:09 2013 +0200 @@ -10,8 +10,10 @@ import java.util.Collection; import java.util.Date; +import java.util.TreeMap; import java.util.TreeSet; +import org.apache.log4j.Logger; import org.dive4elements.river.artifacts.model.Parameters; import org.dive4elements.river.utils.KMIndex; @@ -19,6 +21,9 @@ public class FixAnalysisResult extends FixResult { + private static Logger log = + Logger.getLogger(FixAnalysisResult.class); + protected KMIndex<AnalysisPeriod []> analysisPeriods; public FixAnalysisResult() { @@ -49,31 +54,65 @@ return result; } - public Collection<Date> getReferenceEventsDates() { - TreeSet<Date> dates = new TreeSet<Date>(); - for (KMIndex.Entry<QWD []> entry: referenced) { - QWD [] values = entry.getValue(); - for (int i = 0; i < values.length; i++) { - dates.add(values[i].date); + + public void makeAnalysisEventsUnique() { + // Actually it would be enough to make dates + // unique in one analysis period but to simplify things + // we make them unique in all periods. + DateUniqueMaker dum = new DateUniqueMaker(); + for (KMIndex.Entry<AnalysisPeriod []> entry: analysisPeriods) { + for (AnalysisPeriod ap: entry.getValue()) { + QWD [] qwds = ap.getQWDs(); + if (qwds != null) { + for (QWD qwd: qwds) { + dum.makeUnique(qwd); + } + } } } - return dates; } public Collection<Date> getAnalysisEventsDates(int analysisPeriod) { TreeSet<Date> dates = new TreeSet<Date>(); for (KMIndex.Entry<AnalysisPeriod []> entry: analysisPeriods) { QWD [] qwds = entry.getValue()[analysisPeriod].getQWDs(); - if (qwds == null) { - continue; - } - for (int i = 0; i < qwds.length; i++) { - dates.add(qwds[i].date); + if (qwds != null) { + for (QWD qwd: qwds) { + dates.add(qwd.date); + } } } return dates; } + public Collection<Integer> getAnalysisEventsIndices(int analysisPeriod) { + TreeMap<Date, Integer> dates = new TreeMap<Date, Integer>(); + for (KMIndex.Entry<AnalysisPeriod []> entry: analysisPeriods) { + QWD [] qwds = entry.getValue()[analysisPeriod].getQWDs(); + if (qwds != null) { + for (QWD qwd: qwds) { + dates.put(qwd.date, qwd.index); + } + } + } + return dates.values(); + } + + public void remapAnalysisEventsIndicesToRank(int analysisPeriod) { + RankRemapper remapper = new RankRemapper(); + for (Integer index: getAnalysisEventsIndices(analysisPeriod)) { + remapper.toMap(index); + } + for (KMIndex.Entry<AnalysisPeriod []> entry: analysisPeriods) { + QWD [] qwds = entry.getValue()[analysisPeriod].getQWDs(); + if (qwds != null) { + for (QWD qwd: qwds) { + remapper.remap(qwd); + } + } + } + } + public KMIndex<AnalysisPeriod []> getAnalysisPeriods() { return analysisPeriods; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixCalculation.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixCalculation.java Thu Sep 12 10:13:09 2013 +0200 @@ -114,6 +114,10 @@ return index; } + public int getId() { + return meta.getId(); + } + public boolean getQW( double km, double [] qs, @@ -313,7 +317,7 @@ column.getDate(), interpolated[i], 0d, - column.getIndex()); + column.getId()); // Use database id here } } log.warn("cannot find column for (" + q + ", " + w + ")"); @@ -364,7 +368,9 @@ continue; } - referenced.add(km, fitting.referencedToArray()); + QWD [] refs = fitting.referencedToArray(); + + referenced.add(km, refs); if (fitting.hasOutliers()) { outliers.add(km, fitting.outliersToArray());
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixReferenceEventsFacet.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixReferenceEventsFacet.java Thu Sep 12 10:13:09 2013 +0200 @@ -61,38 +61,36 @@ public Object getData(Artifact artifact, CallContext context) { logger.debug("FixReferenceEventsFacet.getData"); - if (artifact instanceof D4EArtifact) { - D4EArtifact flys = (D4EArtifact)artifact; - - CalculationResult res = - (CalculationResult) flys.compute(context, - ComputeType.ADVANCE, - false); - - FixResult result = (FixResult) res.getData(); - double currentKm = getCurrentKm(context); - - logger.debug("current km in FRE: " + currentKm); - - KMIndex<QWD []> kmQWs = result.getReferenced(); - KMIndex.Entry<QWD []> kmQWsEntry = kmQWs.binarySearch(currentKm); - QWD[] qwds = null; - if (kmQWsEntry != null) { - int ndx = index & 255; - qwds = kmQWsEntry.getValue(); - for (int i = 0; i < qwds.length; i++) { - if (qwds[i].getIndex() == ndx) { - return qwds[i]; - } - } - return null; - } - return null; - } - else { + if (!(artifact instanceof D4EArtifact)) { logger.debug("Not an instance of FixationArtifact."); return null; } + + D4EArtifact flys = (D4EArtifact)artifact; + + CalculationResult res = + (CalculationResult) flys.compute(context, + ComputeType.ADVANCE, + false); + + FixResult result = (FixResult) res.getData(); + double currentKm = getCurrentKm(context); + + if (logger.isDebugEnabled()) { + logger.debug("current km in FRE: " + currentKm); + } + + KMIndex<QWD []> kmQWs = result.getReferenced(); + KMIndex.Entry<QWD []> kmQWsEntry = kmQWs.binarySearch(currentKm); + if (kmQWsEntry != null) { + int ndx = index & 255; + for (QWD qwd: kmQWsEntry.getValue()) { + if (qwd.getIndex() == ndx) { + return qwd; + } + } + } + return null; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixResult.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixResult.java Thu Sep 12 10:13:09 2013 +0200 @@ -8,15 +8,23 @@ package org.dive4elements.river.artifacts.model.fixings; +import org.apache.log4j.Logger; import org.dive4elements.river.artifacts.model.Parameters; import org.dive4elements.river.utils.KMIndex; import java.io.Serializable; +import java.util.Collection; +import java.util.Date; +import java.util.TreeMap; +import java.util.TreeSet; public class FixResult implements Serializable { + private static Logger log = + Logger.getLogger(FixResult.class); + protected Parameters parameters; protected KMIndex<QWD []> referenced; protected KMIndex<QWI []> outliers; @@ -42,6 +50,48 @@ this.referenced = referenced; } + public void makeReferenceEventsDatesUnique() { + DateUniqueMaker dum = new DateUniqueMaker(); + for (KMIndex.Entry<QWD []> entry: referenced) { + for (QWD ref: entry.getValue()) { + dum.makeUnique(ref); + } + } + } + + public Collection<Integer> getReferenceEventsIndices() { + TreeMap<Date, Integer> dates = new TreeMap<Date, Integer>(); + for (KMIndex.Entry<QWD []> entry: referenced) { + for (QWD value: entry.getValue()) { + dates.put(value.date, value.index); + } + } + return dates.values(); + } + + public void remapReferenceIndicesToRank() { + RankRemapper remapper = new RankRemapper(); + for (Integer idx: getReferenceEventsIndices()) { + remapper.toMap(idx); + } + for (KMIndex.Entry<QWD []> entry: referenced) { + for (QWD value: entry.getValue()) { + remapper.remap(value); + } + } + } + + public Collection<Date> getReferenceEventsDates() { + TreeSet<Date> dates = new TreeSet<Date>(); + for (KMIndex.Entry<QWD []> entry: referenced) { + for (QWD qwd: entry.getValue()) { + dates.add(qwd.date); + } + } + return dates; + } + + public KMIndex<QWI []> getOutliers() { return outliers; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixWQCurveFacet.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/FixWQCurveFacet.java Thu Sep 12 10:13:09 2013 +0200 @@ -66,57 +66,57 @@ */ @Override public Object getData(Artifact artifact, CallContext context) { - logger.debug("getData"); - if (artifact instanceof D4EArtifact) { - D4EArtifact flys = (D4EArtifact)artifact; - FixAnalysisAccess access = new FixAnalysisAccess(flys); - - CalculationResult res = - (CalculationResult) flys.compute(context, - ComputeType.ADVANCE, - false); - - FixResult result = (FixResult) res.getData(); - double currentKm = getCurrentKm(context); - - logger.debug("getData: km = " + currentKm); - - String function = access.getFunction(); - Function ff = FunctionFactory.getInstance().getFunction(function); - - if (ff == null) { - logger.warn("getData: ff == null"); - return null; - } - Parameters params = result.getParameters(); - String[] paramNames = ff.getParameterNames(); - - double [] coeffs = params.interpolate("km", currentKm, paramNames); - - if (coeffs == null) { - logger.warn("getData: coeffs == null"); - return null; - } - - org.dive4elements.river.artifacts.math.Function mf = - ff.instantiate(coeffs); - - double maxQ = FixFacetUtils.getMaxQ(params, currentKm); - logger.debug("getData: maxQ = " + maxQ); - - FixFunction fix = new FixFunction( - ff.getName(), - ff.getDescription(), - mf, - maxQ); - - return fix; - } - else { + logger.debug("getData"); + if (!(artifact instanceof D4EArtifact)) { logger.debug("Not an instance of D4EArtifact / FixationArtifact."); return null; } + + D4EArtifact flys = (D4EArtifact)artifact; + FixAnalysisAccess access = new FixAnalysisAccess(flys); + + CalculationResult res = + (CalculationResult) flys.compute(context, + ComputeType.ADVANCE, + false); + + FixResult result = (FixResult) res.getData(); + double currentKm = getCurrentKm(context); + + logger.debug("getData: km = " + currentKm); + + String function = access.getFunction(); + Function ff = FunctionFactory.getInstance().getFunction(function); + + if (ff == null) { + logger.warn("getData: ff == null"); + return null; + } + + Parameters params = result.getParameters(); + String[] paramNames = ff.getParameterNames(); + + double [] coeffs = params.interpolate("km", currentKm, paramNames); + + if (coeffs == null) { + logger.warn("getData: coeffs == null"); + return null; + } + + org.dive4elements.river.artifacts.math.Function mf = + ff.instantiate(coeffs); + + double maxQ = FixFacetUtils.getMaxQ(params, currentKm); + logger.debug("getData: maxQ = " + maxQ); + + FixFunction fix = new FixFunction( + ff.getName(), + ff.getDescription(), + mf, + maxQ); + + return fix; } /**
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/fixings/RankRemapper.java Thu Sep 12 10:13:09 2013 +0200 @@ -0,0 +1,44 @@ +/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde + * Software engineering by Intevation GmbH + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ + +package org.dive4elements.river.artifacts.model.fixings; + +import java.util.IdentityHashMap; + +import org.apache.log4j.Logger; + +import gnu.trove.TIntIntHashMap; + +public class RankRemapper { + + private static Logger log = Logger.getLogger(RankRemapper.class); + + private TIntIntHashMap index2rank; + private IdentityHashMap<QWI, Boolean> visited; + + public RankRemapper() { + index2rank = new TIntIntHashMap(); + visited = new IdentityHashMap<QWI, Boolean>(); + } + + public void toMap(int index) { + index2rank.put(index, index2rank.size()); + } + + public <I extends QWI> void remap(I qwi) { + if (!visited.containsKey(qwi)) { + int idx = qwi.index; + if (index2rank.containsKey(idx)) { + qwi.index = index2rank.get(idx); + } else if (log.isDebugEnabled()) { + log.debug("Cannot remap " + idx); + } + visited.put(qwi, true); + } + } +}
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedDiffCalculation.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedDiffCalculation.java Thu Sep 12 10:13:09 2013 +0200 @@ -18,7 +18,10 @@ import org.dive4elements.river.artifacts.model.Calculation; import org.dive4elements.river.artifacts.model.CalculationResult; - +/** + * Perform calculation of differences of bed height (german Sohlhoehe). + * The input are either single year data or epochs. + */ public class BedDiffCalculation extends Calculation { @@ -84,6 +87,7 @@ */ } + /** Get two BedHeights from factory. */ private static BedHeight [] getHeightPair(int [] ids, String type) { return new BedHeight [] { BedHeightFactory.getHeight(type, ids[0], 0), @@ -176,3 +180,4 @@ s2.getName()); } } +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedDiffYearResult.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedDiffYearResult.java Thu Sep 12 10:13:09 2013 +0200 @@ -11,10 +11,10 @@ import gnu.trove.TDoubleArrayList; +/** Result of a bed diff year calculation. */ public class BedDiffYearResult extends BedDifferencesResult { - protected TDoubleArrayList bedHeights; protected TDoubleArrayList dataGap; protected TDoubleArrayList morphWidth; @@ -115,3 +115,4 @@ }; } } +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedHeightFactory.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedHeightFactory.java Thu Sep 12 10:13:09 2013 +0200 @@ -24,6 +24,8 @@ import org.dive4elements.river.artifacts.model.StaticBedHeightCacheKey; import org.dive4elements.river.backend.SessionHolder; + +/** Create BedHeights from database. */ public class BedHeightFactory { /** Private logger to use here. */ private static Logger log = Logger.getLogger(BedHeightFactory.class); @@ -62,7 +64,7 @@ /** - * Get WKms for given column and wst_id, caring about the cache. + * Get BedHeight for given type and height_id, caring about the cache. */ public static BedHeight getHeight(String type, int height_id, int time) { log.debug("BedHeightFactory.getHeight"); @@ -92,7 +94,7 @@ return values; } - /** Get name for a WKms. */ + /** Get name for a BedHeight. */ public static String getHeightName(String type, int height_id) { log.debug("BedHeightFactory.getHeightName height_id/" + height_id); @@ -123,10 +125,9 @@ /** - * Get WKms from db. - * @param column the position columns value - * @param wst_id database id of the wst - * @return according WKms. + * Get BedHeight from db. + * @param height_id database id of the bed_height + * @return according BedHeight. */ public static BedHeight getBedHeightUncached( String type,
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/FlowVelocityMeasurementFactory.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/FlowVelocityMeasurementFactory.java Thu Sep 12 10:13:09 2013 +0200 @@ -8,12 +8,15 @@ package org.dive4elements.river.artifacts.model.minfo; +import java.util.Date; import java.util.List; import org.apache.log4j.Logger; import org.hibernate.SQLQuery; import org.hibernate.Session; +import org.hibernate.type.StandardBasicTypes; + import org.dive4elements.river.model.FlowVelocityMeasurementValue; import org.dive4elements.river.backend.SessionHolder; @@ -40,7 +43,13 @@ log.debug("FlowVelocityMeasurementFactory.getFlowVelocityMeasurementValue"); Session session = SessionHolder.HOLDER.get(); SQLQuery sqlQuery = null; - sqlQuery = session.createSQLQuery(SQL_SELECT_ONE); + sqlQuery = session.createSQLQuery(SQL_SELECT_ONE) + .addScalar("station", StandardBasicTypes.DOUBLE) + .addScalar("datetime", StandardBasicTypes.DATE) + .addScalar("w", StandardBasicTypes.DOUBLE) + .addScalar("q", StandardBasicTypes.DOUBLE) + .addScalar("v", StandardBasicTypes.DOUBLE) + .addScalar("description", StandardBasicTypes.STRING); sqlQuery.setParameter("id", id); List<Object []> results = sqlQuery.list(); @@ -50,10 +59,12 @@ return null; } return FlowVelocityMeasurementValue.getUnmapped( - Double.parseDouble(row[0].toString()), - Double.parseDouble(row[2].toString()), - Double.valueOf(row[3].toString()), - Double.valueOf(row[4].toString()), null, row[5].toString()); + (Double) row[0], + (Double) row[2], + (Double) row[3], + (Double) row[4], + (Date) row[1], + (String) row[5]); } return null; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/QualityMeasurement.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/QualityMeasurement.java Thu Sep 12 10:13:09 2013 +0200 @@ -11,8 +11,11 @@ import java.util.Date; import java.util.Map; +import org.apache.log4j.Logger; +/** A measurement of the bed quality, serving different diameter at given km. */ public class QualityMeasurement { + private static Logger logger = Logger.getLogger(QualityMeasurements.class); private double km; private Date date; @@ -20,7 +23,7 @@ private double depth2; private Map<String, Double> charDiameter; - public QualityMeasurement() { + private QualityMeasurement() { } @@ -61,8 +64,16 @@ this.charDiameter = charDiameter; } + /** + * Get the stored diameter for given key (e.g. d10). + * @return NaN if no data found in this measurement. + */ public double getDiameter(String key) { - return charDiameter.get(key); + Double diameter = charDiameter.get(key); + if (diameter == null) { + logger.warn("No Diameter at km " + km + " for " + key); + } + return (diameter != null) ? diameter : Double.NaN; } public void setDiameter(String key, double value) {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/QualityMeasurementFactory.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/QualityMeasurementFactory.java Thu Sep 12 10:13:09 2013 +0200 @@ -86,14 +86,18 @@ " m.datum BETWEEN :start AND :end " + "ORDER BY m.km"; + /** Transform query result into objects, use INSTANCE singleton. */ public static final class QualityMeasurementResultTransformer extends BasicTransformerAdapter { - public static QualityMeasurementResultTransformer INSTANCE = new QualityMeasurementResultTransformer(); + // Make a singleton + public static QualityMeasurementResultTransformer INSTANCE = + new QualityMeasurementResultTransformer(); - public QualityMeasurementResultTransformer() { + private QualityMeasurementResultTransformer() { } + /** tuples is a row. */ @Override public Object transformTuple(Object[] tuple, String[] aliases) { Map<String, Double> map = new HashMap<String, Double>(); @@ -172,6 +176,7 @@ return new QualityMeasurements(query.list()); } + /** Get all measurements. */ public static QualityMeasurements getBedMeasurements( String river, double from,
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/QualityMeasurements.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/QualityMeasurements.java Thu Sep 12 10:13:09 2013 +0200 @@ -56,3 +56,4 @@ this.measurements.add(qm); } } +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentDensity.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentDensity.java Thu Sep 12 10:13:09 2013 +0200 @@ -65,23 +65,25 @@ this.years = years; } + /** + * Get the density at year. + * measured densities are valid until the next measurement. + * if no measurement was found 1.8 is returned. + */ public double getDensity(double km, int year) { Collections.sort(this.years); - if (this.years.size() == 1) { - return getDensityAtKm(densities.get(year), km); + if (this.years.size() == 1 && years.get(0) <= year) { + logger.debug("get density from year " + year + " at km " + km); + return getDensityAtKm(densities.get(years.get(0)), km); } - else { + else if (this.years.size() > 1) { for (int i = 0; i < years.size() -1; i++) { int y1 = years.get(i); int y2 = years.get(i + 1); - int mid = Math.round((y1 + y2) / 2); - if (year < mid) { + if (year >= y1 && year < y2) { return getDensityAtKm(densities.get(y1), km); } - else if (i == years.size() -1) { - continue; - } - else { + else if (year >= y2 && i == years.size() -1) { return getDensityAtKm(densities.get(y2), km); } } @@ -93,15 +95,12 @@ List<SedimentDensityValue> values, double km ) { - boolean found = true; SedimentDensityValue prev = null; SedimentDensityValue next = null; for (SedimentDensityValue sdv: values) { -logger.debug("year: " + sdv.getYear()); - if (sdv.getKm() == km) { - prev = sdv; - found = true; - break; + logger.debug("year: " + sdv.getYear()); + if (Math.abs(sdv.getKm() - km) < 0.00001) { + return prev.getDensity(); } if (sdv.getKm() > km) { next = sdv; @@ -109,32 +108,39 @@ } prev = sdv; } - if (found) { - return prev.getDensity(); - } - else { - return spline(prev, next, km); - } + return spline(prev, next, km); } - private double spline( + private static double spline( SedimentDensityValue prev, SedimentDensityValue next, double km ) { + if (prev == null && next == null) { + logger.warn("prev and next are null -> NaN"); + return Double.NaN; + } + + if (prev == null) return next.getDensity(); + if (next == null) return prev.getDensity(); + + // XXX: This is no spline interpolation! double lower = prev.getKm(); double upper = next.getKm(); double upperDensity = next.getDensity(); double lowerDensity = prev.getDensity(); - double m =(upperDensity - lowerDensity)/(upper - lower) * km; - double b = lowerDensity - - ((upperDensity - lowerDensity)/(upper - lower) * lower); - return (m * km) + b; + double m = (upperDensity - lowerDensity)/(upper - lower); + double b = lowerDensity - (m * lower); + return m * km + b; } + + /** If multiple values for same year and station are found, + * build and store average, dismiss multiple values. */ public void cleanUp() { Set<Integer> keys = densities.keySet(); + // Walk over years for (Integer key : keys) { List<SedimentDensityValue> list = densities.get(key); if (list.size() == 0) { @@ -146,6 +152,7 @@ int counter = 0; double sum = 0d; for (SedimentDensityValue value : list) { + // Apparently we can assume that values are ordered by km. if (value.getKm() == prevkm) { sum += value.getDensity(); counter++; @@ -164,3 +171,4 @@ } } } +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentDensityFactory.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentDensityFactory.java Thu Sep 12 10:13:09 2013 +0200 @@ -79,14 +79,13 @@ int year ) { log.debug("getSedimentDensityUncached"); - List<Object[]> results = null; Session session = SessionHolder.HOLDER.get(); SQLQuery sqlQuery = session.createSQLQuery(SQL_SELECT_DENSITY) .addScalar("km", StandardBasicTypes.DOUBLE) .addScalar("density", StandardBasicTypes.DOUBLE) .addScalar("year", StandardBasicTypes.INTEGER); sqlQuery.setString("name", river); - results = sqlQuery.list(); + List<Object[]> results = sqlQuery.list(); SedimentDensity density = new SedimentDensity(); for (Object[] row : results) { if (row[0] != null && row[1] != null && row[2] != null) { @@ -98,3 +97,4 @@ return density; } } +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentDensityValue.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentDensityValue.java Thu Sep 12 10:13:09 2013 +0200 @@ -8,7 +8,7 @@ package org.dive4elements.river.artifacts.model.minfo; - +/** A density value at a km, year. */ public class SedimentDensityValue {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoad.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoad.java Thu Sep 12 10:13:09 2013 +0200 @@ -9,11 +9,13 @@ package org.dive4elements.river.artifacts.model.minfo; import java.util.Date; -import java.util.HashMap; +import java.util.Map; import java.util.Set; +import java.util.TreeMap; import org.dive4elements.river.artifacts.model.NamedObjectImpl; import org.dive4elements.river.artifacts.model.Range; +import org.dive4elements.river.utils.EpsilonComparator; /** Gives access to Fractions (at kms). */ @@ -26,10 +28,10 @@ protected boolean isEpoch; protected String unit; - protected HashMap<Double, SedimentLoadFraction> kms; + protected Map<Double, SedimentLoadFraction> kms; public SedimentLoad() { - kms = new HashMap<Double, SedimentLoadFraction>(); + kms = new TreeMap<Double, SedimentLoadFraction>(EpsilonComparator.CMP); } public SedimentLoad( @@ -88,124 +90,71 @@ } public SedimentLoadFraction getFraction(double km) { - if (kms.get(km) == null) { - return new SedimentLoadFraction(); + SedimentLoadFraction f = kms.get(km); + if (f == null) { + f = new SedimentLoadFraction(); + kms.put(km, f); } - return kms.get(km); + return f; } public void setCoarse(double km, double coarse, Range range) { - if (kms.containsKey(km)) { - kms.get(km).setCoarse(coarse); - } - else { - SedimentLoadFraction f = new SedimentLoadFraction(); - f.setCoarse(coarse); - f.setCoarseRange(range); - kms.put(km, f); - } + SedimentLoadFraction f = getFraction(km); + f.setCoarse(coarse); + f.setCoarseRange(range); } public void setFineMiddle(double km, double fine_middle, Range range) { - if (kms.containsKey(km)) { - kms.get(km).setFineMiddle(fine_middle); - kms.get(km).setFineMiddleRange(range); - } - else { - SedimentLoadFraction f = new SedimentLoadFraction(); - f.setFineMiddle(fine_middle); - f.setFineMiddleRange(range); - kms.put(km, f); - } + SedimentLoadFraction f = getFraction(km); + f.setFineMiddle(fine_middle); + f.setFineMiddleRange(range); } + public void setSand(double km, double sand, Range range) { - if (kms.containsKey(km)) { - kms.get(km).setSand(sand); - kms.get(km).setSandRange(range); - } - else { - SedimentLoadFraction f = new SedimentLoadFraction(); - f.setSand(sand); - f.setSandRange(range); - kms.put(km, f); - } + SedimentLoadFraction f = getFraction(km); + f.setSand(sand); + f.setSandRange(range); } public void setSuspSand(double km, double susp_sand, Range range) { - if (kms.containsKey(km)) { - kms.get(km).setSuspSand(susp_sand); - kms.get(km).setSuspSandRange(range); - } - else { - SedimentLoadFraction f = new SedimentLoadFraction(); - f.setSuspSand(susp_sand); - f.setSuspSandRange(range); - kms.put(km, f); - } + SedimentLoadFraction f = getFraction(km); + f.setSuspSand(susp_sand); + f.setSuspSandRange(range); } public void setSuspSandBed(double km, double susp_sand_bed, Range range) { - if (kms.containsKey(km)) { - kms.get(km).setSuspSandBed(susp_sand_bed); - kms.get(km).setSuspSandBedRange(range); - } - else { - SedimentLoadFraction f = new SedimentLoadFraction(); - f.setSuspSandBed(susp_sand_bed); - f.setSuspSandBedRange(range); - kms.put(km, f); - } + SedimentLoadFraction f = getFraction(km); + f.setSuspSandBed(susp_sand_bed); + f.setSuspSandBedRange(range); } public void setSuspSediment(double km, double susp_sediment, Range range) { - if (kms.containsKey(km)) { - kms.get(km).setSuspSediment(susp_sediment); - kms.get(km).setSuspSedimentRange(range); - } - else { - SedimentLoadFraction f = new SedimentLoadFraction(); - f.setSuspSediment(susp_sediment); - f.setSuspSedimentRange(range); - kms.put(km, f); - } + SedimentLoadFraction f = getFraction(km); + f.setSuspSediment(susp_sediment); + f.setSuspSedimentRange(range); } public void setLoadTotal(double km, double total) { - if (kms.containsKey(km)) { - kms.get(km).setLoadTotal(total); - } - else { - SedimentLoadFraction f = new SedimentLoadFraction(); - f.setLoadTotal(total); - kms.put(km, f); - } + setLoadTotal(km, total, null); + } + + public void setLoadTotal(double km, double total, Range range) { + SedimentLoadFraction f = getFraction(km); + f.setLoadTotal(total); + f.setLoadTotalRange(range); } public void setTotal(double km, double total, Range range) { - if (kms.containsKey(km)) { - kms.get(km).setTotal(total); - kms.get(km).setTotalRange(range); - } - else { - SedimentLoadFraction f = new SedimentLoadFraction(); - f.setTotal(total); - f.setTotalRange(range); - kms.put(km, f); - } + SedimentLoadFraction f = getFraction(km); + f.setTotal(total); + f.setTotalRange(range); } public void setUnknown(double km, double unknown, Range range) { - if (kms.containsKey(km)) { - kms.get(km).setUnknown(unknown); - kms.get(km).setUnknownRange(range); - } - else { - SedimentLoadFraction f = new SedimentLoadFraction(); - f.setUnknown(unknown); - f.setUnknownRange(range); - kms.put(km, f); - } + SedimentLoadFraction f = getFraction(km); + f.setUnknown(unknown); + f.setUnknownRange(range); } public String getUnit() {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadCalculation.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadCalculation.java Thu Sep 12 10:13:09 2013 +0200 @@ -11,7 +11,6 @@ import gnu.trove.TDoubleArrayList; import java.util.ArrayList; -import java.util.TreeSet; import java.util.List; import org.apache.log4j.Logger; @@ -36,12 +35,14 @@ protected double kmUp; protected double kmLow; protected int[] period; + /** Years of chosen epochs. */ protected int[][] epoch; protected String unit; public SedimentLoadCalculation() { } + /** Returns CalculationResult with array of SedimentLoadResults. */ public CalculationResult calculate(SedimentLoadAccess access) { logger.info("SedimentLoadCalculation.calculate"); @@ -87,6 +88,7 @@ return new CalculationResult(); } + /** Returns CalculationResult with array of SedimentLoadResults. */ private CalculationResult internalCalculate() { logger.debug("internalCalulate; mode:" + yearEpoch); if (yearEpoch.equals("year")) { @@ -119,9 +121,85 @@ return new CalculationResult( results.toArray(new SedimentLoadResult[results.size()]), this); } + else { + logger.error("Unknown mode " + yearEpoch); + } return null; } + /** + * @param[out] resLoad resulting SedimentLoad + */ + private void calculateEpochKm( + List<SedimentLoad> epochLoads, + SedimentLoad resLoad, + double km + ) { + int cSum = 0; + int fmSum = 0; + int sSum = 0; + int ssSum = 0; + int ssbSum = 0; + int sseSum = 0; + for (SedimentLoad load : epochLoads) { + SedimentLoadFraction f = load.getFraction(km); + if (f.getCoarse() > 0d) { + double c = resLoad.getFraction(km).getCoarse(); + resLoad.setCoarse(km, c + f.getCoarse(), f.getCoarseRange()); + cSum++; + } + if (f.getFineMiddle() > 0d) { + double fm = resLoad.getFraction(km).getFineMiddle(); + resLoad.setFineMiddle(km, fm + f.getFineMiddle(), f.getFineMiddleRange()); + fmSum++; + } + if (f.getSand() > 0d) { + double s = resLoad.getFraction(km).getSand(); + resLoad.setSand(km, s + f.getSand(), f.getSandRange()); + sSum++; + } + if (f.getSuspSand() > 0d) { + double s = resLoad.getFraction(km).getSuspSand(); + resLoad.setSuspSand(km, s + f.getSuspSand(), f.getSuspSandRange()); + ssSum++; + } + if (f.getSuspSandBed() > 0d) { + double s = resLoad.getFraction(km).getSuspSandBed(); + resLoad.setSuspSandBed(km, s + f.getSuspSandBed(), f.getSuspSandBedRange()); + ssbSum++; + } + if (f.getSuspSediment() > 0d) { + double s = resLoad.getFraction(km).getSuspSediment(); + resLoad.setSuspSediment(km, s + f.getSuspSediment(), f.getSuspSedimentRange()); + sseSum++; + } + } + + SedimentLoadFraction fr = resLoad.getFraction(km); + // Prevent divisions by zero, the fraction defaults to 0d. + if (cSum != 0) { + resLoad.setCoarse(km, fr.getCoarse()/cSum, fr.getCoarseRange()); + } + if (fmSum != 0) { + resLoad.setFineMiddle(km, fr.getFineMiddle()/fmSum, + fr.getFineMiddleRange()); + } + if (sSum != 0) { + resLoad.setSand(km, fr.getSand()/sSum, fr.getSandRange()); + } + if (ssSum != 0) { + resLoad.setSuspSand(km, fr.getSuspSand()/ssSum, + fr.getSuspSandRange()); + } + if (ssbSum != 0) { + resLoad.setSuspSandBed(km, fr.getSuspSandBed()/ssbSum, + fr.getSuspSandBedRange()); + } + if (sseSum != 0) { + resLoad.setSuspSediment(km, fr.getSuspSediment()/sseSum, fr.getSuspSedimentRange()); + } + } + private SedimentLoadResult calculateEpoch(int i) { List<SedimentLoad> epochLoads = new ArrayList<SedimentLoad>(); for (int j = epoch[i][0]; j < epoch[i][1]; j++) { @@ -146,53 +224,7 @@ } for (int j = 0; j < kms.size(); j++) { - int cSum = 0; - int fmSum = 0; - int sSum = 0; - int ssSum = 0; - int ssbSum = 0; - int sseSum = 0; - double km = kms.get(j); - for (SedimentLoad load : epochLoads) { - SedimentLoadFraction f = load.getFraction(km); - if (f.getCoarse() > 0d) { - double c = resLoad.getFraction(km).getCoarse(); - resLoad.setCoarse(km, c + f.getCoarse(), f.getCoarseRange()); - cSum++; - } - if (f.getFineMiddle() > 0d) { - double fm = resLoad.getFraction(km).getFineMiddle(); - resLoad.setFineMiddle(km, fm + f.getFineMiddle(), f.getFineMiddleRange()); - fmSum++; - } - if (f.getSand() > 0d) { - double s = resLoad.getFraction(km).getSand(); - resLoad.setSand(km, s + f.getSand(), f.getSandRange()); - sSum++; - } - if (f.getSuspSand() > 0d) { - double s = resLoad.getFraction(km).getSuspSand(); - resLoad.setSuspSand(km, s + f.getSuspSand(), f.getSuspSandRange()); - ssSum++; - } - if (f.getSuspSandBed() > 0d) { - double s = resLoad.getFraction(km).getSuspSandBed(); - resLoad.setSuspSandBed(km, s + f.getSuspSandBed(), f.getSuspSandBedRange()); - ssbSum++; - } - if (f.getSuspSediment() > 0d) { - double s = resLoad.getFraction(km).getSuspSediment(); - resLoad.setSuspSediment(km, s + f.getSuspSediment(), f.getSuspSedimentRange()); - sseSum++; - } - } - SedimentLoadFraction fr = resLoad.getFraction(km); - resLoad.setCoarse(km, fr.getCoarse()/cSum, fr.getCoarseRange()); - resLoad.setFineMiddle(km, fr.getFineMiddle()/fmSum, fr.getFineMiddleRange()); - resLoad.setSand(km, fr.getSand()/sSum, fr.getSandRange()); - resLoad.setSuspSand(km, fr.getSuspSand()/ssSum, fr.getSuspSandRange()); - resLoad.setSuspSandBed(km, fr.getSuspSandBed()/ssbSum, fr.getSuspSandBedRange()); - resLoad.setSuspSediment(km, fr.getSuspSediment()/sseSum, fr.getSuspSedimentRange()); + calculateEpochKm(epochLoads, resLoad, kms.get(j)); } resLoad.setDescription(""); resLoad.setEpoch(true); @@ -216,6 +248,10 @@ return result; } + /** + * Calculate/Fetch values at off. epochs. + * @param i index in epochs. + */ private SedimentLoadResult calculateOffEpoch(int i) { SedimentLoad load = SedimentLoadFactory.getLoadWithData( this.river, @@ -243,8 +279,11 @@ return result; } - /** Fetch loads for a single year, calculate total and - * return the result containing both. */ + /** + * Fetch loads for a single year, calculate total and + * return the result containing both. + * @param y year, e.g. 1980 + */ private SedimentLoadResult calculateYear(int y) { SedimentLoad load = SedimentLoadFactory.getLoadWithData( this.river, @@ -298,44 +337,18 @@ } - /** Returns true if all fraction values except SuspSediment are unset. */ - private boolean hasOnlySuspValues(SedimentLoadFraction fraction) { - return (fraction.getSuspSediment() != 0d && - fraction.getCoarse() == 0d && - fraction.getFineMiddle() == 0d && - fraction.getSand() == 0d && - fraction.getSuspSand() == 0d); - } - - - /** Returns true if all fraction values except SuspSediment are set. */ - private boolean hasButSuspValues(SedimentLoadFraction fraction) { - return (fraction.getSuspSediment() == 0d && - fraction.getCoarse() != 0d && - fraction.getFineMiddle() != 0d && - fraction.getSand() != 0d && - fraction.getSuspSand() != 0d); - } - - - /** Returns true if all fraction needed for total calculation are set. */ - private boolean complete(SedimentLoadFraction fraction) { - return (fraction.getCoarse() != 0d && - fraction.getFineMiddle() != 0d && - fraction.getSand() != 0d && - fraction.getSuspSand() != 0d && - fraction.getSuspSediment() != 0d); - } - /** * Set total values in load. - * Therefore, run over the kms and find ranges where either all + * + * Therefore, run over the sorted kms and find ranges where either all * or all Geschiebe or just the Schwebstoff fractions are set. * Merge these ranges and add (maybe new) respective fractions to - * load. + * load. In the process, remember any 'unfished' ends from measurements + * where the km-ranges did not completely match. + * * @param load SedimentLoad to add total values (and ranges) to. - * @return input param load. + * @return input param load, with total values set. */ private SedimentLoad partialTotal(SedimentLoad load) { SedimentLoad fairLoad = load; @@ -346,12 +359,10 @@ Range lastSuspRange = null; double lastSuspValue = 0d; - TreeSet<Double> kms = new TreeSet<Double>(load.getKms()); - - for (double km: kms) { + for (double km: load.getKms()) { // kms are already sorted! logger.debug ("Trying to add at km " + km); SedimentLoadFraction fraction = load.getFraction(km); - if (complete(fraction)) { + if (fraction.isComplete()) { double total = fraction.getCoarse() + fraction.getFineMiddle() + fraction.getSand() + @@ -387,7 +398,7 @@ } } } - else if (hasOnlySuspValues(fraction) && lastOtherRange != null) { + else if (fraction.hasOnlySuspValues() && lastOtherRange != null) { // Split stuff. Range suspSedimentRange = fraction.getSuspSedimentRange(); // if intersects with last other range, cool! merge and add! @@ -412,7 +423,8 @@ lastOtherRange.setStart(suspSedimentRange.getEnd()); lastSuspRange = null; } - if (Math.abs(suspSedimentRange.getEnd() - lastOtherRange.getEnd()) < 0.1d) { + if (lastOtherRange != null + && Math.abs(suspSedimentRange.getEnd() - lastOtherRange.getEnd()) < 0.1d) { lastOtherRange = null; lastSuspRange = null; } @@ -424,7 +436,7 @@ lastOtherRange = null; } } - else if (hasButSuspValues(fraction) && lastSuspRange != null) { + else if (fraction.hasButSuspValues() && lastSuspRange != null) { // If intersects with last suspsed range, merge and add double total = fraction.getCoarse() + fraction.getFineMiddle() + @@ -449,7 +461,9 @@ lastSuspRange = null; lastOtherValue = total - lastSuspValue; } - if (lastSuspRange != null && Math.abs(lastSuspRange.getEnd() - lastOtherRange.getEnd()) < 0.1d) { + if (lastSuspRange != null + && lastOtherRange != null + && Math.abs(lastSuspRange.getEnd() - lastOtherRange.getEnd()) < 0.1d) { lastOtherRange = null; lastSuspRange = null; } @@ -465,7 +479,7 @@ else { // Some values are missing or no intersection with former values. // Stay as we are. - if (hasButSuspValues(fraction)) { + if (fraction.hasButSuspValues()) { double total = fraction.getCoarse() + fraction.getFineMiddle() + fraction.getSand() + @@ -474,7 +488,7 @@ lastOtherValue = total; lastSuspRange = null; } - else if (hasOnlySuspValues(fraction)) { + else if (fraction.hasOnlySuspValues()) { lastSuspRange = fraction.getSuspSedimentRange(); lastSuspValue = fraction.getSuspSediment(); lastOtherRange = null; @@ -485,9 +499,20 @@ } + /** + * Transform values in load. + * Background is to transform values measured in + * t/a to m^3/a using the specific measured densities. + * + * @param load The load of which values should be transformed. + * @param year The year at which to look at density (e.g. 2003). + * + * @return parameter load with transformed values. + */ private SedimentLoad calculateUnit(SedimentLoad load, int year) { SedimentDensity density = SedimentDensityFactory.getSedimentDensity(river, kmLow, kmUp, year); + for (double km: load.getKms()) { double dens = density.getDensity(km, year); SedimentLoadFraction fraction = load.getFraction(km); @@ -498,6 +523,7 @@ double bedSand = fraction.getSuspSandBed(); double sediment = fraction.getSuspSediment(); double total = fraction.getTotal(); + double loadTotal = fraction.getLoadTotal(); load.setCoarse(km, (coarse * dens), fraction.getCoarseRange()); load.setFineMiddle(km, (fineMiddle * dens), fraction.getFineMiddleRange()); load.setSand(km, (sand * dens), fraction.getSandRange()); @@ -505,6 +531,7 @@ load.setSuspSandBed(km, (bedSand * dens), fraction.getSuspSandBedRange()); load.setSuspSediment(km, (sediment * dens), fraction.getSuspSedimentRange()); load.setTotal(km, (total * dens), fraction.getTotalRange()); + load.setLoadTotal(km, (loadTotal * dens), fraction.getLoadTotalRange()); } return load; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java Thu Sep 12 10:13:09 2013 +0200 @@ -31,6 +31,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.TreeSet; import java.util.TreeMap; import org.apache.log4j.Logger; @@ -55,6 +56,7 @@ super(idx, name, description, type, hash, stateId); } + @Override public Object getData(Artifact artifact, CallContext context) { logger.debug("Get data for sediment load at index: " + index); @@ -104,7 +106,7 @@ TDoubleArrayList xPos = new TDoubleArrayList(); TDoubleArrayList yPos = new TDoubleArrayList(); double lastX = -1d; - for (double km: load.getKms()) { + for (double km: new TreeSet<Double>(load.getKms())) { SedimentLoadFraction fraction = load.getFraction(km); if (fraction.getTotal() != 0) { if (Math.abs(lastX-km) >= EPSILON) {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFactory.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFactory.java Thu Sep 12 10:13:09 2013 +0200 @@ -29,16 +29,19 @@ import org.hibernate.Session; import org.hibernate.type.StandardBasicTypes; + /** Pull Sediment Loads out of db. */ public class SedimentLoadFactory { /** Private logger to use here. */ private static Logger log = Logger.getLogger(SedimentLoadFactory.class); + // Cache name/keys public static final String LOADS_CACHE_NAME = "sedimentloads"; public static final String LOAD_DATA_CACHE_NAME = "sedimentload-data"; - /** Query to get km and ws for wst_id and column_pos. */ + /** Query to get description and start year of single type + * sediment_yields. */ public static final String SQL_SELECT_SINGLES = "SELECT DISTINCT " + " sy.description AS description, " + @@ -51,7 +54,23 @@ " AND ti.stop_time IS NULL " + " AND syv.station BETWEEN :startKm AND :endKm"; - /** Query to get name for wst_id and column_pos. */ + /** Query to get description, name and time range for official + * epoch-type sediment yields. */ + public static final String SQL_SELECT_OFFEPOCHS = + "SELECT DISTINCT " + + " ti.start_time AS startYear, " + + " ti.stop_time AS end " + + " FROM sediment_yield sy " + + " JOIN rivers r ON sy.river_id = r.id " + + " JOIN sediment_yield_values syv ON sy.id = syv.sediment_yield_id " + + " JOIN time_intervals ti ON sy.time_interval_id = ti.id " + + " WHERE r.name = :name " + + " AND ti.stop_time IS NOT NULL " + + " AND syv.station BETWEEN :startKm AND :endKm " + + " AND sy.kind = 1"; + + /** Query to get description, name and time range for epoch-type + * sediment yields. */ public static final String SQL_SELECT_EPOCHS = "SELECT DISTINCT " + " sy.description AS description, " + @@ -115,8 +134,11 @@ " JOIN rivers r ON sy.river_id = r.id " + " JOIN time_intervals ti ON sy.time_interval_id = ti.id " + " JOIN grain_fraction gf ON sy.grain_fraction_id = gf.id " + - "WHERE r.name = :river" + - " AND gf.name = 'unknown'"; + " JOIN units u ON sy.unit_id = u.id " + + "WHERE r.name = :river " + + " AND gf.name = 'unknown' " + + " AND sy.kind = :type " + + " AND u.name = :unit"; public static final String SQL_SELECT_EPOCHS_DATA = "SELECT" + @@ -143,7 +165,8 @@ } /** - * + * @param river name of river. + * @param type */ public static SedimentLoad[] getLoads( String river, @@ -180,6 +203,10 @@ return values; } + /** + * Get a sedimentLoad filled with data from db (or cache). + * @param type "epoch", "off_epoch" or "single" + */ public static SedimentLoad getLoadWithData( String river, String type, @@ -287,13 +314,37 @@ } return loads; } + else if (type.equals("off_epoch")) { + sqlQuery = session.createSQLQuery(SQL_SELECT_OFFEPOCHS) + .addScalar("startYear", StandardBasicTypes.DATE) + .addScalar("end", StandardBasicTypes.DATE); + sqlQuery.setString("name", river); + sqlQuery.setDouble("startKm", startKm); + sqlQuery.setDouble("endKm", endKm); + List<Object []> results = sqlQuery.list(); + + SedimentLoad[] loads = new SedimentLoad[results.size()]; + for (int i = 0; i < results.size(); i++) { + Object[] row = results.get(i); + loads[i] = new SedimentLoad( + ((Date) row[0]).toString() + (Date) row[1], + (Date) row[0], + (Date) row[1], + true, + ""); + } + return loads; + } + else { + log.warn("getSedimentLoadsUncached does not understand type " + type); + } return new SedimentLoad[0]; } /** * Get sediment loads from db. * @param river the river - * @param type the sediment load type (year or epoch) + * @param type the sediment load type (year, epoch or off_epoch) * @return according sediment loads. */ public static SedimentLoad getSedimentLoadWithDataUncached( @@ -308,22 +359,35 @@ Session session = SessionHolder.HOLDER.get(); SQLQuery sqlQuery = null; - List<MeasurementStation> allStations = RiverFactory.getRiver(river).getMeasurementStations(); - TreeMap<Double,MeasurementStation> floatStations = new TreeMap<Double, MeasurementStation>(); - TreeMap<Double,MeasurementStation> suspStations = new TreeMap<Double, MeasurementStation>(); + // Measurement stations: all, for float-stuff, for suspended stuff. + // Because they need fast sorted access, use TreeMaps. + // They map the starting validity range km to the station itself. + List<MeasurementStation> allStations = + RiverFactory.getRiver(river).getMeasurementStations(); + TreeMap<Double,MeasurementStation> floatStations = + new TreeMap<Double, MeasurementStation>(); + TreeMap<Double,MeasurementStation> suspStations = + new TreeMap<Double, MeasurementStation>(); + + // From all stations, sort into the two kinds, skip undefined ones. for (MeasurementStation measurementStation: allStations) { if (measurementStation.getMeasurementType() == null || measurementStation.getRange() == null) { continue; } if (measurementStation.getMeasurementType().equals("Schwebstoff")) { - suspStations.put(measurementStation.getRange().getA().doubleValue(), measurementStation); + suspStations.put( + measurementStation.getRange().getA().doubleValue(), + measurementStation); } else if (measurementStation.getMeasurementType().equals("Geschiebe")) { - floatStations.put(measurementStation.getRange().getA().doubleValue(), measurementStation); + floatStations.put( + measurementStation.getRange().getA().doubleValue(), + measurementStation); } } + // Construct date constraint. Calendar start = Calendar.getInstance(); start.set(syear - 1, 11, 31); Calendar end = Calendar.getInstance(); @@ -344,27 +408,35 @@ sqlQuery.setString("grain", "total"); List<Object []> results = sqlQuery.list(); SedimentLoad load = new SedimentLoad(); - Object[] row = results.get(0); - load = new SedimentLoad( - (String) row[0], - (Date) row[1], - null, - false, - (String) row[4]); - getValues("coarse", sqlQuery, load, floatStations); - getValues("fine_middle", sqlQuery, load, floatStations); - getValues("sand", sqlQuery, load, floatStations); - getValues("suspended_sediment", sqlQuery, load, suspStations); - getValues("susp_sand_bed", sqlQuery, load, floatStations); - getValues("susp_sand", sqlQuery, load, floatStations); + if (results.isEmpty()) { + log.warn("Empty result for year calculation."); + } + else { + Object[] row = results.get(0); + load = new SedimentLoad( + (String) row[0], //description + (Date) row[1], //start + null, //end + false, //isEpoch + (String) row[4]); //unit + } + load = getValues("coarse", sqlQuery, load, floatStations); + load = getValues("fine_middle", sqlQuery, load, floatStations); + load = getValues("sand", sqlQuery, load, floatStations); + load = getValues("suspended_sediment", sqlQuery, load, suspStations); + load = getValues("susp_sand_bed", sqlQuery, load, floatStations); + load = getValues("susp_sand", sqlQuery, load, floatStations); return load; } else if (type.equals("off_epoch")) { + // Set calendars to fetch the epochs by their start and end + // dates. Calendar toStart = Calendar.getInstance(); toStart.set(eyear - 1, 11, 31); Calendar toEnd = Calendar.getInstance(); - toEnd.set(eyear, 11, 30); + toEnd.set(eyear+1, 00, 01); + // Set query parameters. sqlQuery = session.createSQLQuery(SQL_SELECT_EPOCHS_DATA) .addScalar("description", StandardBasicTypes.STRING) .addScalar("startYear", StandardBasicTypes.DATE) @@ -377,47 +449,79 @@ sqlQuery.setDouble("endKm", endKm); sqlQuery.setDate("sbegin", start.getTime()); sqlQuery.setDate("send", end.getTime()); - sqlQuery.setDate("ebegin",toStart.getTime()); + sqlQuery.setDate("ebegin", toStart.getTime()); sqlQuery.setDate("eend", toEnd.getTime()); sqlQuery.setString("grain", "total"); List<Object[]> results = null; results = sqlQuery.list(); - SedimentLoad load = new SedimentLoad(); - Object[] row = results.get(0); - load = new SedimentLoad( - (String) row[0], - (Date) row[1], - (Date) row[4], - true, - (String)row[5]); - TDoubleArrayList kms = new TDoubleArrayList(); - for (int i = 0; i < results.size(); i++) { - row = results.get(i); - kms.add((Double)row[3]); - load.setLoadTotal((Double)row[3], (Double)row[2]); + SedimentLoad load = null; + if (results.isEmpty()) { + log.warn("No total results for off-epoch"); } - getValues("coarse", sqlQuery, load, floatStations); - getValues("fine_middle", sqlQuery, load, floatStations); - getValues("sand", sqlQuery, load, floatStations); - getValues("suspended_sediment", sqlQuery, load, suspStations); - getValues("susp_sand_bed", sqlQuery, load, floatStations); - getValues("susp_sand", sqlQuery, load, floatStations); + else { + Object[] row = results.get(0); + load = new SedimentLoad( + (String) row[0], + (Date) row[1], + (Date) row[4], + true, + (String)row[5]); + TDoubleArrayList kms = new TDoubleArrayList(); + for (int i = 0; i < results.size(); i++) { + row = results.get(i); + kms.add((Double)row[3]); + load.setLoadTotal((Double)row[3], (Double)row[2]); + } + } + load = getValues("coarse", sqlQuery, load, floatStations); + load = getValues("fine_middle", sqlQuery, load, floatStations); + load = getValues("sand", sqlQuery, load, floatStations); + load = getValues("suspended_sediment", sqlQuery, load, suspStations); + load = getValues("susp_sand_bed", sqlQuery, load, floatStations); + load = getValues("susp_sand", sqlQuery, load, floatStations); return load; } return new SedimentLoad(); } + protected static Range findMeasurementStationRange( + TreeMap<Double, MeasurementStation> stations, + double km + ) { + MeasurementStation station = stations.get(km); + if (station == null) { + return null; + } + + double endKm; + + if (stations.ceilingEntry(km + 0.1d) != null) { + MeasurementStation nextStation = stations.ceilingEntry(km + 0.1d).getValue(); + endKm = nextStation.getRange().getA().doubleValue(); + } + else { + // TODO end-of-river instead of B. + endKm = station.getRange().getB().doubleValue(); + } + + return new Range( + station.getRange().getA().doubleValue(), + endKm); + } /** * Run query with grain parameter set to fraction, feed result into - * load. + * load. Create load if null. + * * @param fraction value to set 'grain' parameter in query to. * @param query query in which to set 'grain' parameter and run. - * @param load[out] SedimentLoad which to populate with values. + * @param load[out] SedimentLoad which to populate with values. if null + * and values are found, return a new load. + * @return param load or new load if null. */ - protected static void getValues ( + protected static SedimentLoad getValues ( String fraction, SQLQuery query, SedimentLoad load, @@ -425,27 +529,25 @@ ) { query.setString("grain", fraction); List<Object[]> results = query.list(); + + // We have the first results for this query, create new load. + if (!results.isEmpty() && load == null) { + Object[] row = results.get(0); + load = new SedimentLoad( + (String) row[0], + (Date) row[1], + (Date) row[4], + true, + (String)row[5]); + } + for (int i = 0; i < results.size(); i++) { Object[] row = results.get(i); double km = (Double)row[3]; - MeasurementStation station = stations.get(km); - MeasurementStation nextStation = null; - if (stations.ceilingEntry(km + 0.1d) != null) { - nextStation = stations.ceilingEntry(km + 0.1d).getValue(); - } - Range range = null; - if (station == null) { + Range range = findMeasurementStationRange(stations, km); + if (range == null) { log.warn("No measurement station for " + fraction + " km " + km); - } - else { - if (nextStation != null) - range = new Range(station.getRange().getA().doubleValue(), - nextStation.getRange().getA().doubleValue()); - else { - // TODO end-of-river instead of B. - range = new Range(station.getRange().getA().doubleValue(), - station.getRange().getB().doubleValue()); - } + continue; } double v = -1; if (row[2] != null) { @@ -470,6 +572,8 @@ load.setSuspSandBed(km, v, range); } } + + return load; } public static SedimentLoad getLoadUnknown( @@ -549,13 +653,29 @@ return load; } - public static SedimentLoad[] getSedimentLoadUnknown(String river) { + /** + * Return sediment loads with 'unknown' fraction type. + * @param river Name of the river + * @param unit Restrict result set to those of given unit. + * @param type Type like year, epoch, off_epoch + */ + public static SedimentLoad[] getSedimentLoadUnknown( + String river, + String unit, + String type) { Session session = SessionHolder.HOLDER.get(); SQLQuery sqlQuery = session.createSQLQuery(SQL_SELECT_UNKNOWN) .addScalar("description", StandardBasicTypes.STRING) .addScalar("startYear", StandardBasicTypes.DATE) .addScalar("end", StandardBasicTypes.DATE); sqlQuery.setString("river", river); + sqlQuery.setString("unit", unit); + if (type.equals("off_epoch")) { + sqlQuery.setInteger("type", 1); + } + else { + sqlQuery.setInteger("type", 0); + } List<Object[]> results = sqlQuery.list(); SedimentLoad[] loads = new SedimentLoad[results.size()]; int counter = 0;
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFraction.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFraction.java Thu Sep 12 10:13:09 2013 +0200 @@ -25,15 +25,15 @@ double total; double unknown; /** Values are valid within this km range. */ - Range sandRange = null; - Range fineMiddleRange = null; - Range coarseRange = null; - Range suspSandRange = null; - Range suspSandBedRange = null; - Range suspSedimentRange = null; - Range loadTotalRange = null; - Range totalRange = null; - Range unknownRange = null; + Range sandRange; + Range fineMiddleRange; + Range coarseRange; + Range suspSandRange; + Range suspSandBedRange; + Range suspSedimentRange; + Range loadTotalRange; + Range totalRange; + Range unknownRange; public SedimentLoadFraction() { sand = 0d; @@ -166,6 +166,10 @@ this.loadTotal = total; } + public Range getLoadTotalRange() { + return this.loadTotalRange; + } + public void setLoadTotalRange(Range range) { this.loadTotalRange = range; } @@ -185,5 +189,35 @@ public void setUnknownRange(Range unknownRange) { this.unknownRange = unknownRange; } + + /** Returns true if all fraction values except SuspSediment are unset. */ + public boolean hasOnlySuspValues() { + return + getSuspSediment() != 0d && + getCoarse() == 0d && + getFineMiddle() == 0d && + getSand() == 0d && + getSuspSand() == 0d; + } + + /** Returns true if all fraction values except SuspSediment are set. */ + public boolean hasButSuspValues() { + return + getSuspSediment() == 0d && + getCoarse() != 0d && + getFineMiddle() != 0d && + getSand() != 0d && + getSuspSand() != 0d; + } + + /** Returns true if all fraction needed for total calculation are set. */ + public boolean isComplete() { + return + getCoarse() != 0d && + getFineMiddle() != 0d && + getSand() != 0d && + getSuspSand() != 0d && + getSuspSediment() != 0d; + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadUnknownFacet.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadUnknownFacet.java Thu Sep 12 10:13:09 2013 +0200 @@ -14,7 +14,7 @@ extends DataFacet { /** Very own logger. */ - private static Logger logger = Logger.getLogger(SedimentLoadFacet.class); + private static Logger logger = Logger.getLogger(SedimentLoadUnknownFacet.class); public SedimentLoadUnknownFacet() { } @@ -24,6 +24,7 @@ super(idx, name, description, type, hash, stateId); } + /** Get data from SedimentLoad with unknown type, from factory. */ @Override public Object getData(Artifact artifact, CallContext context) { logger.debug("Get data for sediment load at index: " + index); @@ -32,25 +33,15 @@ SedimentLoadAccess access = new SedimentLoadAccess(flys); String river = access.getRiver(); + String unit = access.getUnit(); SedimentLoad[] unknown = - SedimentLoadFactory.getSedimentLoadUnknown(river); + SedimentLoadFactory.getSedimentLoadUnknown( + river, + unit.replace("_per_","/"), + access.getYearEpoch()); SedimentLoad load = SedimentLoadFactory.getLoadUnknown( river, unknown[index].getDescription()); - if (access.getUnit().equals("t/a") && load.getUnit().equals("m3/a")) { - for (Double km: load.getKms()) { - SedimentLoadFraction fraction = load.getFraction(km); - fraction.setUnknown(fraction.getUnknown() / 1.8); - load.addKm(km, fraction); - } - } - else if (access.getUnit().equals("m3/a") && load.getUnit().equals("t/a")) { - for (Double km: load.getKms()) { - SedimentLoadFraction fraction = load.getFraction(km); - fraction.setUnknown(fraction.getUnknown() * 1.8); - load.addKm(km, fraction); - } - } return load; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/Fitting.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/Fitting.java Thu Sep 12 10:13:09 2013 +0200 @@ -8,6 +8,7 @@ package org.dive4elements.river.artifacts.model.sq; +import org.dive4elements.artifacts.common.utils.StringUtils; import org.dive4elements.river.artifacts.math.fitting.Function; import java.util.ArrayList; @@ -18,12 +19,17 @@ import org.apache.commons.math.optimization.fitting.CurveFitter; import org.apache.commons.math.optimization.general.LevenbergMarquardtOptimizer; +import org.apache.commons.math.stat.regression.SimpleRegression; import org.apache.log4j.Logger; public class Fitting implements Outlier.Callback { + // XXX: Hack to force linear fitting! + private static final boolean USE_NON_LINEAR_FITTING = + Boolean.getBoolean("minfo.sq.fitting.nonlinear"); + private static Logger log = Logger.getLogger(Fitting.class); public interface Callback { @@ -47,13 +53,15 @@ protected Callback callback; + protected SQ.View sqView; + public Fitting() { } - public Fitting(Function function, double stdDevFactor) { - this(); + public Fitting(Function function, double stdDevFactor, SQ.View sqView) { this.function = function; this.stdDevFactor = stdDevFactor; + this.sqView = sqView; } public Function getFunction() { @@ -75,12 +83,70 @@ @Override public void initialize(List<SQ> sqs) throws MathException { - LevenbergMarquardtOptimizer lmo = + if (USE_NON_LINEAR_FITTING + || function.getParameterNames().length != 2) { + nonLinearFitting(sqs); + } + else { + linearFitting(sqs); + } + } + + protected void linearFitting(List<SQ> sqs) { + coeffs = linearRegression(sqs); + instance = function.instantiate(coeffs); + } + + protected double [] linearRegression(List<SQ> sqs) { + + String [] pns = function.getParameterNames(); + double [] result = new double[pns.length]; + + if (sqs.size() < 2) { + log.debug("not enough points"); + return result; + } + + SimpleRegression reg = new SimpleRegression(); + + for (SQ sq: sqs) { + double s = sqView.getS(sq); + double q = sqView.getQ(sq); + reg.addData(q, s); + } + + double m = reg.getIntercept(); + double b = reg.getSlope(); + + if (log.isDebugEnabled()) { + log.debug("m: " + m); + log.debug("b: " + b); + } + + int mIdx = StringUtils.indexOf("m", pns); + int bIdx = StringUtils.indexOf("b", pns); + + if (mIdx == -1 || bIdx == -1) { + log.error("index not found: " + mIdx + " " + bIdx); + return result; + } + + result[bIdx] = m; + result[mIdx] = b; + + return result; + } + + + protected void nonLinearFitting(List<SQ> sqs) throws MathException { + + LevenbergMarquardtOptimizer optimizer = new LevenbergMarquardtOptimizer(); - CurveFitter cf = new CurveFitter(lmo); + CurveFitter cf = new CurveFitter(optimizer); + for (SQ sq: sqs) { - cf.addObservedPoint(sq.getQ(), sq.getS()); + cf.addObservedPoint(sqView.getQ(sq), sqView.getS(sq)); } coeffs = cf.fit( @@ -88,13 +154,13 @@ instance = function.instantiate(coeffs); - chiSqr = lmo.getChiSquare(); + chiSqr = optimizer.getChiSquare(); } @Override public double eval(SQ sq) { - double s = instance.value(sq.q); - return sq.s - s; + double s = instance.value(sqView.getQ(sq)); + return sqView.getS(sq) - s; } @Override @@ -119,28 +185,15 @@ chiSqr); } - protected static final List<SQ> onlyValid(List<SQ> sqs) { - - List<SQ> good = new ArrayList<SQ>(sqs.size()); - - for (SQ sq: sqs) { - if (sq.isValid()) { - good.add(sq); - } - } - - return good; - } - - public boolean fit(List<SQ> sqs, String method, Callback callback) { - - sqs = onlyValid(sqs); + public boolean fit(List<SQ> sqs, String method, Callback callback) { if (sqs.size() < 2) { log.warn("Too less points for fitting."); return false; } + sqs = new ArrayList<SQ>(sqs); + this.callback = callback; try {
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/LogSQ.java Thu Sep 12 10:13:09 2013 +0200 @@ -0,0 +1,68 @@ +/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde + * Software engineering by Intevation GmbH + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ + +package org.dive4elements.river.artifacts.model.sq; + +public class LogSQ extends SQ { + + public static final View LOG_SQ_VIEW = new View() { + @Override + public double getS(SQ sq) { + return ((LogSQ)sq).getLogS(); + } + + @Override + public double getQ(SQ sq) { + return ((LogSQ)sq).getLogQ(); + } + }; + + public static final Factory LOG_SQ_FACTORY = new Factory() { + @Override + public SQ createSQ(double s, double q) { + return new LogSQ(s, q); + } + }; + + protected double logS; + protected double logQ; + + protected boolean logTrans; + + public LogSQ() { + } + + public LogSQ(double s, double q) { + super(s, q); + } + + /** important: We cannot process negative s/q. */ + @Override + public boolean isValid() { + return super.isValid() && s > 0d && q > 0d; + } + + protected void ensureLogTrans() { + if (!logTrans) { + logTrans = true; + logS = Math.log(s); + logQ = Math.log(q); + } + } + + public double getLogS() { + ensureLogTrans(); + return logS; + } + + public double getLogQ() { + ensureLogTrans(); + return logQ; + } +} +
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/MeasurementFactory.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/MeasurementFactory.java Thu Sep 12 10:13:09 2013 +0200 @@ -58,32 +58,32 @@ "gp.LFDNR AS LFDNR," + "g.UFERABST AS UFERABST," + "g.UFERABLINKS AS UFERABLINKS," + - "m.TSCHWEB AS TSCHWEB," + - "m.TSAND AS TSAND," + - "gp.GTRIEB_F AS GTRIEB," + - "m.TGESCHIEBE AS TGESCHIEBE," + - "si.SIEB01 AS SIEB01, si.SIEB02 AS SIEB02," + - "si.SIEB03 AS SIEB03, si.SIEB04 AS SIEB04," + - "si.SIEB05 AS SIEB05, si.SIEB06 AS SIEB06," + - "si.SIEB07 AS SIEB07, si.SIEB08 AS SIEB08," + - "si.SIEB09 AS SIEB09, si.SIEB10 AS SIEB10," + - "si.SIEB11 AS SIEB11, si.SIEB12 AS SIEB12," + - "si.SIEB13 AS SIEB13, si.SIEB14 AS SIEB14," + - "si.SIEB15 AS SIEB15, si.SIEB16 AS SIEB16," + - "si.SIEB17 AS SIEB17, si.SIEB18 AS SIEB18," + - "si.SIEB19 AS SIEB19, si.SIEB20 AS SIEB20," + - "si.SIEB21 AS SIEB21," + - "gs.RSIEB01 AS RSIEB01, gs.RSIEB02 AS RSIEB02," + - "gs.RSIEB03 AS RSIEB03, gs.RSIEB04 AS RSIEB04," + - "gs.RSIEB05 AS RSIEB05, gs.RSIEB06 AS RSIEB06," + - "gs.RSIEB07 AS RSIEB07, gs.RSIEB08 AS RSIEB08," + - "gs.RSIEB09 AS RSIEB09, gs.RSIEB10 AS RSIEB10," + - "gs.RSIEB11 AS RSIEB11, gs.RSIEB12 AS RSIEB12," + - "gs.RSIEB13 AS RSIEB13, gs.RSIEB14 AS RSIEB14," + - "gs.RSIEB15 AS RSIEB15, gs.RSIEB16 AS RSIEB16," + - "gs.RSIEB17 AS RSIEB17, gs.RSIEB18 AS RSIEB18," + - "gs.RSIEB19 AS RSIEB19, gs.RSIEB20 AS RSIEB20," + - "gs.RSIEB21 AS RSIEB21, gs.REST AS REST " + + "COALESCE(m.TSCHWEB, 0) AS TSCHWEB," + + "COALESCE(m.TSAND, 0) AS TSAND," + + "COALESCE(gp.GTRIEB_F, 0) AS GTRIEB," + + "COALESCE(m.TGESCHIEBE, 0) AS TGESCHIEBE," + + "COALESCE(si.SIEB01, 0) AS SIEB01, COALESCE(si.SIEB02, 0) AS SIEB02," + + "COALESCE(si.SIEB03, 0) AS SIEB03, COALESCE(si.SIEB04, 0) AS SIEB04," + + "COALESCE(si.SIEB05, 0) AS SIEB05, COALESCE(si.SIEB06, 0) AS SIEB06," + + "COALESCE(si.SIEB07, 0) AS SIEB07, COALESCE(si.SIEB08, 0) AS SIEB08," + + "COALESCE(si.SIEB09, 0) AS SIEB09, COALESCE(si.SIEB10, 0) AS SIEB10," + + "COALESCE(si.SIEB11, 0) AS SIEB11, COALESCE(si.SIEB12, 0) AS SIEB12," + + "COALESCE(si.SIEB13, 0) AS SIEB13, COALESCE(si.SIEB14, 0) AS SIEB14," + + "COALESCE(si.SIEB15, 0) AS SIEB15, COALESCE(si.SIEB16, 0) AS SIEB16," + + "COALESCE(si.SIEB17, 0) AS SIEB17, COALESCE(si.SIEB18, 0) AS SIEB18," + + "COALESCE(si.SIEB19, 0) AS SIEB19, COALESCE(si.SIEB20, 0) AS SIEB20," + + "COALESCE(si.SIEB21, 0) AS SIEB21," + + "COALESCE(gs.RSIEB01, 0) AS RSIEB01, COALESCE(gs.RSIEB02, 0) AS RSIEB02," + + "COALESCE(gs.RSIEB03, 0) AS RSIEB03, COALESCE(gs.RSIEB04, 0) AS RSIEB04," + + "COALESCE(gs.RSIEB05, 0) AS RSIEB05, COALESCE(gs.RSIEB06, 0) AS RSIEB06," + + "COALESCE(gs.RSIEB07, 0) AS RSIEB07, COALESCE(gs.RSIEB08, 0) AS RSIEB08," + + "COALESCE(gs.RSIEB09, 0) AS RSIEB09, COALESCE(gs.RSIEB10, 0) AS RSIEB10," + + "COALESCE(gs.RSIEB11, 0) AS RSIEB11, COALESCE(gs.RSIEB12, 0) AS RSIEB12," + + "COALESCE(gs.RSIEB13, 0) AS RSIEB13, COALESCE(gs.RSIEB14, 0) AS RSIEB14," + + "COALESCE(gs.RSIEB15, 0) AS RSIEB15, COALESCE(gs.RSIEB16, 0) AS RSIEB16," + + "COALESCE(gs.RSIEB17, 0) AS RSIEB17, COALESCE(gs.RSIEB18, 0) AS RSIEB18," + + "COALESCE(gs.RSIEB19, 0) AS RSIEB19, COALESCE(gs.RSIEB20, 0) AS RSIEB20," + + "COALESCE(gs.RSIEB21, 0) AS RSIEB21, COALESCE(gs.REST , 0) AS REST " + "FROM MESSUNG m " + "JOIN STATION s ON m.STATIONID = s.STATIONID " + "JOIN GEWAESSER r ON s.GEWAESSERID = r.GEWAESSERID " + @@ -190,9 +190,10 @@ } public static Measurements getMeasurements( - String river, - double location, - DateRange dateRange + String river, + double location, + DateRange dateRange, + SQ.Factory sqFactory ) { Session session = SedDBSessionHolder.HOLDER.get(); try { @@ -202,7 +203,7 @@ List<Measurement> accumulated = loadFractions( session, river, location, dateRange); - return new Measurements(totals, accumulated); + return new Measurements(totals, accumulated, sqFactory); } finally { session.close();
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/Measurements.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/Measurements.java Thu Sep 12 10:13:09 2013 +0200 @@ -66,13 +66,17 @@ protected List<Measurement> measuments; protected List<Measurement> accumulated; + protected SQ.Factory sqFactory; + public Measurements() { } public Measurements( List<Measurement> measuments, - List<Measurement> accumulated + List<Measurement> accumulated, + SQ.Factory sqFactory ) { + this.sqFactory = sqFactory; if (log.isDebugEnabled()) { log.debug("number of measuments: " + measuments.size()); log.debug("number of accumulated: " + accumulated.size()); @@ -81,14 +85,14 @@ this.accumulated = accumulated; } - public static List<SQ> extractSQ( + public List<SQ> extractSQ( List<Measurement> measuments, SExtractor extractor ) { List<SQ> result = new ArrayList<SQ>(measuments.size()); int invalid = 0; for (Measurement measument: measuments) { - SQ sq = new SQ(extractor.getS(measument), measument.Q()); + SQ sq = sqFactory.createSQ(extractor.getS(measument), measument.Q()); if (sq.isValid()) { result.add(sq); }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/SQ.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/SQ.java Thu Sep 12 10:13:09 2013 +0200 @@ -11,8 +11,37 @@ import java.io.Serializable; +/** Represents S/Q pairs. They are immutable! */ public class SQ implements Serializable { + public interface Factory { + SQ createSQ(double s, double q); + } + + public static final Factory SQ_FACTORY = new Factory() { + @Override + public SQ createSQ(double s, double q) { + return new SQ(s, q); + } + }; + + public interface View { + double getS(SQ sq); + double getQ(SQ sq); + } + + public static final View SQ_VIEW = new View() { + @Override + public double getS(SQ sq) { + return sq.getS(); + } + + @Override + public double getQ(SQ sq) { + return sq.getQ(); + } + }; + protected double s; protected double q; @@ -29,19 +58,10 @@ return s; } - public void setS(double s) { - this.s = s; - } - - public double getQ() { return q; } - public void setQ(double q) { - this.q = q; - } - public boolean isValid() { return !Double.isNaN(s) && !Double.isNaN(q); }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/SQRelationCalculation.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/SQRelationCalculation.java Thu Sep 12 10:13:09 2013 +0200 @@ -8,6 +8,7 @@ package org.dive4elements.river.artifacts.model.sq; +import org.dive4elements.artifacts.common.utils.StringUtils; import org.dive4elements.river.artifacts.access.SQRelationAccess; import org.dive4elements.river.artifacts.math.fitting.Function; @@ -21,6 +22,7 @@ import org.dive4elements.river.backend.SedDBSessionHolder; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.apache.log4j.Logger; @@ -30,7 +32,20 @@ private static final Logger log = Logger.getLogger(SQRelationCalculation.class); - public static final String SQ_FUNCTION_NAME = "sq-pow"; + public static final boolean NON_LINEAR_FITTING = + Boolean.getBoolean("minfo.sq.calcution.non.linear.fitting"); + + public static final String SQ_POW_FUNCTION_NAME = "sq-pow"; + public static final String SQ_LIN_FUNCTION_NAME = "linear"; + + public static final String [] EXTRA_PARAMETERS = { + "chi_sqr", + "std_dev", + "max_q", + "c_ferguson", + "c_duan", + "r2" + }; protected String river; protected double location; @@ -49,8 +64,6 @@ Double outliers = access.getOutliers(); String method = access.getOutlierMethod(); - //river = "Rhein"; - if (river == null) { // TODO: i18n addProblem("sq.missing.river"); @@ -102,31 +115,95 @@ } } + public interface TransformCoeffs { + double [] transform(double [] coeffs); + } + + public static final TransformCoeffs IDENTITY_TRANS = + new TransformCoeffs() { + @Override + public double [] transform(double [] coeffs) { + return coeffs; + } + }; + + public static final TransformCoeffs LINEAR_TRANS = + new TransformCoeffs() { + @Override + public double [] transform(double [] coeffs) { + log.debug("before transform: " + Arrays.toString(coeffs)); + if (coeffs.length == 2) { + coeffs = new double [] { Math.exp(coeffs[1]), coeffs[0] }; + } + log.debug("after transform: " + Arrays.toString(coeffs)); + return coeffs; + } + }; + protected CalculationResult internalCalculate() { - Function function = FunctionFactory + Function powFunction = FunctionFactory .getInstance() - .getFunction(SQ_FUNCTION_NAME); + .getFunction(SQ_POW_FUNCTION_NAME); - if (function == null) { - log.error("No '" + SQ_FUNCTION_NAME + "' function found."); + if (powFunction == null) { + log.error("No '" + SQ_POW_FUNCTION_NAME + "' function found."); // TODO: i18n addProblem("sq.missing.sq.function"); + return new CalculationResult(new SQResult[0], this); + } + + Function function; + SQ.View sqView; + SQ.Factory sqFactory; + ParameterCreator pc; + + if (NON_LINEAR_FITTING) { + log.debug("Use non linear fitting."); + sqView = SQ.SQ_VIEW; + sqFactory = SQ.SQ_FACTORY; + function = powFunction; + pc = new ParameterCreator( + powFunction.getParameterNames(), + powFunction.getParameterNames(), + powFunction, + sqView); + } + else { + log.debug("Use linear fitting."); + sqView = LogSQ.LOG_SQ_VIEW; + sqFactory = LogSQ.LOG_SQ_FACTORY; + function = FunctionFactory + .getInstance() + .getFunction(SQ_LIN_FUNCTION_NAME); + if (function == null) { + log.error("No '" + SQ_LIN_FUNCTION_NAME + "' function found."); + // TODO: i18n + addProblem("sq.missing.sq.function"); + return new CalculationResult(new SQResult[0], this); + } + pc = new LinearParameterCreator( + powFunction.getParameterNames(), + function.getParameterNames(), + function, + sqView); } Measurements measurements = - MeasurementFactory.getMeasurements(river, location, period); + MeasurementFactory.getMeasurements( + river, location, period, sqFactory); SQFractionResult [] fractionResults = new SQFractionResult[SQResult.NUMBER_FRACTIONS]; + for (int i = 0; i < fractionResults.length; ++i) { List<SQ> sqs = measurements.getSQs(i); SQFractionResult fractionResult; List<SQFractionResult.Iteration> iterations = - doFitting(function, sqs); + doFitting(function, sqs, sqView, pc); if (iterations == null) { // TODO: i18n @@ -148,13 +225,15 @@ } protected List<SQFractionResult.Iteration> doFitting( - final Function function, - List<SQ> sqs + final Function function, + List<SQ> sqs, + SQ.View sqView, + final ParameterCreator pc ) { final List<SQFractionResult.Iteration> iterations = new ArrayList<SQFractionResult.Iteration>(); - boolean success = new Fitting(function, outliers).fit( + boolean success = new Fitting(function, outliers, sqView).fit( sqs, method, new Fitting.Callback() { @@ -166,11 +245,11 @@ double standardDeviation, double chiSqr ) { - Parameters parameters = createParameters( - function.getParameterNames(), + Parameters parameters = pc.createParameters( coeffs, standardDeviation, - chiSqr); + chiSqr, + measurements); iterations.add(new SQFractionResult.Iteration( parameters, measurements, @@ -181,22 +260,178 @@ return success ? iterations : null; } - public static final Parameters createParameters( - String [] names, - double [] values, - double standardDeviation, - double chiSqr - ) { - String [] columns = new String[names.length + 2]; - columns[0] = "chi_sqr"; - columns[1] = "std_dev"; - System.arraycopy(names, 0, columns, 2, names.length); - Parameters parameters = new Parameters(columns); - int row = parameters.newRow(); - parameters.set(row, names, values); - parameters.set(row, "chi_sqr", chiSqr); - parameters.set(row, "std_dev", standardDeviation); - return parameters; + public static class ParameterCreator { + + protected String [] origNames; + protected String [] proxyNames; + + protected Function function; + protected SQ.View view; + + public ParameterCreator( + String [] origNames, + String [] proxyNames, + Function function, + SQ.View view + ) { + this.origNames = origNames; + this.proxyNames = proxyNames; + this.function = function; + this.view = view; + } + + protected double [] transformCoeffs(double [] coeffs) { + return coeffs; + } + + private static double maxQ(SQ [] sqs) { + double max = -Double.MAX_VALUE; + for (SQ sq: sqs) { + double q = sq.getQ(); // Don't use view here! + if (q > max) { + max = q; + } + } + return Math.max(0d, max); + } + + private double cFerguson( + org.dive4elements.river.artifacts.math.Function instance, + SQ [] sqs + ) { + double sqrSum = 0d; + + for (SQ sq: sqs) { + double s = view.getS(sq); + double q = view.getQ(sq); + double diffS = s - instance.value(q); + sqrSum += diffS*diffS; + } + + return Math.exp(0.5d * sqrSum/(sqs.length-2)); + } + + private double cDuan( + org.dive4elements.river.artifacts.math.Function instance, + SQ [] sqs + ) { + double sum = 0d; + + for (SQ sq: sqs) { + double s = view.getS(sq); + double q = view.getQ(sq); + double diffS = s - instance.value(q); + sum += Math.exp(diffS); + } + return sum / sqs.length; + } + + private double r2( + org.dive4elements.river.artifacts.math.Function instance, + SQ [] sqs + ) { + double xm = 0; + double ym = 0; + for (SQ sq: sqs) { + double s = view.getS(sq); + double q = view.getQ(sq); + double fs = instance.value(q); + xm += s; + ym += fs; + } + xm /= sqs.length; + ym /= sqs.length; + + double mixXY = 0d; + double sumX = 0d; + double sumY = 0d; + + for (SQ sq: sqs) { + double s = view.getS(sq); + double q = view.getQ(sq); + double fs = instance.value(q); + + double xDiff = xm - s; + double yDiff = ym - fs; + + mixXY += xDiff*yDiff; + + sumX += xDiff*xDiff; + sumY += yDiff*yDiff; + } + + double r = mixXY/Math.sqrt(sumX*sumY); + return r*r; + } + + + public Parameters createParameters( + double [] coeffs, + double standardDeviation, + double chiSqr, + SQ [] measurements + ) { + String [] columns = StringUtils.join(EXTRA_PARAMETERS, origNames); + + Parameters parameters = new Parameters(columns); + int row = parameters.newRow(); + parameters.set(row, origNames, transformCoeffs(coeffs)); + parameters.set(row, "chi_sqr", chiSqr); + parameters.set(row, "std_dev", standardDeviation); + parameters.set(row, "max_q", maxQ(measurements)); + + // We need to instantiate the function to calculate + // the remaining values. + org.dive4elements.river.artifacts.math.Function f = + function.instantiate(coeffs); + + parameters.set(row, "c_ferguson", cFerguson(f, measurements)); + parameters.set(row, "c_duan", cDuan(f, measurements)); + parameters.set(row, "r2", r2(f, measurements)); + + return parameters; + } + } + + /** We need to transform the coeffs back to the original function. */ + public static class LinearParameterCreator extends ParameterCreator { + + public LinearParameterCreator( + String [] origNames, + String [] proxyNames, + Function function, + SQ.View view + ) { + super(origNames, proxyNames, function, view); + } + + @Override + protected double [] transformCoeffs(double [] coeffs) { + + int bP = StringUtils.indexOf("m", proxyNames); + int mP = StringUtils.indexOf("b", proxyNames); + + int aO = StringUtils.indexOf("a", origNames); + int bO = StringUtils.indexOf("b", origNames); + + if (bP == -1 || mP == -1 || aO == -1 || bO == -1) { + log.error("index not found: " + + bP + " " + mP + " " + + aO + " " + bO); + return coeffs; + } + + double [] ncoeffs = (double [])coeffs.clone(); + ncoeffs[aO] = Math.exp(coeffs[mP]); + ncoeffs[bO] = coeffs[bP]; + + if (log.isDebugEnabled()) { + log.debug("before transform: " + Arrays.toString(coeffs)); + log.debug("after transform: " + Arrays.toString(ncoeffs)); + } + + return ncoeffs; + } } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/SQRelationJRDataSource.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/SQRelationJRDataSource.java Thu Sep 12 10:13:09 2013 +0200 @@ -73,6 +73,12 @@ else if ("periods".equals(fieldName)) { value = metaData.get("periods"); } + else if ("msName".equals(fieldName)) { + value = metaData.get("msName"); + } + else if ("msGauge".equals(fieldName)) { + value = metaData.get("msGauge"); + } else if ("km".equals(fieldName)) { value = data.get(index)[0]; } @@ -86,12 +92,24 @@ value = data.get(index)[3]; } else if ("total".equals(fieldName)) { - value = data.get(index)[4]; + value = data.get(index)[7]; } else if ("out".equals(fieldName)) { + value = data.get(index)[8]; + } + else if ("sd".equals(fieldName)) { + value = data.get(index)[4]; + } + else if ("qmax".equals(fieldName)) { value = data.get(index)[5]; } - else if ("variance".equals(fieldName)) { + else if ("cferg".equals(fieldName)) { + value = data.get(index)[10]; + } + else if ("cduan".equals(fieldName)) { + value = data.get(index)[9]; + } + else if ("r2".equals(fieldName)) { value = data.get(index)[6]; } return value;
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/services/D4EService.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/services/D4EService.java Thu Sep 12 10:13:09 2013 +0200 @@ -44,6 +44,7 @@ } + /** Override to do the meat work (called in processXML). */ protected abstract Document doProcess( Document data, GlobalContext globalContext, @@ -56,6 +57,7 @@ } + /** Called when processing done, close session. */ protected void shutdown() { logger.debug("shutdown"); Session session = SessionHolder.HOLDER.get();
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/services/DischargeInfoService.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/services/DischargeInfoService.java Thu Sep 12 10:13:09 2013 +0200 @@ -40,6 +40,8 @@ public static final String GAUGE_XPATH = "/art:gauge/text()"; + public static final String RIVER_NAME_XPATH = "/art:gauge/art:river/text()"; + public DischargeInfoService() { } @@ -58,30 +60,39 @@ String gaugeNumber = XMLUtils.xpathString( data, GAUGE_XPATH, ArtifactNamespaceContext.INSTANCE); + String river = XMLUtils.xpathString( + data, RIVER_NAME_XPATH, ArtifactNamespaceContext.INSTANCE); + if (gaugeNumber == null || (gaugeNumber = gaugeNumber.trim()).length() == 0) { logger.warn("No gauge specified. Cannot return discharge info."); return XMLUtils.newDocument(); } - logger.debug("Getting discharge for gauge: " + gaugeNumber); + logger.debug("Getting discharge for gauge: " + gaugeNumber + " at river: " + river); long gn; try { gn = Long.parseLong(gaugeNumber); } catch (NumberFormatException nfe) { - logger.warn("Invalid gauge number. Cannot return discharg info."); + logger.warn("Invalid gauge number. Cannot return discharge info."); return XMLUtils.newDocument(); } - Gauge gauge = Gauge.getGaugeByOfficialNumber(gn); + Gauge gauge; + if (river == null || river.isEmpty()) { + gauge = Gauge.getGaugeByOfficialNumber(gn); + } else { + gauge = Gauge.getGaugeByOfficialNumber(gn, river); + } + if (gauge == null) { logger.warn("No such gauge found."); return XMLUtils.newDocument(); } - logger.debug("Found gauge: " + gauge.getName()); + logger.debug("Found gauge: " + gauge.getName() + " id: " + gauge.getId()); return buildDocument(gauge); }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/services/RiverInfoService.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/services/RiverInfoService.java Thu Sep 12 10:13:09 2013 +0200 @@ -37,6 +37,7 @@ protected River river; protected Element riverele; + @Override protected Document doProcess( Document data, GlobalContext globalContext, @@ -91,3 +92,4 @@ ? Double.toString(value.doubleValue()) : ""; } } +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/services/SedimentLoadInfoService.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/services/SedimentLoadInfoService.java Thu Sep 12 10:13:09 2013 +0200 @@ -22,6 +22,7 @@ import org.dive4elements.river.artifacts.model.minfo.SedimentLoadFactory; +/** Service delivering info about sediment loads. */ public class SedimentLoadInfoService extends D4EService { @@ -33,6 +34,10 @@ public static final String FROM_XPATH = "/art:river/art:location/art:from/text()"; public static final String TO_XPATH = "/art:river/art:location/art:to/text()"; + /** + * Create document with sedimentload infos, + * constrained by contents in data. + */ @Override protected Document doProcess( Document data, @@ -54,17 +59,18 @@ data, TO_XPATH, ArtifactNamespaceContext.INSTANCE); - double f, t; + double fromD, toD; try { - f = Double.parseDouble(from); - t = Double.parseDouble(to); + fromD = Double.parseDouble(from); + toD = Double.parseDouble(to); } catch (NumberFormatException nfe) { logger.warn("Invalid locations. Cannot return sediment loads."); return XMLUtils.newDocument(); } - SedimentLoad[] loads = SedimentLoadFactory.getLoads(river, type, f, t); + SedimentLoad[] loads = + SedimentLoadFactory.getLoads(river, type, fromD, toD); return buildDocument(loads); } @@ -98,3 +104,4 @@ return result; } } +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/FloodMapState.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/FloodMapState.java Thu Sep 12 10:13:09 2013 +0200 @@ -8,55 +8,35 @@ package org.dive4elements.river.artifacts.states; +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jts.geom.Geometry; +import com.vividsolutions.jts.geom.LineString; +import com.vividsolutions.jts.geom.Polygon; + import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; - import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.log4j.Logger; - import org.apache.velocity.Template; - -import org.geotools.feature.FeatureCollection; -import org.geotools.feature.FeatureCollections; - -import org.geotools.feature.simple.SimpleFeatureBuilder; - -import org.hibernate.HibernateException; - -import org.opengis.feature.simple.SimpleFeature; -import org.opengis.feature.simple.SimpleFeatureType; - -import com.vividsolutions.jts.geom.Coordinate; -import com.vividsolutions.jts.geom.Geometry; -import com.vividsolutions.jts.geom.LineString; -import com.vividsolutions.jts.geom.Polygon; - import org.dive4elements.artifactdatabase.state.Facet; - import org.dive4elements.artifacts.Artifact; import org.dive4elements.artifacts.CallContext; import org.dive4elements.artifacts.CallMeta; import org.dive4elements.artifacts.GlobalContext; - import org.dive4elements.artifacts.common.utils.FileTools; - import org.dive4elements.river.artifacts.D4EArtifact; - import org.dive4elements.river.artifacts.access.RangeAccess; - import org.dive4elements.river.artifacts.context.RiverContext; - import org.dive4elements.river.artifacts.model.CalculationMessage; import org.dive4elements.river.artifacts.model.CalculationResult; import org.dive4elements.river.artifacts.model.FacetTypes; import org.dive4elements.river.artifacts.model.LayerInfo; import org.dive4elements.river.artifacts.model.WQKms; - import org.dive4elements.river.artifacts.model.map.HWS; import org.dive4elements.river.artifacts.model.map.HWSContainer; import org.dive4elements.river.artifacts.model.map.HWSFactory; @@ -64,24 +44,25 @@ import org.dive4elements.river.artifacts.model.map.WSPLGENCalculation; import org.dive4elements.river.artifacts.model.map.WSPLGENJob; import org.dive4elements.river.artifacts.model.map.WSPLGENReportFacet; - import org.dive4elements.river.artifacts.resources.Resources; - import org.dive4elements.river.exports.WstWriter; - import org.dive4elements.river.model.CrossSectionTrack; import org.dive4elements.river.model.DGM; import org.dive4elements.river.model.Floodplain; import org.dive4elements.river.model.RiverAxis; - import org.dive4elements.river.utils.ArtifactMapfileGenerator; -import org.dive4elements.river.utils.RiverUtils; import org.dive4elements.river.utils.GeometryUtils; import org.dive4elements.river.utils.MapfileGenerator; - +import org.dive4elements.river.utils.RiverUtils; import org.dive4elements.river.wsplgen.FacetCreator; import org.dive4elements.river.wsplgen.JobObserver; import org.dive4elements.river.wsplgen.Scheduler; +import org.geotools.feature.FeatureCollection; +import org.geotools.feature.FeatureCollections; +import org.geotools.feature.simple.SimpleFeatureBuilder; +import org.hibernate.HibernateException; +import org.opengis.feature.simple.SimpleFeature; +import org.opengis.feature.simple.SimpleFeatureType; public class FloodMapState extends DefaultState @@ -109,9 +90,9 @@ public static final String WSPLGEN_FLOODPLAIN = "talaue.shp"; public static final String WSPLGEN_WSP_FILE = "waterlevel.wst"; public static final String WSPLGEN_OUTPUT_FILE = "wsplgen.shp"; - public static final String WSPLGEN_USER_SHAPE = "user-rgd.shp"; - public static final String WSPLGEN_USER_ZIP = "user-rgd.zip"; - public static final String WSPLGEN_USER_FILENAME = "user-rgd"; + public static final String WSPLGEN_USER_RGD_SHAPE = "user-rgd.shp"; + public static final String WSPLGEN_USER_RGD_ZIP = "user-rgd.zip"; + public static final String WSPLGEN_USER_RGD = "user-rgd"; public static final String WSPLGEN_QPS_NAME = "qps"; @@ -428,7 +409,7 @@ createMapfile( artifact, artifactDir, - MapfileGenerator.MS_LAYER_PREFIX + "hws-lines", + MapfileGenerator.MS_LAYER_PREFIX + HWS_LINES, HWS_LINES_SHAPE, "LINE", "31467", @@ -441,7 +422,7 @@ } - private void createMapfile( + public static void createMapfile( D4EArtifact artifact, File artifactDir, String name, @@ -650,13 +631,13 @@ File dir, WSPLGENJob job ) { - File archive = new File(dir, WSPLGEN_USER_SHAPE); + File archive = new File(dir, WSPLGEN_USER_RGD_SHAPE); boolean exists = archive.exists(); logger.debug("shp file exists: " + exists); if (exists) { - job.addLin(dir + "/" + WSPLGEN_USER_SHAPE); + job.addLin(dir + "/" + WSPLGEN_USER_RGD_SHAPE); facetCreator.createShapeFacet(FacetCreator.I18N_USERSHAPE, - MapfileGenerator.MS_LAYER_PREFIX + "user-rgd", + MapfileGenerator.MS_LAYER_PREFIX + WSPLGEN_USER_RGD, FLOODMAP_USERSHAPE, 4); } @@ -901,11 +882,22 @@ Floodplain plain = Floodplain.getFloodplain(river); + if (plain == null) { + logger.debug("No flood plain for river '" + river + "'"); + return; + } + + Polygon polygon = plain.getGeom(); + if (polygon == null) { + logger.warn("Floodplain has no geometry."); + return; + } + SimpleFeatureType ft = GeometryUtils.buildFeatureType( "talaue", srs, Polygon.class); SimpleFeatureBuilder builder = new SimpleFeatureBuilder(ft); - builder.add(plain.getGeom()); + builder.add(polygon); FeatureCollection collection = FeatureCollections.newCollection(); collection.add(builder.buildFeature("0"));
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/GaugeDischargeState.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/GaugeDischargeState.java Thu Sep 12 10:13:09 2013 +0200 @@ -10,6 +10,8 @@ import java.util.List; +import java.text.DateFormat; + import org.apache.log4j.Logger; import org.dive4elements.artifacts.CallMeta; @@ -31,7 +33,10 @@ import org.dive4elements.river.artifacts.resources.Resources; import org.dive4elements.river.model.Gauge; +import org.dive4elements.river.model.DischargeTable; +import org.dive4elements.river.model.TimeInterval; +import org.dive4elements.river.utils.Formatter; /** * The only state for an GaugeDischargeState (River and km known). @@ -57,10 +62,25 @@ protected String createFacetName(GaugeDischargeArtifact artifact, CallMeta meta) { + if (artifact.getFacetWishName() != null) { + /* We let the Artifact overwrite our name as this allows + * injecting the facet name from the Datacage */ + return artifact.getFacetWishName(); + } + Gauge gauge = artifact.getGauge(); + DischargeTable mdt = gauge.fetchMasterDischargeTable(); + TimeInterval validity = mdt.getTimeInterval(); + DateFormat df = Formatter.getDateFormatter(meta, "dd.MM.yyyy"); + String start = validity.getStartTime() != null ? + df.format(validity.getStartTime()) : "Unknown"; + String stop = validity.getStopTime() != null ? + df.format(validity.getStopTime()) : ""; + Object[] args = new Object[] { gauge.getName(), - gauge.getStation() + start, + stop }; String name = Resources.getMsg(
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/GaugeTimerangeState.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/GaugeTimerangeState.java Thu Sep 12 10:13:09 2013 +0200 @@ -67,7 +67,8 @@ } } - logger.warn("Could not determine time range for gauge: " + gauge); + logger.warn("Could not determine time range for gauge: " + gauge.getName() + + " id: " + gauge.getId()); return null; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/HWSBarriersState.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/HWSBarriersState.java Thu Sep 12 10:13:09 2013 +0200 @@ -9,51 +9,29 @@ package org.dive4elements.river.artifacts.states; import java.io.File; -import java.io.FileNotFoundException; import java.io.IOException; - -import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import org.apache.log4j.Logger; - -import org.apache.velocity.Template; - -import org.geotools.data.shapefile.ShapefileDataStore; - -import org.geotools.feature.FeatureCollection; -import org.geotools.feature.FeatureCollections; - -import org.opengis.feature.simple.SimpleFeatureType; - -import org.opengis.feature.type.GeometryDescriptor; - -import org.w3c.dom.Element; - import org.dive4elements.artifactdatabase.state.Facet; - import org.dive4elements.artifacts.Artifact; import org.dive4elements.artifacts.CallContext; - import org.dive4elements.artifacts.common.utils.FileTools; - import org.dive4elements.artifacts.common.utils.XMLUtils.ElementCreator; - import org.dive4elements.river.artifacts.D4EArtifact; - import org.dive4elements.river.artifacts.access.MapAccess; - -import org.dive4elements.river.artifacts.model.LayerInfo; - import org.dive4elements.river.artifacts.model.map.HWS; import org.dive4elements.river.artifacts.model.map.HWSContainer; import org.dive4elements.river.artifacts.model.map.HWSFactory; - -import org.dive4elements.river.utils.ArtifactMapfileGenerator; -import org.dive4elements.river.utils.RiverUtils; import org.dive4elements.river.utils.GeometryUtils; import org.dive4elements.river.utils.MapfileGenerator; +import org.dive4elements.river.utils.RiverUtils; +import org.geotools.data.shapefile.ShapefileDataStore; +import org.geotools.feature.FeatureCollection; +import org.geotools.feature.FeatureCollections; +import org.opengis.feature.simple.SimpleFeatureType; +import org.opengis.feature.type.GeometryDescriptor; +import org.w3c.dom.Element; public class HWSBarriersState extends DefaultState @@ -63,9 +41,8 @@ private static Logger logger = Logger.getLogger(HWSBarriersState.class); private static final String HWS_SHAPEFILE_LINES = "hws-lines.shp"; private static final String HWS_SHAPEFILE_POINTS = "hws-points.shp"; - private static final String USER_RGD_SHAPE = "user-rgd.shp"; - private static final String USER_RGD_ZIP = "user-rgd.zip"; - private static final String USER_RGD_FILENAME = "user-rgd"; + + @Override protected String getUIProvider() { return "map_digitize"; @@ -146,7 +123,7 @@ } if (successLines) { - createMapfile( + FloodMapState.createMapfile( artifact, artifactDir, MapfileGenerator.MS_LAYER_PREFIX + "hws-lines", @@ -156,7 +133,7 @@ "hws"); } if (successPoints) { - createMapfile( + FloodMapState.createMapfile( artifact, artifactDir, MapfileGenerator.MS_LAYER_PREFIX + "hws-points", @@ -172,7 +149,7 @@ try { ShapefileDataStore store = new ShapefileDataStore( new File(artifactDir.getCanonicalPath() + - "/" + USER_RGD_SHAPE) + "/" + FloodMapState.WSPLGEN_USER_RGD_SHAPE) .toURI().toURL()); GeometryDescriptor desc = store.getSchema().getGeometryDescriptor(); @@ -201,14 +178,14 @@ else { type = "POINT"; } - createMapfile( + FloodMapState.createMapfile( artifact, artifactDir, - MapfileGenerator.MS_LAYER_PREFIX + USER_RGD_FILENAME, - USER_RGD_SHAPE, + MapfileGenerator.MS_LAYER_PREFIX + FloodMapState.WSPLGEN_USER_RGD, + FloodMapState.WSPLGEN_USER_RGD_SHAPE, type, epsg, - "user-rgd"); + FloodMapState.WSPLGEN_USER_RGD); } catch (IOException e) { logger.warn("No mapfile for user-rgd created!"); @@ -219,7 +196,7 @@ } private boolean extractUserShp(File dir) { - File archive = new File(dir, USER_RGD_ZIP); + File archive = new File(dir, FloodMapState.WSPLGEN_USER_RGD_ZIP); boolean exists = archive.exists(); logger.debug("Zip file exists: " + exists); if (exists) { @@ -231,7 +208,7 @@ } catch (IOException ioe) { logger.warn("Zip archive " + dir + "/" - + USER_RGD_ZIP + " could not be extracted."); + + FloodMapState.WSPLGEN_USER_RGD_ZIP + " could not be extracted."); return false; } } @@ -261,7 +238,7 @@ return true; } try { - FileTools.copyFile(file, new File(target, USER_RGD_FILENAME + "." + suffix)); + FileTools.copyFile(file, new File(target, FloodMapState.WSPLGEN_USER_RGD + "." + suffix)); } catch (IOException ioe) { logger.warn ("Error while copying file " + file.getName()); @@ -275,41 +252,6 @@ FileTools.deleteRecursive(source); } - private void createMapfile( - D4EArtifact artifact, - File artifactDir, - String name, - String hwsShapefile, - String type, - String srid, - String group - ) { - LayerInfo info = new LayerInfo(); - info.setName(name + artifact.identifier()); - info.setType(type); - info.setDirectory(artifact.identifier()); - info.setTitle(name); - info.setData(hwsShapefile); - info.setSrid(srid); - info.setGroupTitle(group); - MapfileGenerator generator = new ArtifactMapfileGenerator(); - Template tpl = generator.getTemplateByName(MapfileGenerator.SHP_LAYER_TEMPLATE); - try { - File layer = new File(artifactDir.getCanonicalPath() + "/" + name); - generator.writeLayer(info, layer, tpl); - List<String> layers = new ArrayList<String>(); - layers.add(layer.getAbsolutePath()); - generator.generate(); - } - catch(FileNotFoundException fnfe) { - logger.warn("Could not find mapfile for hws layer"); - } - catch (Exception ioe) { - logger.warn("Could not create mapfile for hws layer"); - logger.warn(Arrays.toString(ioe.getStackTrace())); - } - } - @Override public void endOfLife(Artifact artifact, Object callContext) {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/WDifferencesState.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/WDifferencesState.java Thu Sep 12 10:13:09 2013 +0200 @@ -18,10 +18,12 @@ import org.dive4elements.artifacts.CallContext; import org.dive4elements.artifacts.Artifact; +import org.dive4elements.river.artifacts.ChartArtifact; import org.dive4elements.river.artifacts.D4EArtifact; +import org.dive4elements.river.artifacts.FixationArtifact; +import org.dive4elements.river.artifacts.MINFOArtifact; import org.dive4elements.river.artifacts.StaticWKmsArtifact; import org.dive4elements.river.artifacts.WINFOArtifact; -import org.dive4elements.river.artifacts.ChartArtifact; import org.dive4elements.river.artifacts.math.WKmsOperation; @@ -29,15 +31,16 @@ import org.dive4elements.river.artifacts.model.DataFacet; import org.dive4elements.river.artifacts.model.DifferenceCurveFacet; import org.dive4elements.river.artifacts.model.EmptyFacet; - import org.dive4elements.river.artifacts.model.FacetTypes; import org.dive4elements.river.artifacts.model.WKms; import org.dive4elements.river.artifacts.model.WQKms; +import org.dive4elements.river.artifacts.model.fixings.FixRealizingResult; + import org.dive4elements.river.utils.RiverUtils; import org.dive4elements.river.utils.StringUtil; - +/** State of a WINFOArtifact to get differences of data of other artifacts. */ public class WDifferencesState extends DefaultState implements FacetTypes @@ -76,9 +79,10 @@ /** - * Access the data (wkms). + * Access the data (wkms) of an artifact, coded in mingle. */ protected WKms getWKms(String mingle, CallContext context) { + logger.debug("WDifferencesState.getWKms " + mingle); String[] def = mingle.split(";"); String uuid = def[0]; String name = def[1]; @@ -96,23 +100,45 @@ return wkms; } - WINFOArtifact flys = (WINFOArtifact) RiverUtils.getArtifact( + D4EArtifact d4eArtifact = RiverUtils.getArtifact( uuid, context); - if (flys == null) { + if (d4eArtifact == null) { logger.warn("One of the artifacts (1) for diff calculation could not be loaded"); return null; } - else{ - WQKms[] wqkms = (WQKms[]) flys.getWaterlevelData(). + + if (d4eArtifact instanceof WINFOArtifact) { + logger.debug("Get WKms from WINFOArtifact"); + WINFOArtifact flys = (WINFOArtifact) d4eArtifact; + + WKms[] wkms = (WKms[]) flys.getWaterlevelData(context). getData(); - if (wqkms == null) - logger.warn("not waterlevels in artifact"); - else if (wqkms.length < idx) - logger.warn("not enough waterlevels in artifact"); - return wqkms[idx]; + if (wkms == null || wkms.length == 0) { + logger.warn("no waterlevels in artifact"); + } + else if (wkms.length < idx+1) { + logger.warn("Not enough waterlevels in artifact."); + return new WQKms(); + } + return wkms[idx]; } + else if (d4eArtifact instanceof MINFOArtifact) { + logger.debug("Get WKms from MINFOArtifact"); + CalculationResult r = (CalculationResult) + d4eArtifact.compute(context, ComputeType.ADVANCE, false); + } + else if (d4eArtifact instanceof FixationArtifact) { + logger.debug ("Get WKms from FixationArtifact."); + CalculationResult r = (CalculationResult) + d4eArtifact.compute(context, ComputeType.ADVANCE, false); + FixRealizingResult frR = (FixRealizingResult) r.getData(); + return frR.getWQKms()[idx]; + } + + logger.error("Do not know how to handle (getWKms) minuend/subtrahend"); + return null; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/WaterlevelState.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/WaterlevelState.java Thu Sep 12 10:13:09 2013 +0200 @@ -138,6 +138,7 @@ return res; } + /** Returns empty String if argument is null, argument itself otherwise. */ private static final String nn(String s) { return s != null ? s : ""; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/fixation/FixAnalysisCompute.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/fixation/FixAnalysisCompute.java Thu Sep 12 10:13:09 2013 +0200 @@ -9,6 +9,7 @@ package org.dive4elements.river.artifacts.states.fixation; import java.text.DateFormat; +import java.util.Collection; import java.util.Date; import java.util.List; @@ -41,7 +42,9 @@ import org.dive4elements.river.artifacts.model.fixings.FixWQCurveFacet; import org.dive4elements.river.artifacts.resources.Resources; import org.dive4elements.river.artifacts.states.DefaultState; +import org.dive4elements.river.utils.Formatter; import org.dive4elements.river.utils.IdGenerator; +import org.dive4elements.river.utils.UniqueDateFormatter; /** * @author <a href="mailto:raimund.renkert@intevation.de">Raimund Renkert</a> @@ -76,8 +79,6 @@ "fix.hq5" }; - // TODO Why does this happen here? In other cases its implemented in the - // respective artifact, not State. static { // Active/deactivate facets. FacetActivity.Registry.getInstance().register( @@ -171,15 +172,15 @@ int qsS = access.getQSectorStart(); int qsE = access.getQSectorEnd(); - // TODO: i18n - DateFormat df = DateFormat.getDateInstance(DateFormat.MEDIUM); + DateFormat df = Formatter.getDateFormatter(context.getMeta(), "dd.MM.yyyy"); + DateFormat lf = Formatter.getDateFormatter(context.getMeta(), "dd.MM.yyy'T'HH:mm"); DateRange [] periods = access.getAnalysisPeriods(); for (int i = 0; i < periods.length; i++) { DateRange period = periods[i]; String startDate = df.format(period.getFrom()); - String endDate = df.format(period.getTo()); + String endDate = df.format(period.getTo()); for (int j = qsS; j <= qsE; j++) { @@ -233,19 +234,22 @@ I18N_ANALYSIS, I18N_ANALYSIS); + Collection<Date> aeds = fr.getAnalysisEventsDates(i); + UniqueDateFormatter cf = new UniqueDateFormatter(df, lf, aeds); + int k = 0; - for (Date d: fr.getAnalysisEventsDates(i)) { + for (Date d: aeds) { int anaNdx = i << 8; anaNdx = anaNdx | k; facets.add(new FixAnalysisEventsFacet(anaNdx, FIX_ANALYSIS_EVENTS_DWT, - eventDesc + (i+1) + " - " + df.format(d))); + eventDesc + (i+1) + " - " + cf.format(d))); facets.add(new FixLongitudinalAnalysisFacet(anaNdx, FIX_ANALYSIS_EVENTS_LS, - eventDesc + (i+1) + " - " + df.format(d))); + eventDesc + (i+1) + " - " + cf.format(d))); facets.add(new FixAnalysisEventsFacet(anaNdx, FIX_ANALYSIS_EVENTS_WQ, - eventDesc + (i+1) +" - " + df.format(d))); + eventDesc + (i+1) +" - " + cf.format(d))); k++; } } @@ -259,27 +263,29 @@ I18N_REFERENCEDEVIATION, I18N_REFERENCEDEVIATION); + Collection<Date> reds = fr.getReferenceEventsDates(); + UniqueDateFormatter cf = new UniqueDateFormatter(df, lf, reds); + int i = 0; - for (Date d: fr.getReferenceEventsDates()) { + for (Date d: reds) { int refNdx = idg.next() << 8; refNdx |= i; facets.add(new FixReferenceEventsFacet(refNdx, FIX_REFERENCE_EVENTS_DWT, - i18n_ref + " - " + df.format(d))); + i18n_ref + " - " + cf.format(d))); refNdx = idg.next() << 8; refNdx = refNdx | i; facets.add(new FixLongitudinalReferenceFacet(refNdx, FIX_REFERENCE_EVENTS_LS, - i18n_ref + " - " + df.format(d))); + i18n_ref + " - " + cf.format(d))); refNdx = idg.next() << 8; refNdx |= i; facets.add(new FixReferenceEventsFacet(refNdx, FIX_REFERENCE_EVENTS_WQ, - i18n_ref + " - " + df.format(d))); + i18n_ref + " - " + cf.format(d))); i++; } - facets.add(new FixLongitudinalDeviationFacet(idg.next(), FIX_DEVIATION_LS, i18n_dev));
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/DifferencesState.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/DifferencesState.java Thu Sep 12 10:13:09 2013 +0200 @@ -37,6 +37,7 @@ import org.dive4elements.river.utils.Formatter; /** + * State for BedDifferences. * @author <a href="mailto:raimund.renkert@intevation.de">Raimund Renkert</a> */ public class DifferencesState @@ -68,7 +69,7 @@ @Override public Object computeAdvance(D4EArtifact artifact, String hash, CallContext context, List<Facet> facets, Object old) { - logger.debug("BedQualityState.computeAdvance"); + logger.debug("DifferencesState.computeAdvance"); List<Facet> newFacets = new ArrayList<Facet>(); @@ -97,9 +98,15 @@ return res; } - protected void generateFacets(CallContext context, List<Facet> newFacets, - BedDifferencesResult[] results, String stateId, String hash) { - logger.debug("BedQualityState.generateFacets"); + /** Generate Facets based on given results. */ + protected void generateFacets( + CallContext context, + List<Facet> newFacets, + BedDifferencesResult[] results, + String stateId, + String hash + ) { + logger.debug("DifferencesState.generateFacets"); CallMeta meta = context.getMeta(); @@ -120,9 +127,7 @@ newFacets.add(new BedDiffYearFacet( idx, BED_DIFFERENCE_MORPH_WIDTH, - createBedDiffMorphDescription( - meta, - (BedDiffYearResult)results[idx]), + createBedDiffMorphDescription(meta), ComputeType.ADVANCE, stateId, hash)); @@ -335,12 +340,9 @@ } protected String createBedDiffMorphDescription( - CallMeta meta, - BedDiffYearResult result) { - String range = result.getStart() + " - " + result.getEnd(); - + CallMeta meta) { return Resources.getMsg(meta, I18N_FACET_BED_DIFF_MORPH, - I18N_FACET_BED_DIFF_MORPH, new Object[] { range }); + I18N_FACET_BED_DIFF_MORPH); } protected String createBedDiffAbsoluteDescription( @@ -412,3 +414,4 @@ }); } } +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadCalculate.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadCalculate.java Thu Sep 12 10:13:09 2013 +0200 @@ -21,6 +21,7 @@ import org.dive4elements.river.artifacts.D4EArtifact; import org.dive4elements.river.artifacts.access.SedimentLoadAccess; import org.dive4elements.river.artifacts.model.CalculationResult; +import org.dive4elements.river.artifacts.model.DataFacet; import org.dive4elements.river.artifacts.model.FacetTypes; import org.dive4elements.river.artifacts.model.ReportFacet; import org.dive4elements.river.artifacts.model.minfo.SedimentLoad; @@ -33,7 +34,7 @@ import org.dive4elements.river.artifacts.states.DefaultState; import org.dive4elements.river.utils.DateGuesser; - +/** State in which Sediment Load(s) are calculated/retrieved. */ public class SedimentLoadCalculate extends DefaultState implements FacetTypes @@ -73,7 +74,7 @@ name.equals(SEDIMENT_LOAD_SUSP_SAND_BED)){ return Boolean.FALSE; } - else if (name.equals(SEDIMENT_LOAD_UNKOWN)) { + else if (name.equals(SEDIMENT_LOAD_UNKNOWN)) { D4EArtifact d4e = (D4EArtifact)artifact; SedimentLoadUnknownFacet f = (SedimentLoadUnknownFacet) @@ -139,10 +140,6 @@ return res; } - String river = access.getRiver(); - SedimentLoad[] unknown = - SedimentLoadFactory.getSedimentLoadUnknown(river); - String type = access.getYearEpoch(); if (type.equals("year")) { generateYearFacets(context, newFacets, results, getID(), hash); @@ -154,6 +151,12 @@ generateOffEpochFacets(context, newFacets, results, getID(), hash); } logger.debug("Created " + newFacets.size() + " new Facets."); + + String river = access.getRiver(); + SedimentLoad[] unknown = + SedimentLoadFactory.getSedimentLoadUnknown(river, + access.getUnit().replace("_per_","/"), type); + if (res.getReport().hasProblems()) { newFacets.add(new ReportFacet(ComputeType.ADVANCE, hash, id)); } @@ -161,12 +164,16 @@ for (int i = 0; i < unknown.length; i++) { newFacets.add(new SedimentLoadUnknownFacet( i, - SEDIMENT_LOAD_UNKOWN, + SEDIMENT_LOAD_UNKNOWN, unknown[i].getDescription(), ComputeType.ADVANCE, getID(), hash)); } + + newFacets.add( + new DataFacet(CSV, "CSV data", ComputeType.ADVANCE, hash, id)); + facets.addAll(newFacets); return res; @@ -520,3 +527,4 @@ } } } +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf-8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadOffEpochSelect.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadOffEpochSelect.java Thu Sep 12 10:13:09 2013 +0200 @@ -12,7 +12,7 @@ import org.dive4elements.river.artifacts.states.DefaultState; - +/** State in which official epoch is selected to calculate sediment load. */ public class SedimentLoadOffEpochSelect extends DefaultState {
--- a/artifacts/src/main/java/org/dive4elements/river/collections/AttributeParser.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/collections/AttributeParser.java Thu Sep 12 10:13:09 2013 +0200 @@ -51,6 +51,7 @@ protected CollectionAttribute attribute; + /** Just store reference to document. */ public AttributeParser(Document attributeDocument) { this.attributeDocument = attributeDocument; }
--- a/artifacts/src/main/java/org/dive4elements/river/collections/AttributeWriter.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/collections/AttributeWriter.java Thu Sep 12 10:13:09 2013 +0200 @@ -46,7 +46,7 @@ protected List<Facet> newFacets; /** - * "Compatibility matrix", mapws list of facet names to output names. + * "Compatibility matrix", maps list of facet names to output names. * Any facet that is not found in the list for a specific output will * not be added to the resulting document. */ @@ -211,7 +211,12 @@ ManagedFacet picked = pickFacet(facet, oldFacets); if (facet.equals(picked)) { - genuinelyNewFacets.add(picked); + if (!facetInTwoOuts(facet, genuinelyNewFacets)) { + genuinelyNewFacets.add(picked); + } + else { + logger.debug("Skip clone facet that shall be present in two outs"); + } } else { currentFacets.add(picked); @@ -260,17 +265,20 @@ // Preparations to be able to detect gaps. Map<Integer, ManagedFacet> mfmap = new HashMap<Integer, ManagedFacet>(); - int max = 0; + int maxPosition = 0; for (ManagedFacet mf: currentFacets) { int pos = mf.getPosition(); mfmap.put(Integer.valueOf(pos), mf); - if (pos > max) max = pos; + if (pos > maxPosition) maxPosition = pos; } - // Finally do gap correction. - if (max != currentFacets.size()) { + // TODO issue1458: debug what happens + + // Finally do gap correction + // (note that posistions start at 1, not at zero). + if (maxPosition != currentFacets.size()) { int gap = 0; - for (int i = 1; i <= max; i++) { + for (int i = 1; i <= maxPosition; i++) { ManagedFacet mf = mfmap.get(Integer.valueOf(i)); if (mf == null) { gap++; @@ -281,14 +289,27 @@ } // Now add all facets. - for (ManagedFacet oldMF: currentFacets) { - attribute.addFacet(outputName, oldMF); + for (ManagedFacet facet: currentFacets) { + attribute.addFacet(outputName, facet); } return !currentFacets.isEmpty(); } + /** Returns true if a likely clone of facet is + * contained in genuinelyNewFacets, as happens when same facet is defined + * for two outs. */ + private boolean facetInTwoOuts(ManagedFacet facet, List<ManagedFacet> genuinelyNewFacets) { + for (ManagedFacet otherFacet: genuinelyNewFacets) { + if (facet.isSame(otherFacet)) { + return true; + } + } + return false; + } + + /** * Returns the facet to be added to Document. * Return the new facet only if the "same" facet was not present before.
--- a/artifacts/src/main/java/org/dive4elements/river/collections/CollectionAttribute.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/collections/CollectionAttribute.java Thu Sep 12 10:13:09 2013 +0200 @@ -66,6 +66,7 @@ } + /** Remove outputs without facets from outputMap. */ public void cleanEmptyOutputs() { if (outputMap == null) { return; @@ -95,14 +96,14 @@ } if (outputMap == null) { - logger.warn("Tried to add facet but no Outputs are existing yet."); + logger.warn("Tried to add settings but no Outputs are existing yet."); return; } Output output = outputMap.get(outputKey); if (output == null) { - logger.warn("Tried to add facet for unknown Output: " + outputKey); + logger.warn("Tried to add settings for unknown Output: " + outputKey); return; } @@ -112,7 +113,7 @@ public void addFacet(String outputKey, Facet facet) { if (facet == null) { - logger.warn("Tried to add empty facet."); + logger.warn("Tried to add null facet."); return; } @@ -139,6 +140,7 @@ } + /** Empty facets list for outputKey output. */ public void clearFacets(String outputKey) { if (outputKey == null || outputKey.length() == 0) { logger.warn("Tried to clear Facets, but no Output key specified!");
--- a/artifacts/src/main/java/org/dive4elements/river/collections/D4EArtifactCollection.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/collections/D4EArtifactCollection.java Thu Sep 12 10:13:09 2013 +0200 @@ -74,6 +74,22 @@ public static final String XPATH_LOADED_RECOMMENDATIONS = "/art:attribute/art:loaded-recommendations"; + private CallContext context; + + private ArtifactDatabase db; + + protected CallContext getContext() { + return this.context; + } + + protected ArtifactDatabase getArtifactDB() { + return this.db; + } + + protected void setContext(CallContext context) { + this.context = context; + this.db = context.getDatabase(); + } /** * Create and return description Document for this collection. @@ -82,26 +98,26 @@ public Document describe(CallContext context) { log.debug("D4EArtifactCollection.describe: " + identifier); + setContext(context); + CollectionDescriptionHelper helper = new CollectionDescriptionHelper( getName(), identifier(), getCreationTime(), getTTL(), context); - ArtifactDatabase db = context.getDatabase(); Document oldAttrs = getAttribute(); AttributeParser parser = new AttributeParser(oldAttrs); try { - String[] aUUIDs = getArtifactUUIDs(context); + String[] aUUIDs = getArtifactUUIDs(); - oldAttrs = removeAttributes(oldAttrs, context); + oldAttrs = removeAttributes(oldAttrs); parser = new AttributeParser(oldAttrs); - CollectionAttribute newAttr = mergeAttributes( - db, context, parser, aUUIDs); + CollectionAttribute newAttr = mergeAttributes(parser, aUUIDs); - if (checkOutputSettings(newAttr, context)) { - saveCollectionAttribute(db, context, newAttr); + if (checkOutputSettings(newAttr)) { + saveCollectionAttribute(newAttr); } helper.setAttribute(newAttr); @@ -129,13 +145,11 @@ * @param uuids Artifact uuids. */ protected CollectionAttribute mergeAttributes( - ArtifactDatabase db, - CallContext context, AttributeParser oldParser, String[] uuids ) { CollectionAttribute cAttribute = - buildOutAttributes(db, context, oldParser, uuids); + buildOutAttributes(oldParser, uuids); if (cAttribute == null) { log.warn("mergeAttributes: cAttribute == null"); @@ -145,13 +159,19 @@ cAttribute.setLoadedRecommendations( getLoadedRecommendations(oldParser.getAttributeDocument())); - saveCollectionAttribute(db, context, cAttribute); + saveCollectionAttribute(cAttribute); return cAttribute; } - protected Document removeAttributes(Document attrs, CallContext context) { + /** + * Remove those output-elements which have a name that does + * not appear in master artifacts out-list. + * @param attr[in,out] Document to clean and return. + * @return param attr. + */ + protected Document removeAttributes(Document attrs) { Node outs = (Node) XMLUtils.xpath( attrs, "/art:attribute/art:outputs", @@ -167,7 +187,7 @@ if (nodes != null) { for (int i = 0; i < nodes.getLength(); i++) { Element e = (Element)nodes.item(i); - if(!outputExists(e.getAttribute("name"), context)) { + if(!outputExists(e.getAttribute("name"))) { outs.removeChild(e); } } @@ -182,9 +202,9 @@ * @param context current context * @return true if current master artifact has given output. */ - protected boolean outputExists(String name, CallContext context) { - D4EArtifact master = getMasterArtifact(context); - List<Output> outList = master.getOutputs(context); + protected boolean outputExists(String name) { + D4EArtifact master = getMasterArtifact(); + List<Output> outList = master.getOutputs(getContext()); for (Output o : outList) { if (name.equals(o.getName())) { @@ -203,8 +223,6 @@ * @return true, if the transaction was successful, otherwise false. */ protected boolean saveCollectionAttribute( - ArtifactDatabase db, - CallContext context, CollectionAttribute attribute ) { log.info("Save new CollectionAttribute into database."); @@ -213,7 +231,7 @@ try { // Save the merged document into database. - db.setCollectionAttribute(identifier(), context.getMeta(), doc); + getArtifactDB().setCollectionAttribute(identifier(), getContext().getMeta(), doc); log.info("Saving CollectionAttribute was successful."); @@ -254,8 +272,7 @@ * @return true, if the CollectionAttribute was modified, otherwise false. */ protected boolean checkOutputSettings( - CollectionAttribute attribute, - CallContext cc + CollectionAttribute attribute ) { boolean modified = false; @@ -281,7 +298,7 @@ if (settings == null) { log.debug("No Settings set for Output '" + outName + "'."); output.setSettings( - createInitialOutputSettings(cc, attribute, outName)); + createInitialOutputSettings(attribute, outName)); modified = true; } @@ -302,11 +319,10 @@ * @return a default Settings object for the specified Output. */ protected Settings createInitialOutputSettings( - CallContext cc, CollectionAttribute attr, String out ) { - OutGenerator outGen = RiverContext.getOutGenerator(cc, out, null); + OutGenerator outGen = RiverContext.getOutGenerator(getContext(), out, null); if (outGen == null) { return null; @@ -314,13 +330,13 @@ // XXX NOTE: the outGen is not able to process its generate() operation, // because it has no OutputStream set! - outGen.init(XMLUtils.newDocument(), null, cc); - prepareMasterArtifact(outGen, cc); + outGen.init(XMLUtils.newDocument(), null, getContext()); + prepareMasterArtifact(outGen); try { - Document outAttr = getAttribute(cc, attr, out); + Document outAttr = getAttribute(attr, out); OutputHelper helper = new OutputHelper(identifier()); - helper.doOut(outGen, out, out, outAttr, cc); + helper.doOut(outGen, out, out, outAttr, getContext()); } catch (ArtifactDatabaseException adbe) { log.error(adbe, adbe); @@ -343,6 +359,8 @@ { boolean debug = log.isDebugEnabled(); + setContext(context); + long reqBegin = System.currentTimeMillis(); if (debug) { @@ -362,6 +380,8 @@ log.debug("-> Output subtype = " + subtype); } + // If type contains 'chartinfo' use a generator that + // just allow access to width, height etc. OutGenerator generator = null; if (type != null && type.length() > 0 @@ -400,10 +420,10 @@ generator.init(format, out, context); generator.setSettings(settings); generator.setCollection(this); - prepareMasterArtifact(generator, context); + prepareMasterArtifact(generator); try { - Document attr = getAttribute(context, cAttr, name); + Document attr = getAttribute(cAttr, name); OutputHelper helper = new OutputHelper(identifier()); if (name.equals("sq_overview")) { helper.doOut(generator, name, subtype, format, context); @@ -426,12 +446,11 @@ * Sets the master Artifact at the given <i>generator</i>. * * @param generator The generator that gets a master Artifact. - * @param cc The CallContext. */ - protected void prepareMasterArtifact(OutGenerator generator, CallContext cc + protected void prepareMasterArtifact(OutGenerator generator ) { // Get master artifact. - D4EArtifact master = getMasterArtifact(cc); + D4EArtifact master = getMasterArtifact(); if (master != null) { log.debug("Set master Artifact to uuid: " + master.identifier()); generator.setMasterArtifact(master); @@ -445,18 +464,18 @@ /** * @return masterartifact or null if exception/not found. */ - protected D4EArtifact getMasterArtifact(CallContext context) + protected D4EArtifact getMasterArtifact() { try { - ArtifactDatabase db = context.getDatabase(); - CallMeta callMeta = context.getMeta(); + ArtifactDatabase db = getArtifactDB(); + CallMeta callMeta = getContext().getMeta(); Document document = db.getCollectionsMasterArtifact( identifier(), callMeta); String masterUUID = XMLUtils.xpathString( document, XPATH_MASTER_UUID, ArtifactNamespaceContext.INSTANCE); D4EArtifact masterArtifact = - (D4EArtifact) getArtifact(masterUUID, context); + (D4EArtifact) getArtifact(masterUUID); return masterArtifact; } catch (ArtifactDatabaseException ade) { @@ -471,8 +490,6 @@ * @param uuids List of artifact uuids. */ protected CollectionAttribute buildOutAttributes( - ArtifactDatabase db, - CallContext context, AttributeParser aParser, String[] uuids) { @@ -485,14 +502,16 @@ return null; } - D4EArtifact masterArtifact = getMasterArtifact(context); + D4EArtifact masterArtifact = getMasterArtifact(); if (masterArtifact == null) { log.debug("buildOutAttributes: masterArtifact == null"); return null; } - OutputParser oParser = new OutputParser(db, context); + OutputParser oParser = new OutputParser( + getArtifactDB(), + getContext()); if (uuids != null) { for (String uuid: uuids) { @@ -508,7 +527,7 @@ aParser.parse(); AttributeWriter aWriter = new AttributeWriter( - db, + getArtifactDB(), aParser.getCollectionAttribute(), aParser.getOuts(), aParser.getFacets(), @@ -531,7 +550,6 @@ * @return the attribute for the desired output type. */ protected Document getAttribute( - CallContext context, CollectionAttribute cAttr, String output) throws ArtifactDatabaseException @@ -570,13 +588,13 @@ * * @return a list of uuids. */ - protected String[] getArtifactUUIDs(CallContext context) + protected String[] getArtifactUUIDs() throws ArtifactDatabaseException { log.debug("D4EArtifactCollection.getArtifactUUIDs"); - ArtifactDatabase db = context.getDatabase(); - CallMeta meta = context.getMeta(); + ArtifactDatabase db = getArtifactDB(); + CallMeta meta = getContext().getMeta(); Document itemList = db.listCollectionArtifacts(identifier(), meta); NodeList items = (NodeList) XMLUtils.xpath( @@ -617,7 +635,7 @@ * * @return an Artifact. */ - protected Artifact getArtifact(String uuid, CallContext context) + protected Artifact getArtifact(String uuid) throws ArtifactDatabaseException { log.debug("D4EArtifactCollection.getArtifact"); @@ -628,6 +646,44 @@ return persistent != null ? persistent.getArtifact() : null; } + /** + * Returns artifacts with name name. + * + * @param name The Artifact name to search + * @param context The CallContext + * + * @return a list of artifacts matching this name. + */ + public List<Artifact> getArtifactsByName(String name, CallContext context) + { + setContext(context); + return getArtifactsByName(name); + } + + /** + * Returns artifacts with name name. + * + * @param name The Artifact name to search + * + * @return a list of artifacts matching this name. + */ + protected List<Artifact> getArtifactsByName(String name) + { + log.debug("Searching for Artifacts: " + name); + List<Artifact> ret = new ArrayList<Artifact>(); + try { + for (String uuid: getArtifactUUIDs()) { + D4EArtifact subArt = (D4EArtifact) getArtifact(uuid); + if (subArt.getName() != null && subArt.getName().equals(name)) { + ret.add(subArt); + } + } + } catch (ArtifactDatabaseException e) { + log.error("Unexpected Error!", e); + } finally { + return ret; + } + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/exports/ATExporter.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/ATExporter.java Thu Sep 12 10:13:09 2013 +0200 @@ -30,6 +30,7 @@ import org.dive4elements.river.model.Gauge; import org.dive4elements.river.model.River; import org.dive4elements.river.model.TimeInterval; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.RiverUtils; import org.dive4elements.river.artifacts.access.RangeAccess; @@ -71,8 +72,8 @@ @Override public void doOut( ArtifactAndFacet artifactf, - Document attr, - boolean visible + ThemeDocument attr, + boolean visible ) { data = (WQ)artifactf.getData(context); }
--- a/artifacts/src/main/java/org/dive4elements/river/exports/AbstractExporter.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/AbstractExporter.java Thu Sep 12 10:13:09 2013 +0200 @@ -32,6 +32,7 @@ import org.dive4elements.river.artifacts.resources.Resources; import org.dive4elements.river.collections.D4EArtifactCollection; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.Formatter; @@ -158,7 +159,7 @@ @Override public void doOut( ArtifactAndFacet artifactFacet, - Document attr, + ThemeDocument attr, boolean visible ) { String name = artifactFacet.getFacetName();
--- a/artifacts/src/main/java/org/dive4elements/river/exports/ChartGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/ChartGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -33,9 +33,8 @@ import org.dive4elements.river.model.River; import org.dive4elements.river.themes.LineStyle; import org.dive4elements.river.themes.TextStyle; -import org.dive4elements.river.themes.ThemeAccess; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.RiverUtils; -import org.dive4elements.river.utils.ThemeUtil; import java.awt.BasicStroke; import java.awt.Color; @@ -198,7 +197,7 @@ ChartArea area, LineStyle lineStyle, TextStyle textStyle, - Document theme + ThemeDocument theme ) { // OPTIMIZE pre-calculate area-related values final float TEXT_OFF = 0.03f; @@ -249,7 +248,7 @@ area2, annotation.getPos(), lineStyle); if (!Float.isNaN(annotation.getHitPoint()) && theme != null) { // New line annotation to hit curve. - if (ThemeUtil.parseShowVerticalLine(theme)) { + if (theme.parseShowVerticalLine()) { XYLineAnnotation hitLineAnnotation = createStickyLineAnnotation( StickyAxisAnnotation.SimpleAxis.X_AXIS, @@ -258,7 +257,7 @@ plot.getRenderer(rendererIndex).addAnnotation(hitLineAnnotation, org.jfree.ui.Layer.BACKGROUND); } - if (ThemeUtil.parseShowHorizontalLine(theme)) { + if (theme.parseShowHorizontalLine()) { XYLineAnnotation lineBackAnnotation = createStickyLineAnnotation( StickyAxisAnnotation.SimpleAxis.Y_AXIS2, @@ -277,7 +276,7 @@ lineAnnotation = createLeftStickAnnotation(area, annotation.getPos(), lineStyle); if (!Float.isNaN(annotation.getHitPoint()) && theme != null) { // New line annotation to hit curve. - if (ThemeUtil.parseShowHorizontalLine(theme)) { + if (theme.parseShowHorizontalLine()) { XYLineAnnotation hitLineAnnotation = createStickyLineAnnotation( StickyAxisAnnotation.SimpleAxis.Y_AXIS, @@ -286,7 +285,7 @@ plot.getRenderer(rendererIndex).addAnnotation(hitLineAnnotation, org.jfree.ui.Layer.BACKGROUND); } - if (ThemeUtil.parseShowVerticalLine(theme)) { + if (theme.parseShowVerticalLine()) { XYLineAnnotation lineBackAnnotation = createStickyLineAnnotation( StickyAxisAnnotation.SimpleAxis.X_AXIS, @@ -452,15 +451,14 @@ for (RiverAnnotation fa: annotations) { // Access text styling, if any. - Document theme = fa.getTheme(); + ThemeDocument theme = fa.getTheme(); TextStyle textStyle = null; LineStyle lineStyle = null; // Get Themeing information and add legend item. if (theme != null) { - ThemeAccess themeAccess = new ThemeAccess(theme); - textStyle = themeAccess.parseTextStyle(); - lineStyle = themeAccess.parseLineStyle(); + textStyle = theme.parseComplexTextStyle(); + lineStyle = theme.parseComplexLineStyle(); if (fa.getLabel() != null) { LegendItemCollection lic = new LegendItemCollection(); LegendItemCollection old = plot.getFixedLegendItems(); @@ -509,7 +507,7 @@ @Override public abstract void doOut( ArtifactAndFacet bundle, - Document attr, + ThemeDocument attr, boolean visible); @@ -619,10 +617,10 @@ * @param theme Theme document for given annotations. * @param visible The visibility of the annotations. */ - protected void doAnnotations( + public void doAnnotations( RiverAnnotation annotations, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ){ logger.debug("doAnnotations"); @@ -1834,11 +1832,14 @@ * * @return a new LegendItem instance. */ - public LegendItem createLegendItem(Document theme, String name) { + public LegendItem createLegendItem(ThemeDocument theme, String name) { // OPTIMIZE Pass font, parsed Theme items. - ThemeAccess themeAccess = new ThemeAccess(theme); - Color color = themeAccess.parseLineColorField(); + Color color = theme.parseLineColorField(); + if (color == null) { + color = Color.BLACK; + } + LegendItem legendItem = new LegendItem(name, color); legendItem.setLabelFont(createLegendLabelFont());
--- a/artifacts/src/main/java/org/dive4elements/river/exports/ChartInfoGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/ChartInfoGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -10,6 +10,7 @@ import org.dive4elements.river.collections.D4EArtifactCollection; import org.dive4elements.river.java2d.NOPGraphics2D; +import org.dive4elements.river.themes.ThemeDocument; import java.io.IOException; import java.io.OutputStream; @@ -106,9 +107,10 @@ /** * Dispatches the operation to the instantiated generator. */ + @Override public void doOut( ArtifactAndFacet artifactFacet, - Document attr, + ThemeDocument attr, boolean visible ) { generator.doOut(artifactFacet, attr, visible);
--- a/artifacts/src/main/java/org/dive4elements/river/exports/ComputedDischargeCurveGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/ComputedDischargeCurveGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -16,10 +16,12 @@ import org.dive4elements.river.artifacts.model.FacetTypes; import org.dive4elements.river.artifacts.model.WKms; import org.dive4elements.river.artifacts.model.WQKms; +import org.dive4elements.river.exports.process.DischargeProcessor; import org.dive4elements.river.jfree.RiverAnnotation; import org.dive4elements.river.jfree.StickyAxisAnnotation; import org.dive4elements.river.jfree.StyledXYSeries; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.RiverUtils; import java.awt.Font; @@ -32,11 +34,10 @@ import org.apache.log4j.Logger; -import org.w3c.dom.Document; - /** - * An OutGenerator that generates discharge curves. + * An OutGenerator that generates discharge curves, also at locations + * not at a gauge. * * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a> */ @@ -104,6 +105,7 @@ * Create Y (range) axis for given index, here with a special axis * that depends on other axis (does translation and scaling for * special case at gauge in cm). + * @return A NumberAxis, possibly scaled. */ @Override protected NumberAxis createYAxis(int index) { @@ -138,7 +140,7 @@ @Override public void doOut( ArtifactAndFacet artifactFacet, - Document attr, + ThemeDocument attr, boolean visible ) { String name = artifactFacet.getFacetName(); @@ -150,9 +152,11 @@ return; } - //XXX DEAD CODE // Facet facet = artifactFacet.getFacet(); - - if (name.equals(COMPUTED_DISCHARGE_Q)) { + DischargeProcessor dProcessor = new DischargeProcessor(getRange()[0]); + if (dProcessor.canHandle(name)) { + dProcessor.doOut(this, artifactFacet, attr, visible, YAXIS.W.idx); + } + else if (name.equals(COMPUTED_DISCHARGE_Q)) { doDischargeQOut((WQKms) artifactFacet.getData(context), artifactFacet, attr, visible); } else if (name.equals(STATIC_WQ)) { @@ -165,18 +169,9 @@ attr, visible); } - else if (name.equals(COMPUTED_DISCHARGE_MAINVALUES_Q) - || name.equals(MAINVALUES_Q) - || name.equals(COMPUTED_DISCHARGE_MAINVALUES_W) - || name.equals(MAINVALUES_W) - ) { - RiverAnnotation mainValues = (RiverAnnotation) artifactFacet.getData(context); - translateRiverAnnotation(mainValues); - doAnnotations( - mainValues, - artifactFacet, attr, visible); - } - else if (name.equals(STATIC_WKMS_INTERPOL) || name.equals(HEIGHTMARKS_POINTS)) { + else if (STATIC_WKMS_INTERPOL.equals(name) || + HEIGHTMARKS_POINTS.equals(name) || + STATIC_WQKMS_W.equals(name)) { doWAnnotations( artifactFacet.getData(context), artifactFacet, @@ -212,16 +207,16 @@ /** * Add WQ Data to plot. - * @param wqkms data as double[][] + * @param wq data as double[][] */ protected void doWQOut( - Object wqkms, + Object wq, ArtifactAndFacet aaf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("ComputedDischargeCurveGenerator: doWQOut"); - double [][] data = (double [][]) wqkms; + double [][] data = (double [][]) wq; XYSeries series = new StyledXYSeries(aaf.getFacetDescription(), theme); StyledSeriesBuilder.addPoints(series, data, true); @@ -238,7 +233,7 @@ protected void doDischargeQOut( WQKms wqkms, ArtifactAndFacet aaf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("ComputedDischargeCurveGenerator: doDischargeQOut"); @@ -270,7 +265,7 @@ protected void doQOut( WQKms wqkms, ArtifactAndFacet aaf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("ComputedDischargeCurveGenerator: doQOut (add W/Q data)."); @@ -289,7 +284,7 @@ protected void doWQAnnotations( Object wqkms, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ) { List<StickyAxisAnnotation> xy = new ArrayList<StickyAxisAnnotation>(); @@ -315,7 +310,7 @@ protected void doWAnnotations( Object wqkms, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ) { Facet facet = aandf.getFacet();
--- a/artifacts/src/main/java/org/dive4elements/river/exports/CrossSectionGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/CrossSectionGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -21,7 +21,6 @@ import org.jfree.chart.annotations.XYTextAnnotation; import org.jfree.chart.plot.XYPlot; import org.jfree.data.xy.XYSeries; -import org.w3c.dom.Document; import org.dive4elements.artifactdatabase.state.ArtifactAndFacet; import org.dive4elements.artifacts.DataProvider; @@ -36,10 +35,9 @@ import org.dive4elements.river.model.FastCrossSectionLine; import org.dive4elements.river.themes.LineStyle; import org.dive4elements.river.themes.TextStyle; -import org.dive4elements.river.themes.ThemeAccess; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.RiverUtils; import org.dive4elements.river.utils.Formatter; -import org.dive4elements.river.utils.ThemeUtil; /** @@ -185,14 +183,13 @@ for(RiverAnnotation fa : this.annotations) { // Access text styling, if any. - Document theme = fa.getTheme(); + ThemeDocument theme = fa.getTheme(); TextStyle textStyle = null; // XXX: DEAD CODE // LineStyle lineStyle = null; // Get Themeing information and add legend item. if (theme != null) { - ThemeAccess themeAccess = new ThemeAccess(theme); - textStyle = themeAccess.parseTextStyle(); + textStyle = theme.parseComplexTextStyle(); // XXX: DEAD CODE // lineStyle = themeAccess.parseLineStyle(); if (fa.getLabel() != null) { LegendItemCollection lic = new LegendItemCollection(); @@ -258,7 +255,7 @@ @Override public void doOut( ArtifactAndFacet artifactFacet, - Document attr, + ThemeDocument attr, boolean visible ) { String name = artifactFacet.getFacetName(); @@ -332,7 +329,7 @@ protected void doCrossSectionWaterLineOut( Object o, String seriesName, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("CrossSectionGenerator.doCrossSectionWaterLineOut"); @@ -341,10 +338,10 @@ // DO NOT SORT DATA! This destroys the gaps indicated by NaNs. StyledXYSeries series = new StyledXYSeries(seriesName, false, theme); - if (!ThemeUtil.parseShowLineLabel(theme)) { + if (!theme.parseShowLineLabel()) { series.setLabel(""); } - if (ThemeUtil.parseShowWidth(theme)) { + if (theme.parseShowWidth()) { NumberFormat nf = Formatter.getMeterFormat(this.context); String labelAdd = "b=" + nf.format(lines.width) + "m"; if (series.getLabel().length() == 0) { @@ -354,7 +351,7 @@ series.setLabel(series.getLabel() + ", " + labelAdd); } } - if (ThemeUtil.parseShowLevel(theme) && lines.points.length > 1 + if (theme.parseShowLevel() && lines.points.length > 1 && lines.points[1].length > 0) { NumberFormat nf = Formatter.getMeterFormat(this.context); D4EArtifact flys = (D4EArtifact) master; @@ -369,7 +366,7 @@ series.setLabel(series.getLabel() + ", " + labelAdd); } } - if (ThemeUtil.parseShowMiddleHeight(theme) && lines.width != 0) { + if (theme.parseShowMiddleHeight() && lines.width != 0) { NumberFormat nf = Formatter.getMeterFormat(this.context); String labelAdd = "T=" + nf.format(lines.area / lines.width) + "m"; // : " + lines.area + "/" + lines.width); @@ -391,7 +388,7 @@ protected void doHyk( Object o, String seriesName, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("CrossSectionGenerator.doHyk"); @@ -419,7 +416,7 @@ protected void doCrossSectionOut( Object o, String seriesName, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("CrossSectionGenerator.doCrossSectionOut");
--- a/artifacts/src/main/java/org/dive4elements/river/exports/DischargeCurveGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/DischargeCurveGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -8,16 +8,22 @@ package org.dive4elements.river.exports; +import java.util.ArrayList; +import java.util.List; + import org.dive4elements.artifactdatabase.state.ArtifactAndFacet; import org.dive4elements.river.artifacts.D4EArtifact; import org.dive4elements.river.artifacts.model.FacetTypes; import org.dive4elements.river.artifacts.model.WQKms; +import org.dive4elements.river.exports.process.DischargeProcessor; +import org.dive4elements.river.jfree.CollisionFreeXYTextAnnotation; import org.dive4elements.river.jfree.Bounds; import org.dive4elements.river.jfree.RiverAnnotation; import org.dive4elements.river.jfree.StickyAxisAnnotation; import org.dive4elements.river.jfree.StyledXYSeries; import org.dive4elements.river.model.Gauge; import org.dive4elements.river.model.River; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.RiverUtils; import org.apache.log4j.Logger; @@ -26,7 +32,6 @@ import org.jfree.chart.plot.XYPlot; import org.jfree.data.Range; import org.jfree.data.xy.XYSeries; -import org.w3c.dom.Document; /** @@ -38,6 +43,7 @@ extends XYChartGenerator implements FacetTypes { + /** Beware, in this implementation, the W axis is also in cm! */ public static enum YAXIS { WCm(0), W(1); @@ -72,9 +78,16 @@ * Returns the PNP (Datum) of gauge, if at gauge, 0 otherwise. */ public static double getCurrentGaugeDatum(double km, D4EArtifact artifact, double tolerance) { - // Code borrowed from FixATWriter. - Gauge gauge = RiverUtils.getGauge(artifact); + // Look if there is a gauge at chosen km: + // Get gauge which is defined for km + Gauge gauge = + RiverUtils.getRiver(artifact).determineGauge(km-0.1d, km+0.1d); + if (gauge == null) { + logger.error("No Gauge could be found at station " + km + "!"); + return 0d; + } double subtractPNP = 0d; + // Compare to km. if (Math.abs(km - gauge.getStation().doubleValue()) < tolerance) { subtractPNP = gauge.getDatum().doubleValue(); } @@ -82,12 +95,24 @@ } + /** Get the current Gauge datum with default distance tolerance. */ public double getCurrentGaugeDatum() { return getCurrentGaugeDatum(getRange()[0], (D4EArtifact) getMaster(), 1e-4); } + /** Overriden to show second axis also if no visible data present. */ + @Override + protected void adjustAxes(XYPlot plot) { + super.adjustAxes(plot); + if (getCurrentGaugeDatum() != 0d) { + // Show the W[*m] axis even if there is no data. + plot.setRangeAxis(1, createYAxis(YAXIS.W.idx)); + } + } + + public DischargeCurveGenerator() { super(); } @@ -142,6 +167,7 @@ return zoomin; } + /** Translate River annotations if a gauge. */ public void translateRiverAnnotation(RiverAnnotation riverAnnotation) { if (getCurrentGaugeDatum() == 0d) { return; @@ -163,13 +189,18 @@ @Override public void doOut( ArtifactAndFacet artifactFacet, - Document theme, + ThemeDocument theme, boolean visible ) { String name = artifactFacet.getFacetName(); logger.debug("DischargeCurveGenerator.doOut: " + name); - if (name.equals(DISCHARGE_CURVE) + DischargeProcessor dProcessor = new DischargeProcessor(getRange()[0]); + if (dProcessor.canHandle(name)) { + // In Base DischargeCurveGenerator, always at gauge, use WCm axis. + dProcessor.doOut(this, artifactFacet, theme, visible, YAXIS.WCm.idx); + } + else if (name.equals(DISCHARGE_CURVE) || name.equals(GAUGE_DISCHARGE_CURVE)) { doDischargeOut( (D4EArtifact)artifactFacet.getArtifact(), @@ -178,22 +209,17 @@ theme, visible); } - else if (name.equals(COMPUTED_DISCHARGE_MAINVALUES_Q) - || name.equals(MAINVALUES_Q) - || name.equals(COMPUTED_DISCHARGE_MAINVALUES_W) - || name.equals(MAINVALUES_W)) - { - RiverAnnotation mainValues = (RiverAnnotation) artifactFacet.getData(context); - translateRiverAnnotation(mainValues); - doAnnotations( - mainValues, - artifactFacet, theme, visible); - } else if (FacetTypes.IS.MANUALPOINTS(name)) { doPoints(artifactFacet.getData(context), artifactFacet, theme, visible, YAXIS.W.idx); } + else if (STATIC_WQ.equals(name)) { + doWQOut(artifactFacet.getData(context), + artifactFacet, + theme, + visible); + } else { logger.warn("DischargeCurveGenerator.doOut: Unknown facet name: " + name); return; @@ -208,9 +234,10 @@ D4EArtifact artifact, Object o, String description, - Document theme, + ThemeDocument theme, boolean visible) { + logger.debug("DischargeCurveGenerator.doDischargeOut"); WQKms wqkms = (WQKms) o; String gaugeName = wqkms.getName(); @@ -235,5 +262,134 @@ addAxisSeries(series, YAXIS.W.idx, visible); } + + /** + * Add W/Q-Series to plot. + * @param wqkms actual data + * @param theme theme to use. + */ + protected void doQOut( + Object wqkms, + ArtifactAndFacet aaf, + ThemeDocument theme, + boolean visible + ) { + logger.debug("DischargeCurveGenerator: doQOut (add W/Q data)."); + XYSeries series = new StyledXYSeries(aaf.getFacetDescription(), theme); + + StyledSeriesBuilder.addPointsQW(series, (WQKms) wqkms); + + addAxisSeries(series, YAXIS.W.idx, visible); + } + + + /** Add a point annotation at given x and y coordinates. */ + protected void addPointTextAnnotation( + String title, + double x, + double y, + ThemeDocument theme + ) { + List<XYTextAnnotation> textAnnos = + new ArrayList<XYTextAnnotation>(); + XYTextAnnotation anno = new CollisionFreeXYTextAnnotation( + title, + x, + y); + textAnnos.add(anno); + RiverAnnotation flysAnno = new RiverAnnotation( + null, null, null, theme); + flysAnno.setTextAnnotations(textAnnos); + addAnnotations(flysAnno); + } + + + /** + * Return true if all values in data[0] are smaller than zero + * (in imported data they are set to -1 symbolically). + * Return false if data is null or empty + */ + private boolean hasNoDischarge(double[][] data) { + if (data == null || data.length == 0) { + return false; + } + + double[] qs = data[0]; + for (double q: qs) { + if (q > 0d) { + return false; + } + } + + return true; + } + + + /** + * Add WQ Data to plot. + * @param wq data as double[][] + */ + protected void doWQOut( + Object wq, + ArtifactAndFacet aaf, + ThemeDocument theme, + boolean visible + ) { + logger.debug("DischargeCurveGenerator: doWQOut"); + double [][] data = (double [][]) wq; + String title = aaf.getFacetDescription(); + + double translate = getCurrentGaugeDatum(); + + // If no Q values (i.e. all -1) found, add annotations. + if (hasNoDischarge(data)) { + List<StickyAxisAnnotation> xy = new ArrayList<StickyAxisAnnotation>(); + + for (double y: data[1]) { + if (translate != 0d) { + y = (y-translate)*100d; + } + + xy.add(new StickyAxisAnnotation( + title, + (float) y, + StickyAxisAnnotation.SimpleAxis.Y_AXIS)); + } + + doAnnotations( + new RiverAnnotation(title, xy), + aaf, theme, visible); + return; + } + + // Otherwise add points. + XYSeries series = new StyledXYSeries(title, theme); + + if (translate != 0d) { + StyledSeriesBuilder.addPointsQW(series, data, -translate, 100d); + addAxisSeries(series, YAXIS.W.idx, visible); + } + else { + StyledSeriesBuilder.addPoints(series, data, true); + addAxisSeries(series, YAXIS.W.idx, visible); + } + + if (visible && theme.parseShowPointLabel() + && data != null && data.length != 0) { + + double[] xs = data[0]; + double[] ys = data[1]; + for (int i = 0; i < xs.length; i++) { + double x = xs[i]; + double y = ys[i]; + + if (translate != 0d) { + y = (y-translate)*100d; + } + + addPointTextAnnotation(title, x, y, theme); + } + } + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/exports/DischargeLongitudinalSectionGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/DischargeLongitudinalSectionGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -12,19 +12,18 @@ import org.jfree.data.xy.XYSeries; -import org.w3c.dom.Document; - import org.dive4elements.artifactdatabase.state.ArtifactAndFacet; import org.dive4elements.artifactdatabase.state.Facet; import org.dive4elements.river.artifacts.model.FacetTypes; import org.dive4elements.river.artifacts.model.WQCKms; -import org.dive4elements.river.artifacts.model.WQKms; import org.dive4elements.river.exports.process.Processor; +import org.dive4elements.river.exports.process.QOutProcessor; import org.dive4elements.river.exports.process.WOutProcessor; import org.dive4elements.river.jfree.RiverAnnotation; import org.dive4elements.river.jfree.StyledXYSeries; +import org.dive4elements.river.themes.ThemeDocument; @@ -49,7 +48,7 @@ @Override public void doOut( ArtifactAndFacet artifactFacet, - Document attr, + ThemeDocument attr, boolean visible ) { logger.debug("DischargeLongitudinalSectionGenerator.doOut"); @@ -62,14 +61,7 @@ Facet facet = artifactFacet.getFacet(); - if (name.contains(DISCHARGE_LONGITUDINAL_Q)) { - doQOut( - (WQKms) artifactFacet.getData(context), - artifactFacet, - attr, - visible); - } - else if (name.equals(DISCHARGE_LONGITUDINAL_C)) { + if (name.equals(DISCHARGE_LONGITUDINAL_C)) { doCorrectedWOut( (WQCKms) artifactFacet.getData(context), facet, @@ -85,18 +77,15 @@ artifactFacet, attr, visible, YAXIS.W.idx); } - else if (name.equals(STATIC_WQKMS_Q)) { - doQOut( - (WQKms) artifactFacet.getData(context), - artifactFacet, - attr, - visible); - } else { Processor processor = new WOutProcessor(); + Processor qProcessor = new QOutProcessor(); if (processor.canHandle(name)) { processor.doOut(this, artifactFacet, attr, visible, YAXIS.W.idx); } + else if (qProcessor.canHandle(name)) { + qProcessor.doOut(this, artifactFacet, attr, visible, YAXIS.Q.idx); + } else { logger.warn("Unknown facet name: " + name); } @@ -111,10 +100,10 @@ * @param theme The theme that contains styling information. */ protected void doCorrectedWOut( - WQCKms wqckms, - Facet facet, - Document theme, - boolean visible + WQCKms wqckms, + Facet facet, + ThemeDocument theme, + boolean visible ) { logger.debug("DischargeLongitudinalSectionGenerator.doCorrectedWOut");
--- a/artifacts/src/main/java/org/dive4elements/river/exports/DurationCurveGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/DurationCurveGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -14,6 +14,7 @@ import org.dive4elements.river.jfree.Bounds; import org.dive4elements.river.jfree.RiverAnnotation; import org.dive4elements.river.jfree.StyledXYSeries; +import org.dive4elements.river.themes.ThemeDocument; import java.awt.Font; import java.awt.geom.Point2D; @@ -24,7 +25,6 @@ import org.jfree.chart.plot.XYPlot; import org.jfree.data.Range; import org.jfree.data.xy.XYSeries; -import org.w3c.dom.Document; /** @@ -173,7 +173,7 @@ @Override public void doOut( ArtifactAndFacet artifactFacet, - Document attr, + ThemeDocument attr, boolean visible ) { String name = artifactFacet.getFacetName(); @@ -238,7 +238,7 @@ protected void doWOut( WQDay wqdays, ArtifactAndFacet aaf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("DurationCurveGenerator.doWOut"); @@ -257,10 +257,10 @@ } protected void doPointOut( - Point2D point, + Point2D point, ArtifactAndFacet aandf, - Document theme, - boolean visible + ThemeDocument theme, + boolean visible ){ logger.debug("DurationCurveGenerator.doPointOut"); @@ -281,7 +281,7 @@ protected void doQOut( WQDay wqdays, ArtifactAndFacet aaf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("DurationCurveGenerator.doQOut");
--- a/artifacts/src/main/java/org/dive4elements/river/exports/FlowVelocityGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/FlowVelocityGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -14,8 +14,6 @@ import org.jfree.data.xy.XYSeries; -import org.w3c.dom.Document; - import org.dive4elements.artifactdatabase.state.ArtifactAndFacet; import org.dive4elements.artifactdatabase.state.Facet; @@ -33,6 +31,7 @@ import org.dive4elements.river.jfree.RiverAnnotation; import org.dive4elements.river.jfree.StyledXYSeries; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.RiverUtils; @@ -209,7 +208,7 @@ */ public void doOut( ArtifactAndFacet artifactAndFacet, - Document attr, + ThemeDocument attr, boolean visible ) { String name = artifactAndFacet.getFacetName(); @@ -370,7 +369,7 @@ private void doBedQualityLoadDiameter( BedloadDiameterResult data, ArtifactAndFacet aandf, - Document attr, + ThemeDocument attr, boolean visible) { XYSeries series = new StyledXYSeries(aandf.getFacetDescription(), attr); StyledSeriesBuilder.addPoints(series, data.getDiameterData(), true); @@ -382,7 +381,7 @@ private void doBedQualityTopLayerOut( BedDiameterResult data, ArtifactAndFacet aandf, - Document attr, + ThemeDocument attr, boolean visible) { XYSeries series = new StyledXYSeries(aandf.getFacetDescription(), attr); StyledSeriesBuilder.addPoints(series, data.getDiameterSubData(), true); @@ -393,7 +392,7 @@ private void doBedQualitySubLayerOut( BedDiameterResult data, ArtifactAndFacet aandf, - Document attr, + ThemeDocument attr, boolean visible ) { logger.debug("Do beddiametersubout"); @@ -416,7 +415,7 @@ protected void doMainChannelOut( FlowVelocityData data, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("FlowVelocityGenerator.doMainChannelOut"); @@ -433,7 +432,7 @@ protected void doVPointOut ( Object data, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("FlowVelocityGenerator.doVPointOut"); @@ -456,7 +455,7 @@ protected void doTotalChannelOut( FlowVelocityData data, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("FlowVelocityGenerator.doTotalChannelOut"); @@ -486,10 +485,10 @@ protected void doQOut( FlowVelocityData data, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ) { - logger.debug("FlowVelocityGenerator.doTauOut"); + logger.debug("FlowVelocityGenerator.doQOut"); XYSeries series = new StyledXYSeries(aandf.getFacetDescription(), theme); @@ -509,7 +508,7 @@ protected void doTauOut( FlowVelocityData data, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("FlowVelocityGenerator.doTauOut"); @@ -543,10 +542,10 @@ * @param visible whether or not visible. */ protected void doArea( - Object o, + Object o, ArtifactAndFacet aandf, - Document theme, - boolean visible + ThemeDocument theme, + boolean visible ) { logger.debug("FlowVelocityGenerator.doArea"); logger.warn("TODO: Implement FlowVelocityGenerator.doArea");
--- a/artifacts/src/main/java/org/dive4elements/river/exports/HistoricalDischargeCurveGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/HistoricalDischargeCurveGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -25,6 +25,7 @@ import org.dive4elements.river.jfree.StyledTimeSeries; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.RiverUtils; import org.jfree.chart.plot.XYPlot; @@ -36,8 +37,6 @@ import org.jfree.data.time.TimeSeries; import org.jfree.data.time.TimeSeriesCollection; -import org.w3c.dom.Document; - /** * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a> @@ -127,7 +126,7 @@ } @Override - public void doOut(ArtifactAndFacet artifactFacet, Document theme, + public void doOut(ArtifactAndFacet artifactFacet, ThemeDocument theme, boolean visible) { String name = artifactFacet.getFacetName(); logger.debug("HistoricalDischargeCurveGenerator.doOut: " + name); @@ -169,7 +168,7 @@ } protected void doHistoricalDischargeOutQ(D4EArtifact artifact, - Object data, String desc, Document theme, boolean visible) { + Object data, String desc, ThemeDocument theme, boolean visible) { logger.debug("doHistoricalDischargeOut(): description = " + desc); WQTimerange wqt = (WQTimerange) data; @@ -181,7 +180,7 @@ } protected void doHistoricalDischargeOutW(D4EArtifact artifact, - Object data, String desc, Document theme, boolean visible) { + Object data, String desc, ThemeDocument theme, boolean visible) { logger.debug("doHistoricalDischargeOut(): description = " + desc); WQTimerange wqt = (WQTimerange) data; @@ -193,7 +192,7 @@ } protected void doHistoricalDischargeDifferenceOutQ(D4EArtifact artifact, - Object data, String desc, Document theme, boolean visible) { + Object data, String desc, ThemeDocument theme, boolean visible) { logger.debug("doHistoricalDischargeDifferenceOut: desc = " + desc); HistoricalWQTimerange wqt = (HistoricalWQTimerange) data; @@ -205,7 +204,7 @@ } protected void doHistoricalDischargeDifferenceOutW(D4EArtifact artifact, - Object data, String desc, Document theme, boolean visible) { + Object data, String desc, ThemeDocument theme, boolean visible) { logger.debug("doHistoricalDischargeDifferenceOut: desc = " + desc); HistoricalWQTimerange wqt = (HistoricalWQTimerange) data; @@ -223,7 +222,7 @@ * looks like a "step chart". */ protected TimeSeriesCollection newTimeSeriesCollection( - Timerange[] timeranges, double[] values, Document theme, String desc) { + Timerange[] timeranges, double[] values, ThemeDocument theme, String desc) { logger.debug("Create new TimeSeriesCollection for: " + desc); TimeSeriesCollection tsc = new TimeSeriesCollection();
--- a/artifacts/src/main/java/org/dive4elements/river/exports/HistoricalDischargeWQCurveGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/HistoricalDischargeWQCurveGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -10,7 +10,6 @@ import org.apache.log4j.Logger; import org.jfree.data.xy.XYSeries; -import org.w3c.dom.Document; import org.dive4elements.artifactdatabase.state.ArtifactAndFacet; import org.dive4elements.river.artifacts.D4EArtifact; @@ -22,6 +21,7 @@ import org.dive4elements.river.jfree.RiverAnnotation; import org.dive4elements.river.jfree.StyledValueMarker; import org.dive4elements.river.jfree.StyledXYSeries; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.RiverUtils; @@ -105,7 +105,7 @@ } @Override - public void doOut(ArtifactAndFacet artifactFacet, Document theme, + public void doOut(ArtifactAndFacet artifactFacet, ThemeDocument theme, boolean visible) { String name = artifactFacet.getFacetName(); logger.debug("HistoricalDischargeWQCurveGenerator.doOut: " + name); @@ -124,7 +124,9 @@ artifactFacet.getData(context), artifactFacet.getFacetDescription(), theme, visible); } - else if (name.equals(HISTORICAL_DISCHARGE_WQ_CURVE)) { + else if (name.equals(HISTORICAL_DISCHARGE_WQ_CURVE) || + name.equals(DISCHARGE_CURVE) || + name.equals(GAUGE_DISCHARGE_CURVE)) { doHistoricalDischargeCurveOut( (D4EArtifact) artifactFacet.getArtifact(), artifactFacet.getData(context), @@ -134,11 +136,10 @@ doPoints(artifactFacet.getData(context), artifactFacet, theme, visible, YAXIS.W.idx); } - else if (HISTORICAL_DISCHARGE_MAINVALUES_Q.equals(name)) { - doAnnotations((RiverAnnotation) - artifactFacet.getData(context), artifactFacet, theme, visible); - } - else if (HISTORICAL_DISCHARGE_MAINVALUES_W.equals(name)) { + else if (name.equals(MAINVALUES_W) || + name.equals(MAINVALUES_Q) || + HISTORICAL_DISCHARGE_MAINVALUES_Q.equals(name) || + HISTORICAL_DISCHARGE_MAINVALUES_W.equals(name)) { doAnnotations((RiverAnnotation) artifactFacet.getData(context), artifactFacet, theme, visible); } @@ -149,19 +150,19 @@ } protected void doHistoricalDischargeOutQ(D4EArtifact artifact, - Object data, String desc, Document theme, boolean visible) { + Object data, String desc, ThemeDocument theme, boolean visible) { double value = Double.valueOf(data.toString()); addDomainMarker(new StyledValueMarker(value, theme), visible); } protected void doHistoricalDischargeOutW(D4EArtifact artifact, - Object data, String desc, Document theme, boolean visible) { + Object data, String desc, ThemeDocument theme, boolean visible) { double value = Double.valueOf(data.toString()); addValueMarker(new StyledValueMarker(value, theme), visible); } protected void doHistoricalDischargeCurveOut(D4EArtifact artifact, - Object data, String desc, Document theme, boolean visible) { + Object data, String desc, ThemeDocument theme, boolean visible) { XYSeries series = new StyledXYSeries(desc, theme); StyledSeriesBuilder.addPointsQW(series, (WQKms) data);
--- a/artifacts/src/main/java/org/dive4elements/river/exports/LongitudinalSectionGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/LongitudinalSectionGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -16,10 +16,18 @@ import org.dive4elements.river.artifacts.model.FacetTypes; import org.dive4elements.river.artifacts.model.WKms; import org.dive4elements.river.artifacts.model.WQKms; + +import org.dive4elements.river.exports.process.Processor; +import org.dive4elements.river.exports.process.BedDiffHeightYearProcessor; +import org.dive4elements.river.exports.process.BedDiffYearProcessor; +import org.dive4elements.river.exports.process.BedheightProcessor; +import org.dive4elements.river.exports.process.QOutProcessor; import org.dive4elements.river.exports.process.WOutProcessor; + import org.dive4elements.river.jfree.RiverAnnotation; import org.dive4elements.river.jfree.StyledAreaSeriesCollection; import org.dive4elements.river.jfree.StyledXYSeries; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.DataUtil; import org.dive4elements.river.utils.RiverUtils; import org.apache.log4j.Logger; @@ -27,7 +35,6 @@ import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.plot.XYPlot; import org.jfree.data.xy.XYSeries; -import org.w3c.dom.Document; /** @@ -315,7 +322,7 @@ @Override public void doOut( ArtifactAndFacet artifactAndFacet, - Document attr, + ThemeDocument attr, boolean visible ) { String name = artifactAndFacet.getFacetName(); @@ -333,16 +340,26 @@ return; } - WOutProcessor processor = new WOutProcessor(); - if (processor.canHandle(name)) { - processor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx); + WOutProcessor wProcessor = new WOutProcessor(); + QOutProcessor qProcessor = new QOutProcessor(); + Processor bedp = new BedheightProcessor(); + Processor bdyProcessor = new BedDiffYearProcessor(); + Processor bdhyProcessor = new BedDiffHeightYearProcessor(); + + if (wProcessor.canHandle(name)) { + wProcessor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx); } - else if (name.equals(LONGITUDINAL_Q)) { - doQOut( - (WQKms) artifactAndFacet.getData(context), - artifactAndFacet, - attr, - visible); + if (qProcessor.canHandle(name)) { + qProcessor.doOut(this, artifactAndFacet, attr, visible, YAXIS.Q.idx); + } + else if (bedp.canHandle(name)) { + bedp.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx); + } + else if (bdyProcessor.canHandle(name)) { + bdyProcessor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx); + } + else if (bdhyProcessor.canHandle(name)) { + bdhyProcessor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx); } else if (name.equals(LONGITUDINAL_ANNOTATION)) { doAnnotations( @@ -351,13 +368,6 @@ attr, visible); } - else if (name.equals(STATIC_WQKMS_Q)) { - doQOut( - (WQKms) artifactAndFacet.getData(context), - artifactAndFacet, - attr, - visible); - } else if (name.equals(W_DIFFERENCES)) { doWDifferencesOut( (WKms) artifactAndFacet.getData(context), @@ -392,7 +402,7 @@ protected void doWDifferencesOut( WKms wkms, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("WDifferencesCurveGenerator.doWDifferencesOut"); @@ -422,35 +432,6 @@ /** - * Process the output for Q facets in a longitudinal section curve. - * - * @param wqkms An array of WQKms values. - * @param aandf The facet and artifact. This facet does NOT support any data objects. Use - * D4EArtifact.getNativeFacet() instead to retrieve a Facet which supports - * data. - * @param theme The theme that contains styling information. - * @param visible The visibility of the curve. - */ - protected void doQOut( - WQKms wqkms, - ArtifactAndFacet aandf, - Document theme, - boolean visible - ) { - logger.debug("LongitudinalSectionGenerator.doQOut"); - - XYSeries series = new StyledXYSeries(aandf.getFacetDescription(), theme); - - StyledSeriesBuilder.addStepPointsKmQ(series, wqkms); - - addAxisSeries(series, YAXIS.Q.idx, visible); - - if (needInvertAxis(wqkms)) { - setInverted(true); - } - } - - /** * This method determines - taking JFreeCharts auto x value ordering into * account - if the x axis need to be inverted. Waterlines in these charts * should decrease. @@ -517,7 +498,7 @@ protected void doArea( Object o, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("LongitudinalSectionGenerator.doArea");
--- a/artifacts/src/main/java/org/dive4elements/river/exports/MapGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/MapGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -24,9 +24,9 @@ import org.dive4elements.river.artifacts.model.map.WSPLGENLayerFacet; import org.dive4elements.river.artifacts.states.WaterlevelGroundDifferences; import org.dive4elements.river.collections.D4EArtifactCollection; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.ArtifactMapfileGenerator; import org.dive4elements.river.utils.GeometryUtils; -import org.dive4elements.river.utils.ThemeUtil; import java.io.File; import java.io.FileNotFoundException; @@ -96,7 +96,7 @@ @Override public void doOut( ArtifactAndFacet artifactFacet, - Document attr, + ThemeDocument attr, boolean visible) { String name = artifactFacet.getFacetName(); @@ -120,9 +120,11 @@ setInitialExtent(extent); createWSPLGENLayer(flys, wms, attr); } - else if (FLOODMAP_USERSHAPE.equals(name)) { - createUserShapeLayer(flys, wms); - } + // FIXME: Already generated by HWSBarrierState + // wms has a wrong SRID which would break that layer + //else if (FLOODMAP_USERSHAPE.equals(name)) { + // createUserShapeLayer(flys, wms); + //} else { logger.debug("doOut: createDatabaseLayer for facet name: " + name); createDatabaseLayer(flys, wms, attr); @@ -135,9 +137,9 @@ protected void createWSPLGENLayer( - D4EArtifact flys, + D4EArtifact flys, WMSLayerFacet wms, - Document attr + ThemeDocument attr ) { try { if(wms instanceof WSPLGENLayerFacet) { @@ -158,7 +160,8 @@ mfg.createUeskLayer( flys, (WSPLGENLayerFacet) wms, - ThemeUtil.createDynamicMapserverStyle(attr, from, to, step, context.getMeta()), + attr.createDynamicMapserverStyle( + from, to, step, context.getMeta()), context); } else { @@ -188,9 +191,9 @@ protected void createDatabaseLayer( - D4EArtifact flys, + D4EArtifact flys, WMSLayerFacet wms, - Document attr + ThemeDocument attr ) { logger.debug("createDatabaseLayer for facet: " + wms.getName()); @@ -209,7 +212,7 @@ mfg.createDatabaseLayer( flys, (WMSDBLayerFacet) wms, - ThemeUtil.createMapserverStyle(attr)); + attr.createMapserverStyle()); } else { logger.warn("Cannot create DB layer from: " + wms.getClass());
--- a/artifacts/src/main/java/org/dive4elements/river/exports/MiddleBedHeightGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/MiddleBedHeightGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -10,19 +10,23 @@ import org.dive4elements.artifactdatabase.state.ArtifactAndFacet; import org.dive4elements.artifactdatabase.state.Facet; + import org.dive4elements.river.artifacts.D4EArtifact; import org.dive4elements.river.artifacts.model.FacetTypes; import org.dive4elements.river.artifacts.model.MiddleBedHeightData; +import org.dive4elements.river.artifacts.model.WQKms; + import org.dive4elements.river.exports.process.BedheightProcessor; import org.dive4elements.river.exports.process.Processor; +import org.dive4elements.river.exports.process.QOutProcessor; import org.dive4elements.river.exports.process.WOutProcessor; import org.dive4elements.river.jfree.RiverAnnotation; import org.dive4elements.river.jfree.StyledXYSeries; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.RiverUtils; import org.apache.log4j.Logger; import org.jfree.data.xy.XYSeries; -import org.w3c.dom.Document; // TODO Move class to org.dive4elements.river.exports.minfo /** @@ -35,7 +39,7 @@ implements FacetTypes { public enum YAXIS { - H(0), W(1), P(2); + H(0), W(1), P(2), Q(3); protected int idx; private YAXIS(int c) { idx = c; @@ -73,6 +77,9 @@ public static final String I18N_P_YAXIS_LABEL_DEFAULT = "Gepeilte Breite [m]"; public static final String I18N_P_YAXIS_LABEL = "chart.bedheight_middle.sounding.yaxis.label"; + public static final String I18N_Q_YAXIS_LABEL_DEFAULT = "Q [m\u00b3/s]"; + public static final String I18N_Q_YAXIS_LABEL = + "chart.longitudinal.section.yaxis.second.label"; @Override protected YAxisWalker getYAxisWalker() { @@ -139,6 +146,9 @@ else if (index == YAXIS.P.idx) { label = msg(I18N_P_YAXIS_LABEL, I18N_P_YAXIS_LABEL_DEFAULT); } + else if (index == YAXIS.Q.idx) { + label = msg(I18N_Q_YAXIS_LABEL, I18N_Q_YAXIS_LABEL_DEFAULT); + } return label; } @@ -166,7 +176,7 @@ @Override public void doOut( ArtifactAndFacet artifactAndFacet, - Document attr, + ThemeDocument attr, boolean visible ) { String name = artifactAndFacet.getFacetName(); @@ -186,11 +196,14 @@ Processor woutp = new WOutProcessor(); Processor bedp = new BedheightProcessor(); - WOutProcessor processor = new WOutProcessor(); - if (processor.canHandle(name)) { - processor.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx); + Processor qoutp = new QOutProcessor(); + if (woutp.canHandle(name)) { + woutp.doOut(this, artifactAndFacet, attr, visible, YAXIS.W.idx); } - if (name.equals(MIDDLE_BED_HEIGHT_SINGLE) || name.equals(MIDDLE_BED_HEIGHT_EPOCH)) { + else if (qoutp.canHandle(name)) { + qoutp.doOut(this, artifactAndFacet, attr, visible, YAXIS.Q.idx); + } + else if (name.equals(MIDDLE_BED_HEIGHT_SINGLE) || name.equals(MIDDLE_BED_HEIGHT_EPOCH)) { doHeightOut( (MiddleBedHeightData) artifactAndFacet.getData(context), artifactAndFacet, @@ -226,6 +239,13 @@ attr, visible); } + else if (name.equals(STATIC_WQKMS_Q)) { + doQOut( + (WQKms) artifactAndFacet.getData(context), + artifactAndFacet, + attr, + visible); + } else if (bedp.canHandle(name)) { bedp.doOut(this, artifactAndFacet, attr, visible, YAXIS.P.idx); } @@ -249,7 +269,7 @@ protected void doHeightOut( MiddleBedHeightData data, ArtifactAndFacet aandf, - Document theme, + ThemeDocument theme, boolean visible ) { logger.debug("MiddleBedHeightGenerator.doMainChannelOut"); @@ -283,13 +303,47 @@ * @param visible whether or not visible. */ protected void doArea( - Object o, + Object o, ArtifactAndFacet aandf, - Document theme, - boolean visible + ThemeDocument theme, + boolean visible ) { logger.debug("FlowVelocityGenerator.doArea"); logger.warn("TODO: Implement FlowVelocityGenerator.doArea"); } + + + // TODO method borrowed from LongitudinalSectionGenerator. + // Extract in an Processor + /** + * Process the output for Q facets in a longitudinal section curve. + * + * @param wqkms An array of WQKms values. + * @param aandf The facet and artifact. This facet does NOT support any data objects. Use + * D4EArtifact.getNativeFacet() instead to retrieve a Facet which supports + * data. + * @param theme The theme that contains styling information. + * @param visible The visibility of the curve. + */ + protected void doQOut( + WQKms wqkms, + ArtifactAndFacet aandf, + ThemeDocument theme, + boolean visible + ) { + logger.debug("MiddleBedHeightGenerator.doQOut"); + + XYSeries series = new StyledXYSeries(aandf.getFacetDescription(), theme); + + StyledSeriesBuilder.addStepPointsKmQ(series, wqkms); + + addAxisSeries(series, YAXIS.Q.idx, visible); + + /* + if (needInvertAxis(wqkms)) { + setInverted(true); + } + */ + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/exports/OutGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/OutGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -18,6 +18,7 @@ import org.dive4elements.artifacts.Artifact; import org.dive4elements.artifacts.CallContext; import org.dive4elements.river.collections.D4EArtifactCollection; +import org.dive4elements.river.themes.ThemeDocument; /** @@ -63,7 +64,7 @@ * producing the output. * @param visible Specifies, if this output should be visible or not. */ - void doOut(ArtifactAndFacet bundle, Document attr, boolean visible); + void doOut(ArtifactAndFacet bundle, ThemeDocument attr, boolean visible); /** * Writes the collected output of all artifacts specified in the
--- a/artifacts/src/main/java/org/dive4elements/river/exports/OutputHelper.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/OutputHelper.java Thu Sep 12 10:13:09 2013 +0200 @@ -40,6 +40,7 @@ import org.dive4elements.river.artifacts.model.ManagedDomFacet; import org.dive4elements.river.artifacts.model.ManagedFacet; import org.dive4elements.river.themes.Theme; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.themes.ThemeFactory; public class OutputHelper { @@ -71,11 +72,13 @@ boolean debug = log.isDebugEnabled(); if (debug) { - log.debug("D4EArtifactCollection.doOut: " + outName); + log.debug("OutputHelper.doOut: " + outName); } ThemeList themeList = new ThemeList(attributes); + ThemeDocument themeDoc = new ThemeDocument(attributes); + List<ArtifactAndFacet> dataProviders = doBlackboardPass(themeList, context, outName); @@ -109,7 +112,7 @@ if (outName.equals("sq_overview")) { generator.doOut( dataProviders.get(i), - attributes, + themeDoc, theme.getActive() == 1); } else { @@ -141,7 +144,7 @@ * * @return an attribute in form of a document. */ - protected Document getFacetThemeFromAttribute( + protected ThemeDocument getFacetThemeFromAttribute( String uuid, String outName, String facet, @@ -154,7 +157,7 @@ if (debug) { log.debug( - "D4EArtifactCollection.getFacetThemeFromAttribute(facet=" + "OutputHelper.getFacetThemeFromAttribute(facet=" + facet + ", index=" + index + ")"); } @@ -219,7 +222,7 @@ Document doc = XMLUtils.newDocument(); doc.appendChild(doc.importNode(theme, true)); - return doc; + return new ThemeDocument(doc); } /** * Adds the theme of a facet to a CollectionItem's attribute. @@ -235,7 +238,7 @@ Theme t, CallContext context) { - log.debug("D4EArtifactCollection.addThemeToAttribute: " + uuid); + log.debug("OutputHelper.addThemeToAttribute: " + uuid); if (t == null) { log.warn("Theme is empty - cancel adding it to attribute!"); @@ -380,7 +383,7 @@ protected Artifact getArtifact(String uuid, CallContext context) throws ArtifactDatabaseException { - log.debug("D4EArtifactCollection.getArtifact"); + log.debug("OutputHelper.getArtifact"); Backend backend = Backend.getInstance(); PersistentArtifact persistent = backend.getArtifact(uuid); @@ -409,7 +412,7 @@ boolean debug = log.isDebugEnabled(); if (debug) { - log.debug("D4EArtifactCollection.initItemAttribute"); + log.debug("OutputHelper.initItemAttribute"); } Theme t = getThemeForFacet(uuid, facet, pattern, index, outName, context); @@ -447,7 +450,7 @@ String outName, CallContext context) { - log.info("D4EArtifactCollection.getThemeForFacet: " + facet); + log.info("OutputHelper.getThemeForFacet: " + facet); RiverContext flysContext = context instanceof RiverContext ? (RiverContext) context
--- a/artifacts/src/main/java/org/dive4elements/river/exports/ReferenceCurveGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/ReferenceCurveGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -15,6 +15,7 @@ import org.dive4elements.river.artifacts.model.WWAxisTypes; import org.dive4elements.river.jfree.RiverAnnotation; import org.dive4elements.river.jfree.StyledXYSeries; +import org.dive4elements.river.themes.ThemeDocument; import org.dive4elements.river.utils.Formatter; import java.awt.geom.Point2D; @@ -25,7 +26,6 @@ import org.jfree.chart.axis.TickUnits; import org.jfree.chart.axis.ValueAxis; import org.jfree.data.xy.XYSeries; -import org.w3c.dom.Document; /** * An OutGenerator that generates reference curves. @@ -137,7 +137,7 @@ @Override public void doOut( ArtifactAndFacet artifactFacet, - Document theme, + ThemeDocument theme, boolean visible ) { String name = artifactFacet.getFacetName(); @@ -187,9 +187,9 @@ /** Register DataSeries with (maybe transformed) points. */ public void doReferenceOut( - Object data, - Document theme, - boolean visible + Object data, + ThemeDocument theme, + boolean visible ) { WW ww = (WW)data; @@ -216,10 +216,10 @@ // TODO resolve duplicate in DurationCurveGenerator protected void doPointOut( - Point2D point, + Point2D point, ArtifactAndFacet aandf, - Document theme, - boolean visible + ThemeDocument theme, + boolean visible ){ logger.debug("ReferenceCurveGenerator.doPointOut");
--- a/artifacts/src/main/java/org/dive4elements/river/exports/ReportGenerator.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/ReportGenerator.java Thu Sep 12 10:13:09 2013 +0200 @@ -24,6 +24,7 @@ import org.dive4elements.river.artifacts.model.Calculation; import org.dive4elements.river.collections.D4EArtifactCollection; +import org.dive4elements.river.themes.ThemeDocument; import org.w3c.dom.Document; @@ -60,7 +61,7 @@ @Override public void doOut( ArtifactAndFacet artifactFacet, - Document attr, + ThemeDocument attr, boolean visible ) { logger.debug("doOut");
--- a/artifacts/src/main/java/org/dive4elements/river/exports/ShapeExporter.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/ShapeExporter.java Thu Sep 12 10:13:09 2013 +0200 @@ -14,6 +14,7 @@ import org.dive4elements.artifacts.common.utils.FileTools; import org.dive4elements.artifacts.common.utils.XMLUtils; import org.dive4elements.river.collections.D4EArtifactCollection; +import org.dive4elements.river.themes.ThemeDocument; import org.w3c.dom.Document; @@ -47,7 +48,7 @@ } @Override - public void doOut(ArtifactAndFacet bundle, Document attr, boolean visible) { + public void doOut(ArtifactAndFacet bundle, ThemeDocument attr, boolean visible) { String name = bundle.getFacetName(); if (!isFacetValid(name)) {
--- a/artifacts/src/main/java/org/dive4elements/river/exports/StyledSeriesBuilder.java Sat Jun 29 00:20:58 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/StyledSeriesBuilder.java Thu Sep 12 10:13:09 2013 +0200 @@ -40,14 +40,63 @@ /** + * Add points to series, create gaps if certain distance between + * points is met and scale the Y value. + * + * @param series Series to add points to. + * @param points Points to add to series, points[0] to 1st dim, points[1] + * to 2nd dim. + * @param skipNANs if true, skip NAN values in points parameter. Otherwise, + * the NaNs lead to gaps in graph. + * @param distance if two consecutive entries in points[0] are more + * than distance apart, create a NaN value to skip in display. + * Still, create a line segment. + * @param factor Factor by which to scale the y value (points[1]). + */ + public static void addPointsFactorY(XYSeries series, + double[][] points, + boolean skipNANs, + double distance, + double factor + ) { + if (points == null || points.length <= 1) { + return; + } + double [] xPoints = points[0]; + double [] yPoints = points[1]; + for (int i = 0; i < xPoints.length; i++) { + if (skipNANs && + (Double.isNaN(xPoints[i]) || Double.isNaN(yPoints[i]))) { + logger.warn ("Skipping NaN in StyledSeriesBuilder."); + continue; + } + // Create gap if distance >= distance. + if (i != 0 && Math.abs(xPoints[i-1] - xPoints[i]) >= distance) { + // Create at least a small segment for last point. + if (!Double.isNaN(yPoints[i-1])) { + series.add(xPoints[i-1]+0.99d*(distance)/2.d, yPoints[i-1]*factor, false); + } + + if (!Double.isNaN(yPoints[i-1]) && !Double.isNaN(yPoints[i])) { + series.add((xPoints[i-1]+xPoints[i])/2.d, Double.NaN, false); + } + } + series.add(xPoints[i], yPoints[i]*factor, false); + } + } + + + /** * Add points to series, create gaps if certain distance between points is met. * * @param series Series to add points to. * @param points Points to add to series, points[0] to 1st dim, points[1] * to 2nd dim. - * @param skipNANs if true, skip NAN values in points parameter. + * @param skipNANs if true, skip NAN values in points parameter. Otherwise, + * the NaNs lead to gaps in graph. * @param distance if two consecutive entries in points[0] are more * than distance apart, create a NaN value to skip in display. + * Still, create a line segment. */ public static void addPoints(XYSeries series, double[][] points, boolean skipNANs, double distance) { if (points == null || points.length <= 1) { @@ -63,6 +112,11 @@ } // Create gap if distance >= distance. if (i != 0 && Math.abs(xPoints[i-1] - xPoints[i]) >= distance) { + // Create at least a small segment for last point. + if (!Double.isNaN(yPoints[i-1])) { + series.add(xPoints[i-1]+0.99d*(distance)/2.d, yPoints[i-1], false); + } + if (!Double.isNaN(yPoints[i-1]) && !Double.isNaN(yPoints[i])) { series.add((xPoints[i-1]+xPoints[i])/2.d, Double.NaN, false); } @@ -250,6 +304,27 @@ * W values and scaling it with wScale. * * @param series Series to add points to. + * @param qws to add to series. + * @param wAdd Value to add to each Q while adding to series. + * @param wScale multiply with + */