Mercurial > dive4elements > river
changeset 9021:efc4d84a59e4
Zusammenführen
author | gernotbelger |
---|---|
date | Mon, 23 Apr 2018 16:18:39 +0200 |
parents | e4a423b3e94e (current diff) 5974bba60e4e (diff) |
children | ea3908c3cbb6 |
files | |
diffstat | 24 files changed, 729 insertions(+), 63 deletions(-) [+] |
line wrap: on
line diff
--- a/artifacts/doc/conf/conf.xml Mon Apr 23 13:00:41 2018 +0200 +++ b/artifacts/doc/conf/conf.xml Mon Apr 23 16:18:39 2018 +0200 @@ -181,6 +181,10 @@ ttl="3600000" artifact="org.dive4elements.river.artifacts.uinfo.UINFOArtifact">org.dive4elements.artifactdatabase.DefaultArtifactFactory</artifact-factory> + <artifact-factory name="tkhxf" description="Factory to create an artifact used in sinfo datacage." + ttl="3600000" + artifact="org.dive4elements.river.artifacts.sinfo.predefinedtkh.PredefinedTkhArtifact">org.dive4elements.artifactdatabase.DefaultArtifactFactory</artifact-factory> + </artifact-factories> <user-factory name="default" description="Factory to create new users">org.dive4elements.artifactdatabase.DefaultUserFactory</user-factory>
--- a/artifacts/doc/conf/generators/longitudinal-diagram-defaults.xml Mon Apr 23 13:00:41 2018 +0200 +++ b/artifacts/doc/conf/generators/longitudinal-diagram-defaults.xml Mon Apr 23 16:18:39 2018 +0200 @@ -55,6 +55,8 @@ <processor class="org.dive4elements.river.artifacts.sinfo.common.VelocityProcessor" axis="Velocity"/> <processor class="org.dive4elements.river.artifacts.sinfo.common.FlowDepthDevelopmentProcessor" axis="flowdepthDevelopmentAxis"/> <processor class="org.dive4elements.river.artifacts.sinfo.common.FlowDepthDevelopmentPerYearProcessor" axis="flowdepthDevelopmentPerYearAxis"/> + + <processor class="org.dive4elements.river.artifacts.sinfo.common.PredefinedTkhProcessor" axis="tkhAxis"/> <chartextender class="org.dive4elements.river.artifacts.sinfo.flowdepth.FlowDepthChartExtender" /> </longitudinal-defaults> \ No newline at end of file
--- a/artifacts/doc/conf/meta-data.xml Mon Apr 23 13:00:41 2018 +0200 +++ b/artifacts/doc/conf/meta-data.xml Mon Apr 23 16:18:39 2018 +0200 @@ -279,14 +279,14 @@ <dc:call-macro name="basedata_7_waterlevels"/> </fixanalysis> </minfo> - <dc:call-macro name="basedata_3_officials"/> + <dc:call-macro name="basedata_3_officials"/> <dc:call-macro name="basedata_2_fixations"/> <sinfo> <sinfo_additional_ls> <dc:call-macro name="basedata_1_additionals-sinfo-with-q"/> <dc:call-macro name="basedata_1_additionals-sinfo-without-q"/> - </sinfo_additional_ls> - </sinfo> + </sinfo_additional_ls> + </sinfo> </dc:when> <dc:when test="$out = 'sinfo_flow_depth'"> @@ -1307,50 +1307,50 @@ </dc:call-macro> </dc:macro> - <dc:macro name="basedata_1_additionals-sinfo-with-q_macro"> - <dc:filter expr="$kind=1 and $sinfo_selection='Q'"> - <dc:if test="dc:has-result()"> - <sinfo_additional_ls_withQ> - <dc:group expr="dc:replace($wst_description, 'Zus.Längsschnitte/', '')"> - <additional name="{dc:group-key()}"> - <dc:for-each> - <dc:macro-body/> - </dc:for-each> - </additional> - </dc:group> - </sinfo_additional_ls_withQ> - </dc:if> - </dc:filter> - </dc:macro> - - <dc:macro name="basedata_1_additionals-sinfo-with-q"> - <dc:call-macro name="basedata_1_additionals-sinfo-with-q_macro"> - <column name="{$wst_column_name}" ids="additionals-wstv-{$wst_column_position}-{$wst_id}" factory="staticwqkms" target_out="{$out}" info="{$info} [km {$deffrom} - {$defto}]"/> - </dc:call-macro> - </dc:macro> - - <dc:macro name="basedata_1_additionals-sinfo-without-q_macro"> - <dc:filter expr="$kind=1 and $sinfo_selection='W'"> - <dc:if test="dc:has-result()"> - <sinfo_additional_ls_withoutQ> - <dc:group expr="dc:replace($wst_description, 'Zus.Längsschnitte/', '')"> - <additional name="{dc:group-key()}"> - <dc:for-each> - <dc:macro-body/> - </dc:for-each> - </additional> - </dc:group> - </sinfo_additional_ls_withoutQ> - </dc:if> - </dc:filter> - </dc:macro> - - <dc:macro name="basedata_1_additionals-sinfo-without-q"> - <dc:call-macro name="basedata_1_additionals-sinfo-without-q_macro"> - <column name="{$wst_column_name}" ids="additionals-wstv-{$wst_column_position}-{$wst_id}" factory="staticwkms" target_out="{$out}" info="{$info} [km {$deffrom} - {$defto}]"/> - </dc:call-macro> - </dc:macro> - + <dc:macro name="basedata_1_additionals-sinfo-with-q_macro"> + <dc:filter expr="$kind=1 and $sinfo_selection='Q'"> + <dc:if test="dc:has-result()"> + <sinfo_additional_ls_withQ> + <dc:group expr="dc:replace($wst_description, 'Zus.Längsschnitte/', '')"> + <additional name="{dc:group-key()}"> + <dc:for-each> + <dc:macro-body/> + </dc:for-each> + </additional> + </dc:group> + </sinfo_additional_ls_withQ> + </dc:if> + </dc:filter> + </dc:macro> + + <dc:macro name="basedata_1_additionals-sinfo-with-q"> + <dc:call-macro name="basedata_1_additionals-sinfo-with-q_macro"> + <column name="{$wst_column_name}" ids="additionals-wstv-{$wst_column_position}-{$wst_id}" factory="staticwqkms" target_out="{$out}" info="{$info} [km {$deffrom} - {$defto}]"/> + </dc:call-macro> + </dc:macro> + + <dc:macro name="basedata_1_additionals-sinfo-without-q_macro"> + <dc:filter expr="$kind=1 and $sinfo_selection='W'"> + <dc:if test="dc:has-result()"> + <sinfo_additional_ls_withoutQ> + <dc:group expr="dc:replace($wst_description, 'Zus.Längsschnitte/', '')"> + <additional name="{dc:group-key()}"> + <dc:for-each> + <dc:macro-body/> + </dc:for-each> + </additional> + </dc:group> + </sinfo_additional_ls_withoutQ> + </dc:if> + </dc:filter> + </dc:macro> + + <dc:macro name="basedata_1_additionals-sinfo-without-q"> + <dc:call-macro name="basedata_1_additionals-sinfo-without-q_macro"> + <column name="{$wst_column_name}" ids="additionals-wstv-{$wst_column_position}-{$wst_id}" factory="staticwkms" target_out="{$out}" info="{$info} [km {$deffrom} - {$defto}]"/> + </dc:call-macro> + </dc:macro> + <dc:macro name="basedata_1_additionals_wq"> <dc:call-macro name="basedata_1_additionals_macro"> <column name="{$wst_column_name}" ids="base_data-wstv-{$wst_column_position}-{$wst_id}" factory="wqinterpol" target_out="{$out}" info="{$info} [km {$deffrom} - {$defto}]"/> @@ -1626,6 +1626,10 @@ <dc:call-macro name="minfo-heights-max"/> <dc:call-macro name="minfo-heights-min"/> </bed_heights_minmax> + + <sinfo_predefined_tkh> + <dc:call-macro name="sinfo_predefined_tkh"/> + </sinfo_predefined_tkh> </sinfo> </dc:macro> @@ -3460,5 +3464,30 @@ </dc:filter> </dc:macro> + <!-- tkh imported from CSV-files for S-INFO --> + <dc:macro name="sinfo_predefined_tkh"> + <dc:context> + <dc:statement> + SELECT tc.tkh_id AS tkh_id, tc.id AS tkh_col_id, tc.name AS tkh_col_name, + MIN(t.filename) AS tkh_name, MIN(tv.station) AS km_min, MAX(tv.station) AS km_max, + MIN(t.filename) || '<br />' || MIN(t.sounding_info) || '<br />' || MIN(t.evaluation_by) AS info + FROM tkh_column tc INNER JOIN tkh t ON tc.tkh_id=t.id + INNER JOIN tkh_values tv ON tc.id=tv.tkh_column_id + WHERE t.river_id=${river_id} + GROUP BY tc.tkh_id, tc.id + ORDER BY tc.id + </dc:statement> + <dc:if test="dc:has-result()"> + <dc:group expr="$tkh_name"> + <tkhx name="{dc:group-key()}"> + <dc:for-each> + <tkhcolumn name="{$tkh_col_name}" ids="tkhx-{$tkh_col_id}-{$tkh_id}" factory="tkhxf" target_out="{$out}" info="{$tkh_name}<br />[km {$km_min} - {$km_max}]"/> + </dc:for-each> + </tkhx> + </dc:group> + </dc:if> + </dc:context> + </dc:macro> + </datacage> </dc:template> \ No newline at end of file
--- a/artifacts/doc/conf/themes.xml Mon Apr 23 13:00:41 2018 +0200 +++ b/artifacts/doc/conf/themes.xml Mon Apr 23 16:18:39 2018 +0200 @@ -440,5 +440,7 @@ <mapping from="sinfo_facet_flow_depth_development_per_year.filtered" to="SInfoFlowDepthDevelopmentPerYear" /> <mapping from="sinfo_facet_waterlevel_difference.filtered" to="SInfoWaterlevelDifference" /> <mapping from="sinfo_facet_bedheight_difference.filtered" to="SInfoBedHeightDifference" /> + + <mapping from="sinfo_facet_predefined_tkh" to="SInfoPredefinedTkh" /> </mappings> </themes> \ No newline at end of file
--- a/artifacts/doc/conf/themes/default.xml Mon Apr 23 13:00:41 2018 +0200 +++ b/artifacts/doc/conf/themes/default.xml Mon Apr 23 16:18:39 2018 +0200 @@ -2956,7 +2956,7 @@ <field name="showarea" type="boolean" display="Flaeche anzeigen" default="true" /> <field name="areabgcolor" type="Color" default="245, 245, 220" display="Füllfarbe" /> <field name="areabgpattern" type="areapattern" default="patternFill" display="Flächentyp" /> - <field name="areatransparency" type="int" default="10" /> + <field name="areatransparency" type="int" default="20" /> <field name="showarealabel" type="boolean" display="Flächenbeschriftung anzeigen" default="false" hint="hidden"/> </fields> @@ -2992,5 +2992,16 @@ <fields> <field name="linecolor" type="Color" display="Linienfarbe" default="255, 0, 0" /> </fields> - </theme> + </theme> + <theme name="SInfoPredefinedTkh"> + <inherits> + <inherit from="SInfoTkh" /> + </inherits> + <fields> + <field name="linecolor" type="Color" display="Linienfarbe" default="128, 64, 10" /> + <field name="areabgcolor" type="Color" default="255, 240, 224" display="Füllfarbe" /> + <field name="areabgpattern" type="areapattern" default="patternFill" display="Flächentyp" /> + <field name="areatransparency" type="int" default="20" /> + </fields> + </theme> </themegroup> \ No newline at end of file
--- a/artifacts/doc/conf/themes/second.xml Mon Apr 23 13:00:41 2018 +0200 +++ b/artifacts/doc/conf/themes/second.xml Mon Apr 23 16:18:39 2018 +0200 @@ -2981,4 +2981,15 @@ <field name="linecolor" type="Color" display="Linienfarbe" default="255, 0, 0" /> </fields> </theme> + <theme name="SInfoPredefinedTkh"> + <inherits> + <inherit from="SInfoTkh" /> + </inherits> + <fields> + <field name="linecolor" type="Color" display="Linienfarbe" default="128, 64, 10" /> + <field name="areabgcolor" type="Color" default="255, 240, 224" display="Füllfarbe" /> + <field name="areabgpattern" type="areapattern" default="patternFill" display="Flächentyp" /> + <field name="areatransparency" type="int" default="20" /> + </fields> + </theme> </themegroup> \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/common/PredefinedTkhProcessor.java Mon Apr 23 16:18:39 2018 +0200 @@ -0,0 +1,118 @@ +/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde + * Software engineering by + * Björnsen Beratende Ingenieure GmbH + * Dr. Schumacher Ingenieurbüro für Wasser und Umwelt + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ + +package org.dive4elements.river.artifacts.sinfo.common; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.apache.log4j.Logger; +import org.dive4elements.artifactdatabase.state.ArtifactAndFacet; +import org.dive4elements.artifacts.CallContext; +import org.dive4elements.river.exports.DiagramGenerator; +import org.dive4elements.river.exports.StyledSeriesBuilder; +import org.dive4elements.river.jfree.StyledAreaSeriesCollection; +import org.dive4elements.river.jfree.StyledXYSeries; +import org.dive4elements.river.themes.ThemeDocument; + +/** + * Processor to generate a data series for tkh data loaded from the database + * + * @author Matthias Schäfer + * + */ +public class PredefinedTkhProcessor extends AbstractSInfoProcessor { + + private final static Logger log = Logger.getLogger(PredefinedTkhProcessor.class); + + public static final String FACET_PREDEFINED_TKH = "sinfo_facet_predefined_tkh"; + + private static final String I18N_AXIS_LABEL = "sinfo.chart.tkh.section.yaxis.label"; + + private static final String I18N_FACET_TKH_DESCRIPTION = "sinfo.facet.tkh.description"; + + private static final String SINFO_CHART_TKH_YAXIS_LABEL = "sinfo.chart.tkh.yaxis.label"; + + private static final Set<String> HANDLED_FACET_TYPES = new HashSet<>(); + + static { + HANDLED_FACET_TYPES.add(FACET_PREDEFINED_TKH); + } + + public PredefinedTkhProcessor() { + super(I18N_AXIS_LABEL, HANDLED_FACET_TYPES); + } + + protected String generateSeries1(final DiagramGenerator generator, final ArtifactAndFacet bundle, final ThemeDocument theme, final boolean visible) { + final CallContext context = generator.getCallContext(); + final Map<String, String> metaData = bundle.getFacet().getMetaData(bundle.getArtifact(), context); + final StyledXYSeries series = new StyledXYSeries(bundle.getFacetDescription(), theme); + series.putMetaData(metaData, bundle.getArtifact(), context); + + final Object raw = bundle.getData(context); + if (raw == null) { + return null; + } + if (!(raw instanceof double[][])) { + log.error("Unkown datatype: " + raw.getClass().getName()); + return null; + } + final double[][] data = (double[][]) raw; + StyledSeriesBuilder.addPoints(series, data, true); + + // REMARK: we add " " because the description is misused as id, which must be unique. + final StyledXYSeries seriesDown = new StyledXYSeries(bundle.getFacetDescription() + " ", theme); + // final double[][] pointsDown = data.getTkhDownPoints(); + final double[][] datadown = (double[][]) raw; + for (int i = 0; i <= datadown[0].length - 1; i++) + datadown[1][i] = 0; + StyledSeriesBuilder.addPoints(seriesDown, datadown, true); + + final StyledAreaSeriesCollection area = new StyledAreaSeriesCollection(theme); + area.setMode(StyledAreaSeriesCollection.FILL_MODE.BETWEEN); + area.addSeries(series); + area.addSeries(seriesDown); + generator.addAreaSeries(area, getAxisName(), visible); + // generator.addAxisSeries(series, getAxisName(), visible); + return null; + } + + @Override + protected String generateSeries(final DiagramGenerator generator, final ArtifactAndFacet bundle, final ThemeDocument theme, final boolean visible) { + final CallContext context = generator.getCallContext(); + + final String facetName = bundle.getFacetName(); + final AbstractTkhCalculationResult data = (AbstractTkhCalculationResult) bundle.getData(context); + if (data == null) { + // Check has been here before so we keep it for security reasons + // this should never happen though. + throw new IllegalStateException("Data is null for facet: " + facetName); + } + + final StyledXYSeries seriesUp = new StyledXYSeries(bundle.getFacetDescription(), theme); + final double[][] pointsUp = data.getTkhUpPoints(); + StyledSeriesBuilder.addPoints(seriesUp, pointsUp, true); + + // REMARK: we add " " because the description is misused as id, which must be unique. + final StyledXYSeries seriesDown = new StyledXYSeries(bundle.getFacetDescription() + " ", theme); + final double[][] pointsDown = data.getTkhDownPoints(); + StyledSeriesBuilder.addPoints(seriesDown, pointsDown, true); + + final StyledAreaSeriesCollection area = new StyledAreaSeriesCollection(theme); + area.setMode(StyledAreaSeriesCollection.FILL_MODE.BETWEEN); + area.addSeries(seriesUp); + area.addSeries(seriesDown); + + generator.addAreaSeries(area, getAxisName(), visible); + + return null; + } +} \ No newline at end of file
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/common/RiverInfoProvider.java Mon Apr 23 13:00:41 2018 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/common/RiverInfoProvider.java Mon Apr 23 16:18:39 2018 +0200 @@ -100,7 +100,7 @@ if (this.showAllGauges) return this.gaugeIndex.findGauge(km); - if (this.refGauge.getRange().contains(km)) + if ((this.refGauge != null) && this.refGauge.getRange().contains(km)) return this.refGauge; return null;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/predefinedtkh/PredefinedTkhArtifact.java Mon Apr 23 16:18:39 2018 +0200 @@ -0,0 +1,129 @@ +/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde + * Software engineering by + * Björnsen Beratende Ingenieure GmbH + * Dr. Schumacher Ingenieurbüro für Wasser und Umwelt + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ + +package org.dive4elements.river.artifacts.sinfo.predefinedtkh; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.log4j.Logger; +import org.dive4elements.artifactdatabase.state.DefaultOutput; +import org.dive4elements.artifactdatabase.state.Facet; +import org.dive4elements.artifactdatabase.state.FacetActivity; +import org.dive4elements.artifacts.Artifact; +import org.dive4elements.artifacts.ArtifactFactory; +import org.dive4elements.artifacts.CallMeta; +import org.dive4elements.artifacts.common.utils.XMLUtils; +import org.dive4elements.river.artifacts.AbstractStaticStateArtifact; +import org.dive4elements.river.artifacts.D4EArtifact; +import org.dive4elements.river.artifacts.model.FacetTypes; +import org.dive4elements.river.artifacts.sinfo.common.PredefinedTkhProcessor; +import org.dive4elements.river.artifacts.states.StaticState; +import org.dive4elements.river.model.sinfo.TkhColumn; +import org.w3c.dom.Document; + +/** + * Display of a tkh data series loaded from database + * + * @author Matthias Schäfer + * + */ +public class PredefinedTkhArtifact extends AbstractStaticStateArtifact implements FacetTypes { + + /** The log for this class. */ + private static Logger log = Logger.getLogger(PredefinedTkhArtifact.class); + + /** Artifact name. */ + private static final String NAME = "tkhx"; + + static { + // TODO: Move to configuration. + FacetActivity.Registry.getInstance().register(NAME, FacetActivity.INACTIVE); + } + + public static final String STATIC_STATE_NAME = "state.predefined_tkh.static"; + + /** + * Trivial Constructor. + */ + public PredefinedTkhArtifact() { + log.debug("new PredefinedTkhArtifact"); + } + + @Override + public String getName() { + return NAME; + } + + /** + * Gets called from factory, to set things up. + */ + @Override + public void setup(final String identifier, final ArtifactFactory factory, final Object context, final CallMeta callMeta, final Document data, + final List<Class> loadFacets) { + + log.debug("PredefinedTkhArtifact.setup"); + + if (log.isDebugEnabled()) { + log.debug(XMLUtils.toString(data)); + } + + final String code = getDatacageIDValue(data); + + createFacets(callMeta, code); + + super.setup(identifier, factory, context, callMeta, data, loadFacets); + } + + private void createFacets(final CallMeta callMeta, final String code) { + if (code == null) + return; + final String[] parts = code.split("-"); + if (parts.length < 3) { + log.error("Invalid datacage ID '" + code + "'"); + return; + } + addStringData("tkh_col_id", parts[1]); + addStringData("tkh_id", parts[2]); + final TkhColumn tkhcol = TkhColumn.getTkhColumnById(Integer.parseInt(parts[1])); + + final ArrayList<Facet> facets = new ArrayList<>(1); + facets.add(new PredefinedTkhFacet(PredefinedTkhProcessor.FACET_PREDEFINED_TKH, tkhcol.getName() + " (" + tkhcol.getTkh().getFilename() + ")")); + addFacets(STATIC_STATE_NAME, facets); + } + + @Override + protected void initStaticState() { + + log.debug("initStaticState " + getName() + " " + identifier()); + + final StaticState state = new StaticState(STATIC_STATE_NAME); + final DefaultOutput output = new DefaultOutput("general", "general", "image/png", "chart"); + + final List<Facet> facets = getFacets(STATIC_STATE_NAME); + output.addFacets(facets); + state.addOutput(output); + + setStaticState(state); + } + + @Override + protected void initialize(final Artifact artifact, final Object context, final CallMeta meta) { + // do not clone facets, etc. from master artifact + + log.debug("initialize"); + importData((D4EArtifact) artifact, "river"); + importData((D4EArtifact) artifact, "ld_from"); + importData((D4EArtifact) artifact, "ld_to"); + + log.debug("ld_from " + getDataAsString("ld_from")); + log.debug("ld_to " + getDataAsString("ld_to")); + } +} \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/predefinedtkh/PredefinedTkhColumnAccess.java Mon Apr 23 16:18:39 2018 +0200 @@ -0,0 +1,68 @@ +/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde + * Software engineering by + * Björnsen Beratende Ingenieure GmbH + * Dr. Schumacher Ingenieurbüro für Wasser und Umwelt + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ + +package org.dive4elements.river.artifacts.sinfo.predefinedtkh; + +import org.apache.commons.lang.math.DoubleRange; +import org.dive4elements.river.artifacts.D4EArtifact; +import org.dive4elements.river.artifacts.access.RangeAccess; + +/** + * Access to the database loaded tkh column artifact data + * + * @author Matthias Schäfer + */ +final class PredefinedTkhColumnAccess extends RangeAccess { + + /***** FIELDS *****/ + + private Integer columnId; + + private Integer tkhId; + + private String name; + + + /***** CONSTRUCTORS *****/ + + public PredefinedTkhColumnAccess(final D4EArtifact artifact) { + super(artifact); + } + + + /***** METHDOS *****/ + + public DoubleRange getRange() { + final double from = getFrom(); + final double to = getTo(); + return new DoubleRange(from, to); + } + + public Integer getTkhId() { + if (this.tkhId == null) { + this.tkhId = getInteger("tkh_id"); + } + return this.columnId; + } + + public Integer getColumnId() { + if (this.columnId == null) { + this.columnId = getInteger("tkh_col_id"); + } + return this.columnId; + } + + public String getName() { + if (this.name == null) { + this.name = getString("name"); + } + return this.name; + } +} \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/predefinedtkh/PredefinedTkhFacet.java Mon Apr 23 16:18:39 2018 +0200 @@ -0,0 +1,111 @@ +/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde + * Software engineering by + * Björnsen Beratende Ingenieure GmbH + * Dr. Schumacher Ingenieurbüro für Wasser und Umwelt + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ + +package org.dive4elements.river.artifacts.sinfo.predefinedtkh; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.dive4elements.artifacts.Artifact; +import org.dive4elements.artifacts.CallContext; +import org.dive4elements.river.artifacts.D4EArtifact; +import org.dive4elements.river.artifacts.common.GeneralResultType; +import org.dive4elements.river.artifacts.common.ResultRow; +import org.dive4elements.river.artifacts.model.BlackboardDataFacet; +import org.dive4elements.river.artifacts.model.Calculation; +import org.dive4elements.river.artifacts.model.FacetTypes; +import org.dive4elements.river.artifacts.sinfo.common.SInfoResultType; +import org.dive4elements.river.artifacts.sinfo.tkhcalculation.SoilKind; +import org.dive4elements.river.artifacts.sinfo.tkhcalculation.SoilKindKmValueFinder; +import org.dive4elements.river.model.sinfo.TkhColumn; +import org.dive4elements.river.model.sinfo.TkhValue; + +/** + * Facet for a tkh value series loaded from the database + * + * @author Matthias Schäfer + */ +public class PredefinedTkhFacet extends BlackboardDataFacet implements FacetTypes { + + private static final long serialVersionUID = 5508868967789155306L; + + public PredefinedTkhFacet(final String name, final String description) { + super(0, name, description); + + this.metaData.put("X", "chart.longitudinal.section.xaxis.label"); + this.metaData.put("Y", "chart.tkh.section.yaxis.label"); + } + + /** + * Returns the data this facet requires. + * + * @param artifact + * the owner artifact. + * @param context + * the CallContext (ignored). + * + * @return + * the data as PredefinedTkhQueryCalculationResult + */ + @Override + public Object getData(final Artifact artifact, final CallContext context) { + + final PredefinedTkhColumnAccess access = new PredefinedTkhColumnAccess((D4EArtifact) artifact); + final TkhColumn series = TkhColumn.getTkhColumnById(access.getColumnId()); + final List<TkhValue> tkhValues = TkhValue.getTkhValues(series, access.getFrom(true), access.getTo(true)); + final Calculation problems = new Calculation(); + final SoilKindKmValueFinder soilKindFinder = SoilKindKmValueFinder.loadValues(problems, access.getRiver(), access.getRange()); + + final Collection<ResultRow> rows = new ArrayList<>(); + SoilKind bedMobility = SoilKind.mobil; + for (final TkhValue tkhValue : tkhValues) { + final ResultRow row = ResultRow.create(); + row.putValue(GeneralResultType.station, tkhValue.getStation().doubleValue()); + if (soilKindFinder != null) + bedMobility = soilKindFinder.findSoilKind(tkhValue.getStation().doubleValue()); + row.putValue(SInfoResultType.soilkind, bedMobility); + final double tkh = tkhValue(tkhValue.getTkheight()) * 100; + row.putValue(SInfoResultType.tkh, tkh); + switch (bedMobility) { + case starr: + row.putValue(SInfoResultType.tkhup, tkh); + row.putValue(SInfoResultType.tkhdown, 0.0); + break; + case mobil: + default: + row.putValue(SInfoResultType.tkhup, tkh / 2); + row.putValue(SInfoResultType.tkhdown, -tkh / 2); + break; + } + rows.add(row); + } + return new PredefinedTkhQueryCalculationResult(series.getName(), rows); + } + + private double tkhValue(final Double value) { + if (value != null) + return value.doubleValue(); + else + return Double.NaN; + } + + /** + * Create a deep copy of this Facet. + * + * @return a deep copy. + */ + @Override + public PredefinedTkhFacet deepCopy() { + final PredefinedTkhFacet copy = new PredefinedTkhFacet(this.name, this.description); + copy.set(this); + return copy; + } +} \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/predefinedtkh/PredefinedTkhQueryCalculationResult.java Mon Apr 23 16:18:39 2018 +0200 @@ -0,0 +1,29 @@ +/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde + * Software engineering by + * Björnsen Beratende Ingenieure GmbH + * Dr. Schumacher Ingenieurbüro für Wasser und Umwelt + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ +package org.dive4elements.river.artifacts.sinfo.predefinedtkh; + +import java.util.Collection; + +import org.dive4elements.river.artifacts.common.ResultRow; +import org.dive4elements.river.artifacts.sinfo.common.AbstractTkhCalculationResult; + +/** + * Contains the results of a database query of a tkh series + * + * @author Matthias Schäfer + */ +final class PredefinedTkhQueryCalculationResult extends AbstractTkhCalculationResult { + + private static final long serialVersionUID = 1L; + + public PredefinedTkhQueryCalculationResult(final String label, final Collection<ResultRow> rows) { + super(label, null, true, rows); + } +} \ No newline at end of file
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhcalculation/BedQualityD50KmValueFinder.java Mon Apr 23 13:00:41 2018 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhcalculation/BedQualityD50KmValueFinder.java Mon Apr 23 16:18:39 2018 +0200 @@ -56,11 +56,16 @@ * If PostgreSQL would support a median aggregate function like Oracle does, the aggregation could be placed into this * query. */ - private static final String SQL_BED_D50_SUBLAYER_MEASUREMENT = "SELECT t.km, t.datum, p.tiefevon, p.tiefebis, a.d50" - + " FROM sohltest t INNER JOIN station s ON t.stationid = s.stationid" + " INNER JOIN gewaesser g ON s.gewaesserid = g.gewaesserid" - + " INNER JOIN sohlprobe p ON t.sohltestid = p.sohltestid" + " INNER JOIN siebanalyse a ON p.sohlprobeid = a.sohlprobeid" - + " WHERE (g.name = :name) AND (s.km BETWEEN :fromkm - 0.0001 AND :tokm + 0.0001)" + " AND (p.tiefevon > 0.0) AND (p.tiefebis <= 0.5)" - + " AND (t.datum BETWEEN :fromdate AND :todate)" + " ORDER BY t.km ASC, a.d50 ASC"; + private static final String SQL_BED_D50_SUBLAYER_MEASUREMENT = // + "SELECT t.km, t.datum, p.tiefevon, p.tiefebis, a.d50" // + + " FROM sohltest t INNER JOIN station s ON t.stationid = s.stationid" // + + " INNER JOIN gewaesser g ON s.gewaesserid = g.gewaesserid" // + + " INNER JOIN sohlprobe p ON t.sohltestid = p.sohltestid" // + + " INNER JOIN siebanalyse a ON p.sohlprobeid = a.sohlprobeid" // + + " WHERE (g.name = :name) AND (s.km BETWEEN :fromkm - 0.0001 AND :tokm + 0.0001)" // + + " AND (p.tiefevon > 0.0) AND (p.tiefebis <= 0.5)" // + + " AND (t.datum BETWEEN :fromdate AND :todate)" // + + " ORDER BY t.km ASC, a.d50 ASC"; private Calculation problems; @@ -116,16 +121,22 @@ final TDoubleArrayList values = new TDoubleArrayList(); final TDoubleArrayList kmd50s = new TDoubleArrayList(); + // Median aggregate d50 values for each km for (int i = 0; i <= rows.size() - 1; i++) { kmd50s.add((double) rows.get(i)[4]); if (((i == rows.size() - 1) || !Utils.epsilonEquals((double) rows.get(i)[0], (double) rows.get(i + 1)[0], 0.0001))) { final int k = kmd50s.size() / 2; values.add(((k + k < kmd50s.size()) ? kmd50s.get(k) : (kmd50s.get(k - 1) + kmd50s.get(k)) / 2) / 1000); kms.add((double) rows.get(i)[0]); - log.debug(String.format("loadValues km %.3f d50(mm) %.1f count %d", kms.get(kms.size() - 1), values.get(values.size() - 1), kmd50s.size())); + log.debug(String.format("loadValues km %.3f d50(mm) %.1f count %d", kms.get(kms.size() - 1), values.get(values.size() - 1) * 1000, + kmd50s.size())); kmd50s.clear(); } } + if (kms.size() >= 1) + log.debug(String.format("loadValues: %d kms found from %.3f to %.3f", kms.size(), kms.get(0), kms.get(kms.size() - 1))); + else + log.debug("loadValues: no kms found"); if (kms.size() < 2 || values.size() < 2) { problems.addProblem("bedqualityd50kmvaluefinder.empty", soundingYear);
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhcalculation/FlowVelocityModelKmValueFinder.java Mon Apr 23 13:00:41 2018 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhcalculation/FlowVelocityModelKmValueFinder.java Mon Apr 23 16:18:39 2018 +0200 @@ -318,13 +318,14 @@ if (this.leftIndexFound == this.rightIndexFound) { // Exact km match final double qfound = getLeftValues().findQ(q); - log.debug(String.format("findKmQValues km %.3f q %.0f = %.0f (%d)", km, q, qfound, this.leftIndexFound)); + log.debug(String.format("findKmQValues km %.3f q %.0f = %.0f (%d) v = %.3f, tau = %.3f", km, q, qfound, this.leftIndexFound, + this.getFindVmainFound(), this.getFindTauFound())); return !Double.isNaN(qfound); } final double[] qfound = { getLeftValues().findQ(q), getRightValues().findQ(q) }; - log.debug(String.format("findKmQValues km %.3f q %.0f = %.0f (%d, %.3f) - %.0f (%d, %.3f)", km, q, qfound[0], this.leftIndexFound, - getLeftValues().getKm(), qfound[1], this.rightIndexFound, getRightValues().getKm())); + log.debug(String.format("findKmQValues km %.3f q %.0f = %.0f (%d, %.3f) - %.0f (%d, %.3f) v = %.3f, tau = %.3f", km, q, qfound[0], this.leftIndexFound, + getLeftValues().getKm(), qfound[1], this.rightIndexFound, getRightValues().getKm(), this.getFindVmainFound(), this.getFindTauFound())); return !Double.isNaN(qfound[0]) && !Double.isNaN(qfound[1]); }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhcalculation/SoilKindKmValueFinder.java Mon Apr 23 13:00:41 2018 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhcalculation/SoilKindKmValueFinder.java Mon Apr 23 16:18:39 2018 +0200 @@ -26,7 +26,7 @@ /** * @author Matthias Schäfer */ -final class SoilKindKmValueFinder { +public final class SoilKindKmValueFinder { /***** FIELDS *****/
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/doc/schema/oracle-drop-sinfo-uinfo.sql Mon Apr 23 16:18:39 2018 +0200 @@ -0,0 +1,53 @@ +SET AUTOCOMMIT ON; + +DROP TABLE bed_mobility CASCADE CONSTRAINTS PURGE; +DROP TABLE bed_mobility_values CASCADE CONSTRAINTS PURGE; +DROP TABLE infrastructure CASCADE CONSTRAINTS PURGE; +DROP TABLE infrastructure_values CASCADE CONSTRAINTS PURGE; +DROP TABLE channel CASCADE CONSTRAINTS PURGE; +DROP TABLE channel_values CASCADE CONSTRAINTS PURGE; +DROP TABLE collision_type CASCADE CONSTRAINTS PURGE; +DROP TABLE collision CASCADE CONSTRAINTS PURGE; +DROP TABLE collision_values CASCADE CONSTRAINTS PURGE; +DROP TABLE tkh CASCADE CONSTRAINTS PURGE; +DROP TABLE tkh_column CASCADE CONSTRAINTS PURGE; +DROP TABLE tkh_values CASCADE CONSTRAINTS PURGE; +DROP TABLE depth_evolution CASCADE CONSTRAINTS PURGE; +DROP TABLE depth_evolution_values CASCADE CONSTRAINTS PURGE; +DROP TABLE daily_discharge CASCADE CONSTRAINTS PURGE; +DROP TABLE daily_discharge_values CASCADE CONSTRAINTS PURGE; +DROP TABLE salix CASCADE CONSTRAINTS PURGE; +DROP TABLE salix_values CASCADE CONSTRAINTS PURGE; +DROP TABLE salix_rank CASCADE CONSTRAINTS PURGE; +DROP TABLE vegetation_type CASCADE CONSTRAINTS PURGE; +DROP TABLE vegetation CASCADE CONSTRAINTS PURGE; +DROP TABLE vegetation_zone CASCADE CONSTRAINTS PURGE; +DROP TABLE flow_depth CASCADE CONSTRAINTS PURGE; +DROP TABLE flow_depth_column CASCADE CONSTRAINTS PURGE; +DROP TABLE flow_depth_values CASCADE CONSTRAINTS PURGE; + +DROP SEQUENCE BED_MOBILITY_ID_SEQ; +DROP SEQUENCE BED_MOBILITY_VALUES_ID_SEQ; +DROP SEQUENCE INFRASTRUCTURE_ID_SEQ; +DROP SEQUENCE INFRASTRUCTURE_VALUES_ID_SEQ; +DROP SEQUENCE CHANNEL_ID_SEQ; +DROP SEQUENCE CHANNEL_VALUES_ID_SEQ; +DROP SEQUENCE COLLISION_TYPE_ID_SEQ; +DROP SEQUENCE COLLISION_ID_SEQ; +DROP SEQUENCE COLLISION_VALUES_ID_SEQ; +DROP SEQUENCE TKH_ID_SEQ; +DROP SEQUENCE TKH_COLUMN_ID_SEQ; +DROP SEQUENCE TKH_VALUES_ID_SEQ; +DROP SEQUENCE DEPTH_EVOLUTION_ID_SEQ; +DROP SEQUENCE DEPTH_EVOLUTION_VALUES_ID_SEQ; +DROP SEQUENCE DAILY_DISCHARGE_ID_SEQ; +DROP SEQUENCE DAILY_DISCHARGE_VALUES_ID_SEQ; +DROP SEQUENCE SALIX_ID_SEQ; +DROP SEQUENCE SALIX_VALUES_ID_SEQ; +DROP SEQUENCE SALIX_RANK_ID_SEQ; +DROP SEQUENCE VEGETATION_TYPE_ID_SEQ; +DROP SEQUENCE VEGETATION_ID_SEQ; +DROP SEQUENCE VEGETATION_ZONE_ID_SEQ; +DROP SEQUENCE FLOW_DEPTH_ID_SEQ; +DROP SEQUENCE FLOW_DEPTH_COLUMN_ID_SEQ; +DROP SEQUENCE FLOW_DEPTH_VALUES_ID_SEQ;
--- a/backend/src/main/java/org/dive4elements/river/model/sinfo/BedMobility.java Mon Apr 23 13:00:41 2018 +0200 +++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/BedMobility.java Mon Apr 23 16:18:39 2018 +0200 @@ -25,7 +25,10 @@ import javax.persistence.SequenceGenerator; import javax.persistence.Table; +import org.dive4elements.river.backend.SessionHolder; import org.dive4elements.river.model.River; +import org.hibernate.Query; +import org.hibernate.Session; /** * Hibernate binding for the DB table bed_mobility @@ -134,4 +137,18 @@ public void addValue(final BedMobilityValue value) { this.values.add(value); } + + /** + * Selects a bed mobility series of a river from the database + */ + public static BedMobility queryForRiver(final River river) { + final Session session = SessionHolder.HOLDER.get(); + final Query query = session.createQuery("FROM BedMobilty WHERE river=:river"); + query.setParameter("river", river); + final List<BedMobility> rows = query.list(); + if (rows != null) + return rows.get(0); + else + return null; + } } \ No newline at end of file
--- a/backend/src/main/java/org/dive4elements/river/model/sinfo/BedMobilityValue.java Mon Apr 23 13:00:41 2018 +0200 +++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/BedMobilityValue.java Mon Apr 23 16:18:39 2018 +0200 @@ -11,6 +11,7 @@ package org.dive4elements.river.model.sinfo; import java.io.Serializable; +import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; @@ -22,6 +23,10 @@ import javax.persistence.SequenceGenerator; import javax.persistence.Table; +import org.dive4elements.river.backend.SessionHolder; +import org.hibernate.Query; +import org.hibernate.Session; + /** * Hibernate binding for the DB table bed_mobility_values @@ -106,4 +111,17 @@ public void setMoving(final int bedMoving) { this.moving = bedMoving; } + + /** + * Selects the bed mobility of a series in a km range from the database + */ + public static List<BedMobilityValue> getValues(final BedMobility parent, final double kmLo, final double kmHi) { + final Session session = SessionHolder.HOLDER.get(); + final Query query = session.createQuery("FROM BedMobilityValue WHERE (bedMobility=:parent)" + + " AND (station >= :kmLo - 0.0001) AND (station <= :kmHi + 0.0001)"); + query.setParameter("parent", parent); + query.setParameter("kmLo", new Double(kmLo)); + query.setParameter("kmHi", new Double(kmHi)); + return query.list(); + } }
--- a/backend/src/main/java/org/dive4elements/river/model/sinfo/Tkh.java Mon Apr 23 13:00:41 2018 +0200 +++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/Tkh.java Mon Apr 23 16:18:39 2018 +0200 @@ -26,7 +26,10 @@ import javax.persistence.SequenceGenerator; import javax.persistence.Table; +import org.dive4elements.river.backend.SessionHolder; import org.dive4elements.river.model.River; +import org.hibernate.Query; +import org.hibernate.Session; /** * Hibernate binding for the DB table tkh @@ -171,4 +174,15 @@ public void addColumn(final TkhColumn value) { this.columns.add(value); } + + /** + * Selects a tkh series by id from the database + */ + public static Tkh getTkhById(final int id) { + final Session session = SessionHolder.HOLDER.get(); + final Query query = session.createQuery("FROM Tkh WHERE id=:id"); + query.setParameter("id", id); + final List<Tkh> rows = query.list(); + return (rows != null) ? rows.get(0) : null; + } } \ No newline at end of file
--- a/backend/src/main/java/org/dive4elements/river/model/sinfo/TkhColumn.java Mon Apr 23 13:00:41 2018 +0200 +++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/TkhColumn.java Mon Apr 23 16:18:39 2018 +0200 @@ -24,6 +24,10 @@ import javax.persistence.SequenceGenerator; import javax.persistence.Table; +import org.dive4elements.river.backend.SessionHolder; +import org.hibernate.Query; +import org.hibernate.Session; + /** * Hibernate binding for the DB table tkh_column * @@ -103,4 +107,15 @@ public void addValue(final TkhValue value) { this.values.add(value); } + + /** + * Selects a tkh series column by id from the database + */ + public static TkhColumn getTkhColumnById(final int id) { + final Session session = SessionHolder.HOLDER.get(); + final Query query = session.createQuery("FROM TkhColumn WHERE id=:id"); + query.setParameter("id", id); + final List<TkhColumn> rows = query.list(); + return (rows != null) ? rows.get(0) : null; + } }
--- a/backend/src/main/java/org/dive4elements/river/model/sinfo/TkhValue.java Mon Apr 23 13:00:41 2018 +0200 +++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/TkhValue.java Mon Apr 23 16:18:39 2018 +0200 @@ -11,6 +11,7 @@ package org.dive4elements.river.model.sinfo; import java.io.Serializable; +import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; @@ -22,6 +23,9 @@ import javax.persistence.SequenceGenerator; import javax.persistence.Table; +import org.dive4elements.river.backend.SessionHolder; +import org.hibernate.Query; +import org.hibernate.Session; /** * Hibernate binding for the DB table tkh_values @@ -109,4 +113,17 @@ public void setTkheight(final Double tkheight) { this.tkheight = tkheight; } + + /** + * Selects the tkh values of a tkh series column in a km range from the database + */ + public static List<TkhValue> getTkhValues(final TkhColumn parent, final double kmLo, final double kmHi) { + final Session session = SessionHolder.HOLDER.get(); + final Query query = session.createQuery("FROM TkhValue WHERE (tkhColumn=:parent)" + + " AND (station >= :kmLo - 0.0001) AND (station <= :kmHi + 0.0001)"); + query.setParameter("parent", parent); + query.setParameter("kmLo", new Double(kmLo)); + query.setParameter("kmHi", new Double(kmHi)); + return query.list(); + } }
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.java Mon Apr 23 13:00:41 2018 +0200 +++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.java Mon Apr 23 16:18:39 2018 +0200 @@ -1485,12 +1485,14 @@ String sinfo_additional_ls_withQ(); String sinfo_additional_ls_withoutQ(); - + + String sinfo_predefined_tkh(); + String uinfo(); - + String uinfo_salix_line_export(); String uinfo_vegetation_zones_export(); - + String uinfo_inundation_duration_export(); } \ No newline at end of file
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.properties Mon Apr 23 13:00:41 2018 +0200 +++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants.properties Mon Apr 23 16:18:39 2018 +0200 @@ -792,6 +792,8 @@ sinfo_additional_ls_withQ = mit Abfluss sinfo_additional_ls_withoutQ = ohne Abfluss +sinfo_predefined_tkh = Transportk\u00f6rperh\u00f6hen + uinfo = U-INFO uinfo_inundation_duration_export = \u00dcberflutungsdauern Export uinfo_salix_line_export = Salix-Linie Export
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_de.properties Mon Apr 23 13:00:41 2018 +0200 +++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/FLYSConstants_de.properties Mon Apr 23 16:18:39 2018 +0200 @@ -792,6 +792,8 @@ sinfo_additional_ls_withQ = mit Abfluss sinfo_additional_ls_withoutQ = ohne Abfluss +sinfo_predefined_tkh = Transportk\u00f6rperh\u00f6hen + uinfo = U-INFO uinfo_inundation_duration_export = \u00dcberflutungsdauern Export uinfo_salix_line_export = Salix-Linie Export