diff artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepthdev/FlowDepthDevelopmentExporter.java @ 8951:322b0e6298ea

Work on SINFO FlowDepth-Development
author gernotbelger
date Fri, 16 Mar 2018 18:08:38 +0100
parents
children c40db8e8dcae
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepthdev/FlowDepthDevelopmentExporter.java	Fri Mar 16 18:08:38 2018 +0100
@@ -0,0 +1,161 @@
+/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.artifacts.sinfo.flowdepthdev;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter;
+import org.dive4elements.river.artifacts.sinfo.common.SInfoResultRow;
+import org.dive4elements.river.artifacts.sinfo.common.SInfoResultType;
+import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource;
+import org.dive4elements.river.artifacts.sinfo.util.RiverInfo;
+
+import au.com.bytecode.opencsv.CSVWriter;
+
+/**
+ * Generates different output formats (csv, pdf) of data that resulted from a flow depths computation.
+ *
+ * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a>
+ * @author Gernot Belger
+ */
+// REMARK: must be public because its registered in generators.xml
+public class FlowDepthDevelopmentExporter extends AbstractSInfoExporter<FlowDepthDevelopmentCalculationResult, FlowDepthDevelopmentCalculationResults> {
+
+    /** The log used in this exporter. */
+    private static Logger log = Logger.getLogger(FlowDepthDevelopmentExporter.class);
+
+    private static final String JASPER_FILE = "/jasper/sinfo.flowdepthminmax.jasper";
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    @Override
+    protected void writeCSVResultMetadata(final CSVWriter writer, final FlowDepthDevelopmentCalculationResults results,
+            final FlowDepthDevelopmentCalculationResult result) {
+
+        // FIXME: distinguish header labels
+        writeCSVSoundingMetadata(writer, result.getCurrentSounding());
+        writeCSVWaterlevelMetadata(writer, result.getCurrentWst());
+
+        // FIXME: distinguish header labels
+        writeCSVSoundingMetadata(writer, result.getHistoricalSounding());
+        writeCSVWaterlevelMetadata(writer, result.getHistoricalWst());
+    }
+
+    @Override
+    protected void writeCSVGlobalMetadata(final CSVWriter writer, final FlowDepthDevelopmentCalculationResults results) {
+
+        super.writeCSVGlobalMetadataDefaults(writer, results);
+
+        writer.writeNext(new String[] { "" });
+    }
+
+    /**
+     * Write the header, with different headings depending on whether at a
+     * gauge or at a location.
+     *
+     * @param river
+     * @param useTkh
+     */
+    @Override
+    protected void writeCSVHeader(final CSVWriter writer, final FlowDepthDevelopmentCalculationResults results, final RiverInfo river) {
+        log.info("FlowDepthExporter.writeCSVHeader");
+
+        final Collection<String> header = new ArrayList<>(11);
+
+        header.add(msg(SInfoResultType.station.getCsvHeader()));
+        header.add(msgUnit(SInfoResultType.flowdepthDevelopment.getCsvHeader(), SInfoResultType.flowdepthDevelopment.getUnit()));
+        header.add(msgUnit(SInfoResultType.flowdepthDevelopmentPerYear.getCsvHeader(), SInfoResultType.flowdepthDevelopmentPerYear.getUnit()));
+
+        // FIXME: add data-labels in header
+        header.add(msgUnit(SInfoResultType.waterlevelDifference.getCsvHeader(), SInfoResultType.waterlevelDifference.getUnit()));
+        header.add(msgUnit(SInfoResultType.bedHeightDifference.getCsvHeader(), SInfoResultType.bedHeightDifference.getUnit()));
+
+        header.add(msgUnit(SInfoResultType.flowdepthCurrent.getCsvHeader(), SInfoResultType.flowdepthCurrent.getUnit()));
+        header.add(msgUnit(SInfoResultType.flowdepthHistorical.getCsvHeader(), SInfoResultType.flowdepthHistorical.getUnit()));
+
+        header.add(msg(SInfoResultType.location.getCsvHeader()));
+
+        writer.writeNext(header.toArray(new String[header.size()]));
+    }
+
+    @Override
+    protected String[] formatCSVRow(final FlowDepthDevelopmentCalculationResults results, final FlowDepthDevelopmentCalculationResult result,
+            final SInfoResultRow row) {
+        return formatRow(result, row);
+    }
+
+    /**
+     * Format a row of a flow depth result into an array of string, both used by csv and pdf
+     *
+     * @param result
+     *
+     * @param useTkh
+     */
+    private String[] formatRow(final FlowDepthDevelopmentCalculationResult result, final SInfoResultRow row) {
+
+        final Collection<String> lines = new ArrayList<>(10);
+
+        lines.add(row.exportValue(this.context, SInfoResultType.station));
+
+        // FIXME
+
+        // REMARK: null check as pdf will call this with null and in that case we show all columns (to avoid multiple jasper
+        // FIXME: does not work like this: we may have several pairs of min/max; so we need to look at all of them?
+        // templates)
+        // if (result == null || result.getMinSounding() != null)
+        lines.add(row.exportValue(this.context, SInfoResultType.flowdepthmin));
+        // if (result == null || result.getMaxSounding() != null)
+        lines.add(row.exportValue(this.context, SInfoResultType.flowdepthmax));
+
+        lines.add(row.exportValue(this.context, SInfoResultType.waterlevel));
+        lines.add(row.exportValue(this.context, SInfoResultType.discharge));
+        lines.add(row.exportValue(this.context, SInfoResultType.waterlevelLabel));
+        lines.add(row.exportValue(this.context, SInfoResultType.gaugeLabel));
+        lines.add(row.exportValue(this.context, SInfoResultType.meanBedHeight));
+        lines.add(row.exportValue(this.context, SInfoResultType.soundingLabel));
+        lines.add(row.exportValue(this.context, SInfoResultType.location));
+
+        return lines.toArray(new String[lines.size()]);
+    }
+
+    @Override
+    protected final String getJasperFile() {
+        return JASPER_FILE;
+    }
+
+    @Override
+    protected final void addJRMetaData(final MetaAndTableJRDataSource source, final FlowDepthDevelopmentCalculationResults results) {
+
+        /* general metadata */
+        super.addJRMetaDataDefaults(source, results);
+
+        /* column headings */
+        // FIXME
+        source.addMetaData("station_header", SInfoResultType.station.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("flowdepthmin_header", SInfoResultType.flowdepthmin.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("flowdepthmax_header", SInfoResultType.flowdepthmax.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("waterlevel_header", SInfoResultType.waterlevel.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("discharge_header", SInfoResultType.discharge.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("waterlevel_name_header", SInfoResultType.waterlevelLabel.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("gauge_header", SInfoResultType.gaugeLabel.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("bedheight_header", SInfoResultType.meanBedHeight.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("sounding_name_header", SInfoResultType.soundingLabel.getPdfHeader(this.context.getMeta()));
+        source.addMetaData("location_header", SInfoResultType.location.getPdfHeader(this.context.getMeta()));
+    }
+
+    @Override
+    protected String[] formatPDFRow(final FlowDepthDevelopmentCalculationResults results, final SInfoResultRow row) {
+        return formatRow(null, row);
+    }
+}
\ No newline at end of file

http://dive4elements.wald.intevation.org