diff artifacts/src/main/java/org/dive4elements/river/artifacts/uinfo/salix/SalixLineExporter.java @ 8996:fb9430250899

Work on uinfo
author gernotbelger
date Thu, 12 Apr 2018 19:13:39 +0200
parents
children 7134a4c7d1b6
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/uinfo/salix/SalixLineExporter.java	Thu Apr 12 19:13:39 2018 +0200
@@ -0,0 +1,182 @@
+/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+package org.dive4elements.river.artifacts.uinfo.salix;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.artifacts.common.GeneralResultType;
+import org.dive4elements.river.artifacts.common.ResultRow;
+import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource;
+import org.dive4elements.river.artifacts.sinfo.util.RiverInfo;
+import org.dive4elements.river.artifacts.uinfo.commons.AbstractUInfoExporter;
+import org.dive4elements.river.artifacts.uinfo.commons.UInfoResultType;
+
+import au.com.bytecode.opencsv.CSVWriter;
+
+/**
+ * @author Domenico Nardi Tironi
+ *
+ */
+public class SalixLineExporter extends AbstractUInfoExporter<SalixLineCalculationResult, SalixLineCalculationResults> {
+
+    /** The log used in this exporter. */
+    private static Logger log = Logger.getLogger(SalixLineExporter.class);
+
+    private static final String JASPER_FILE = "/jasper/uinfo.salixline.jasper";
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    @Override
+    protected void writeCSVGlobalMetadata(final CSVWriter writer, final SalixLineCalculationResults results) {
+        log.info("SalixLineExporter.writeCSVMeta");
+
+        super.writeCSVGlobalMetadataDefaults(writer, results);
+
+        // Add Auswerter, Bezugspegel, Jahr/Zeitraum der Wasserspiegellage
+
+        // "# Auswerter: "
+        writeCSVMetaEntry(writer, "uinfo.export.salix_line.csv.meta.header.evaluator", "sounding.getEvaluationBy()");
+        // "# Bezugspegel: "
+        writeCSVMetaEntry(writer, "uinfo.export.salix_line.csv.meta.header.waterlevel.gauge", "wst.getGauge()");
+
+        // "# Jahr/Zeitraum der Wasserspiegellage: "
+        // final int year = wst.getYear();
+        // if (year > 0)
+        writeCSVMetaEntry(writer, "uinfo.export.salix_line.csv.meta.header.waterlevel.year", "Integer.toString(year)");
+
+    }
+
+    private void writeRegionalEffectsCSVMetadata(final CSVWriter writer) {
+        final String main = "uinfo.export.csv.meta.header.salix.regional";
+        // "##Regional wirkende Eingriffe"
+        writeCSVMetaEntry(writer, main);
+        writeRegionalCommonCSVMeta(writer);
+    }
+
+    private void writeRegionalCommonCSVMeta(final CSVWriter writer) {
+        final String main = "uinfo.export.csv.meta.header.salix";
+        // "# Szenariotyp: "
+        writeCSVMetaEntry(writer, main + ".szenariotyp");
+        // "# Teilabschnitt: "
+        writeCSVMetaEntry(writer, main + ".teilabschnitt");
+        // "# Mittelwasserspiegellagenänderung: "
+        writeCSVMetaEntry(writer, main + ".mwspiegellaenderung");
+    }
+
+    private void writeExtendedRegionalEffectsCSVMetadata(final CSVWriter writer) {
+        final String main = "uinfo.export.csv.meta.header.salix.regionalextended";
+        // "##Überregional wirkende Eingriffe"
+        writeCSVMetaEntry(writer, main);
+
+        writeRegionalCommonCSVMeta(writer);
+    }
+
+    private void writeHistoricalViewCSVMetadata(final CSVWriter writer) {
+        final String main = "uinfo.export.csv.meta.header.salix.historical";
+        // "##Historische Betrachtung"
+        writeCSVMetaEntry(writer, main);
+
+        final String mainSub = "uinfo.export.csv.meta.header.salix";
+        // "# Szenariotyp: "
+        writeCSVMetaEntry(writer, mainSub + ".szenariotyp");
+        // "# Teilabschnitt: "
+        writeCSVMetaEntry(writer, mainSub + ".teilabschnitt");
+
+        // "# Art des Zeitraums: "
+        writeCSVMetaEntry(writer, main + ".zeitart");
+        // "# Historischer Zeitpunkt: "
+        writeCSVMetaEntry(writer, main + ".zeitpunkt");
+    }
+
+    @Override
+    protected void writeCSVResultMetadata(final CSVWriter writer, final SalixLineCalculationResults results, final SalixLineCalculationResult result) {
+        writeRegionalEffectsCSVMetadata(writer);
+        writer.writeNext(new String[] { "" }); // break line
+        writeExtendedRegionalEffectsCSVMetadata(writer);
+        writer.writeNext(new String[] { "" }); // break line
+        writeHistoricalViewCSVMetadata(writer);
+    }
+
+    /**
+     * Write the header, with different headings depending on whether at a
+     * gauge or at a location.
+     *
+     * @param river
+     * @param useTkh
+     */
+    @Override
+    protected void writeCSVHeader(final CSVWriter writer, final SalixLineCalculationResults results, final RiverInfo river) {
+        log.info("FlowDepthExporter.writeCSVHeader");
+
+        final Collection<String> header = new ArrayList<>(4);
+
+        header.add(msg(GeneralResultType.station.getCsvHeader()));
+        // header.add(msgUnit(SInfoResultType.flowdepth.getCsvHeader(), SInfoResultType.flowdepth.getUnit()));
+
+        header.add(msg(UInfoResultType.salixline.getCsvHeader()));
+        // wenn "historisch" gewählt wurde, nur "historisch" anzeigen; sonst für jeden scen-wert ne neue Spalte und "hist"
+        // ausblenden!...!..!!
+        header.add(msg(UInfoResultType.salixlinehist.getCsvHeader()));
+        header.add(msg(UInfoResultType.salixlinescen.getCsvHeader()));
+        writer.writeNext(header.toArray(new String[header.size()]));
+    }
+
+    /**
+     * Format a row of a flow depth result into an array of string, both used by csv and pdf
+     *
+     * @param pdf
+     *
+     * @param useTkh
+     */
+    @Override
+    protected String[] formatRow(final SalixLineCalculationResults results, final ResultRow row, final ExportMode mode) {
+
+        final Collection<String> lines = new ArrayList<>(3);
+
+        lines.add(row.exportValue(this.context, GeneralResultType.station));
+        lines.add(row.exportValue(this.context, UInfoResultType.salixline));
+
+        // wenn "historisch" gewählt wurde, nur "historisch" anzeigen; sonst für jeden scen-wert ne neue Spalte und "hist"
+        // ausblenden!...!..!!
+        lines.add(row.exportValue(this.context, UInfoResultType.salixlinehist));
+        lines.add(row.exportValue(this.context, UInfoResultType.salixlinescen));
+        return lines.toArray(new String[lines.size()]);
+    }
+
+    @Override
+    protected final String getJasperFile() {
+        return JASPER_FILE;
+    }
+
+    @Override
+    protected final void addJRMetaData(final MetaAndTableJRDataSource source, final SalixLineCalculationResults results) {
+
+        super.addJRMetaData(source, results);
+
+        /* additional column headings */
+        source.addMetaData("station_header", GeneralResultType.station.getPdfHeader(this.context.getMeta()));
+        // source.addMetaData("flowdepth_header", SInfoResultType.flowdepth.getPdfHeader(this.context.getMeta()));
+        // source.addMetaData("flowdepth_tkh_header", SInfoResultType.flowdepthtkh.getPdfHeader(this.context.getMeta()));
+        // source.addMetaData("tkh_header", SInfoResultType.tkh.getPdfHeader(this.context.getMeta()));
+        // source.addMetaData("waterlevel_header", SInfoResultType.waterlevel.getPdfHeader(this.context.getMeta()));
+        // source.addMetaData("discharge_header", SInfoResultType.discharge.getPdfHeader(this.context.getMeta()));
+        // source.addMetaData("waterlevel_name_header", SInfoResultType.waterlevelLabel.getPdfHeader(this.context.getMeta()));
+        // source.addMetaData("gauge_header", SInfoResultType.gaugeLabel.getPdfHeader(this.context.getMeta()));
+        // source.addMetaData("bedheight_header", SInfoResultType.meanBedHeight.getPdfHeader(this.context.getMeta()));
+        // source.addMetaData("sounding_name_header", SInfoResultType.soundingLabel.getPdfHeader(this.context.getMeta()));
+        // source.addMetaData("location_header", SInfoResultType.location.getPdfHeader(this.context.getMeta()));
+    }
+
+}
\ No newline at end of file

http://dive4elements.wald.intevation.org