diff artifacts/src/main/java/org/dive4elements/river/artifacts/uinfo/salix/SalixLineExporter.java @ 9150:23945061daec

gigantic refactoring: exporter, result, results to support multiple jaspers -> collisions
author gernotbelger
date Thu, 14 Jun 2018 16:56:31 +0200
parents 41f4bc83aa7a
children a4121ec450d6
line wrap: on
line diff
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/uinfo/salix/SalixLineExporter.java	Thu Jun 14 15:12:25 2018 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/uinfo/salix/SalixLineExporter.java	Thu Jun 14 16:56:31 2018 +0200
@@ -10,18 +10,14 @@
 package org.dive4elements.river.artifacts.uinfo.salix;
 
 import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collection;
 
-import org.apache.log4j.Logger;
-import org.dive4elements.river.artifacts.common.GeneralResultType;
+import org.dive4elements.river.artifacts.common.AbstractCalculationExportableResult;
+import org.dive4elements.river.artifacts.common.AbstractCommonExporter;
+import org.dive4elements.river.artifacts.common.ExportContextCSV;
 import org.dive4elements.river.artifacts.common.I18NStrings;
 import org.dive4elements.river.artifacts.common.JasperReporter;
 import org.dive4elements.river.artifacts.common.MetaAndTableJRDataSource;
-import org.dive4elements.river.artifacts.common.ResultRow;
 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo;
-import org.dive4elements.river.artifacts.uinfo.commons.AbstractUInfoExporter;
-import org.dive4elements.river.artifacts.uinfo.commons.UInfoResultType;
 
 import au.com.bytecode.opencsv.CSVWriter;
 import net.sf.jasperreports.engine.JRException;
@@ -30,166 +26,75 @@
  * @author Domenico Nardi Tironi
  *
  */
-public class SalixLineExporter extends AbstractUInfoExporter<SalixLineCalculationResult, SalixLineCalculationResults> {
-
-    /** The log used in this exporter. */
-    private static Logger log = Logger.getLogger(SalixLineExporter.class);
-
-    private static final String JASPER_FILE = "/jasper/templates/uinfo.salixline.jrxml";
-
-    @Override
-    protected Logger getLog() {
-        return log;
-    }
+public class SalixLineExporter extends AbstractCommonExporter<SalixLineCalculationResults> {
 
     @Override
-    protected void writeCSVGlobalMetadata(final CSVWriter writer, final SalixLineCalculationResults results) {
-        log.info("SalixLineExporter.writeCSVMeta");
-
-        super.writeCSVGlobalMetadataDefaults(writer, results);
-
-        // "# Höhensystem des Flusses: "
-        writeCSVMetaEntry(writer, I18NStrings.CSV_META_HEIGHT_UNIT_RIVER, results.getRiver().getWstUnit());
-
-        // Add Auswerter, Bezugspegel, Jahr/Zeitraum der Wasserspiegellage
-
-        // "# Auswerter: "
-        writeCSVMetaEntry(writer, "uinfo.export.salix_line.csv.meta.header.evaluator", "sounding.getEvaluationBy()");
-        // "# Bezugspegel: "
-        writeCSVMetaEntry(writer, "uinfo.export.salix_line.csv.meta.header.waterlevel.gauge", "wst.getGauge()");
-
-        // "# Jahr/Zeitraum der Wasserspiegellage: "
-        // final int year = wst.getYear();
-        // if (year > 0)
-        writeCSVMetaEntry(writer, "uinfo.export.salix_line.csv.meta.header.waterlevel.year", "Integer.toString(year)");
-
-    }
-
-    private void writeRegionalEffectsCSVMetadata(final CSVWriter writer) {
-        final String main = "uinfo.export.csv.meta.header.salix.regional";
-        // "##Regional wirkende Eingriffe"
-        writeCSVMetaEntry(writer, main);
-        writeRegionalCommonCSVMeta(writer);
-    }
-
-    private void writeRegionalCommonCSVMeta(final CSVWriter writer) {
-        final String main = "uinfo.export.csv.meta.header.salix";
-        // "# Szenariotyp: "
-        writeCSVMetaEntry(writer, main + ".szenariotyp");
-        // "# Teilabschnitt: "
-        writeCSVMetaEntry(writer, main + ".teilabschnitt");
-        // "# Mittelwasserspiegellagenänderung: "
-        writeCSVMetaEntry(writer, main + ".mwspiegellaenderung");
-    }
-
-    private void writeExtendedRegionalEffectsCSVMetadata(final CSVWriter writer) {
-        final String main = "uinfo.export.csv.meta.header.salix.regionalextended";
-        // "##Überregional wirkende Eingriffe"
-        writeCSVMetaEntry(writer, main);
-
-        writeRegionalCommonCSVMeta(writer);
-    }
-
-    private void writeHistoricalViewCSVMetadata(final CSVWriter writer) {
-        final String main = "uinfo.export.csv.meta.header.salix.historical";
-        // "##Historische Betrachtung"
-        writeCSVMetaEntry(writer, main);
-
-        final String mainSub = "uinfo.export.csv.meta.header.salix";
-        // "# Szenariotyp: "
-        writeCSVMetaEntry(writer, mainSub + ".szenariotyp");
-        // "# Teilabschnitt: "
-        writeCSVMetaEntry(writer, mainSub + ".teilabschnitt");
-
-        // "# Art des Zeitraums: "
-        writeCSVMetaEntry(writer, main + ".zeitart");
-        // "# Historischer Zeitpunkt: "
-        writeCSVMetaEntry(writer, main + ".zeitpunkt");
-    }
-
-    @Override
-    protected void writeCSVResultMetadata(final CSVWriter writer, final SalixLineCalculationResults results, final SalixLineCalculationResult result) {
-        writeRegionalEffectsCSVMetadata(writer);
-        writer.writeNext(new String[] { "" }); // break line
-        writeExtendedRegionalEffectsCSVMetadata(writer);
-        writer.writeNext(new String[] { "" }); // break line
-        writeHistoricalViewCSVMetadata(writer);
-    }
-
-    /**
-     * Write the header, with different headings depending on whether at a
-     * gauge or at a location.
-     *
-     * @param river
-     * @param useTkh
-     */
-    @Override
-    protected void writeCSVHeader(final CSVWriter writer, final SalixLineCalculationResults results, final RiverInfo river) {
-        log.info("FlowDepthExporter.writeCSVHeader");
-
-        final Collection<String> header = new ArrayList<>(4);
-
-        header.add(msg(GeneralResultType.station.getCsvHeader()));
-        // header.add(msgUnit(SInfoResultType.flowdepth.getCsvHeader(), SInfoResultType.flowdepth.getUnit()));
-
-        header.add(msg(UInfoResultType.salixline.getCsvHeader()));
-        // wenn "historisch" gewählt wurde, nur "historisch" anzeigen; sonst für jeden scen-wert ne neue Spalte und "hist"
-        // ausblenden!...!..!!
-        header.add(msg(UInfoResultType.salixlinehist.getCsvHeader()));
-        header.add(msg(UInfoResultType.salixlinescen.getCsvHeader()));
-        writer.writeNext(header.toArray(new String[header.size()]));
-    }
-
-    /**
-     * Format a row of a flow depth result into an array of string, both used by csv and pdf
-     *
-     * @param pdf
-     *
-     * @param useTkh
-     */
-    @Override
-    protected String[] formatRow(final SalixLineCalculationResults results, final ResultRow row, final ExportMode mode) {
-
-        final Collection<String> lines = new ArrayList<>(3);
-
-        lines.add(row.exportValue(this.context, GeneralResultType.station));
-        lines.add(row.exportValue(this.context, UInfoResultType.salixline));
-
-        // wenn "historisch" gewählt wurde, nur "historisch" anzeigen; sonst für jeden scen-wert ne neue Spalte und "hist"
-        // ausblenden!...!..!!
-        lines.add(row.exportValue(this.context, UInfoResultType.salixlinehist));
-        lines.add(row.exportValue(this.context, UInfoResultType.salixlinescen));
-        return lines.toArray(new String[lines.size()]);
-    }
-
-    @Override
-    protected final void addJRMetaData(final MetaAndTableJRDataSource source, final SalixLineCalculationResults results) {
-
-        super.addJRMetaData(source, results);
-
-        /* additional column headings */
-        source.addMetaData("station_header", GeneralResultType.station.getPdfHeader(this.context.getMeta()));
-
-        source.addMetaData("salix_line", UInfoResultType.salixline.getPdfHeader(this.context.getMeta()));
-        // wenn "historisch" gewählt wurde, nur "historisch" anzeigen; sonst für jeden scen-wert ne neue Spalte und "hist"
-        // ausblenden!...!..!!
-        source.addMetaData("salix_line_hist", UInfoResultType.salixlinehist.getPdfHeader(this.context.getMeta()));
-        source.addMetaData("salix_line_scen", UInfoResultType.salixlinescen.getPdfHeader(this.context.getMeta()));
-    }
-
-    @Override
-    protected void writePDF(final OutputStream out) {
+    protected void doWritePdf(final OutputStream out, final SalixLineCalculationResults results) {
+        // TODO: Move to super
         try {
-            final MetaAndTableJRDataSource source = createJRData(this.data);
+            final ExportContextCSV exportContextCSV = new ExportContextCSV(this.context, null);
 
             final JasperReporter reporter = new JasperReporter();
-            reporter.addReport(JASPER_FILE, source);
+
+            for (final AbstractCalculationExportableResult<SalixLineCalculationResults> result : results.getResults()) {
+                final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource();
+                getHelper().addJRMetaDataUSINFO(source, results);
+
+                result.addReport(exportContextCSV, results, reporter, source);
+            }
+
             reporter.exportPDF(out);
         }
         catch (final JRException je) {
             getLog().warn("Error generating PDF Report!", je);
         }
+    }
 
+    @Override
+    protected void doWriteCSVData(final CSVWriter writer, final SalixLineCalculationResults results) {
+        // TODO: Diesen Ablauf in super? - ist etwas anders bei den globalen metadaten
+
+        // TODO: move results into context?
+        final ExportContextCSV exportContextCSV = new ExportContextCSV(this.context, writer);
+
+        getLog().info("writeCSVData");
+
+        /* write as csv */
+        exportContextCSV.writeCSVGlobalMetadataDefaults(results); // ggf auslagern innerhalb dieser Klasse
+
+        // writer.writeNext(new String[] { "" }); // break line HERE to avoid redundance
+
+        // "# Höhensystem des Flusses: "
+        exportContextCSV.writeCSVMetaEntry(I18NStrings.CSV_META_HEIGHT_UNIT_RIVER, results.getRiver().getWstUnit());
+
+        // Add Auswerter, Bezugspegel, Jahr/Zeitraum der Wasserspiegellage
+
+        // "# Auswerter: "
+        exportContextCSV.writeCSVMetaEntry("uinfo.export.salix_line.csv.meta.header.evaluator", "sounding.getEvaluationBy()");
+        // "# Bezugspegel: "
+        exportContextCSV.writeCSVMetaEntry("uinfo.export.salix_line.csv.meta.header.waterlevel.gauge", "wst.getGauge()");
+
+        // "# Jahr/Zeitraum der Wasserspiegellage: "
+        // final int year = wst.getYear();
+        // if (year > 0)
+        exportContextCSV.writeCSVMetaEntry("uinfo.export.salix_line.csv.meta.header.waterlevel.year", "Integer.toString(year)");
+
+        final RiverInfo river = results.getRiver();
+
+        final Class<?> lastResultType = null;
+
+        for (final AbstractCalculationExportableResult<SalixLineCalculationResults> result : results.getResults()) {
+
+            final Class<?> resultType = result.getClass();
+            if (lastResultType == null || lastResultType != resultType) {
+                exportContextCSV.writeBlankLine();
+                result.writeCSVHeader(exportContextCSV, results, river);
+                exportContextCSV.writeBlankLine();
+            } else
+                exportContextCSV.writeCSVLine(new String[] { "#" });
+
+            result.writeCsv(exportContextCSV, results);
+        }
     }
 
 }
\ No newline at end of file

http://dive4elements.wald.intevation.org