Mercurial > dive4elements > river
view artifacts/src/main/java/org/dive4elements/river/artifacts/uinfo/salix/SalixLineExporter.java @ 9039:13b5b515c61f
Cant use lambda in java 7 code
author | gernotbelger |
---|---|
date | Wed, 02 May 2018 12:16:47 +0200 |
parents | 7134a4c7d1b6 |
children | 41f4bc83aa7a |
line wrap: on
line source
/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde * Software engineering by * Björnsen Beratende Ingenieure GmbH * Dr. Schumacher Ingenieurbüro für Wasser und Umwelt * * This file is Free Software under the GNU AGPL (>=v3) * and comes with ABSOLUTELY NO WARRANTY! Check out the * documentation coming with Dive4Elements River for details. */ package org.dive4elements.river.artifacts.uinfo.salix; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collection; import org.apache.log4j.Logger; import org.dive4elements.river.artifacts.common.GeneralResultType; import org.dive4elements.river.artifacts.common.JasperReporter; import org.dive4elements.river.artifacts.common.MetaAndTableJRDataSource; import org.dive4elements.river.artifacts.common.ResultRow; import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; import org.dive4elements.river.artifacts.uinfo.commons.AbstractUInfoExporter; import org.dive4elements.river.artifacts.uinfo.commons.UInfoResultType; import au.com.bytecode.opencsv.CSVWriter; import net.sf.jasperreports.engine.JRException; /** * @author Domenico Nardi Tironi * */ public class SalixLineExporter extends AbstractUInfoExporter<SalixLineCalculationResult, SalixLineCalculationResults> { /** The log used in this exporter. */ private static Logger log = Logger.getLogger(SalixLineExporter.class); private static final String JASPER_FILE = "/jasper/templates/uinfo.salixline.jrxml"; @Override protected Logger getLog() { return log; } @Override protected void writeCSVGlobalMetadata(final CSVWriter writer, final SalixLineCalculationResults results) { log.info("SalixLineExporter.writeCSVMeta"); super.writeCSVGlobalMetadataDefaults(writer, results); // Add Auswerter, Bezugspegel, Jahr/Zeitraum der Wasserspiegellage // "# Auswerter: " writeCSVMetaEntry(writer, "uinfo.export.salix_line.csv.meta.header.evaluator", "sounding.getEvaluationBy()"); // "# Bezugspegel: " writeCSVMetaEntry(writer, "uinfo.export.salix_line.csv.meta.header.waterlevel.gauge", "wst.getGauge()"); // "# Jahr/Zeitraum der Wasserspiegellage: " // final int year = wst.getYear(); // if (year > 0) writeCSVMetaEntry(writer, "uinfo.export.salix_line.csv.meta.header.waterlevel.year", "Integer.toString(year)"); } private void writeRegionalEffectsCSVMetadata(final CSVWriter writer) { final String main = "uinfo.export.csv.meta.header.salix.regional"; // "##Regional wirkende Eingriffe" writeCSVMetaEntry(writer, main); writeRegionalCommonCSVMeta(writer); } private void writeRegionalCommonCSVMeta(final CSVWriter writer) { final String main = "uinfo.export.csv.meta.header.salix"; // "# Szenariotyp: " writeCSVMetaEntry(writer, main + ".szenariotyp"); // "# Teilabschnitt: " writeCSVMetaEntry(writer, main + ".teilabschnitt"); // "# Mittelwasserspiegellagenänderung: " writeCSVMetaEntry(writer, main + ".mwspiegellaenderung"); } private void writeExtendedRegionalEffectsCSVMetadata(final CSVWriter writer) { final String main = "uinfo.export.csv.meta.header.salix.regionalextended"; // "##Überregional wirkende Eingriffe" writeCSVMetaEntry(writer, main); writeRegionalCommonCSVMeta(writer); } private void writeHistoricalViewCSVMetadata(final CSVWriter writer) { final String main = "uinfo.export.csv.meta.header.salix.historical"; // "##Historische Betrachtung" writeCSVMetaEntry(writer, main); final String mainSub = "uinfo.export.csv.meta.header.salix"; // "# Szenariotyp: " writeCSVMetaEntry(writer, mainSub + ".szenariotyp"); // "# Teilabschnitt: " writeCSVMetaEntry(writer, mainSub + ".teilabschnitt"); // "# Art des Zeitraums: " writeCSVMetaEntry(writer, main + ".zeitart"); // "# Historischer Zeitpunkt: " writeCSVMetaEntry(writer, main + ".zeitpunkt"); } @Override protected void writeCSVResultMetadata(final CSVWriter writer, final SalixLineCalculationResults results, final SalixLineCalculationResult result) { writeRegionalEffectsCSVMetadata(writer); writer.writeNext(new String[] { "" }); // break line writeExtendedRegionalEffectsCSVMetadata(writer); writer.writeNext(new String[] { "" }); // break line writeHistoricalViewCSVMetadata(writer); } /** * Write the header, with different headings depending on whether at a * gauge or at a location. * * @param river * @param useTkh */ @Override protected void writeCSVHeader(final CSVWriter writer, final SalixLineCalculationResults results, final RiverInfo river) { log.info("FlowDepthExporter.writeCSVHeader"); final Collection<String> header = new ArrayList<>(4); header.add(msg(GeneralResultType.station.getCsvHeader())); // header.add(msgUnit(SInfoResultType.flowdepth.getCsvHeader(), SInfoResultType.flowdepth.getUnit())); header.add(msg(UInfoResultType.salixline.getCsvHeader())); // wenn "historisch" gewählt wurde, nur "historisch" anzeigen; sonst für jeden scen-wert ne neue Spalte und "hist" // ausblenden!...!..!! header.add(msg(UInfoResultType.salixlinehist.getCsvHeader())); header.add(msg(UInfoResultType.salixlinescen.getCsvHeader())); writer.writeNext(header.toArray(new String[header.size()])); } /** * Format a row of a flow depth result into an array of string, both used by csv and pdf * * @param pdf * * @param useTkh */ @Override protected String[] formatRow(final SalixLineCalculationResults results, final ResultRow row, final ExportMode mode) { final Collection<String> lines = new ArrayList<>(3); lines.add(row.exportValue(this.context, GeneralResultType.station)); lines.add(row.exportValue(this.context, UInfoResultType.salixline)); // wenn "historisch" gewählt wurde, nur "historisch" anzeigen; sonst für jeden scen-wert ne neue Spalte und "hist" // ausblenden!...!..!! lines.add(row.exportValue(this.context, UInfoResultType.salixlinehist)); lines.add(row.exportValue(this.context, UInfoResultType.salixlinescen)); return lines.toArray(new String[lines.size()]); } @Override protected final void addJRMetaData(final MetaAndTableJRDataSource source, final SalixLineCalculationResults results) { super.addJRMetaData(source, results); /* additional column headings */ source.addMetaData("station_header", GeneralResultType.station.getPdfHeader(this.context.getMeta())); source.addMetaData("salix_line", UInfoResultType.salixline.getPdfHeader(this.context.getMeta())); // wenn "historisch" gewählt wurde, nur "historisch" anzeigen; sonst für jeden scen-wert ne neue Spalte und "hist" // ausblenden!...!..!! source.addMetaData("salix_line_hist", UInfoResultType.salixlinehist.getPdfHeader(this.context.getMeta())); source.addMetaData("salix_line_scen", UInfoResultType.salixlinescen.getPdfHeader(this.context.getMeta())); } @Override protected void writePDF(final OutputStream out) { try { final MetaAndTableJRDataSource source = createJRData(this.data); final JasperReporter reporter = new JasperReporter(); reporter.addReport(JASPER_FILE, source); reporter.exportPDF(out); } catch (final JRException je) { getLog().warn("Error generating PDF Report!", je); } } }