Mercurial > dive4elements > river
diff artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepthminmax/FlowDepthMinMaxExporter.java @ 9150:23945061daec
gigantic refactoring: exporter, result, results
to support multiple jaspers -> collisions
author | gernotbelger |
---|---|
date | Thu, 14 Jun 2018 16:56:31 +0200 |
parents | 7134a4c7d1b6 |
children | a4121ec450d6 |
line wrap: on
line diff
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepthminmax/FlowDepthMinMaxExporter.java Thu Jun 14 15:12:25 2018 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepthminmax/FlowDepthMinMaxExporter.java Thu Jun 14 16:56:31 2018 +0200 @@ -9,19 +9,13 @@ package org.dive4elements.river.artifacts.sinfo.flowdepthminmax; import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Collection; -import org.apache.log4j.Logger; -import org.dive4elements.river.artifacts.common.GeneralResultType; +import org.dive4elements.river.artifacts.common.AbstractCalculationExportableResult; +import org.dive4elements.river.artifacts.common.AbstractCommonExporter; +import org.dive4elements.river.artifacts.common.ExportContextCSV; import org.dive4elements.river.artifacts.common.JasperReporter; import org.dive4elements.river.artifacts.common.MetaAndTableJRDataSource; -import org.dive4elements.river.artifacts.common.ResultRow; -import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter; -import org.dive4elements.river.artifacts.sinfo.common.SInfoResultType; -import org.dive4elements.river.artifacts.sinfo.util.BedHeightInfo; import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; -import org.dive4elements.river.artifacts.sinfo.util.WstInfo; import au.com.bytecode.opencsv.CSVWriter; import net.sf.jasperreports.engine.JRException; @@ -33,122 +27,23 @@ * @author Gernot Belger */ // REMARK: must be public because its registered in generators.xml -public class FlowDepthMinMaxExporter extends AbstractSInfoExporter<FlowDepthMinMaxCalculationResult, FlowDepthMinMaxCalculationResults> { - - /** The log used in this exporter. */ - private static Logger log = Logger.getLogger(FlowDepthMinMaxExporter.class); - - private static final String JASPER_FILE = "/jasper/templates/sinfo.flowdepthminmax.jrxml"; - - @Override - protected Logger getLog() { - return log; - } - - @Override - protected void writeCSVResultMetadata(final CSVWriter writer, final FlowDepthMinMaxCalculationResults results, - final FlowDepthMinMaxCalculationResult result) { - - final BedHeightInfo sounding = result.getSounding(); - super.writeCSVSoundingMetadata(writer, sounding); - writer.writeNext(new String[] { "" }); // break line - final WstInfo wst = result.getWst(); - writeCSVWaterlevelMetadata(writer, wst); - writer.writeNext(new String[] { "" }); // break line - } +public class FlowDepthMinMaxExporter extends AbstractCommonExporter<FlowDepthMinMaxCalculationResults> { @Override - protected void writeCSVGlobalMetadata(final CSVWriter writer, final FlowDepthMinMaxCalculationResults results) { - - super.writeCSVGlobalMetadataDefaults(writer, results); - } - - /** - * Write the header, with different headings depending on whether at a - * gauge or at a location. - * - * @param river - * @param useTkh - */ - @Override - protected void writeCSVHeader(final CSVWriter writer, final FlowDepthMinMaxCalculationResults results, final RiverInfo river) { - log.info("FlowDepthExporter.writeCSVHeader"); - - final Collection<String> header = new ArrayList<>(11); - - header.add(msg(GeneralResultType.station.getCsvHeader())); - header.add(msgUnit(SInfoResultType.flowdepthmin.getCsvHeader(), SInfoResultType.flowdepthmin.getUnit())); - header.add(msgUnit(SInfoResultType.flowdepthmax.getCsvHeader(), SInfoResultType.flowdepthmax.getUnit())); - header.add(msgUnit(SInfoResultType.waterlevel.getCsvHeader(), river.getWstUnit())); - header.add(msgUnit(SInfoResultType.discharge.getCsvHeader(), SInfoResultType.discharge.getUnit())); - header.add(msg(SInfoResultType.waterlevelLabel.getCsvHeader())); - header.add(msg(SInfoResultType.gaugeLabel.getCsvHeader())); - header.add(msgUnit(SInfoResultType.meanBedHeight.getCsvHeader(), river.getWstUnit())); - header.add(msg(SInfoResultType.soundingLabel.getCsvHeader())); - header.add(msg(SInfoResultType.location.getCsvHeader())); - - writer.writeNext(header.toArray(new String[header.size()])); - } - - /** - * Format a row of a flow depth result into an array of string, both used by csv and pdf - * - * @param result - * - * @param useTkh - */ - @Override - protected String[] formatRow(final FlowDepthMinMaxCalculationResults results, final ResultRow row, final ExportMode mode) { - - final Collection<String> lines = new ArrayList<>(10); - - lines.add(row.exportValue(this.context, GeneralResultType.station)); - - // REMARK: null check as pdf will call this with null and in that case we show all columns (to avoid multiple jasper - // FIXME: does not work like this: we may have several pairs of min/max; so we need to look at all of them? - // templates) - // if (result == null || result.getMinSounding() != null) - lines.add(row.exportValue(this.context, SInfoResultType.flowdepthmin)); - // if (result == null || result.getMaxSounding() != null) - lines.add(row.exportValue(this.context, SInfoResultType.flowdepthmax)); - - lines.add(row.exportValue(this.context, SInfoResultType.waterlevel)); - lines.add(row.exportValue(this.context, SInfoResultType.discharge)); - lines.add(row.exportValue(this.context, SInfoResultType.waterlevelLabel)); - lines.add(row.exportValue(this.context, SInfoResultType.gaugeLabel)); - lines.add(row.exportValue(this.context, SInfoResultType.meanBedHeight)); - lines.add(row.exportValue(this.context, SInfoResultType.soundingLabel)); - lines.add(row.exportValue(this.context, SInfoResultType.location)); - - return lines.toArray(new String[lines.size()]); - } - - @Override - protected final void addJRMetaData(final MetaAndTableJRDataSource source, final FlowDepthMinMaxCalculationResults results) { - - /* general metadata */ - super.addJRMetaData(source, results); - - /* column headings */ - source.addMetaData("station_header", GeneralResultType.station.getPdfHeader(this.context.getMeta())); - source.addMetaData("flowdepthmin_header", SInfoResultType.flowdepthmin.getPdfHeader(this.context.getMeta())); - source.addMetaData("flowdepthmax_header", SInfoResultType.flowdepthmax.getPdfHeader(this.context.getMeta())); - source.addMetaData("waterlevel_header", SInfoResultType.waterlevel.getPdfHeader(this.context.getMeta())); - source.addMetaData("discharge_header", SInfoResultType.discharge.getPdfHeader(this.context.getMeta())); - source.addMetaData("waterlevel_name_header", SInfoResultType.waterlevelLabel.getPdfHeader(this.context.getMeta())); - source.addMetaData("gauge_header", SInfoResultType.gaugeLabel.getPdfHeader(this.context.getMeta())); - source.addMetaData("bedheight_header", SInfoResultType.meanBedHeight.getPdfHeader(this.context.getMeta())); - source.addMetaData("sounding_name_header", SInfoResultType.soundingLabel.getPdfHeader(this.context.getMeta())); - source.addMetaData("location_header", SInfoResultType.location.getPdfHeader(this.context.getMeta())); - } - - @Override - protected void writePDF(final OutputStream out) { + protected void doWritePdf(final OutputStream out, final FlowDepthMinMaxCalculationResults results) { + // TODO: Move to super try { - final MetaAndTableJRDataSource source = createJRData(this.data); + final ExportContextCSV exportContextCSV = new ExportContextCSV(this.context, null); final JasperReporter reporter = new JasperReporter(); - reporter.addReport(JASPER_FILE, source); + + for (final AbstractCalculationExportableResult<FlowDepthMinMaxCalculationResults> result : results.getResults()) { + final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource(); + getHelper().addJRMetaDataUSINFO(source, results); + + result.addReport(exportContextCSV, results, reporter, source); + } + reporter.exportPDF(out); } catch (final JRException je) { @@ -156,4 +51,36 @@ } } + @Override + protected void doWriteCSVData(final CSVWriter writer, final FlowDepthMinMaxCalculationResults results) { + // TODO: Diesen Ablauf in super? + + // TODO: move results into context? + final ExportContextCSV exportContextCSV = new ExportContextCSV(this.context, writer); + + getLog().info("writeCSVData"); + + /* write as csv */ + exportContextCSV.writeCSVGlobalMetadataDefaults(results); // ggf auslagern innerhalb dieser Klasse + + // writer.writeNext(new String[] { "" }); // break line HERE to avoid redundance + + final RiverInfo river = results.getRiver(); + + final Class<?> lastResultType = null; + + for (final AbstractCalculationExportableResult<FlowDepthMinMaxCalculationResults> result : results.getResults()) { + + final Class<?> resultType = result.getClass(); + if (lastResultType == null || lastResultType != resultType) { + exportContextCSV.writeBlankLine(); + result.writeCSVHeader(exportContextCSV, results, river); + exportContextCSV.writeBlankLine(); + } else + exportContextCSV.writeCSVLine(new String[] { "#" }); + + result.writeCsv(exportContextCSV, results); + } + + } } \ No newline at end of file