Mercurial > dive4elements > river
view artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java @ 8854:7bbfb24e6eec
SINFO - first prototype of BArt Fließtiefen
author | gernotbelger |
---|---|
date | Thu, 18 Jan 2018 18:34:41 +0100 |
parents | |
children | 1009cab0f86b |
line wrap: on
line source
/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde * Software engineering by Intevation GmbH * * This file is Free Software under the GNU AGPL (>=v3) * and comes with ABSOLUTELY NO WARRANTY! Check out the * documentation coming with Dive4Elements River for details. */ package org.dive4elements.river.artifacts.sinfo.flowdepth; import java.io.OutputStream; import java.text.DateFormat; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Locale; import java.util.Map; import org.apache.log4j.Logger; import org.dive4elements.artifacts.CallMeta; import org.dive4elements.artifacts.common.utils.Config; import org.dive4elements.river.artifacts.model.CalculationResult; import org.dive4elements.river.artifacts.resources.Resources; import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource; import org.dive4elements.river.exports.AbstractExporter; import org.dive4elements.river.model.River; import org.dive4elements.river.utils.Formatter; import au.com.bytecode.opencsv.CSVWriter; import net.sf.jasperreports.engine.JRDataSource; import net.sf.jasperreports.engine.JRException; import net.sf.jasperreports.engine.JasperExportManager; import net.sf.jasperreports.engine.JasperFillManager; import net.sf.jasperreports.engine.JasperPrint; /** * Generates different output formats (csv, pdf) of data that resulted from a flow depths computation. * * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a> * @author Gernot Belger */ // REMARK: must be public because its registered in generators.xml public class FlowDepthExporter extends AbstractExporter { /** The log used in this exporter.*/ private static Logger log = Logger.getLogger(FlowDepthExporter.class); private static final String CSV_KM_HEADER = "sinfo.export.flow_depth.csv.header.km"; private static final String CSV_FLOWDEPTH_HEADER = "sinfo.export.flow_depth.csv.header.flowdepth"; private static final String CSV_FLOWDEPTHTKH_HEADER = "sinfo.export.flow_depth.csv.header.flowdepthTkh"; private static final String CSV_TKH_HEADER = "sinfo.export.flow_depth.csv.header.tkh"; private static final String CSV_WATERLEVEL_HEADER = "sinfo.export.flow_depth.csv.header.waterlevel"; private static final String CSV_DISCHARGE_HEADER = "sinfo.export.flow_depth.csv.header.discharge"; private static final String CSV_LABEL_HEADER = "sinfo.export.flow_depth.csv.header.label"; private static final String CSV_GAUGE_HEADER = "sinfo.export.flow_depth.csv.header.gauge"; private static final String CSV_MEAN_BED_HEIGHT_HEADER = "sinfo.export.flow_depth.csv.header.mean_bed_height"; private static final String CSV_SOUNDING_HEADER = "sinfo.export.flow_depth.csv.header.sounding"; private static final String CSV_LOCATION_HEADER = "sinfo.export.flow_depth.csv.header.location"; private static final String CSV_META_HEADER_RESULT = "sinfo.export.flow_depth.csv.meta.header.result"; private static final String CSV_META_VERSION = "sinfo.export.flow_depth.csv.meta.version"; private static final String CSV_META_USER = "sinfo.export.flow_depth.csv.meta.user"; private static final String CSV_META_CREATION = "sinfo.export.flow_depth.csv.meta.creation"; private static final String CSV_META_RIVER = "sinfo.export.flow_depth.csv.meta.river"; private static final String CSV_META_HEADER_SOUNDING = "sinfo.export.flow_depth.csv.meta.header.sounding"; private static final String CSV_META_HEADER_WATERLEVEL = "sinfo.export.flow_depth.csv.meta.header.waterlevel"; private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper"; //$NON-NLS-1$ /** The storage that contains the current calculation result.*/ private FlowDepthCalculationResults data = null; private NumberFormat meanBedHeightFormatter; private NumberFormat tkhFormatter; private NumberFormat flowDepthFormatter; private NumberFormat getMeanBedHeightFormatter() { if( meanBedHeightFormatter == null ) // FIXME: check if this is right meanBedHeightFormatter = Formatter.getMiddleBedHeightHeight(context); return meanBedHeightFormatter; } private NumberFormat getTkhFormatter() { if( tkhFormatter == null ) // FIXME: check if this is right, probably not, we need one digit tkhFormatter = Formatter.getWaterlevelW(context); return tkhFormatter; } private NumberFormat getFlowDepthFormatter() { if( flowDepthFormatter == null ) // FIXME: check if this is right flowDepthFormatter = Formatter.getMeterFormat(context); return flowDepthFormatter; } @Override protected void addData(Object d) { /* reset */ data = null; if (d instanceof CalculationResult) { final Object dat = ((CalculationResult)d).getData(); if( dat != null ) data = (FlowDepthCalculationResults)dat; } } @Override protected void writeCSVData(CSVWriter writer) { log.info("FlowDepthExporter.writeCSVData"); /* fetch calculation results */ final FlowDepthCalculationResults results = data; /* write as csv */ // boolean atGauge = mode == WQ_MODE.QGAUGE || mode == WQ_MODE.WGAUGE; // boolean isQ = mode == WQ_MODE.QGAUGE || mode == WQ_MODE.QFREE; // RiverUtils.WQ_INPUT input // = RiverUtils.getWQInputMode((D4EArtifact)master); final boolean useTkh = results.isUseTkh(); writeCSVMeta(writer, results); writeCSVHeader(writer, useTkh); for (final FlowDepthCalculationResult result : results.getResults()) { writeCSVFlowDepthResult(writer, result, useTkh); } } private void writeCSVFlowDepthResult(final CSVWriter writer, final FlowDepthCalculationResult result, final boolean useTkh) { final Collection<FlowDepthRow> rows = result.getRows(); for (final FlowDepthRow flowDepthRow : rows) { writeCSVFlowDepthRow(writer, flowDepthRow, useTkh); } } private void writeCSVMeta(final CSVWriter writer, final FlowDepthCalculationResults results) { log.info("FlowDepthExporter.writeCSVMeta"); // Workflow zur Berechnung der Fließtiefe.pdf // "##ERGEBNISAUSGABE - Name des Gewässers - Fließtiefe" final River river = results.getRiver(); writeCSVMeataEntry(writer, CSV_META_HEADER_RESULT, river.getName() ); // "# FLYS-Version: " // FIXME final String flysVersion = "unbekannt"; writeCSVMeataEntry(writer, CSV_META_VERSION, flysVersion ); // "# Bearbeiter: " // FIXME final String user = "unbekannt"; writeCSVMeataEntry(writer, CSV_META_USER, user ); // "# Datum der Erstellung: " final Locale locale = Resources.getLocale(context.getMeta()); final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale); writeCSVMeataEntry(writer, CSV_META_CREATION, df.format(new Date()) ); // "# Gewässer: " writeCSVMeataEntry(writer, CSV_META_RIVER, river.getName() ); // "# Höhensystem des Flusses: " // FIXME // "# Ort/Bereich (km): " // FIXME // TODO: unklar, es wird nur ein Bereich eingegeben // RangeAccess rangeAccess = new RangeAccess(flys); // double[] kms = rangeAccess.getKmRange(); // writer.writeNext(new String[] { // Resources.getMsg( // meta, // CSV_META_RANGE, // CSV_META_RANGE, // new Object[] { kms[0], kms[kms.length-1] }) // }); // "##METADATEN PEILUNG" writeCSVMeataEntry(writer, CSV_META_HEADER_SOUNDING ); // "# Jahr der Peilung: " // FIXME // "# Aufnahmeart: " // FIXME // "# Lagesystem: " // FIXME // "# Höhensystem: " // FIXME // "# ursprüngliches Höhensystem: " // FIXME // "##METADATEN WASSERSPIEGELLAGE" writeCSVMeataEntry(writer, CSV_META_HEADER_WATERLEVEL ); // "# Bezeichnung der Wasserspiegellage: " // FIXME // "# Höhensystem der Wasserspiegellage: " // FIXME // "# Auswerter: " // FIXME // "# Bezugspegel: " // FIXME // "# Jahr/Zeitraum der Wasserspiegellage: " // FIXME // "# W/Pegel [cm]: " (nur bei Eingabe des Wasserstands am Pegel) // TODO: unklar, es wird kein W eingegeben // "# Q (m³/s): " (nur bei Eingabe des Durchflusses) // TODO: unklar, es wird kein Q eingegeben // writer.writeNext(new String[] { // Resources.getMsg( // meta, // CSV_META_GAUGE, // CSV_META_GAUGE, // new Object[] { RiverUtils.getGaugename(flys) }) // }); writer.writeNext(new String[] { "" }); } private void writeCSVMeataEntry(CSVWriter writer, String message, Object... messageArgs) { CallMeta meta = context.getMeta(); writer.writeNext(new String[] { Resources.getMsg( meta, message, message, messageArgs) }); } /** * Write the header, with different headings depending on whether at a * gauge or at a location. * @param useTkh */ private void writeCSVHeader( final CSVWriter writer, final boolean useTkh ) { log.info("FlowDepthExporter.writeCSVHeader"); final Collection<String> header = new ArrayList<>(11); header.add(msg(CSV_KM_HEADER,CSV_KM_HEADER)); header.add(msg(CSV_FLOWDEPTH_HEADER)); if( useTkh ) { header.add(msg(CSV_FLOWDEPTHTKH_HEADER)); header.add(msg(CSV_TKH_HEADER)); } header.add(msg(CSV_WATERLEVEL_HEADER)); header.add(msg(CSV_DISCHARGE_HEADER)); header.add(msg(CSV_LABEL_HEADER)); header.add(msg(CSV_GAUGE_HEADER)); header.add(msg(CSV_MEAN_BED_HEIGHT_HEADER)); header.add(msg(CSV_SOUNDING_HEADER)); header.add(msg(CSV_LOCATION_HEADER)); writer.writeNext(header.toArray(new String[header.size()])); } /** * Format a row of a flow depth result into an array of string, both used by csv and pdf * @param useTkh */ private String[] formatFlowDepthRow( final FlowDepthRow row, boolean useTkh ) { final Collection<String> lines = new ArrayList<>(11); // Fluss-km lines.add( getKmFormatter().format( row.getStation() ) ); // Fließtiefe [m] lines.add( getFlowDepthFormatter().format( row.getFlowDepth() ) ); if( useTkh ) { // Fließtiefe mit TKH [m] lines.add( getFlowDepthFormatter().format( row.getFlowDepthWithTkh() ) ); // TKH [cm] lines.add( getTkhFormatter().format( row.getTkh() ) ); } // Wasserstand [NN + m] lines.add( getWFormatter().format( row.getWaterlevel() ) ); // Q [m³/s] lines.add( getQFormatter().format( row.getDischarge() ) ); // Bezeichnung lines.add( row.getWaterlevelLabel() ); // Bezugspegel lines.add( row.getGauge() ); // Mittlere Sohlhöhe [NN + m] lines.add( getMeanBedHeightFormatter().format( row.getMeanBedHeight( ) ) ); // Peilung/Epoche lines.add( row.getSoundageLabel() ); // Lage lines.add( row.getLocation() ); return lines.toArray(new String[lines.size()]); } /** * Write "rows" of csv data from wqkms with writer. * @param useTkh */ private void writeCSVFlowDepthRow( final CSVWriter writer, final FlowDepthRow row, final boolean useTkh ) { log.debug("FlowDepthExporter.writeCSVFlowDepthRow"); final String[] formattedRow = formatFlowDepthRow(row, useTkh); writer.writeNext( formattedRow ); } @Override protected void writePDF(OutputStream outStream) { log.debug("write PDF"); final JRDataSource source = createJRData(); final String confPath = Config.getConfigDirectory().toString(); // FIXME: distinguish between with and without tkh: we need two jasper reports! final Map<String,Object> parameters = new HashMap<>(); parameters.put("ReportTitle", "Exported Data"); try { final JasperPrint print = JasperFillManager.fillReport( confPath + JASPER_FILE, parameters, source); JasperExportManager.exportReportToPdfStream(print, outStream); } catch(JRException je) { log.warn("Error generating PDF Report!", je); } } private JRDataSource createJRData() { /* fetch calculation results */ final FlowDepthCalculationResults results = data; final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource(); addJRMetaData(source, results); final boolean useTkh = results.isUseTkh(); for (final FlowDepthCalculationResult result : results.getResults()) { addJRTableData(source, result, useTkh); } return source; } private void addJRMetaData(final MetaAndTableJRDataSource source, FlowDepthCalculationResults results) { // Workflow zur Berechnung der Fließtiefe.pdf // "##ERGEBNISAUSGABE - Name des Gewässers - Fließtiefe" // writeCSVMeataEntry(writer, CSV_META_HEADER_RESULT, inputData.getRiver() ); // FIXME final String flysVersion = "unbekannt"; // CSV_META_VERSION source.addMetaData("version", flysVersion); // FIXME String user = "unbekannt"; // CSV_META_USER source.addMetaData("user", user); final Locale locale = Resources.getLocale(context.getMeta()); final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale); source.addMetaData("date", df.format(new Date())); // CSV_META_RIVER source.addMetaData("river", results.getRiver().getName()); // FIXME source.addMetaData("range", "FIXME"); // "# Ort/Bereich (km): " // FIXME // TODO: unklar, es wird nur ein Bereich eingegeben // RangeAccess rangeAccess = new RangeAccess(flys); // double[] kms = rangeAccess.getKmRange(); // writer.writeNext(new String[] { // Resources.getMsg( // meta, // CSV_META_RANGE, // CSV_META_RANGE, // new Object[] { kms[0], kms[kms.length-1] }) // }); // RangeAccess rangeAccess = new RangeAccess(flys); // double[] kms = rangeAccess.getKmRange(); // source.addMetaData("range", // kmf.format(kms[0]) + " - " + kmf.format(kms[kms.length-1])); } private void addJRTableData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResult result, final boolean useTkh) { final Collection<FlowDepthRow> rows = result.getRows(); for (final FlowDepthRow row : rows) { final String[] formattedRow = formatFlowDepthRow(row, useTkh); source.addData(formattedRow); } } }