Mercurial > dive4elements > river
view artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java @ 8946:5d5d482da3e9
Implementing SINFO - FlowDepthMinMax calculation
author | gernotbelger |
---|---|
date | Tue, 13 Mar 2018 18:49:33 +0100 |
parents | 9c02733a1b3c |
children | a4f1ac81f26d |
line wrap: on
line source
/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde * Software engineering by Intevation GmbH * * This file is Free Software under the GNU AGPL (>=v3) * and comes with ABSOLUTELY NO WARRANTY! Check out the * documentation coming with Dive4Elements River for details. */ package org.dive4elements.river.artifacts.sinfo.tkhstate; import java.util.ArrayList; import java.util.Collection; import org.apache.log4j.Logger; import org.dive4elements.river.artifacts.sinfo.SInfoI18NStrings; import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter; import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource; import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; import org.dive4elements.river.artifacts.sinfo.util.WstInfo; import org.dive4elements.river.utils.RiverUtils; import au.com.bytecode.opencsv.CSVWriter; /** * Generates different output formats (csv, pdf) of data that resulted from a tkh computation. * * @author Gernot Belger */ // REMARK: must be public because its registered in generators.xml public class TkhExporter extends AbstractSInfoExporter<TkhResultRow, TkhCalculationResult, TkhCalculationResults> { private static enum ExportMode { pdf, csv } /** The log used in this exporter. */ private static Logger log = Logger.getLogger(TkhExporter.class); private static final String CSV_META_CALCULATION_FORMULA = "sinfo.export.tkh.calculation.formula"; private static final String CSV_TKH_HEADER = "sinfo.export.tkh.csv.header.tkh"; private static final String CSV_TKHKIND_HEADER = "sinfo.export.tkh.csv.header.tkhkind"; private static final String PREFIX_TKH_KIND = "sinfo.export.tkh.soilkind."; private static final String CSV_MEAN_BED_HEIGHT_HEADER_SHORT = "sinfo.export.flow_depth.csv.header.mean_bed_height.short"; private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper"; @Override protected Logger getLog() { return log; } @Override protected void writeCSVGlobalMetadata(final CSVWriter writer, final TkhCalculationResults results) { log.info("TkhExporter.writeCSVMeta"); super.writeCSVGlobalMetadataDefaults(writer, results); // "# Berechnungsgrundlage: Gleichung nach GILL (1971)" writeCSVMetaEntry(writer, CSV_META_CALCULATION_FORMULA); writer.writeNext(new String[] { "" }); } /** * Write the header, with different headings depending on whether at a gauge or at a location. */ @Override protected void writeCSVHeader(final CSVWriter writer, final TkhCalculationResults results, final RiverInfo river) { log.info("TkhExporter.writeCSVHeader"); final Collection<String> header = new ArrayList<>(11); header.add(msg(SInfoI18NStrings.CSV_KM_HEADER)); header.add(msgUnit(CSV_TKH_HEADER, SInfoI18NStrings.UNIT_CM)); header.add(msg(CSV_TKHKIND_HEADER)); header.add(msgUnit(SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_HEADER, river.getWstUnit())); header.add(msgUnit(SInfoI18NStrings.CSV_WATERLEVEL_HEADER, river.getWstUnit())); header.add(msgUnit(SInfoI18NStrings.CSV_DISCHARGE_HEADER, SInfoI18NStrings.UNIT_CUBIC_M)); final String descriptionHeader = results.getDescriptionHeader(); if (descriptionHeader != null) header.add(msg(descriptionHeader)); header.add(msg(SInfoI18NStrings.CSV_GAUGE_HEADER)); header.add(msg(SInfoI18NStrings.CSV_LOCATION_HEADER)); writer.writeNext(header.toArray(new String[header.size()])); } @Override // FIXME: rename protected void writeCSVResultMetadata(final CSVWriter writer, final TkhCalculationResults results, final TkhCalculationResult result) { final WstInfo wst = result.getWst(); super.writeCSVWaterlevelMetadata(writer, wst); // TODO: // "# W/Pegel [cm]: " (nur bei Eingabe des Wasserstands am Pegel) // "# Q (m³/s): " (nur bei Eingabe des Durchflusses) } @Override protected String[] formatCSVRow(final TkhCalculationResults results, final TkhResultRow row) { return formatRow(results, row, ExportMode.csv); } /** * Format a row of a flow depth result into an array of string, both used by csv and pdf * * @param results * * @param useTkh */ private String[] formatRow(final TkhCalculationResults results, final TkhResultRow row, final ExportMode mode) { final Collection<String> lines = new ArrayList<>(11); // Fluss-km lines.add(getKmFormatter().format(row.getStation())); // TKH [cm] lines.add(getTkhFormatter().format(row.getTkh())); // Einteilung der Gewässersohle (starr/mobil) lines.add(msg(PREFIX_TKH_KIND + row.getTkhKind().name())); // Mittlere Sohlhöhe [NN + m] lines.add(getMeanBedHeighFormatter().format(row.getMeanBedHeight())); // Wasserstand [NN + m] lines.add(getW2Formatter().format(row.getWaterlevel())); // Q [m³/s] final double discharge = row.getDischarge(); final double roundedDischarge = RiverUtils.roundQ(discharge); lines.add(getQFormatter().format(roundedDischarge)); // Bezeichnung // REMARK: always export this column in pdf-mode, because WInfo also does it (no need for two jasper-templates). if (results.getDescriptionHeader() != null || mode == ExportMode.pdf) lines.add(row.getWaterlevelLabel()); // Bezugspegel lines.add(row.getGauge()); // Lage lines.add(row.getLocation()); return lines.toArray(new String[lines.size()]); } @Override protected final String getJasperFile() { return JASPER_FILE; } @Override protected final void addJRMetaData(final MetaAndTableJRDataSource source, final TkhCalculationResults results) { /* general metadata */ super.addJRMetaDataDefaults(source, results); /* column headings */ source.addMetaData("station_header", msg(SInfoI18NStrings.CSV_KM_HEADER)); source.addMetaData("tkh_header", msg(CSV_TKH_HEADER)); source.addMetaData("bedheight_header", msg(CSV_MEAN_BED_HEIGHT_HEADER_SHORT)); source.addMetaData("waterlevel_header", msg(SInfoI18NStrings.CSV_WATERLEVEL_HEADER)); source.addMetaData("discharge_header", msg(SInfoI18NStrings.CSV_DISCHARGE_HEADER)); // REMARK: actually the column makes no sense if description header is null. But (software symmetry...) WINFO also // writes an empty column into the pdf in that case (most probably to avoid the need for two jasper templates). final String descriptionHeader = results.getDescriptionHeader(); final String waterlevelNameHeader = descriptionHeader == null ? msg(SInfoI18NStrings.CSV_LABEL_HEADER) : descriptionHeader; source.addMetaData("waterlevel_name_header", waterlevelNameHeader); source.addMetaData("gauge_header", msg(SInfoI18NStrings.CSV_GAUGE_HEADER)); source.addMetaData("location_header", msg(SInfoI18NStrings.CSV_LOCATION_HEADER)); } @Override protected String[] formatPDFRow(final TkhCalculationResults results, final TkhResultRow row) { return formatRow(results, row, ExportMode.pdf); } }