Mercurial > dive4elements > river
diff artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java @ 8938:9c02733a1b3c
Work on Sinfo-tkh - exports; using same logic for wst-description as winfo
author | gernotbelger |
---|---|
date | Tue, 06 Mar 2018 17:09:39 +0100 |
parents | 791714b92b5c |
children | 5d5d482da3e9 |
line wrap: on
line diff
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java Tue Mar 06 17:08:51 2018 +0100 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java Tue Mar 06 17:09:39 2018 +0100 @@ -36,6 +36,10 @@ // REMARK: must be public because its registered in generators.xml public class TkhExporter extends AbstractSInfoExporter<TkhResultRow, TkhCalculationResult, TkhCalculationResults> { + private static enum ExportMode { + pdf, csv + } + /** The log used in this exporter. */ private static Logger log = Logger.getLogger(TkhExporter.class); @@ -57,7 +61,7 @@ } @Override - protected final void writeCSVMeta(final CSVWriter writer, final TkhCalculationResults results) { + protected void writeCSVGlobalMetadata(final CSVWriter writer, final TkhCalculationResults results) { log.info("TkhExporter.writeCSVMeta"); final String calcModeLabel = results.getCalcModeLabel(); @@ -95,20 +99,25 @@ /** * Write the header, with different headings depending on whether at a gauge or at a location. */ + @Override - protected final void writeCSVHeader(final CSVWriter writer, final RiverInfo river) { + protected void writeCSVHeader(final CSVWriter writer, final TkhCalculationResults results, final RiverInfo river) { log.info("TkhExporter.writeCSVHeader"); final Collection<String> header = new ArrayList<>(11); header.add(msg(SInfoI18NStrings.CSV_KM_HEADER)); header.add(msgUnit(CSV_TKH_HEADER, SInfoI18NStrings.UNIT_CM)); - header.add(msgUnit(CSV_TKHKIND_HEADER, SInfoI18NStrings.UNIT_CM)); + header.add(msg(CSV_TKHKIND_HEADER)); header.add(msgUnit(SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_HEADER, river.getWstUnit())); header.add(msgUnit(SInfoI18NStrings.CSV_WATERLEVEL_HEADER, river.getWstUnit())); header.add(msgUnit(SInfoI18NStrings.CSV_DISCHARGE_HEADER, SInfoI18NStrings.UNIT_CUBIC_M)); - header.add(msg(SInfoI18NStrings.CSV_LABEL_HEADER)); + + final String descriptionHeader = results.getDescriptionHeader(); + if (descriptionHeader != null) + header.add(msg(descriptionHeader)); + header.add(msg(SInfoI18NStrings.CSV_GAUGE_HEADER)); header.add(msg(SInfoI18NStrings.CSV_LOCATION_HEADER)); @@ -116,7 +125,8 @@ } @Override - protected void writeCSVResultHeader(final CSVWriter writer, final TkhCalculationResult result) { + // FIXME: rename + protected void writeCSVResultMetadata(final CSVWriter writer, final TkhCalculationResults results, final TkhCalculationResult result) { /* first some specific metadata */ final WstInfo wst = result.getWst(); @@ -127,21 +137,23 @@ writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_NAME, wst.getLabel()); // "# Bezugspegel: " writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_GAUGE, wst.getGauge()); - // "# Jahr/Zeitraum der Wasserspiegellage: " - writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_YEAR, Integer.toString(wst.getYear())); + // // "# Jahr/Zeitraum der Wasserspiegellage: " + // writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_YEAR, Integer.toString(wst.getYear())); } @Override - protected final String[] formatCSVRow(final TkhResultRow row) { - return formatRow(row); + protected String[] formatCSVRow(final TkhCalculationResults results, final TkhResultRow row) { + return formatRow(results, row, ExportMode.csv); } /** * Format a row of a flow depth result into an array of string, both used by csv and pdf * + * @param results + * * @param useTkh */ - private String[] formatRow(final TkhResultRow row) { + private String[] formatRow(final TkhCalculationResults results, final TkhResultRow row, final ExportMode mode) { final Collection<String> lines = new ArrayList<>(11); @@ -166,7 +178,9 @@ lines.add(getQFormatter().format(roundedDischarge)); // Bezeichnung - lines.add(row.getWaterlevelLabel()); + // REMARK: always export this column in pdf-mode, because WInfo also does it (no need for two jasper-templates). + if (results.getDescriptionHeader() != null || mode == ExportMode.pdf) + lines.add(row.getWaterlevelLabel()); // Bezugspegel lines.add(row.getGauge()); @@ -219,13 +233,19 @@ source.addMetaData("bedheight_header", msg(CSV_MEAN_BED_HEIGHT_HEADER_SHORT)); source.addMetaData("waterlevel_header", msg(SInfoI18NStrings.CSV_WATERLEVEL_HEADER)); source.addMetaData("discharge_header", msg(SInfoI18NStrings.CSV_DISCHARGE_HEADER)); - source.addMetaData("waterlevel_name_header", msg(SInfoI18NStrings.CSV_LABEL_HEADER)); + + // REMARK: actually the column makes no sense if description header is null. But (software symmetry...) WINFO also + // writes an empty column into the pdf in that case (most probably to avoid the need for two jasper templates). + final String descriptionHeader = results.getDescriptionHeader(); + final String waterlevelNameHeader = descriptionHeader == null ? msg(SInfoI18NStrings.CSV_LABEL_HEADER) : descriptionHeader; + source.addMetaData("waterlevel_name_header", waterlevelNameHeader); + source.addMetaData("gauge_header", msg(SInfoI18NStrings.CSV_GAUGE_HEADER)); source.addMetaData("location_header", msg(SInfoI18NStrings.CSV_LOCATION_HEADER)); } @Override - protected final String[] formatPDFRow(final TkhResultRow row) { - return formatRow(row); + protected String[] formatPDFRow(final TkhCalculationResults results, final TkhResultRow row) { + return formatRow(results, row, ExportMode.pdf); } } \ No newline at end of file