Mercurial > dive4elements > river
comparison artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java @ 8948:a4f1ac81f26d
Work on SINFO-FlowDepthMinMax.
Also rework of result row stuff, in order to reduce abstraction, using result type concept
author | gernotbelger |
---|---|
date | Wed, 14 Mar 2018 14:10:32 +0100 |
parents | 5d5d482da3e9 |
children | 09e4a4909814 |
comparison
equal
deleted
inserted
replaced
8947:86650594f051 | 8948:a4f1ac81f26d |
---|---|
10 | 10 |
11 import java.util.ArrayList; | 11 import java.util.ArrayList; |
12 import java.util.Collection; | 12 import java.util.Collection; |
13 | 13 |
14 import org.apache.log4j.Logger; | 14 import org.apache.log4j.Logger; |
15 import org.dive4elements.river.artifacts.sinfo.SInfoI18NStrings; | |
16 import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter; | 15 import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter; |
16 import org.dive4elements.river.artifacts.sinfo.common.SInfoResultRow; | |
17 import org.dive4elements.river.artifacts.sinfo.common.SInfoResultType; | |
17 import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource; | 18 import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource; |
18 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; | 19 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; |
19 import org.dive4elements.river.artifacts.sinfo.util.WstInfo; | 20 import org.dive4elements.river.artifacts.sinfo.util.WstInfo; |
20 import org.dive4elements.river.utils.RiverUtils; | |
21 | 21 |
22 import au.com.bytecode.opencsv.CSVWriter; | 22 import au.com.bytecode.opencsv.CSVWriter; |
23 | 23 |
24 /** | 24 /** |
25 * Generates different output formats (csv, pdf) of data that resulted from a tkh computation. | 25 * Generates different output formats (csv, pdf) of data that resulted from a tkh computation. |
26 * | 26 * |
27 * @author Gernot Belger | 27 * @author Gernot Belger |
28 */ | 28 */ |
29 // REMARK: must be public because its registered in generators.xml | 29 // REMARK: must be public because its registered in generators.xml |
30 public class TkhExporter extends AbstractSInfoExporter<TkhResultRow, TkhCalculationResult, TkhCalculationResults> { | 30 public class TkhExporter extends AbstractSInfoExporter<TkhCalculationResult, TkhCalculationResults> { |
31 | 31 |
32 private static enum ExportMode { | 32 private static enum ExportMode { |
33 pdf, csv | 33 pdf, csv |
34 } | 34 } |
35 | 35 |
36 /** The log used in this exporter. */ | 36 /** The log used in this exporter. */ |
37 private static Logger log = Logger.getLogger(TkhExporter.class); | 37 private static Logger log = Logger.getLogger(TkhExporter.class); |
38 | 38 |
39 private static final String CSV_META_CALCULATION_FORMULA = "sinfo.export.tkh.calculation.formula"; | 39 private static final String CSV_META_CALCULATION_FORMULA = "sinfo.export.tkh.calculation.formula"; |
40 | 40 |
41 private static final String CSV_TKH_HEADER = "sinfo.export.tkh.csv.header.tkh"; | 41 // FIXME: use pgetPdfHeader instead of getCsvHeader |
42 | |
43 private static final String CSV_TKHKIND_HEADER = "sinfo.export.tkh.csv.header.tkhkind"; | |
44 | |
45 private static final String PREFIX_TKH_KIND = "sinfo.export.tkh.soilkind."; | |
46 | |
47 private static final String CSV_MEAN_BED_HEIGHT_HEADER_SHORT = "sinfo.export.flow_depth.csv.header.mean_bed_height.short"; | 42 private static final String CSV_MEAN_BED_HEIGHT_HEADER_SHORT = "sinfo.export.flow_depth.csv.header.mean_bed_height.short"; |
48 | 43 |
49 private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper"; | 44 private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper"; |
50 | 45 |
51 @Override | 46 @Override |
73 protected void writeCSVHeader(final CSVWriter writer, final TkhCalculationResults results, final RiverInfo river) { | 68 protected void writeCSVHeader(final CSVWriter writer, final TkhCalculationResults results, final RiverInfo river) { |
74 log.info("TkhExporter.writeCSVHeader"); | 69 log.info("TkhExporter.writeCSVHeader"); |
75 | 70 |
76 final Collection<String> header = new ArrayList<>(11); | 71 final Collection<String> header = new ArrayList<>(11); |
77 | 72 |
78 header.add(msg(SInfoI18NStrings.CSV_KM_HEADER)); | 73 header.add(msg(SInfoResultType.station.getCsvHeader())); |
79 header.add(msgUnit(CSV_TKH_HEADER, SInfoI18NStrings.UNIT_CM)); | 74 header.add(msgUnit(SInfoResultType.tkh.getCsvHeader(), SInfoResultType.tkh.getUnit())); |
80 header.add(msg(CSV_TKHKIND_HEADER)); | 75 header.add(msg(SInfoResultType.soilkind.getCsvHeader())); |
81 header.add(msgUnit(SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_HEADER, river.getWstUnit())); | 76 header.add(msgUnit(SInfoResultType.meanBedHeight.getCsvHeader(), river.getWstUnit())); |
82 | 77 header.add(msgUnit(SInfoResultType.waterlevel.getCsvHeader(), river.getWstUnit())); |
83 header.add(msgUnit(SInfoI18NStrings.CSV_WATERLEVEL_HEADER, river.getWstUnit())); | 78 header.add(msgUnit(SInfoResultType.discharge.getCsvHeader(), SInfoResultType.discharge.getUnit())); |
84 header.add(msgUnit(SInfoI18NStrings.CSV_DISCHARGE_HEADER, SInfoI18NStrings.UNIT_CUBIC_M)); | |
85 | 79 |
86 final String descriptionHeader = results.getDescriptionHeader(); | 80 final String descriptionHeader = results.getDescriptionHeader(); |
87 if (descriptionHeader != null) | 81 if (descriptionHeader != null) |
88 header.add(msg(descriptionHeader)); | 82 header.add(msg(descriptionHeader)); |
89 | 83 |
90 header.add(msg(SInfoI18NStrings.CSV_GAUGE_HEADER)); | 84 header.add(msg(SInfoResultType.gaugeLabel.getCsvHeader())); |
91 header.add(msg(SInfoI18NStrings.CSV_LOCATION_HEADER)); | 85 header.add(msg(SInfoResultType.location.getCsvHeader())); |
92 | 86 |
93 writer.writeNext(header.toArray(new String[header.size()])); | 87 writer.writeNext(header.toArray(new String[header.size()])); |
94 } | 88 } |
95 | 89 |
96 @Override | 90 @Override |
98 protected void writeCSVResultMetadata(final CSVWriter writer, final TkhCalculationResults results, final TkhCalculationResult result) { | 92 protected void writeCSVResultMetadata(final CSVWriter writer, final TkhCalculationResults results, final TkhCalculationResult result) { |
99 | 93 |
100 final WstInfo wst = result.getWst(); | 94 final WstInfo wst = result.getWst(); |
101 super.writeCSVWaterlevelMetadata(writer, wst); | 95 super.writeCSVWaterlevelMetadata(writer, wst); |
102 | 96 |
103 // TODO: | 97 // FIXME: |
104 // "# W/Pegel [cm]: " (nur bei Eingabe des Wasserstands am Pegel) | 98 // "# W/Pegel [cm]: " (nur bei Eingabe des Wasserstands am Pegel) |
105 // "# Q (m³/s): " (nur bei Eingabe des Durchflusses) | 99 // "# Q (m³/s): " (nur bei Eingabe des Durchflusses) |
106 } | 100 } |
107 | 101 |
108 @Override | 102 @Override |
109 protected String[] formatCSVRow(final TkhCalculationResults results, final TkhResultRow row) { | 103 protected String[] formatCSVRow(final TkhCalculationResults results, final TkhCalculationResult result, final SInfoResultRow row) { |
110 return formatRow(results, row, ExportMode.csv); | 104 return formatRow(results, row, ExportMode.csv); |
111 } | 105 } |
112 | 106 |
113 /** | 107 /** |
114 * Format a row of a flow depth result into an array of string, both used by csv and pdf | 108 * Format a row of a flow depth result into an array of string, both used by csv and pdf |
115 * | 109 * |
116 * @param results | 110 * @param results |
117 * | 111 * |
118 * @param useTkh | 112 * @param useTkh |
119 */ | 113 */ |
120 private String[] formatRow(final TkhCalculationResults results, final TkhResultRow row, final ExportMode mode) { | 114 private String[] formatRow(final TkhCalculationResults results, final SInfoResultRow row, final ExportMode mode) { |
121 | 115 |
122 final Collection<String> lines = new ArrayList<>(11); | 116 final Collection<String> lines = new ArrayList<>(11); |
123 | 117 |
124 // Fluss-km | 118 lines.add(row.exportValue(this.context, SInfoResultType.station)); |
125 lines.add(getKmFormatter().format(row.getStation())); | 119 lines.add(row.exportValue(this.context, SInfoResultType.tkh)); |
120 lines.add(row.exportValue(this.context, SInfoResultType.soilkind)); | |
121 lines.add(row.exportValue(this.context, SInfoResultType.meanBedHeight)); | |
122 lines.add(row.exportValue(this.context, SInfoResultType.waterlevel)); | |
123 lines.add(row.exportValue(this.context, SInfoResultType.discharge)); | |
126 | 124 |
127 // TKH [cm] | |
128 lines.add(getTkhFormatter().format(row.getTkh())); | |
129 | |
130 // Einteilung der Gewässersohle (starr/mobil) | |
131 lines.add(msg(PREFIX_TKH_KIND + row.getTkhKind().name())); | |
132 | |
133 // Mittlere Sohlhöhe [NN + m] | |
134 lines.add(getMeanBedHeighFormatter().format(row.getMeanBedHeight())); | |
135 | |
136 // Wasserstand [NN + m] | |
137 lines.add(getW2Formatter().format(row.getWaterlevel())); | |
138 | |
139 // Q [m³/s] | |
140 final double discharge = row.getDischarge(); | |
141 final double roundedDischarge = RiverUtils.roundQ(discharge); | |
142 lines.add(getQFormatter().format(roundedDischarge)); | |
143 | |
144 // Bezeichnung | |
145 // REMARK: always export this column in pdf-mode, because WInfo also does it (no need for two jasper-templates). | 125 // REMARK: always export this column in pdf-mode, because WInfo also does it (no need for two jasper-templates). |
146 if (results.getDescriptionHeader() != null || mode == ExportMode.pdf) | 126 if (results.getDescriptionHeader() != null || mode == ExportMode.pdf) |
147 lines.add(row.getWaterlevelLabel()); | 127 lines.add(row.exportValue(this.context, SInfoResultType.waterlevelLabel)); |
148 | 128 |
149 // Bezugspegel | 129 lines.add(row.exportValue(this.context, SInfoResultType.gaugeLabel)); |
150 lines.add(row.getGauge()); | 130 lines.add(row.exportValue(this.context, SInfoResultType.location)); |
151 | |
152 // Lage | |
153 lines.add(row.getLocation()); | |
154 | 131 |
155 return lines.toArray(new String[lines.size()]); | 132 return lines.toArray(new String[lines.size()]); |
156 } | 133 } |
157 | 134 |
158 @Override | 135 @Override |
165 | 142 |
166 /* general metadata */ | 143 /* general metadata */ |
167 super.addJRMetaDataDefaults(source, results); | 144 super.addJRMetaDataDefaults(source, results); |
168 | 145 |
169 /* column headings */ | 146 /* column headings */ |
170 source.addMetaData("station_header", msg(SInfoI18NStrings.CSV_KM_HEADER)); | 147 source.addMetaData("station_header", msg( SInfoResultType.station.getCsvHeader())); |
171 source.addMetaData("tkh_header", msg(CSV_TKH_HEADER)); | 148 source.addMetaData("tkh_header", msg(SInfoResultType.tkh.getCsvHeader())); |
172 source.addMetaData("bedheight_header", msg(CSV_MEAN_BED_HEIGHT_HEADER_SHORT)); | 149 source.addMetaData("bedheight_header", msg(SInfoResultType.meanBedHeight.getCsvHeader())); |
173 source.addMetaData("waterlevel_header", msg(SInfoI18NStrings.CSV_WATERLEVEL_HEADER)); | 150 source.addMetaData("waterlevel_header", msg(SInfoResultType.waterlevel.getCsvHeader())); |
174 source.addMetaData("discharge_header", msg(SInfoI18NStrings.CSV_DISCHARGE_HEADER)); | 151 source.addMetaData("discharge_header", msg(SInfoResultType.discharge.getCsvHeader())); |
175 | 152 |
176 // REMARK: actually the column makes no sense if description header is null. But (software symmetry...) WINFO also | 153 // REMARK: actually the column makes no sense if description header is null. But (software symmetry...) WINFO also |
177 // writes an empty column into the pdf in that case (most probably to avoid the need for two jasper templates). | 154 // writes an empty column into the pdf in that case (most probably to avoid the need for two jasper templates). |
178 final String descriptionHeader = results.getDescriptionHeader(); | 155 final String descriptionHeader = results.getDescriptionHeader(); |
179 final String waterlevelNameHeader = descriptionHeader == null ? msg(SInfoI18NStrings.CSV_LABEL_HEADER) : descriptionHeader; | 156 final String waterlevelNameHeader = descriptionHeader == null ? msg(SInfoResultType.waterlevelLabel.getCsvHeader()) : descriptionHeader; |
180 source.addMetaData("waterlevel_name_header", waterlevelNameHeader); | 157 source.addMetaData("waterlevel_name_header", waterlevelNameHeader); |
181 | 158 |
182 source.addMetaData("gauge_header", msg(SInfoI18NStrings.CSV_GAUGE_HEADER)); | 159 source.addMetaData("gauge_header", msg(SInfoResultType.gaugeLabel.getCsvHeader())); |
183 source.addMetaData("location_header", msg(SInfoI18NStrings.CSV_LOCATION_HEADER)); | 160 source.addMetaData("location_header", msg(SInfoResultType.location.getCsvHeader())); |
184 } | 161 } |
185 | 162 |
186 @Override | 163 @Override |
187 protected String[] formatPDFRow(final TkhCalculationResults results, final TkhResultRow row) { | 164 protected String[] formatPDFRow(final TkhCalculationResults results, final SInfoResultRow row) { |
188 return formatRow(results, row, ExportMode.pdf); | 165 return formatRow(results, row, ExportMode.pdf); |
189 } | 166 } |
190 } | 167 } |