Mercurial > dive4elements > river
comparison artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java @ 8928:791714b92b5c
Basically implemented SINFO-Tkh Exports
author | gernotbelger |
---|---|
date | Thu, 01 Mar 2018 18:49:34 +0100 |
parents | a66f2a7c4f84 |
children | 9c02733a1b3c |
comparison
equal
deleted
inserted
replaced
8927:04ad2cfce559 | 8928:791714b92b5c |
---|---|
6 * documentation coming with Dive4Elements River for details. | 6 * documentation coming with Dive4Elements River for details. |
7 */ | 7 */ |
8 | 8 |
9 package org.dive4elements.river.artifacts.sinfo.flowdepth; | 9 package org.dive4elements.river.artifacts.sinfo.flowdepth; |
10 | 10 |
11 import java.io.OutputStream; | |
12 import java.text.DateFormat; | 11 import java.text.DateFormat; |
13 import java.text.NumberFormat; | 12 import java.text.NumberFormat; |
14 import java.util.ArrayList; | 13 import java.util.ArrayList; |
15 import java.util.Collection; | 14 import java.util.Collection; |
16 import java.util.Date; | 15 import java.util.Date; |
17 import java.util.HashMap; | |
18 import java.util.Locale; | 16 import java.util.Locale; |
19 import java.util.Map; | |
20 | 17 |
21 import org.apache.commons.lang.StringUtils; | 18 import org.apache.commons.lang.StringUtils; |
22 import org.apache.commons.lang.math.DoubleRange; | 19 import org.apache.commons.lang.math.DoubleRange; |
23 import org.apache.log4j.Logger; | 20 import org.apache.log4j.Logger; |
24 import org.dive4elements.artifacts.CallMeta; | |
25 import org.dive4elements.artifacts.common.utils.Config; | |
26 import org.dive4elements.river.FLYS; | 21 import org.dive4elements.river.FLYS; |
27 import org.dive4elements.river.artifacts.model.CalculationResult; | |
28 import org.dive4elements.river.artifacts.resources.Resources; | 22 import org.dive4elements.river.artifacts.resources.Resources; |
23 import org.dive4elements.river.artifacts.sinfo.SInfoI18NStrings; | |
24 import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter; | |
29 import org.dive4elements.river.artifacts.sinfo.util.BedHeightInfo; | 25 import org.dive4elements.river.artifacts.sinfo.util.BedHeightInfo; |
30 import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource; | 26 import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource; |
31 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; | 27 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; |
32 import org.dive4elements.river.artifacts.sinfo.util.WstInfo; | 28 import org.dive4elements.river.artifacts.sinfo.util.WstInfo; |
33 import org.dive4elements.river.exports.AbstractExporter; | |
34 import org.dive4elements.river.utils.RiverUtils; | 29 import org.dive4elements.river.utils.RiverUtils; |
35 | 30 |
36 import au.com.bytecode.opencsv.CSVWriter; | 31 import au.com.bytecode.opencsv.CSVWriter; |
37 import net.sf.jasperreports.engine.JRDataSource; | |
38 import net.sf.jasperreports.engine.JRException; | |
39 import net.sf.jasperreports.engine.JasperExportManager; | |
40 import net.sf.jasperreports.engine.JasperFillManager; | |
41 import net.sf.jasperreports.engine.JasperPrint; | |
42 | 32 |
43 /** | 33 /** |
44 * Generates different output formats (csv, pdf) of data that resulted from a flow depths computation. | 34 * Generates different output formats (csv, pdf) of data that resulted from a flow depths computation. |
45 * | 35 * |
46 * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a> | 36 * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a> |
47 * @author Gernot Belger | 37 * @author Gernot Belger |
48 */ | 38 */ |
49 // REMARK: must be public because its registered in generators.xml | 39 // REMARK: must be public because its registered in generators.xml |
50 public class FlowDepthExporter extends AbstractExporter { | 40 public class FlowDepthExporter extends AbstractSInfoExporter<FlowDepthRow, FlowDepthCalculationResult, FlowDepthCalculationResults> { |
51 | 41 |
52 /** The log used in this exporter. */ | 42 /** The log used in this exporter. */ |
53 private static Logger log = Logger.getLogger(FlowDepthExporter.class); | 43 private static Logger log = Logger.getLogger(FlowDepthExporter.class); |
54 | 44 |
55 private static final String CSV_KM_HEADER = "sinfo.export.flow_depth.csv.header.km"; | |
56 private static final String CSV_FLOWDEPTH_HEADER = "sinfo.export.flow_depth.csv.header.flowdepth"; | 45 private static final String CSV_FLOWDEPTH_HEADER = "sinfo.export.flow_depth.csv.header.flowdepth"; |
57 private static final String CSV_FLOWDEPTHTKH_HEADER = "sinfo.export.flow_depth.csv.header.flowdepthTkh"; | 46 private static final String CSV_FLOWDEPTHTKH_HEADER = "sinfo.export.flow_depth.csv.header.flowdepthTkh"; |
58 private static final String CSV_TKH_HEADER = "sinfo.export.flow_depth.csv.header.tkh"; | 47 private static final String CSV_TKH_HEADER = "sinfo.export.flow_depth.csv.header.tkh"; |
59 private static final String CSV_WATERLEVEL_HEADER = "sinfo.export.flow_depth.csv.header.waterlevel"; | 48 |
60 private static final String CSV_DISCHARGE_HEADER = "sinfo.export.flow_depth.csv.header.discharge"; | |
61 private static final String CSV_LABEL_HEADER = "sinfo.export.flow_depth.csv.header.label"; | |
62 private static final String CSV_GAUGE_HEADER = "sinfo.export.flow_depth.csv.header.gauge"; | |
63 private static final String CSV_MEAN_BED_HEIGHT_HEADER = "sinfo.export.flow_depth.csv.header.mean_bed_height"; | |
64 private static final String CSV_MEAN_BED_HEIGHT_HEADER_SHORT = "sinfo.export.flow_depth.csv.header.mean_bed_height.short"; | 49 private static final String CSV_MEAN_BED_HEIGHT_HEADER_SHORT = "sinfo.export.flow_depth.csv.header.mean_bed_height.short"; |
65 private static final String CSV_SOUNDING_HEADER = "sinfo.export.flow_depth.csv.header.sounding"; | 50 private static final String CSV_SOUNDING_HEADER = "sinfo.export.flow_depth.csv.header.sounding"; |
66 private static final String CSV_LOCATION_HEADER = "sinfo.export.flow_depth.csv.header.location"; | |
67 | |
68 private static final String CSV_META_HEADER_RESULT = "sinfo.export.flow_depth.csv.meta.header.result"; | |
69 | |
70 private static final String CSV_META_HEADER_RESULT_LABEL = "sinfo.export.flow_depth.csv.meta.header.result.label"; | |
71 | |
72 private static final String CSV_META_VERSION = "sinfo.export.flow_depth.csv.meta.version"; | |
73 | |
74 private static final String CSV_META_VERSION_LABEL = "sinfo.export.flow_depth.csv.meta.version.label"; | |
75 | |
76 private static final String CSV_META_USER = "sinfo.export.flow_depth.csv.meta.user"; | |
77 | |
78 private static final String CSV_META_USER_LABEL = "sinfo.export.flow_depth.csv.meta.user.label"; | |
79 | |
80 private static final String CSV_META_CREATION = "sinfo.export.flow_depth.csv.meta.creation"; | |
81 | |
82 private static final String CSV_META_CREATION_LABEL = "sinfo.export.flow_depth.csv.meta.creation.label"; | |
83 | |
84 private static final String CSV_META_RIVER = "sinfo.export.flow_depth.csv.meta.river"; | |
85 | |
86 private static final String CSV_META_RIVER_LABEL = "sinfo.export.flow_depth.csv.meta.river.label"; | |
87 | 51 |
88 private static final String CSV_META_HEADER_SOUNDING = "sinfo.export.flow_depth.csv.meta.header.sounding"; | 52 private static final String CSV_META_HEADER_SOUNDING = "sinfo.export.flow_depth.csv.meta.header.sounding"; |
89 | 53 |
90 private static final String CSV_META_HEADER_SOUNDING_YEAR = "sinfo.export.flow_depth.csv.meta.header.sounding.year"; | 54 private static final String CSV_META_HEADER_SOUNDING_YEAR = "sinfo.export.flow_depth.csv.meta.header.sounding.year"; |
91 | 55 |
97 | 61 |
98 private static final String CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL = "sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel"; | 62 private static final String CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL = "sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel"; |
99 | 63 |
100 private static final String CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL_ORIGINAL = "sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel.original"; | 64 private static final String CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL_ORIGINAL = "sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel.original"; |
101 | 65 |
102 private static final String CSV_META_HEADER_WATERLEVEL = "sinfo.export.flow_depth.csv.meta.header.waterlevel"; | |
103 | |
104 private static final String CSV_META_HEADER_WATERLEVEL_NAME = "sinfo.export.flow_depth.csv.meta.header.waterlevel.name"; | |
105 | |
106 private static final String CSV_META_HEADER_WATERLEVEL_GAUGE = "sinfo.export.flow_depth.csv.meta.header.waterlevel.gauge"; | |
107 | |
108 private static final String CSV_META_HEADER_WATERLEVEL_YEAR = "sinfo.export.flow_depth.csv.meta.header.waterlevel.year"; | |
109 | |
110 private static final String CSV_META_RANGE = "sinfo.export.flow_depth.csv.meta.range"; | |
111 | |
112 private static final String CSV_META_RANGE_LABEL = "sinfo.export.flow_depth.csv.meta.range.label"; | |
113 | |
114 private static final String CSV_META_HEIGHT_UNIT_RIVER = "sinfo.export.flow_depth.csv.meta.height_unit.river"; | |
115 | |
116 private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper"; | 66 private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper"; |
117 | 67 |
118 private static final String UNIT_M = "m"; | 68 @Override |
119 | 69 protected Logger getLog() { |
120 private static final String UNIT_CM = "cm"; | 70 return log; |
121 | 71 } |
122 private static final String UNIT_CUBIC_M = "m³/s"; | 72 |
123 | 73 @Override |
124 /** The storage that contains the current calculation result. */ | 74 protected void writeCSVResultHeader(final CSVWriter writer, final FlowDepthCalculationResult result) { |
125 private FlowDepthCalculationResults data = null; | |
126 | |
127 /** | |
128 * Formats header with unit | |
129 */ | |
130 private String msgUnit(final String key, final String unit) { | |
131 | |
132 final String msg = msg(key); | |
133 return String.format("%s [%s]", msg, unit); | |
134 } | |
135 | |
136 @Override | |
137 protected void addData(final Object d) { | |
138 /* reset */ | |
139 this.data = null; | |
140 | |
141 if (d instanceof CalculationResult) { | |
142 | |
143 final Object dat = ((CalculationResult) d).getData(); | |
144 if (dat != null) | |
145 this.data = (FlowDepthCalculationResults) dat; | |
146 } | |
147 } | |
148 | |
149 @Override | |
150 protected void writeCSVData(final CSVWriter writer) { | |
151 log.info("FlowDepthExporter.writeCSVData"); | |
152 | |
153 /* fetch calculation results */ | |
154 final FlowDepthCalculationResults results = this.data; | |
155 | |
156 final boolean useTkh = results.isUseTkh(); | |
157 final RiverInfo river = results.getRiver(); | |
158 | |
159 /* write as csv */ | |
160 writeCSVMeta(writer, results); | |
161 writeCSVHeader(writer, river, useTkh); | |
162 | |
163 for (final FlowDepthCalculationResult result : results.getResults()) { | |
164 writeCSVFlowDepthResult(writer, result, useTkh); | |
165 } | |
166 } | |
167 | |
168 private void writeCSVFlowDepthResult(final CSVWriter writer, final FlowDepthCalculationResult result, final boolean useTkh) { | |
169 | 75 |
170 /* first some specific metadata */ | 76 /* first some specific metadata */ |
171 final BedHeightInfo sounding = result.getSounding(); | 77 final BedHeightInfo sounding = result.getSounding(); |
172 final WstInfo wst = result.getWst(); | 78 final WstInfo wst = result.getWst(); |
173 | 79 |
186 writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL, sounding.getCurElevationModelUnit()); | 92 writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL, sounding.getCurElevationModelUnit()); |
187 // "# ursprüngliches Höhensystem: " | 93 // "# ursprüngliches Höhensystem: " |
188 writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL_ORIGINAL, sounding.getOldElevationModelUnit()); | 94 writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL_ORIGINAL, sounding.getOldElevationModelUnit()); |
189 | 95 |
190 // "##METADATEN WASSERSPIEGELLAGE" | 96 // "##METADATEN WASSERSPIEGELLAGE" |
191 writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL); | 97 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL); |
192 // "# Bezeichnung der Wasserspiegellage: " | 98 // "# Bezeichnung der Wasserspiegellage: " |
193 writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_NAME, wst.getLabel()); | 99 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_NAME, wst.getLabel()); |
194 // "# Bezugspegel: " | 100 // "# Bezugspegel: " |
195 writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_GAUGE, wst.getGauge()); | 101 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_GAUGE, wst.getGauge()); |
196 // "# Jahr/Zeitraum der Wasserspiegellage: " | 102 // "# Jahr/Zeitraum der Wasserspiegellage: " |
197 writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_YEAR, Integer.toString(wst.getYear())); | 103 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_YEAR, Integer.toString(wst.getYear())); |
198 | 104 } |
199 /* nwo the value rows */ | 105 |
200 final Collection<FlowDepthRow> rows = result.getRows(); | 106 @Override |
201 for (final FlowDepthRow flowDepthRow : rows) { | 107 protected final void writeCSVMeta(final CSVWriter writer, final FlowDepthCalculationResults results) { |
202 writeCSVFlowDepthRow(writer, flowDepthRow, useTkh); | |
203 } | |
204 } | |
205 | |
206 private void writeCSVMeta(final CSVWriter writer, final FlowDepthCalculationResults results) { | |
207 log.info("FlowDepthExporter.writeCSVMeta"); | 108 log.info("FlowDepthExporter.writeCSVMeta"); |
208 | 109 |
209 final String calcModeLabel = results.getCalcModeLabel(); | 110 final String calcModeLabel = results.getCalcModeLabel(); |
210 final RiverInfo river = results.getRiver(); | 111 final RiverInfo river = results.getRiver(); |
211 writeCSVMetaEntry(writer, CSV_META_HEADER_RESULT, msg(CSV_META_HEADER_RESULT_LABEL), river.getName(), calcModeLabel); | 112 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_RESULT, msg(SInfoI18NStrings.CSV_META_HEADER_RESULT_LABEL), river.getName(), calcModeLabel); |
212 | 113 |
213 // "# FLYS-Version: " | 114 // "# FLYS-Version: " |
214 writeCSVMetaEntry(writer, CSV_META_VERSION, msg(CSV_META_VERSION_LABEL), FLYS.VERSION); | 115 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_VERSION, msg(SInfoI18NStrings.CSV_META_VERSION_LABEL), FLYS.VERSION); |
215 | 116 |
216 // "# Bearbeiter: " | 117 // "# Bearbeiter: " |
217 writeCSVMetaEntry(writer, CSV_META_USER, msg(CSV_META_USER_LABEL), results.getUser()); | 118 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_USER, msg(SInfoI18NStrings.CSV_META_USER_LABEL), results.getUser()); |
218 | 119 |
219 // "# Datum der Erstellung: " | 120 // "# Datum der Erstellung: " |
220 final Locale locale = Resources.getLocale(this.context.getMeta()); | 121 final Locale locale = Resources.getLocale(this.context.getMeta()); |
221 final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale); | 122 final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale); |
222 writeCSVMetaEntry(writer, CSV_META_CREATION, msg(CSV_META_CREATION_LABEL), df.format(new Date())); | 123 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_CREATION, msg(SInfoI18NStrings.CSV_META_CREATION_LABEL), df.format(new Date())); |
223 | 124 |
224 // "# Gewässer: " | 125 // "# Gewässer: " |
225 writeCSVMetaEntry(writer, CSV_META_RIVER, msg(CSV_META_RIVER_LABEL), river.getName()); | 126 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_RIVER, msg(SInfoI18NStrings.CSV_META_RIVER_LABEL), river.getName()); |
226 | 127 |
227 // "# Höhensystem des Flusses: " | 128 // "# Höhensystem des Flusses: " |
228 writeCSVMetaEntry(writer, CSV_META_HEIGHT_UNIT_RIVER, river.getWstUnit()); | 129 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEIGHT_UNIT_RIVER, river.getWstUnit()); |
229 | 130 |
230 // "# Ort/Bereich (km): " | 131 // "# Ort/Bereich (km): " |
231 final DoubleRange calcRange = results.getCalcRange(); | 132 final DoubleRange calcRange = results.getCalcRange(); |
232 writeCSVMetaEntry(writer, CSV_META_RANGE, msg(CSV_META_RANGE_LABEL), getKmFormatter().format(calcRange.getMinimumDouble()), | 133 writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_RANGE, msg(SInfoI18NStrings.CSV_META_RANGE_LABEL), |
134 getKmFormatter().format(calcRange.getMinimumDouble()), | |
233 getKmFormatter().format(calcRange.getMaximumDouble())); | 135 getKmFormatter().format(calcRange.getMaximumDouble())); |
234 | 136 |
235 writer.writeNext(new String[] { "" }); | 137 writer.writeNext(new String[] { "" }); |
236 } | |
237 | |
238 private void writeCSVMetaEntry(final CSVWriter writer, final String message, final Object... messageArgs) { | |
239 | |
240 final CallMeta meta = this.context.getMeta(); | |
241 | |
242 writer.writeNext(new String[] { Resources.getMsg(meta, message, message, messageArgs) }); | |
243 } | 138 } |
244 | 139 |
245 /** | 140 /** |
246 * Write the header, with different headings depending on whether at a | 141 * Write the header, with different headings depending on whether at a |
247 * gauge or at a location. | 142 * gauge or at a location. |
248 * | 143 * |
249 * @param river | 144 * @param river |
250 * @param useTkh | 145 * @param useTkh |
251 */ | 146 */ |
252 private void writeCSVHeader(final CSVWriter writer, final RiverInfo river, final boolean useTkh) { | 147 @Override |
148 protected final void writeCSVHeader(final CSVWriter writer, final RiverInfo river) { | |
253 log.info("FlowDepthExporter.writeCSVHeader"); | 149 log.info("FlowDepthExporter.writeCSVHeader"); |
254 | 150 |
255 final Collection<String> header = new ArrayList<>(11); | 151 final Collection<String> header = new ArrayList<>(11); |
256 | 152 |
257 header.add(msg(CSV_KM_HEADER)); | 153 header.add(msg(SInfoI18NStrings.CSV_KM_HEADER)); |
258 header.add(msgUnit(CSV_FLOWDEPTH_HEADER, UNIT_M)); | 154 header.add(msgUnit(CSV_FLOWDEPTH_HEADER, SInfoI18NStrings.UNIT_M)); |
259 if (useTkh) { | 155 if (getData().isUseTkh()) { |
260 header.add(msgUnit(CSV_FLOWDEPTHTKH_HEADER, UNIT_M)); | 156 header.add(msgUnit(CSV_FLOWDEPTHTKH_HEADER, SInfoI18NStrings.UNIT_M)); |
261 header.add(msgUnit(CSV_TKH_HEADER, UNIT_CM)); | 157 header.add(msgUnit(CSV_TKH_HEADER, SInfoI18NStrings.UNIT_CM)); |
262 } | 158 } |
263 | 159 |
264 header.add(msgUnit(CSV_WATERLEVEL_HEADER, river.getWstUnit())); | 160 header.add(msgUnit(SInfoI18NStrings.CSV_WATERLEVEL_HEADER, river.getWstUnit())); |
265 header.add(msgUnit(CSV_DISCHARGE_HEADER, UNIT_CUBIC_M)); | 161 header.add(msgUnit(SInfoI18NStrings.CSV_DISCHARGE_HEADER, SInfoI18NStrings.UNIT_CUBIC_M)); |
266 header.add(msg(CSV_LABEL_HEADER)); | 162 header.add(msg(SInfoI18NStrings.CSV_LABEL_HEADER)); |
267 header.add(msg(CSV_GAUGE_HEADER)); | 163 header.add(msg(SInfoI18NStrings.CSV_GAUGE_HEADER)); |
268 header.add(msgUnit(CSV_MEAN_BED_HEIGHT_HEADER, river.getWstUnit())); | 164 header.add(msgUnit(SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_HEADER, river.getWstUnit())); |
269 header.add(msg(CSV_SOUNDING_HEADER)); | 165 header.add(msg(CSV_SOUNDING_HEADER)); |
270 header.add(msg(CSV_LOCATION_HEADER)); | 166 header.add(msg(SInfoI18NStrings.CSV_LOCATION_HEADER)); |
271 | 167 |
272 writer.writeNext(header.toArray(new String[header.size()])); | 168 writer.writeNext(header.toArray(new String[header.size()])); |
169 } | |
170 | |
171 @Override | |
172 protected final String[] formatCSVRow(final FlowDepthRow row) { | |
173 return formatFlowDepthRow(row); | |
273 } | 174 } |
274 | 175 |
275 /** | 176 /** |
276 * Format a row of a flow depth result into an array of string, both used by csv and pdf | 177 * Format a row of a flow depth result into an array of string, both used by csv and pdf |
277 * | 178 * |
278 * @param useTkh | 179 * @param useTkh |
279 */ | 180 */ |
280 private String[] formatFlowDepthRow(final FlowDepthRow row, final boolean useTkh) { | 181 private String[] formatFlowDepthRow(final FlowDepthRow row) { |
281 | 182 |
282 final Collection<String> lines = new ArrayList<>(11); | 183 final Collection<String> lines = new ArrayList<>(11); |
283 | 184 |
284 // Fluss-km | 185 // Fluss-km |
285 lines.add(getKmFormatter().format(row.getStation())); | 186 lines.add(getKmFormatter().format(row.getStation())); |
286 | 187 |
287 // Fließtiefe [m] | 188 // Fließtiefe [m] |
288 lines.add(getFlowDepthFormatter().format(row.getFlowDepth())); | 189 lines.add(getFlowDepthFormatter().format(row.getFlowDepth())); |
289 | 190 |
290 if (useTkh) { | 191 if (getData().isUseTkh()) { |
291 // Fließtiefe mit TKH [m] | 192 // Fließtiefe mit TKH [m] |
292 lines.add(getFlowDepthFormatter().format(row.getFlowDepthWithTkh())); | 193 lines.add(getFlowDepthFormatter().format(row.getFlowDepthWithTkh())); |
293 | 194 |
294 // TKH [cm] | 195 // TKH [cm] |
295 lines.add(getTkhFormatter().format(row.getTkh())); | 196 lines.add(getTkhFormatter().format(row.getTkh())); |
323 lines.add(row.getLocation()); | 224 lines.add(row.getLocation()); |
324 | 225 |
325 return lines.toArray(new String[lines.size()]); | 226 return lines.toArray(new String[lines.size()]); |
326 } | 227 } |
327 | 228 |
328 /** | 229 @Override |
329 * Write "rows" of csv data from wqkms with writer. | 230 protected final String getJasperFile() { |
330 * | 231 return JASPER_FILE; |
331 * @param useTkh | 232 } |
332 */ | 233 |
333 private void writeCSVFlowDepthRow(final CSVWriter writer, final FlowDepthRow row, final boolean useTkh) { | 234 @Override |
334 log.debug("FlowDepthExporter.writeCSVFlowDepthRow"); | 235 protected final void addJRMetaData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResults results) { |
335 | |
336 final String[] formattedRow = formatFlowDepthRow(row, useTkh); | |
337 writer.writeNext(formattedRow); | |
338 } | |
339 | |
340 @Override | |
341 protected void writePDF(final OutputStream outStream) { | |
342 log.debug("write PDF"); | |
343 | |
344 final JRDataSource source = createJRData(); | |
345 | |
346 final String confPath = Config.getConfigDirectory().toString(); | |
347 | |
348 // FIXME: distinguish between with and without tkh: we need two jasper reports! | |
349 | |
350 final Map<String, Object> parameters = new HashMap<>(); | |
351 parameters.put("ReportTitle", "Exported Data"); | |
352 try { | |
353 final JasperPrint print = JasperFillManager.fillReport(confPath + JASPER_FILE, parameters, source); | |
354 JasperExportManager.exportReportToPdfStream(print, outStream); | |
355 } | |
356 catch (final JRException je) { | |
357 log.warn("Error generating PDF Report!", je); | |
358 } | |
359 } | |
360 | |
361 private JRDataSource createJRData() { | |
362 | |
363 /* fetch calculation results */ | |
364 final FlowDepthCalculationResults results = this.data; | |
365 | |
366 final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource(); | |
367 | |
368 addJRMetaData(source, results); | |
369 | |
370 final boolean useTkh = results.isUseTkh(); | |
371 | |
372 for (final FlowDepthCalculationResult result : results.getResults()) { | |
373 addJRTableData(source, result, useTkh); | |
374 } | |
375 | |
376 return source; | |
377 } | |
378 | |
379 private void addJRMetaData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResults results) { | |
380 | 236 |
381 final RiverInfo river = results.getRiver(); | 237 final RiverInfo river = results.getRiver(); |
382 final String wstUnitName = river.getWstUnit(); | 238 final String wstUnitName = river.getWstUnit(); |
383 | 239 |
384 /* general metadata */ | 240 /* general metadata */ |
385 source.addMetaData("header", msg(CSV_META_HEADER_RESULT_LABEL)); | 241 source.addMetaData("header", msg(SInfoI18NStrings.CSV_META_HEADER_RESULT_LABEL)); |
386 source.addMetaData("calcMode", results.getCalcModeLabel()); | 242 source.addMetaData("calcMode", results.getCalcModeLabel()); |
387 | 243 |
388 source.addMetaData("version_label", msg(CSV_META_VERSION_LABEL)); | 244 source.addMetaData("version_label", msg(SInfoI18NStrings.CSV_META_VERSION_LABEL)); |
389 source.addMetaData("version", FLYS.VERSION); | 245 source.addMetaData("version", FLYS.VERSION); |
390 | 246 |
391 source.addMetaData("user_label", msg(CSV_META_USER_LABEL)); | 247 source.addMetaData("user_label", msg(SInfoI18NStrings.CSV_META_USER_LABEL)); |
392 source.addMetaData("user", results.getUser()); | 248 source.addMetaData("user", results.getUser()); |
393 | 249 |
394 final Locale locale = Resources.getLocale(this.context.getMeta()); | 250 final Locale locale = Resources.getLocale(this.context.getMeta()); |
395 final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale); | 251 final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale); |
396 source.addMetaData("date_label", msg(CSV_META_CREATION_LABEL)); | 252 source.addMetaData("date_label", msg(SInfoI18NStrings.CSV_META_CREATION_LABEL)); |
397 source.addMetaData("date", df.format(new Date())); | 253 source.addMetaData("date", df.format(new Date())); |
398 | 254 |
399 source.addMetaData("river_label", msg(CSV_META_RIVER_LABEL)); | 255 source.addMetaData("river_label", msg(SInfoI18NStrings.CSV_META_RIVER_LABEL)); |
400 source.addMetaData("river", river.getName()); | 256 source.addMetaData("river", river.getName()); |
401 | 257 |
402 final DoubleRange calcRange = results.getCalcRange(); | 258 final DoubleRange calcRange = results.getCalcRange(); |
403 final NumberFormat kmFormatter = getKmFormatter(); | 259 final NumberFormat kmFormatter = getKmFormatter(); |
404 final String rangeValue = String.format("%s - %s", kmFormatter.format(calcRange.getMinimumDouble()), kmFormatter.format(calcRange.getMaximumDouble())); | 260 final String rangeValue = String.format("%s - %s", kmFormatter.format(calcRange.getMinimumDouble()), kmFormatter.format(calcRange.getMaximumDouble())); |
405 source.addMetaData("range_label", msg(CSV_META_RANGE_LABEL)); | 261 source.addMetaData("range_label", msg(SInfoI18NStrings.CSV_META_RANGE_LABEL)); |
406 source.addMetaData("range", rangeValue); | 262 source.addMetaData("range", rangeValue); |
407 | 263 |
408 /* column headings */ | 264 /* column headings */ |
409 source.addMetaData("station_header", msg(CSV_KM_HEADER)); | 265 source.addMetaData("station_header", msg(SInfoI18NStrings.CSV_KM_HEADER)); |
410 source.addMetaData("flowdepth_header", msg(CSV_FLOWDEPTH_HEADER)); | 266 source.addMetaData("flowdepth_header", msg(CSV_FLOWDEPTH_HEADER)); |
411 source.addMetaData("flowdepth_tkh_header", msg(CSV_FLOWDEPTHTKH_HEADER)); | 267 source.addMetaData("flowdepth_tkh_header", msg(CSV_FLOWDEPTHTKH_HEADER)); |
412 source.addMetaData("tkh_header", msg(CSV_TKH_HEADER)); | 268 source.addMetaData("tkh_header", msg(CSV_TKH_HEADER)); |
413 source.addMetaData("waterlevel_header", msg(CSV_WATERLEVEL_HEADER)); | 269 source.addMetaData("waterlevel_header", msg(SInfoI18NStrings.CSV_WATERLEVEL_HEADER)); |
414 source.addMetaData("river_unit", wstUnitName); | 270 source.addMetaData("river_unit", wstUnitName); |
415 source.addMetaData("discharge_header", msg(CSV_DISCHARGE_HEADER)); | 271 source.addMetaData("discharge_header", msg(SInfoI18NStrings.CSV_DISCHARGE_HEADER)); |
416 source.addMetaData("waterlevel_name_header", msg(CSV_LABEL_HEADER)); | 272 source.addMetaData("waterlevel_name_header", msg(SInfoI18NStrings.CSV_LABEL_HEADER)); |
417 source.addMetaData("gauge_header", msg(CSV_GAUGE_HEADER)); | 273 source.addMetaData("gauge_header", msg(SInfoI18NStrings.CSV_GAUGE_HEADER)); |
418 source.addMetaData("bedheight_header", msg(CSV_MEAN_BED_HEIGHT_HEADER_SHORT)); | 274 source.addMetaData("bedheight_header", msg(CSV_MEAN_BED_HEIGHT_HEADER_SHORT)); |
419 source.addMetaData("sounding_name_header", msg(CSV_SOUNDING_HEADER)); | 275 source.addMetaData("sounding_name_header", msg(CSV_SOUNDING_HEADER)); |
420 source.addMetaData("location_header", msg(CSV_LOCATION_HEADER)); | 276 source.addMetaData("location_header", msg(SInfoI18NStrings.CSV_LOCATION_HEADER)); |
421 } | 277 } |
422 | 278 |
423 private void addJRTableData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResult result, final boolean useTkh) { | 279 @Override |
424 | 280 protected final String[] formatPDFRow(final FlowDepthRow row) { |
425 final Collection<FlowDepthRow> rows = result.getRows(); | 281 return formatFlowDepthRow(row); |
426 | |
427 for (final FlowDepthRow row : rows) { | |
428 | |
429 final String[] formattedRow = formatFlowDepthRow(row, useTkh); | |
430 source.addData(formattedRow); | |
431 } | |
432 } | 282 } |
433 } | 283 } |