comparison artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java @ 8854:7bbfb24e6eec

SINFO - first prototype of BArt Fließtiefen
author gernotbelger
date Thu, 18 Jan 2018 18:34:41 +0100
parents
children 1009cab0f86b
comparison
equal deleted inserted replaced
8853:8c64617a7991 8854:7bbfb24e6eec
1 /* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
2 * Software engineering by Intevation GmbH
3 *
4 * This file is Free Software under the GNU AGPL (>=v3)
5 * and comes with ABSOLUTELY NO WARRANTY! Check out the
6 * documentation coming with Dive4Elements River for details.
7 */
8
9 package org.dive4elements.river.artifacts.sinfo.flowdepth;
10
11 import java.io.OutputStream;
12 import java.text.DateFormat;
13 import java.text.NumberFormat;
14 import java.util.ArrayList;
15 import java.util.Collection;
16 import java.util.Date;
17 import java.util.HashMap;
18 import java.util.Locale;
19 import java.util.Map;
20
21 import org.apache.log4j.Logger;
22 import org.dive4elements.artifacts.CallMeta;
23 import org.dive4elements.artifacts.common.utils.Config;
24 import org.dive4elements.river.artifacts.model.CalculationResult;
25 import org.dive4elements.river.artifacts.resources.Resources;
26 import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource;
27 import org.dive4elements.river.exports.AbstractExporter;
28 import org.dive4elements.river.model.River;
29 import org.dive4elements.river.utils.Formatter;
30
31 import au.com.bytecode.opencsv.CSVWriter;
32 import net.sf.jasperreports.engine.JRDataSource;
33 import net.sf.jasperreports.engine.JRException;
34 import net.sf.jasperreports.engine.JasperExportManager;
35 import net.sf.jasperreports.engine.JasperFillManager;
36 import net.sf.jasperreports.engine.JasperPrint;
37
38 /**
39 * Generates different output formats (csv, pdf) of data that resulted from a flow depths computation.
40 *
41 * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a>
42 * @author Gernot Belger
43 */
44 // REMARK: must be public because its registered in generators.xml
45 public class FlowDepthExporter extends AbstractExporter {
46
47 /** The log used in this exporter.*/
48 private static Logger log = Logger.getLogger(FlowDepthExporter.class);
49
50 private static final String CSV_KM_HEADER = "sinfo.export.flow_depth.csv.header.km";
51 private static final String CSV_FLOWDEPTH_HEADER = "sinfo.export.flow_depth.csv.header.flowdepth";
52 private static final String CSV_FLOWDEPTHTKH_HEADER = "sinfo.export.flow_depth.csv.header.flowdepthTkh";
53 private static final String CSV_TKH_HEADER = "sinfo.export.flow_depth.csv.header.tkh";
54 private static final String CSV_WATERLEVEL_HEADER = "sinfo.export.flow_depth.csv.header.waterlevel";
55 private static final String CSV_DISCHARGE_HEADER = "sinfo.export.flow_depth.csv.header.discharge";
56 private static final String CSV_LABEL_HEADER = "sinfo.export.flow_depth.csv.header.label";
57 private static final String CSV_GAUGE_HEADER = "sinfo.export.flow_depth.csv.header.gauge";
58 private static final String CSV_MEAN_BED_HEIGHT_HEADER = "sinfo.export.flow_depth.csv.header.mean_bed_height";
59 private static final String CSV_SOUNDING_HEADER = "sinfo.export.flow_depth.csv.header.sounding";
60 private static final String CSV_LOCATION_HEADER = "sinfo.export.flow_depth.csv.header.location";
61
62 private static final String CSV_META_HEADER_RESULT =
63 "sinfo.export.flow_depth.csv.meta.header.result";
64
65 private static final String CSV_META_VERSION =
66 "sinfo.export.flow_depth.csv.meta.version";
67
68 private static final String CSV_META_USER =
69 "sinfo.export.flow_depth.csv.meta.user";
70
71 private static final String CSV_META_CREATION =
72 "sinfo.export.flow_depth.csv.meta.creation";
73
74 private static final String CSV_META_RIVER =
75 "sinfo.export.flow_depth.csv.meta.river";
76
77 private static final String CSV_META_HEADER_SOUNDING =
78 "sinfo.export.flow_depth.csv.meta.header.sounding";
79
80 private static final String CSV_META_HEADER_WATERLEVEL =
81 "sinfo.export.flow_depth.csv.meta.header.waterlevel";
82
83 private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper"; //$NON-NLS-1$
84
85 /** The storage that contains the current calculation result.*/
86 private FlowDepthCalculationResults data = null;
87
88 private NumberFormat meanBedHeightFormatter;
89
90 private NumberFormat tkhFormatter;
91
92 private NumberFormat flowDepthFormatter;
93
94 private NumberFormat getMeanBedHeightFormatter() {
95 if( meanBedHeightFormatter == null )
96 // FIXME: check if this is right
97 meanBedHeightFormatter = Formatter.getMiddleBedHeightHeight(context);
98 return meanBedHeightFormatter;
99 }
100
101 private NumberFormat getTkhFormatter() {
102 if( tkhFormatter == null )
103 // FIXME: check if this is right, probably not, we need one digit
104 tkhFormatter = Formatter.getWaterlevelW(context);
105 return tkhFormatter;
106 }
107
108 private NumberFormat getFlowDepthFormatter() {
109 if( flowDepthFormatter == null )
110 // FIXME: check if this is right
111 flowDepthFormatter = Formatter.getMeterFormat(context);
112 return flowDepthFormatter;
113 }
114
115 @Override
116 protected void addData(Object d) {
117 /* reset */
118 data = null;
119
120 if (d instanceof CalculationResult) {
121
122 final Object dat = ((CalculationResult)d).getData();
123 if( dat != null )
124 data = (FlowDepthCalculationResults)dat;
125 }
126 }
127
128 @Override
129 protected void writeCSVData(CSVWriter writer) {
130 log.info("FlowDepthExporter.writeCSVData");
131
132 /* fetch calculation results */
133 final FlowDepthCalculationResults results = data;
134
135 /* write as csv */
136
137 // boolean atGauge = mode == WQ_MODE.QGAUGE || mode == WQ_MODE.WGAUGE;
138 // boolean isQ = mode == WQ_MODE.QGAUGE || mode == WQ_MODE.QFREE;
139 // RiverUtils.WQ_INPUT input
140 // = RiverUtils.getWQInputMode((D4EArtifact)master);
141
142 final boolean useTkh = results.isUseTkh();
143
144 writeCSVMeta(writer, results);
145 writeCSVHeader(writer, useTkh);
146
147 for (final FlowDepthCalculationResult result : results.getResults()) {
148 writeCSVFlowDepthResult(writer, result, useTkh);
149 }
150 }
151
152 private void writeCSVFlowDepthResult(final CSVWriter writer, final FlowDepthCalculationResult result, final boolean useTkh) {
153 final Collection<FlowDepthRow> rows = result.getRows();
154 for (final FlowDepthRow flowDepthRow : rows) {
155 writeCSVFlowDepthRow(writer, flowDepthRow, useTkh);
156 }
157 }
158
159 private void writeCSVMeta(final CSVWriter writer, final FlowDepthCalculationResults results) {
160 log.info("FlowDepthExporter.writeCSVMeta");
161
162 // Workflow zur Berechnung der Fließtiefe.pdf
163 // "##ERGEBNISAUSGABE - Name des Gewässers - Fließtiefe"
164 final River river = results.getRiver();
165 writeCSVMeataEntry(writer, CSV_META_HEADER_RESULT, river.getName() );
166
167 // "# FLYS-Version: "
168 // FIXME
169 final String flysVersion = "unbekannt";
170 writeCSVMeataEntry(writer, CSV_META_VERSION, flysVersion );
171
172 // "# Bearbeiter: "
173 // FIXME
174 final String user = "unbekannt";
175 writeCSVMeataEntry(writer, CSV_META_USER, user );
176
177 // "# Datum der Erstellung: "
178 final Locale locale = Resources.getLocale(context.getMeta());
179 final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
180 writeCSVMeataEntry(writer, CSV_META_CREATION, df.format(new Date()) );
181
182 // "# Gewässer: "
183 writeCSVMeataEntry(writer, CSV_META_RIVER, river.getName() );
184
185 // "# Höhensystem des Flusses: "
186
187 // FIXME
188
189 // "# Ort/Bereich (km): "
190 // FIXME
191 // TODO: unklar, es wird nur ein Bereich eingegeben
192 // RangeAccess rangeAccess = new RangeAccess(flys);
193 // double[] kms = rangeAccess.getKmRange();
194 // writer.writeNext(new String[] {
195 // Resources.getMsg(
196 // meta,
197 // CSV_META_RANGE,
198 // CSV_META_RANGE,
199 // new Object[] { kms[0], kms[kms.length-1] })
200 // });
201
202 // "##METADATEN PEILUNG"
203 writeCSVMeataEntry(writer, CSV_META_HEADER_SOUNDING );
204
205 // "# Jahr der Peilung: "
206 // FIXME
207 // "# Aufnahmeart: "
208 // FIXME
209 // "# Lagesystem: "
210 // FIXME
211 // "# Höhensystem: "
212 // FIXME
213 // "# ursprüngliches Höhensystem: "
214 // FIXME
215 // "##METADATEN WASSERSPIEGELLAGE"
216 writeCSVMeataEntry(writer, CSV_META_HEADER_WATERLEVEL );
217 // "# Bezeichnung der Wasserspiegellage: "
218 // FIXME
219 // "# Höhensystem der Wasserspiegellage: "
220 // FIXME
221 // "# Auswerter: "
222 // FIXME
223 // "# Bezugspegel: "
224 // FIXME
225 // "# Jahr/Zeitraum der Wasserspiegellage: "
226 // FIXME
227
228 // "# W/Pegel [cm]: " (nur bei Eingabe des Wasserstands am Pegel)
229 // TODO: unklar, es wird kein W eingegeben
230
231 // "# Q (m³/s): " (nur bei Eingabe des Durchflusses)
232 // TODO: unklar, es wird kein Q eingegeben
233
234 // writer.writeNext(new String[] {
235 // Resources.getMsg(
236 // meta,
237 // CSV_META_GAUGE,
238 // CSV_META_GAUGE,
239 // new Object[] { RiverUtils.getGaugename(flys) })
240 // });
241
242 writer.writeNext(new String[] { "" });
243 }
244
245
246 private void writeCSVMeataEntry(CSVWriter writer, String message, Object... messageArgs) {
247
248 CallMeta meta = context.getMeta();
249
250 writer.writeNext(new String[] {
251 Resources.getMsg(
252 meta,
253 message,
254 message,
255 messageArgs)
256 });
257 }
258
259 /**
260 * Write the header, with different headings depending on whether at a
261 * gauge or at a location.
262 * @param useTkh
263 */
264 private void writeCSVHeader(
265 final CSVWriter writer,
266 final boolean useTkh
267 ) {
268 log.info("FlowDepthExporter.writeCSVHeader");
269
270 final Collection<String> header = new ArrayList<>(11);
271
272 header.add(msg(CSV_KM_HEADER,CSV_KM_HEADER));
273 header.add(msg(CSV_FLOWDEPTH_HEADER));
274 if( useTkh )
275 {
276 header.add(msg(CSV_FLOWDEPTHTKH_HEADER));
277 header.add(msg(CSV_TKH_HEADER));
278 }
279 header.add(msg(CSV_WATERLEVEL_HEADER));
280 header.add(msg(CSV_DISCHARGE_HEADER));
281 header.add(msg(CSV_LABEL_HEADER));
282 header.add(msg(CSV_GAUGE_HEADER));
283 header.add(msg(CSV_MEAN_BED_HEIGHT_HEADER));
284 header.add(msg(CSV_SOUNDING_HEADER));
285 header.add(msg(CSV_LOCATION_HEADER));
286
287 writer.writeNext(header.toArray(new String[header.size()]));
288 }
289
290 /**
291 * Format a row of a flow depth result into an array of string, both used by csv and pdf
292 * @param useTkh
293 */
294 private String[] formatFlowDepthRow(
295 final FlowDepthRow row,
296 boolean useTkh ) {
297
298 final Collection<String> lines = new ArrayList<>(11);
299
300 // Fluss-km
301 lines.add( getKmFormatter().format( row.getStation() ) );
302
303 // Fließtiefe [m]
304 lines.add( getFlowDepthFormatter().format( row.getFlowDepth() ) );
305
306 if( useTkh )
307 {
308 // Fließtiefe mit TKH [m]
309 lines.add( getFlowDepthFormatter().format( row.getFlowDepthWithTkh() ) );
310
311 // TKH [cm]
312 lines.add( getTkhFormatter().format( row.getTkh() ) );
313 }
314
315 // Wasserstand [NN + m]
316 lines.add( getWFormatter().format( row.getWaterlevel() ) );
317
318 // Q [m³/s]
319 lines.add( getQFormatter().format( row.getDischarge() ) );
320
321 // Bezeichnung
322 lines.add( row.getWaterlevelLabel() );
323
324 // Bezugspegel
325 lines.add( row.getGauge() );
326
327 // Mittlere Sohlhöhe [NN + m]
328 lines.add( getMeanBedHeightFormatter().format( row.getMeanBedHeight( ) ) );
329
330 // Peilung/Epoche
331 lines.add( row.getSoundageLabel() );
332
333 // Lage
334 lines.add( row.getLocation() );
335
336 return lines.toArray(new String[lines.size()]);
337 }
338 /**
339 * Write "rows" of csv data from wqkms with writer.
340 * @param useTkh
341 */
342 private void writeCSVFlowDepthRow(
343 final CSVWriter writer,
344 final FlowDepthRow row,
345 final boolean useTkh
346 ) {
347 log.debug("FlowDepthExporter.writeCSVFlowDepthRow");
348
349 final String[] formattedRow = formatFlowDepthRow(row, useTkh);
350 writer.writeNext( formattedRow );
351 }
352
353 @Override
354 protected void writePDF(OutputStream outStream) {
355 log.debug("write PDF");
356
357 final JRDataSource source = createJRData();
358
359 final String confPath = Config.getConfigDirectory().toString();
360
361 // FIXME: distinguish between with and without tkh: we need two jasper reports!
362
363 final Map<String,Object> parameters = new HashMap<>();
364 parameters.put("ReportTitle", "Exported Data");
365 try {
366 final JasperPrint print = JasperFillManager.fillReport(
367 confPath + JASPER_FILE,
368 parameters,
369 source);
370 JasperExportManager.exportReportToPdfStream(print, outStream);
371 }
372 catch(JRException je) {
373 log.warn("Error generating PDF Report!", je);
374 }
375 }
376
377 private JRDataSource createJRData() {
378
379 /* fetch calculation results */
380 final FlowDepthCalculationResults results = data;
381
382 final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource();
383
384 addJRMetaData(source, results);
385
386 final boolean useTkh = results.isUseTkh();
387
388 for (final FlowDepthCalculationResult result : results.getResults()) {
389 addJRTableData(source, result, useTkh);
390 }
391
392 return source;
393 }
394
395 private void addJRMetaData(final MetaAndTableJRDataSource source, FlowDepthCalculationResults results) {
396
397 // Workflow zur Berechnung der Fließtiefe.pdf
398 // "##ERGEBNISAUSGABE - Name des Gewässers - Fließtiefe"
399 // writeCSVMeataEntry(writer, CSV_META_HEADER_RESULT, inputData.getRiver() );
400
401 // FIXME
402 final String flysVersion = "unbekannt";
403 // CSV_META_VERSION
404 source.addMetaData("version", flysVersion);
405
406 // FIXME
407 String user = "unbekannt";
408 // CSV_META_USER
409 source.addMetaData("user", user);
410
411 final Locale locale = Resources.getLocale(context.getMeta());
412 final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
413 source.addMetaData("date", df.format(new Date()));
414
415 // CSV_META_RIVER
416 source.addMetaData("river", results.getRiver().getName());
417
418 // FIXME
419 source.addMetaData("range", "FIXME");
420 // "# Ort/Bereich (km): "
421 // FIXME
422 // TODO: unklar, es wird nur ein Bereich eingegeben
423 // RangeAccess rangeAccess = new RangeAccess(flys);
424 // double[] kms = rangeAccess.getKmRange();
425 // writer.writeNext(new String[] {
426 // Resources.getMsg(
427 // meta,
428 // CSV_META_RANGE,
429 // CSV_META_RANGE,
430 // new Object[] { kms[0], kms[kms.length-1] })
431 // });
432
433 // RangeAccess rangeAccess = new RangeAccess(flys);
434 // double[] kms = rangeAccess.getKmRange();
435 // source.addMetaData("range",
436 // kmf.format(kms[0]) + " - " + kmf.format(kms[kms.length-1]));
437 }
438
439 private void addJRTableData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResult result, final boolean useTkh) {
440
441 final Collection<FlowDepthRow> rows = result.getRows();
442
443 for (final FlowDepthRow row : rows) {
444
445 final String[] formattedRow = formatFlowDepthRow(row, useTkh);
446 source.addData(formattedRow);
447 }
448 }
449 }

http://dive4elements.wald.intevation.org