Mercurial > dive4elements > river
comparison artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java @ 9150:23945061daec
gigantic refactoring: exporter, result, results
to support multiple jaspers -> collisions
author | gernotbelger |
---|---|
date | Thu, 14 Jun 2018 16:56:31 +0200 |
parents | ae3565385e6a |
children | a4121ec450d6 |
comparison
equal
deleted
inserted
replaced
9149:5be112fba832 | 9150:23945061daec |
---|---|
7 */ | 7 */ |
8 | 8 |
9 package org.dive4elements.river.artifacts.sinfo.tkhstate; | 9 package org.dive4elements.river.artifacts.sinfo.tkhstate; |
10 | 10 |
11 import java.io.OutputStream; | 11 import java.io.OutputStream; |
12 import java.util.ArrayList; | |
13 import java.util.Collection; | |
14 | 12 |
15 import org.apache.log4j.Logger; | 13 import org.dive4elements.river.artifacts.common.AbstractCalculationExportableResult; |
16 import org.dive4elements.river.artifacts.common.GeneralResultType; | 14 import org.dive4elements.river.artifacts.common.AbstractCommonExporter; |
15 import org.dive4elements.river.artifacts.common.ExportContextCSV; | |
17 import org.dive4elements.river.artifacts.common.JasperReporter; | 16 import org.dive4elements.river.artifacts.common.JasperReporter; |
18 import org.dive4elements.river.artifacts.common.MetaAndTableJRDataSource; | 17 import org.dive4elements.river.artifacts.common.MetaAndTableJRDataSource; |
19 import org.dive4elements.river.artifacts.common.ResultRow; | |
20 import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter; | |
21 import org.dive4elements.river.artifacts.sinfo.common.SInfoResultType; | |
22 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; | 18 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; |
23 import org.dive4elements.river.artifacts.sinfo.util.WstInfo; | |
24 | 19 |
25 import au.com.bytecode.opencsv.CSVWriter; | 20 import au.com.bytecode.opencsv.CSVWriter; |
26 import net.sf.jasperreports.engine.JRException; | 21 import net.sf.jasperreports.engine.JRException; |
27 | 22 |
28 /** | 23 /** |
29 * Generates different output formats (csv, pdf) of data that resulted from a tkh computation. | 24 * Generates different output formats (csv, pdf) of data that resulted from a tkh computation. |
30 * | 25 * |
31 * @author Gernot Belger | 26 * @author Gernot Belger |
32 */ | 27 */ |
33 // REMARK: must be public because its registered in generators.xml | 28 // REMARK: must be public because its registered in generators.xml |
34 public class TkhExporter extends AbstractSInfoExporter<TkhCalculationResult, TkhCalculationResults> { | 29 public class TkhExporter extends AbstractCommonExporter<TkhCalculationResults> { |
35 | |
36 /** The log used in this exporter. */ | |
37 private static Logger log = Logger.getLogger(TkhExporter.class); | |
38 | 30 |
39 private static final String CSV_META_CALCULATION_FORMULA = "sinfo.export.tkh.calculation.formula"; | 31 private static final String CSV_META_CALCULATION_FORMULA = "sinfo.export.tkh.calculation.formula"; |
40 | 32 |
41 private static final String JASPER_FILE = "/jasper/templates/sinfo.tkh.jrxml"; | |
42 | |
43 @Override | 33 @Override |
44 protected Logger getLog() { | 34 protected void doWritePdf(final OutputStream out, final TkhCalculationResults results) { |
45 return log; | 35 // TODO: Move to super |
46 } | |
47 | |
48 @Override | |
49 protected void writeCSVGlobalMetadata(final CSVWriter writer, final TkhCalculationResults results) { | |
50 log.info("TkhExporter.writeCSVMeta"); | |
51 | |
52 super.writeCSVGlobalMetadataDefaults(writer, results); | |
53 | |
54 // "# Berechnungsgrundlage: Gleichung nach GILL (1971)" | |
55 writeCSVMetaEntry(writer, CSV_META_CALCULATION_FORMULA); | |
56 } | |
57 | |
58 /** | |
59 * Write the header, with different headings depending on whether at a gauge or at a location. | |
60 */ | |
61 | |
62 @Override | |
63 protected void writeCSVHeader(final CSVWriter writer, final TkhCalculationResults results, final RiverInfo river) { | |
64 log.info("TkhExporter.writeCSVHeader"); | |
65 | |
66 final Collection<String> header = new ArrayList<>(11); | |
67 | |
68 header.add(msg(GeneralResultType.station.getCsvHeader())); | |
69 header.add(msgUnit(SInfoResultType.tkh.getCsvHeader(), SInfoResultType.tkh.getUnit())); | |
70 header.add(msg(SInfoResultType.soilkind.getCsvHeader())); | |
71 header.add(msgUnit(SInfoResultType.meanBedHeight.getCsvHeader(), river.getWstUnit())); | |
72 header.add(msgUnit(SInfoResultType.waterlevel.getCsvHeader(), river.getWstUnit())); | |
73 header.add(msgUnit(SInfoResultType.discharge.getCsvHeader(), SInfoResultType.discharge.getUnit())); | |
74 | |
75 final String descriptionHeader = results.getDescriptionHeader(); | |
76 if (descriptionHeader != null) | |
77 header.add(msg(descriptionHeader)); | |
78 | |
79 header.add(msg(SInfoResultType.gaugeLabel.getCsvHeader())); | |
80 header.add(msg(SInfoResultType.location.getCsvHeader())); | |
81 | |
82 writer.writeNext(header.toArray(new String[header.size()])); | |
83 } | |
84 | |
85 @Override | |
86 protected void writeCSVResultMetadata(final CSVWriter writer, final TkhCalculationResults results, final TkhCalculationResult result) { | |
87 | |
88 final WstInfo wst = result.getWst(); | |
89 super.writeCSVWaterlevelMetadata(writer, wst); | |
90 | |
91 // REAMRK: | |
92 // "# W/Pegel [cm]: " (nur bei Eingabe des Wasserstands am Pegel) | |
93 // "# Q (m³/s): " (nur bei Eingabe des Durchflusses) | |
94 // WaterlevelEXporter does this | |
95 // final WaterlevelDescriptionBuilder descBuilder = new WaterlevelDescriptionBuilder((D4EArtifact) this.master, | |
96 // this.context); | |
97 // final String metadata = descBuilder.getMetadata(); | |
98 // BUT: | |
99 // - the WINFO results do not contain this info per wst | |
100 // - the WaterlevelExporter prints this in the global header for all waterlevels, simply based on the input fields | |
101 // - instead we would want tis information per waterlevel | |
102 // - the metadata 'Bezeichnung WST' contains exactly the wanted data | |
103 | |
104 writer.writeNext(new String[] { "" }); // break line | |
105 } | |
106 | |
107 /** | |
108 * Format a row of a flow depth result into an array of string, both used by csv and pdf | |
109 * | |
110 * @param results | |
111 * | |
112 * @param useTkh | |
113 */ | |
114 @Override | |
115 protected String[] formatRow(final TkhCalculationResults results, final ResultRow row, final ExportMode mode) { | |
116 | |
117 final Collection<String> lines = new ArrayList<>(11); | |
118 | |
119 lines.add(row.exportValue(this.context, GeneralResultType.station)); | |
120 lines.add(row.exportValue(this.context, SInfoResultType.tkh)); | |
121 lines.add(row.exportValue(this.context, SInfoResultType.soilkind)); | |
122 lines.add(row.exportValue(this.context, SInfoResultType.meanBedHeight)); | |
123 lines.add(row.exportValue(this.context, SInfoResultType.waterlevel)); | |
124 lines.add(row.exportValue(this.context, SInfoResultType.discharge)); | |
125 | |
126 // REMARK: always export this column in pdf-mode, because WInfo also does it (no need for two jasper-templates). | |
127 if (results.getDescriptionHeader() != null || mode == ExportMode.pdf) | |
128 lines.add(row.exportValue(this.context, SInfoResultType.waterlevelLabel)); | |
129 | |
130 lines.add(row.exportValue(this.context, SInfoResultType.gaugeLabel)); | |
131 lines.add(row.exportValue(this.context, SInfoResultType.location)); | |
132 | |
133 return lines.toArray(new String[lines.size()]); | |
134 } | |
135 | |
136 @Override | |
137 protected final void addJRMetaData(final MetaAndTableJRDataSource source, final TkhCalculationResults results) { | |
138 | |
139 /* general metadata */ | |
140 super.addJRMetaData(source, results); | |
141 | |
142 source.addMetaData("calculation_label", msg("sinfo.export.flow_depth.pdf.meta.calculation.label")); | |
143 source.addMetaData("calculation_name", msg("sinfo.export.flow_depth.pdf.meta.calculation.name")); | |
144 | |
145 /* column headings */ | |
146 source.addMetaData("station_header", GeneralResultType.station.getPdfHeader(this.context.getMeta())); | |
147 source.addMetaData("tkh_header", SInfoResultType.tkh.getPdfHeader(this.context.getMeta())); | |
148 source.addMetaData("tkhkind_header", SInfoResultType.soilkind.getPdfHeader(this.context.getMeta())); | |
149 source.addMetaData("bedheight_header", SInfoResultType.meanBedHeight.getPdfHeader(this.context.getMeta())); | |
150 source.addMetaData("waterlevel_header", SInfoResultType.waterlevel.getPdfHeader(this.context.getMeta())); | |
151 source.addMetaData("discharge_header", SInfoResultType.discharge.getPdfHeader(this.context.getMeta())); | |
152 | |
153 // REMARK: actually the column makes no sense if description header is null. But (software symmetry...) WINFO also | |
154 // writes an empty column into the pdf in that case (most probably to avoid the need for two jasper templates). | |
155 final String descriptionHeader = results.getDescriptionHeader(); | |
156 final String waterlevelNameHeader = descriptionHeader == null ? SInfoResultType.waterlevelLabel.getPdfHeader(this.context.getMeta()) | |
157 : descriptionHeader; | |
158 source.addMetaData("waterlevel_name_header", waterlevelNameHeader); | |
159 | |
160 source.addMetaData("gauge_header", SInfoResultType.gaugeLabel.getPdfHeader(this.context.getMeta())); | |
161 source.addMetaData("location_header", SInfoResultType.location.getPdfHeader(this.context.getMeta())); | |
162 } | |
163 | |
164 @Override | |
165 protected void writePDF(final OutputStream out) { | |
166 try { | 36 try { |
167 final MetaAndTableJRDataSource source = createJRData(this.data); | 37 final ExportContextCSV exportContextCSV = new ExportContextCSV(this.context, null); |
168 | 38 |
169 final JasperReporter reporter = new JasperReporter(); | 39 final JasperReporter reporter = new JasperReporter(); |
170 reporter.addReport(JASPER_FILE, source); | 40 |
41 for (final AbstractCalculationExportableResult<TkhCalculationResults> result : results.getResults()) { | |
42 final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource(); | |
43 getHelper().addJRMetaDataUSINFO(source, results); | |
44 | |
45 source.addMetaData("calculation_label", msg("sinfo.export.flow_depth.pdf.meta.calculation.label")); | |
46 source.addMetaData("calculation_name", msg("sinfo.export.flow_depth.pdf.meta.calculation.name")); | |
47 | |
48 result.addReport(exportContextCSV, results, reporter, source); | |
49 } | |
50 | |
171 reporter.exportPDF(out); | 51 reporter.exportPDF(out); |
172 } | 52 } |
173 catch (final JRException je) { | 53 catch (final JRException je) { |
174 getLog().warn("Error generating PDF Report!", je); | 54 getLog().warn("Error generating PDF Report!", je); |
175 } | 55 } |
176 } | 56 } |
57 | |
58 @Override | |
59 protected void doWriteCSVData(final CSVWriter writer, final TkhCalculationResults results) { | |
60 // TODO: Diesen Ablauf in super? _WINZIGE ABWEICHUNG vom Standard... | |
61 | |
62 // TODO: move results into context? | |
63 final ExportContextCSV exportContextCSV = new ExportContextCSV(this.context, writer); | |
64 | |
65 getLog().info("writeCSVData"); | |
66 | |
67 /* write as csv */ | |
68 exportContextCSV.writeCSVGlobalMetadataDefaults(results); // ggf auslagern innerhalb dieser Klasse | |
69 | |
70 // "# Berechnungsgrundlage: Gleichung nach GILL (1971)" | |
71 exportContextCSV.writeCSVMetaEntry(CSV_META_CALCULATION_FORMULA); | |
72 | |
73 // writer.writeNext(new String[] { "" }); // break line HERE to avoid redundance | |
74 | |
75 final RiverInfo river = results.getRiver(); | |
76 | |
77 final Class<?> lastResultType = null; | |
78 | |
79 for (final AbstractCalculationExportableResult<TkhCalculationResults> result : results.getResults()) { | |
80 | |
81 final Class<?> resultType = result.getClass(); | |
82 if (lastResultType == null || lastResultType != resultType) { | |
83 exportContextCSV.writeBlankLine(); | |
84 result.writeCSVHeader(exportContextCSV, results, river); | |
85 exportContextCSV.writeBlankLine(); | |
86 } else | |
87 exportContextCSV.writeCSVLine(new String[] { "#" }); | |
88 | |
89 result.writeCsv(exportContextCSV, results); | |
90 } | |
91 } | |
177 } | 92 } |