comparison artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepthdev/FlowDepthDevelopmentExporter.java @ 9150:23945061daec

gigantic refactoring: exporter, result, results to support multiple jaspers -> collisions
author gernotbelger
date Thu, 14 Jun 2018 16:56:31 +0200
parents 7134a4c7d1b6
children a4121ec450d6
comparison
equal deleted inserted replaced
9149:5be112fba832 9150:23945061daec
7 */ 7 */
8 8
9 package org.dive4elements.river.artifacts.sinfo.flowdepthdev; 9 package org.dive4elements.river.artifacts.sinfo.flowdepthdev;
10 10
11 import java.io.OutputStream; 11 import java.io.OutputStream;
12 import java.util.ArrayList;
13 import java.util.Collection;
14 12
15 import org.apache.log4j.Logger; 13 import org.dive4elements.river.artifacts.common.AbstractCalculationExportableResult;
16 import org.dive4elements.river.artifacts.common.GeneralResultType; 14 import org.dive4elements.river.artifacts.common.AbstractCommonExporter;
15 import org.dive4elements.river.artifacts.common.ExportContextCSV;
17 import org.dive4elements.river.artifacts.common.JasperReporter; 16 import org.dive4elements.river.artifacts.common.JasperReporter;
18 import org.dive4elements.river.artifacts.common.MetaAndTableJRDataSource; 17 import org.dive4elements.river.artifacts.common.MetaAndTableJRDataSource;
19 import org.dive4elements.river.artifacts.common.ResultRow;
20 import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter;
21 import org.dive4elements.river.artifacts.sinfo.common.SInfoResultType; 18 import org.dive4elements.river.artifacts.sinfo.common.SInfoResultType;
22 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; 19 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo;
23 20
24 import au.com.bytecode.opencsv.CSVWriter; 21 import au.com.bytecode.opencsv.CSVWriter;
25 import net.sf.jasperreports.engine.JRException; 22 import net.sf.jasperreports.engine.JRException;
29 * 26 *
30 * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a> 27 * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a>
31 * @author Gernot Belger 28 * @author Gernot Belger
32 */ 29 */
33 // REMARK: must be public because its registered in generators.xml 30 // REMARK: must be public because its registered in generators.xml
34 public class FlowDepthDevelopmentExporter extends AbstractSInfoExporter<FlowDepthDevelopmentCalculationResult, FlowDepthDevelopmentCalculationResults> { 31 public class FlowDepthDevelopmentExporter extends AbstractCommonExporter<FlowDepthDevelopmentCalculationResults> {
35
36 private static final String CSV_META_HEADER_SOUNDING_CURRENT = "sinfo.export.csv.meta.header.sounding.current";
37
38 private static final String CSV_META_HEADER_SOUNDING_HISTORICAL = "sinfo.export.csv.meta.header.sounding.historical";
39
40 private static final String CSV_META_HEADER_WATERLEVEL_CURRENT = "sinfo.export.csv.meta.header.waterlevel.current";
41
42 private static final String CSV_META_HEADER_WATERLEVEL_HISTORICAL = "sinfo.export.csv.meta.header.waterlevel.historical";
43
44 /** The log used in this exporter. */
45 private static Logger log = Logger.getLogger(FlowDepthDevelopmentExporter.class);
46
47 private static final String JASPER_FILE = "/jasper/templates/sinfo.flowdepthdevelopment.jrxml";
48 32
49 @Override 33 @Override
50 protected Logger getLog() { 34 protected void doWriteCSVData(final CSVWriter writer, final FlowDepthDevelopmentCalculationResults results) {
51 return log; 35 // TODO: Diesen Ablauf in super?
36
37 // TODO: move results into context?
38 final ExportContextCSV exportContextCSV = new ExportContextCSV(this.context, writer);
39
40 getLog().info("writeCSVData");
41
42 /* write as csv */
43 exportContextCSV.writeCSVGlobalMetadataDefaults(results); // ggf auslagern innerhalb dieser Klasse
44
45 // writer.writeNext(new String[] { "" }); // break line HERE to avoid redundance
46
47 final RiverInfo river = results.getRiver();
48
49 final Class<?> lastResultType = null;
50
51 for (final AbstractCalculationExportableResult<FlowDepthDevelopmentCalculationResults> result : results.getResults()) {
52
53 final Class<?> resultType = result.getClass();
54 if (lastResultType == null || lastResultType != resultType) {
55 exportContextCSV.writeBlankLine();
56 result.writeCSVHeader(exportContextCSV, results, river);
57 exportContextCSV.writeBlankLine();
58 } else
59 exportContextCSV.writeCSVLine(new String[] { "#" });
60
61 result.writeCsv(exportContextCSV, results);
62 }
52 } 63 }
53 64
54 @Override 65 @Override
55 protected void writeCSVResultMetadata(final CSVWriter writer, final FlowDepthDevelopmentCalculationResults results, 66 protected void doWritePdf(final OutputStream out, final FlowDepthDevelopmentCalculationResults results) {
56 final FlowDepthDevelopmentCalculationResult result) { 67 // TODO: Move to super //2 lines different
57
58 writeCSVSoundingMetadata(writer, result.getCurrentSounding(), CSV_META_HEADER_SOUNDING_CURRENT);
59 writer.writeNext(new String[] { "" }); // break line
60 writeCSVWaterlevelMetadata(writer, result.getCurrentWst(), CSV_META_HEADER_WATERLEVEL_CURRENT);
61 writer.writeNext(new String[] { "" }); // break line
62 writeCSVSoundingMetadata(writer, result.getHistoricalSounding(), CSV_META_HEADER_SOUNDING_HISTORICAL);
63 writer.writeNext(new String[] { "" }); // break line
64 writeCSVWaterlevelMetadata(writer, result.getHistoricalWst(), CSV_META_HEADER_WATERLEVEL_HISTORICAL);
65 writer.writeNext(new String[] { "" }); // break line
66 }
67
68 @Override
69 protected void writeCSVGlobalMetadata(final CSVWriter writer, final FlowDepthDevelopmentCalculationResults results) {
70 super.writeCSVGlobalMetadataDefaults(writer, results);
71 }
72
73 /**
74 * Write the header, with different headings depending on whether at a
75 * gauge or at a location.
76 *
77 * @param river
78 * @param useTkh
79 */
80 @Override
81 protected void writeCSVHeader(final CSVWriter writer, final FlowDepthDevelopmentCalculationResults results, final RiverInfo river) {
82 log.info("FlowDepthExporter.writeCSVHeader");
83
84 final FlowDepthDevelopmentCalculationResult result = results.getResult();
85
86 final Collection<String> header = new ArrayList<>(11);
87
88 header.add(msg(GeneralResultType.station.getCsvHeader()));
89 header.add(msgUnit(SInfoResultType.flowdepthDevelopment.getCsvHeader(), SInfoResultType.flowdepthDevelopment.getUnit()));
90 header.add(msgUnit(SInfoResultType.flowdepthDevelopmentPerYear.getCsvHeader(), SInfoResultType.flowdepthDevelopmentPerYear.getUnit()));
91 header.add(msgUnitLabel(SInfoResultType.waterlevelDifference.getCsvHeader(), SInfoResultType.waterlevelDifference.getUnit(),
92 result.getWaterlevelDifferenceLabel()));
93 header.add(msgUnitLabel(SInfoResultType.bedHeightDifference.getCsvHeader(), SInfoResultType.bedHeightDifference.getUnit(),
94 result.getBedHeightDifferenceLabel()));
95 header.add(
96 msgUnitLabel(SInfoResultType.flowdepthCurrent.getCsvHeader(), SInfoResultType.flowdepthCurrent.getUnit(), result.getFlowDepthCurrentLabel()));
97 header.add(msgUnitLabel(SInfoResultType.flowdepthHistorical.getCsvHeader(), SInfoResultType.flowdepthHistorical.getUnit(),
98 result.getFlowDepthHistoricalLabel()));
99 header.add(msg(SInfoResultType.location.getCsvHeader()));
100
101 writer.writeNext(header.toArray(new String[header.size()]));
102 }
103
104 /**
105 * Format a row of a flow depth result into an array of string, both used by csv and pdf
106 *
107 * @param result
108 *
109 * @param useTkh
110 */
111 @Override
112 protected String[] formatRow(final FlowDepthDevelopmentCalculationResults results, final ResultRow row, final ExportMode mode) {
113
114 final Collection<String> lines = new ArrayList<>(10);
115
116 lines.add(row.exportValue(this.context, GeneralResultType.station));
117 lines.add(row.exportValue(this.context, SInfoResultType.flowdepthDevelopment));
118 lines.add(row.exportValue(this.context, SInfoResultType.flowdepthDevelopmentPerYear));
119 lines.add(row.exportValue(this.context, SInfoResultType.waterlevelDifference));
120 lines.add(row.exportValue(this.context, SInfoResultType.bedHeightDifference));
121 lines.add(row.exportValue(this.context, SInfoResultType.flowdepthCurrent));
122 lines.add(row.exportValue(this.context, SInfoResultType.flowdepthHistorical));
123 lines.add(row.exportValue(this.context, SInfoResultType.location));
124
125 return lines.toArray(new String[lines.size()]);
126 }
127
128 @Override
129 protected final void addJRMetaData(final MetaAndTableJRDataSource source, final FlowDepthDevelopmentCalculationResults results) {
130
131 /* general metadata */
132 super.addJRMetaData(source, results);
133
134 final FlowDepthDevelopmentCalculationResult result = results.getResult();
135 source.addMetaData("flowdepthdevelopment", result.getLabel());
136 source.addMetaData("flowdepthdevelopment_header_label", SInfoResultType.flowdepthDevelopment.getCsvHeader(this.context.getMeta()));// (this.context.getMeta()));
137
138 /* column headings */
139 source.addMetaData("station_header", GeneralResultType.station.getPdfHeader(this.context.getMeta()));
140 source.addMetaData("flowdepthdevelopment_header", SInfoResultType.flowdepthDevelopment.getPdfHeader(this.context.getMeta()));
141 source.addMetaData("flowdepthdevelopmentperyear_header", SInfoResultType.flowdepthDevelopmentPerYear.getPdfHeader(this.context.getMeta()));
142
143 source.addMetaData("waterleveldifference_header", SInfoResultType.waterlevelDifference.getPdfHeader(this.context.getMeta()));
144 source.addMetaData("waterleveldifference_header2", result.getWaterlevelDifferenceLabel());
145
146 source.addMetaData("bedheightdifference_header", SInfoResultType.bedHeightDifference.getPdfHeader(this.context.getMeta()));
147 source.addMetaData("bedheightdifference_header2", result.getBedHeightDifferenceLabel());
148
149 source.addMetaData("flowdepthcurrent_header", SInfoResultType.flowdepthCurrent.getPdfHeader(this.context.getMeta()));
150 source.addMetaData("flowdepthcurrent_header2", result.getFlowDepthCurrentLabel());
151
152 source.addMetaData("flowdepthhistorical_header", SInfoResultType.flowdepthHistorical.getPdfHeader(this.context.getMeta()));
153 source.addMetaData("flowdepthhistorical_header2", result.getFlowDepthHistoricalLabel());
154
155 source.addMetaData("location_header", SInfoResultType.location.getPdfHeader(this.context.getMeta()));
156 }
157
158 @Override
159 protected void writePDF(final OutputStream out) {
160 try { 68 try {
161 final MetaAndTableJRDataSource source = createJRData(this.data); 69 final ExportContextCSV exportContextCSV = new ExportContextCSV(this.context, null);
162 70
163 final JasperReporter reporter = new JasperReporter(); 71 final JasperReporter reporter = new JasperReporter();
164 reporter.addReport(JASPER_FILE, source); 72
73 for (final AbstractCalculationExportableResult<FlowDepthDevelopmentCalculationResults> result : results.getResults()) {
74 final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource();
75 getHelper().addJRMetaDataUSINFO(source, results);
76
77 // move somewhere? global meta
78 source.addMetaData("flowdepthdevelopment", result.getLabel());
79 source.addMetaData("flowdepthdevelopment_header_label", SInfoResultType.flowdepthDevelopment.getCsvHeader(this.context.getMeta()));// (this.context.getMeta()));
80
81 result.addReport(exportContextCSV, results, reporter, source);
82 }
83
165 reporter.exportPDF(out); 84 reporter.exportPDF(out);
166 } 85 }
167 catch (final JRException je) { 86 catch (final JRException je) {
168 getLog().warn("Error generating PDF Report!", je); 87 getLog().warn("Error generating PDF Report!", je);
169 } 88 }
170 } 89 }
171
172 } 90 }

http://dive4elements.wald.intevation.org