comparison artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java @ 8948:a4f1ac81f26d

Work on SINFO-FlowDepthMinMax. Also rework of result row stuff, in order to reduce abstraction, using result type concept
author gernotbelger
date Wed, 14 Mar 2018 14:10:32 +0100
parents 5d5d482da3e9
children 09e4a4909814
comparison
equal deleted inserted replaced
8947:86650594f051 8948:a4f1ac81f26d
9 package org.dive4elements.river.artifacts.sinfo.flowdepth; 9 package org.dive4elements.river.artifacts.sinfo.flowdepth;
10 10
11 import java.util.ArrayList; 11 import java.util.ArrayList;
12 import java.util.Collection; 12 import java.util.Collection;
13 13
14 import org.apache.commons.lang.StringUtils;
15 import org.apache.log4j.Logger; 14 import org.apache.log4j.Logger;
16 import org.dive4elements.river.artifacts.sinfo.SInfoI18NStrings;
17 import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter; 15 import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter;
16 import org.dive4elements.river.artifacts.sinfo.common.SInfoResultRow;
17 import org.dive4elements.river.artifacts.sinfo.common.SInfoResultType;
18 import org.dive4elements.river.artifacts.sinfo.util.BedHeightInfo; 18 import org.dive4elements.river.artifacts.sinfo.util.BedHeightInfo;
19 import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource; 19 import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource;
20 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; 20 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo;
21 import org.dive4elements.river.artifacts.sinfo.util.WstInfo; 21 import org.dive4elements.river.artifacts.sinfo.util.WstInfo;
22 import org.dive4elements.river.utils.RiverUtils;
23 22
24 import au.com.bytecode.opencsv.CSVWriter; 23 import au.com.bytecode.opencsv.CSVWriter;
25 24
26 /** 25 /**
27 * Generates different output formats (csv, pdf) of data that resulted from a flow depths min/max computation. 26 * Generates different output formats (csv, pdf) of data that resulted from a flow depths min/max computation.
28 * 27 *
29 * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a> 28 * @author <a href="mailto:ingo.weinzierl@intevation.de">Ingo Weinzierl</a>
30 * @author Gernot Belger 29 * @author Gernot Belger
31 */ 30 */
32 // REMARK: must be public because its registered in generators.xml 31 // REMARK: must be public because its registered in generators.xml
33 public class FlowDepthExporter extends AbstractSInfoExporter<FlowDepthRow, FlowDepthCalculationResult, FlowDepthCalculationResults> { 32 public class FlowDepthExporter extends AbstractSInfoExporter<FlowDepthCalculationResult, FlowDepthCalculationResults> {
34 33
35 /** The log used in this exporter. */ 34 /** The log used in this exporter. */
36 private static Logger log = Logger.getLogger(FlowDepthExporter.class); 35 private static Logger log = Logger.getLogger(FlowDepthExporter.class);
37 36
38 private static final String CSV_FLOWDEPTHMINMAX_HEADER = "sinfo.export.flow_depth_minmax.csv.header.flowdepthminmax"; 37 private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper";
39
40 private static final String CSV_FLOWDEPTHTKHMINMAX_HEADER = "sinfo.export.flow_depth_minmax.csv.header.flowdepthTkh";
41
42 private static final String CSV_TKH_HEADER = "sinfo.export.flow_depth.csv.header.tkh";
43
44 private static final String JASPER_FILE = "/jasper/sinfo.flowdepthminmax.jasper";
45 38
46 @Override 39 @Override
47 protected Logger getLog() { 40 protected Logger getLog() {
48 return log; 41 return log;
49 } 42 }
72 * gauge or at a location. 65 * gauge or at a location.
73 * 66 *
74 * @param river 67 * @param river
75 * @param useTkh 68 * @param useTkh
76 */ 69 */
77
78 @Override 70 @Override
79 protected void writeCSVHeader(final CSVWriter writer, final FlowDepthCalculationResults results, final RiverInfo river) { 71 protected void writeCSVHeader(final CSVWriter writer, final FlowDepthCalculationResults results, final RiverInfo river) {
80 log.info("FlowDepthExporter.writeCSVHeader"); 72 log.info("FlowDepthExporter.writeCSVHeader");
81 73
82 final Collection<String> header = new ArrayList<>(11); 74 final Collection<String> header = new ArrayList<>(11);
83 75
84 header.add(msg(SInfoI18NStrings.CSV_KM_HEADER)); 76 header.add(msg(SInfoResultType.station.getCsvHeader()));
85 header.add(msgUnit(CSV_FLOWDEPTHMINMAX_HEADER, SInfoI18NStrings.UNIT_M)); 77 header.add(msgUnit(SInfoResultType.flowdepth.getCsvHeader(), SInfoResultType.flowdepth.getUnit()));
86 if (getData().isUseTkh()) { 78
87 header.add(msgUnit(CSV_FLOWDEPTHTKHMINMAX_HEADER, SInfoI18NStrings.UNIT_M)); 79 if (results.isUseTkh()) {
88 header.add(msgUnit(CSV_TKH_HEADER, SInfoI18NStrings.UNIT_CM)); 80 header.add(msgUnit(SInfoResultType.flowdepthtkh.getCsvHeader(), SInfoResultType.flowdepthtkh.getUnit()));
81 header.add(msgUnit(SInfoResultType.tkh.getCsvHeader(), SInfoResultType.tkh.getUnit()));
89 } 82 }
90 83
91 header.add(msgUnit(SInfoI18NStrings.CSV_WATERLEVEL_HEADER, river.getWstUnit())); 84 header.add(msgUnit(SInfoResultType.waterlevel.getCsvHeader(), river.getWstUnit()));
92 header.add(msgUnit(SInfoI18NStrings.CSV_DISCHARGE_HEADER, SInfoI18NStrings.UNIT_CUBIC_M)); 85 header.add(msgUnit(SInfoResultType.discharge.getCsvHeader(), SInfoResultType.discharge.getUnit()));
93 header.add(msg(SInfoI18NStrings.CSV_LABEL_HEADER)); 86 header.add(msg(SInfoResultType.waterlevelLabel.getCsvHeader()));
94 header.add(msg(SInfoI18NStrings.CSV_GAUGE_HEADER)); 87 header.add(msg(SInfoResultType.gaugeLabel.getCsvHeader()));
95 header.add(msgUnit(SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_HEADER, river.getWstUnit())); 88 header.add(msgUnit(SInfoResultType.meanBedHeight.getCsvHeader(), river.getWstUnit()));
96 header.add(msg(SInfoI18NStrings.CSV_SOUNDING_HEADER)); 89 header.add(msg(SInfoResultType.soundingLabel.getCsvHeader()));
97 header.add(msg(SInfoI18NStrings.CSV_LOCATION_HEADER)); 90 header.add(msg(SInfoResultType.location.getCsvHeader()));
98 91
99 writer.writeNext(header.toArray(new String[header.size()])); 92 writer.writeNext(header.toArray(new String[header.size()]));
100 } 93 }
101 94
102 @Override 95 @Override
103 protected String[] formatCSVRow(final FlowDepthCalculationResults results, final FlowDepthRow row) { 96 protected String[] formatCSVRow(final FlowDepthCalculationResults results, final FlowDepthCalculationResult result, final SInfoResultRow row) {
104 return formatFlowDepthRow(row); 97 return formatRow(row);
105 } 98 }
106 99
107 /** 100 /**
108 * Format a row of a flow depth result into an array of string, both used by csv and pdf 101 * Format a row of a flow depth result into an array of string, both used by csv and pdf
109 * 102 *
110 * @param useTkh 103 * @param useTkh
111 */ 104 */
112 private String[] formatFlowDepthRow(final FlowDepthRow row) { 105 private String[] formatRow(final SInfoResultRow row) {
113 106
114 final Collection<String> lines = new ArrayList<>(11); 107 final Collection<String> lines = new ArrayList<>(11);
115 108
116 // Fluss-km 109 lines.add(row.exportValue(this.context, SInfoResultType.station));
117 lines.add(getKmFormatter().format(row.getStation())); 110 lines.add(row.exportValue(this.context, SInfoResultType.flowdepth));
118
119 // Fließtiefe [m]
120 lines.add(getFlowDepthFormatter().format(row.getFlowDepth()));
121 111
122 if (getData().isUseTkh()) { 112 if (getData().isUseTkh()) {
123 // Fließtiefe mit TKH [m] 113 lines.add(row.exportValue(this.context, SInfoResultType.flowdepthtkh));
124 lines.add(getFlowDepthFormatter().format(row.getFlowDepthWithTkh())); 114 lines.add(row.exportValue(this.context, SInfoResultType.tkh));
125
126 // TKH [cm]
127 lines.add(getTkhFormatter().format(row.getTkh()));
128 } 115 }
129 116
130 // Wasserstand [NN + m] 117 lines.add(row.exportValue(this.context, SInfoResultType.waterlevel));
131 lines.add(getW2Formatter().format(row.getWaterlevel())); 118 lines.add(row.exportValue(this.context, SInfoResultType.discharge));
132 119 lines.add(row.exportValue(this.context, SInfoResultType.waterlevelLabel));
133 // Q [m³/s] 120 lines.add(row.exportValue(this.context, SInfoResultType.gaugeLabel));
134 final double discharge = row.getDischarge(); 121 lines.add(row.exportValue(this.context, SInfoResultType.meanBedHeight));
135 if (Double.isNaN(discharge)) 122 lines.add(row.exportValue(this.context, SInfoResultType.soundingLabel));
136 lines.add(StringUtils.EMPTY); 123 lines.add(row.exportValue(this.context, SInfoResultType.location));
137 else {
138 final double roundedDischarge = RiverUtils.roundQ(discharge);
139 lines.add(getQFormatter().format(roundedDischarge));
140 }
141
142 // Bezeichnung
143 lines.add(row.getWaterlevelLabel());
144
145 // Bezugspegel
146 lines.add(row.getGauge());
147
148 // Mittlere Sohlhöhe [NN + m]
149 lines.add(getMeanBedHeighFormatter().format(row.getMeanBedHeight()));
150
151 // Peilung/Epoche
152 lines.add(row.getSoundageLabel());
153
154 // Lage
155 lines.add(row.getLocation());
156 124
157 return lines.toArray(new String[lines.size()]); 125 return lines.toArray(new String[lines.size()]);
158 } 126 }
159 127
160 @Override 128 @Override
167 135
168 /* general metadata */ 136 /* general metadata */
169 super.addJRMetaDataDefaults(source, results); 137 super.addJRMetaDataDefaults(source, results);
170 138
171 /* column headings */ 139 /* column headings */
172 source.addMetaData("station_header", msg(SInfoI18NStrings.CSV_KM_HEADER)); 140
173 source.addMetaData("flowdepth_header", msg(CSV_FLOWDEPTHMINMAX_HEADER)); 141 source.addMetaData("station_header", msg(SInfoResultType.station.getCsvHeader()));
174 source.addMetaData("flowdepth_tkh_header", msg(CSV_FLOWDEPTHTKHMINMAX_HEADER)); 142 source.addMetaData("flowdepth_header", msg(SInfoResultType.flowdepth.getCsvHeader()));
175 source.addMetaData("tkh_header", msg(CSV_TKH_HEADER)); 143 source.addMetaData("flowdepth_tkh_header", msg(SInfoResultType.flowdepthtkh.getCsvHeader()));
176 source.addMetaData("waterlevel_header", msg(SInfoI18NStrings.CSV_WATERLEVEL_HEADER)); 144 source.addMetaData("tkh_header", msg(SInfoResultType.tkh.getCsvHeader()));
177 source.addMetaData("discharge_header", msg(SInfoI18NStrings.CSV_DISCHARGE_HEADER)); 145 source.addMetaData("waterlevel_header", msg(SInfoResultType.waterlevel.getCsvHeader()));
178 source.addMetaData("waterlevel_name_header", msg(SInfoI18NStrings.CSV_LABEL_HEADER)); 146 source.addMetaData("discharge_header", msg(SInfoResultType.discharge.getCsvHeader()));
179 source.addMetaData("gauge_header", msg(SInfoI18NStrings.CSV_GAUGE_HEADER)); 147 source.addMetaData("waterlevel_name_header", msg(SInfoResultType.waterlevelLabel.getCsvHeader()));
180 source.addMetaData("bedheight_header", msg(SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_HEADER_SHORT)); 148 source.addMetaData("gauge_header", msg(SInfoResultType.gaugeLabel.getCsvHeader()));
181 source.addMetaData("sounding_name_header", msg(SInfoI18NStrings.CSV_SOUNDING_HEADER)); 149 source.addMetaData("bedheight_header", msg(SInfoResultType.meanBedHeight.getCsvHeader()));
182 source.addMetaData("location_header", msg(SInfoI18NStrings.CSV_LOCATION_HEADER)); 150 source.addMetaData("sounding_name_header", msg(SInfoResultType.soundingLabel.getCsvHeader()));
151 source.addMetaData("location_header", msg(SInfoResultType.location.getCsvHeader()));
183 } 152 }
184 153
185 @Override 154 @Override
186 protected String[] formatPDFRow(final FlowDepthCalculationResults results, final FlowDepthRow row) { 155 protected String[] formatPDFRow(final FlowDepthCalculationResults results, final SInfoResultRow row) {
187 return formatFlowDepthRow(row); 156 return formatRow(row);
188 } 157 }
189 } 158 }

http://dive4elements.wald.intevation.org