comparison artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java @ 8863:1009cab0f86b

Some work on flow depth exporter
author gernotbelger
date Fri, 19 Jan 2018 18:47:53 +0100
parents 7bbfb24e6eec
children 9f7a285b0ee3
comparison
equal deleted inserted replaced
8862:5b5bdce5a216 8863:1009cab0f86b
8 8
9 package org.dive4elements.river.artifacts.sinfo.flowdepth; 9 package org.dive4elements.river.artifacts.sinfo.flowdepth;
10 10
11 import java.io.OutputStream; 11 import java.io.OutputStream;
12 import java.text.DateFormat; 12 import java.text.DateFormat;
13 import java.text.NumberFormat;
14 import java.util.ArrayList; 13 import java.util.ArrayList;
15 import java.util.Collection; 14 import java.util.Collection;
16 import java.util.Date; 15 import java.util.Date;
17 import java.util.HashMap; 16 import java.util.HashMap;
18 import java.util.Locale; 17 import java.util.Locale;
19 import java.util.Map; 18 import java.util.Map;
20 19
21 import org.apache.log4j.Logger; 20 import org.apache.log4j.Logger;
22 import org.dive4elements.artifacts.CallMeta; 21 import org.dive4elements.artifacts.CallMeta;
23 import org.dive4elements.artifacts.common.utils.Config; 22 import org.dive4elements.artifacts.common.utils.Config;
23 import org.dive4elements.river.FLYS;
24 import org.dive4elements.river.artifacts.model.CalculationResult; 24 import org.dive4elements.river.artifacts.model.CalculationResult;
25 import org.dive4elements.river.artifacts.resources.Resources; 25 import org.dive4elements.river.artifacts.resources.Resources;
26 import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource; 26 import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource;
27 import org.dive4elements.river.exports.AbstractExporter; 27 import org.dive4elements.river.exports.AbstractExporter;
28 import org.dive4elements.river.model.River; 28 import org.dive4elements.river.model.River;
29 import org.dive4elements.river.utils.Formatter; 29 import org.dive4elements.river.model.Unit;
30 30
31 import au.com.bytecode.opencsv.CSVWriter; 31 import au.com.bytecode.opencsv.CSVWriter;
32 import net.sf.jasperreports.engine.JRDataSource; 32 import net.sf.jasperreports.engine.JRDataSource;
33 import net.sf.jasperreports.engine.JRException; 33 import net.sf.jasperreports.engine.JRException;
34 import net.sf.jasperreports.engine.JasperExportManager; 34 import net.sf.jasperreports.engine.JasperExportManager;
54 private static final String CSV_WATERLEVEL_HEADER = "sinfo.export.flow_depth.csv.header.waterlevel"; 54 private static final String CSV_WATERLEVEL_HEADER = "sinfo.export.flow_depth.csv.header.waterlevel";
55 private static final String CSV_DISCHARGE_HEADER = "sinfo.export.flow_depth.csv.header.discharge"; 55 private static final String CSV_DISCHARGE_HEADER = "sinfo.export.flow_depth.csv.header.discharge";
56 private static final String CSV_LABEL_HEADER = "sinfo.export.flow_depth.csv.header.label"; 56 private static final String CSV_LABEL_HEADER = "sinfo.export.flow_depth.csv.header.label";
57 private static final String CSV_GAUGE_HEADER = "sinfo.export.flow_depth.csv.header.gauge"; 57 private static final String CSV_GAUGE_HEADER = "sinfo.export.flow_depth.csv.header.gauge";
58 private static final String CSV_MEAN_BED_HEIGHT_HEADER = "sinfo.export.flow_depth.csv.header.mean_bed_height"; 58 private static final String CSV_MEAN_BED_HEIGHT_HEADER = "sinfo.export.flow_depth.csv.header.mean_bed_height";
59 private static final String CSV_MEAN_BED_HEIGHT_HEADER_SHORT = "sinfo.export.flow_depth.csv.header.mean_bed_height.short";
59 private static final String CSV_SOUNDING_HEADER = "sinfo.export.flow_depth.csv.header.sounding"; 60 private static final String CSV_SOUNDING_HEADER = "sinfo.export.flow_depth.csv.header.sounding";
60 private static final String CSV_LOCATION_HEADER = "sinfo.export.flow_depth.csv.header.location"; 61 private static final String CSV_LOCATION_HEADER = "sinfo.export.flow_depth.csv.header.location";
61 62
62 private static final String CSV_META_HEADER_RESULT = 63 private static final String CSV_META_HEADER_RESULT =
63 "sinfo.export.flow_depth.csv.meta.header.result"; 64 "sinfo.export.flow_depth.csv.meta.header.result";
64 65
66 private static final String CSV_META_HEADER_RESULT_LABEL =
67 "sinfo.export.flow_depth.csv.meta.header.result.label";
68
65 private static final String CSV_META_VERSION = 69 private static final String CSV_META_VERSION =
66 "sinfo.export.flow_depth.csv.meta.version"; 70 "sinfo.export.flow_depth.csv.meta.version";
71
72 private static final String CSV_META_VERSION_LABEL =
73 "sinfo.export.flow_depth.csv.meta.version.label";
67 74
68 private static final String CSV_META_USER = 75 private static final String CSV_META_USER =
69 "sinfo.export.flow_depth.csv.meta.user"; 76 "sinfo.export.flow_depth.csv.meta.user";
70 77
78 private static final String CSV_META_USER_LABEL =
79 "sinfo.export.flow_depth.csv.meta.user.label";
80
71 private static final String CSV_META_CREATION = 81 private static final String CSV_META_CREATION =
72 "sinfo.export.flow_depth.csv.meta.creation"; 82 "sinfo.export.flow_depth.csv.meta.creation";
73 83
84 private static final String CSV_META_CREATION_LABEL =
85 "sinfo.export.flow_depth.csv.meta.creation.label";
86
74 private static final String CSV_META_RIVER = 87 private static final String CSV_META_RIVER =
75 "sinfo.export.flow_depth.csv.meta.river"; 88 "sinfo.export.flow_depth.csv.meta.river";
76 89
90 private static final String CSV_META_RIVER_LABEL =
91 "sinfo.export.flow_depth.csv.meta.river.label";
92
77 private static final String CSV_META_HEADER_SOUNDING = 93 private static final String CSV_META_HEADER_SOUNDING =
78 "sinfo.export.flow_depth.csv.meta.header.sounding"; 94 "sinfo.export.flow_depth.csv.meta.header.sounding";
79 95
80 private static final String CSV_META_HEADER_WATERLEVEL = 96 private static final String CSV_META_HEADER_WATERLEVEL =
81 "sinfo.export.flow_depth.csv.meta.header.waterlevel"; 97 "sinfo.export.flow_depth.csv.meta.header.waterlevel";
82 98
83 private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper"; //$NON-NLS-1$ 99 private static final String CSV_META_RANGE =
100 "sinfo.export.flow_depth.csv.meta.range";
101
102 private static final String CSV_META_RANGE_LABEL =
103 "sinfo.export.flow_depth.csv.meta.range.label";
104
105 private static final String CSV_META_HEIGHT_UNIT_RIVER = "sinfo.export.flow_depth.csv.meta.height_unit.river";
106
107 private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper";
108
109 private static final String UNIT_M = "m";
110
111 private static final String UNIT_CM = "cm";
112
113 private static final String UNIT_CUBIC_M = "m³/s";
84 114
85 /** The storage that contains the current calculation result.*/ 115 /** The storage that contains the current calculation result.*/
86 private FlowDepthCalculationResults data = null; 116 private FlowDepthCalculationResults data = null;
87 117
88 private NumberFormat meanBedHeightFormatter; 118 /**
89 119 * Formats header with unit
90 private NumberFormat tkhFormatter; 120 */
91 121 private String msgUnit(final String key, String unit ) {
92 private NumberFormat flowDepthFormatter; 122
93 123 final String msg = msg(key);
94 private NumberFormat getMeanBedHeightFormatter() { 124 return String.format("%s [%s]", msg, unit);
95 if( meanBedHeightFormatter == null ) 125 }
96 // FIXME: check if this is right
97 meanBedHeightFormatter = Formatter.getMiddleBedHeightHeight(context);
98 return meanBedHeightFormatter;
99 }
100
101 private NumberFormat getTkhFormatter() {
102 if( tkhFormatter == null )
103 // FIXME: check if this is right, probably not, we need one digit
104 tkhFormatter = Formatter.getWaterlevelW(context);
105 return tkhFormatter;
106 }
107
108 private NumberFormat getFlowDepthFormatter() {
109 if( flowDepthFormatter == null )
110 // FIXME: check if this is right
111 flowDepthFormatter = Formatter.getMeterFormat(context);
112 return flowDepthFormatter;
113 }
114 126
115 @Override 127 @Override
116 protected void addData(Object d) { 128 protected void addData(Object d) {
117 /* reset */ 129 /* reset */
118 data = null; 130 data = null;
122 final Object dat = ((CalculationResult)d).getData(); 134 final Object dat = ((CalculationResult)d).getData();
123 if( dat != null ) 135 if( dat != null )
124 data = (FlowDepthCalculationResults)dat; 136 data = (FlowDepthCalculationResults)dat;
125 } 137 }
126 } 138 }
127 139
128 @Override 140 @Override
129 protected void writeCSVData(CSVWriter writer) { 141 protected void writeCSVData(CSVWriter writer) {
130 log.info("FlowDepthExporter.writeCSVData"); 142 log.info("FlowDepthExporter.writeCSVData");
131 143
132 /* fetch calculation results */ 144 /* fetch calculation results */
133 final FlowDepthCalculationResults results = data; 145 final FlowDepthCalculationResults results = data;
134 146
147 final boolean useTkh = results.isUseTkh();
148 final River river = results.getRiver();
149
135 /* write as csv */ 150 /* write as csv */
136
137 // boolean atGauge = mode == WQ_MODE.QGAUGE || mode == WQ_MODE.WGAUGE;
138 // boolean isQ = mode == WQ_MODE.QGAUGE || mode == WQ_MODE.QFREE;
139 // RiverUtils.WQ_INPUT input
140 // = RiverUtils.getWQInputMode((D4EArtifact)master);
141
142 final boolean useTkh = results.isUseTkh();
143
144 writeCSVMeta(writer, results); 151 writeCSVMeta(writer, results);
145 writeCSVHeader(writer, useTkh); 152 writeCSVHeader(writer, river, useTkh);
146 153
147 for (final FlowDepthCalculationResult result : results.getResults()) { 154 for (final FlowDepthCalculationResult result : results.getResults()) {
148 writeCSVFlowDepthResult(writer, result, useTkh); 155 writeCSVFlowDepthResult(writer, result, useTkh);
149 } 156 }
150 } 157 }
157 } 164 }
158 165
159 private void writeCSVMeta(final CSVWriter writer, final FlowDepthCalculationResults results) { 166 private void writeCSVMeta(final CSVWriter writer, final FlowDepthCalculationResults results) {
160 log.info("FlowDepthExporter.writeCSVMeta"); 167 log.info("FlowDepthExporter.writeCSVMeta");
161 168
162 // Workflow zur Berechnung der Fließtiefe.pdf 169 final String calcModeLabel = results.getCalcModeLabel();
163 // "##ERGEBNISAUSGABE - Name des Gewässers - Fließtiefe"
164 final River river = results.getRiver(); 170 final River river = results.getRiver();
165 writeCSVMeataEntry(writer, CSV_META_HEADER_RESULT, river.getName() ); 171 writeCSVMetaEntry(writer, CSV_META_HEADER_RESULT, msg( CSV_META_HEADER_RESULT_LABEL ), river.getName(), calcModeLabel );
166 172
167 // "# FLYS-Version: " 173 // "# FLYS-Version: "
168 // FIXME 174 writeCSVMetaEntry(writer, CSV_META_VERSION, msg( CSV_META_VERSION_LABEL ), FLYS.VERSION );
169 final String flysVersion = "unbekannt";
170 writeCSVMeataEntry(writer, CSV_META_VERSION, flysVersion );
171 175
172 // "# Bearbeiter: " 176 // "# Bearbeiter: "
173 // FIXME 177 writeCSVMetaEntry(writer, CSV_META_USER, msg( CSV_META_USER_LABEL ), results.getUser() );
174 final String user = "unbekannt";
175 writeCSVMeataEntry(writer, CSV_META_USER, user );
176 178
177 // "# Datum der Erstellung: " 179 // "# Datum der Erstellung: "
178 final Locale locale = Resources.getLocale(context.getMeta()); 180 final Locale locale = Resources.getLocale(context.getMeta());
179 final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale); 181 final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
180 writeCSVMeataEntry(writer, CSV_META_CREATION, df.format(new Date()) ); 182 writeCSVMetaEntry(writer, CSV_META_CREATION, msg( CSV_META_CREATION_LABEL ), df.format(new Date()) );
181 183
182 // "# Gewässer: " 184 // "# Gewässer: "
183 writeCSVMeataEntry(writer, CSV_META_RIVER, river.getName() ); 185 writeCSVMetaEntry(writer, CSV_META_RIVER, msg( CSV_META_RIVER_LABEL ), river.getName() );
184 186
185 // "# Höhensystem des Flusses: " 187 // "# Höhensystem des Flusses: "
186 188 final Unit wstUnit = river.getWstUnit();
187 // FIXME 189 writeCSVMetaEntry(writer, CSV_META_HEIGHT_UNIT_RIVER, wstUnit.getName());
188 190
189 // "# Ort/Bereich (km): " 191 // "# Ort/Bereich (km): "
190 // FIXME 192 writeCSVMetaEntry(writer, CSV_META_RANGE, msg( CSV_META_RANGE_LABEL ), getKmFormatter().format(results.getFrom() ), getKmFormatter().format( results.getTo()));
191 // TODO: unklar, es wird nur ein Bereich eingegeben
192 // RangeAccess rangeAccess = new RangeAccess(flys);
193 // double[] kms = rangeAccess.getKmRange();
194 // writer.writeNext(new String[] {
195 // Resources.getMsg(
196 // meta,
197 // CSV_META_RANGE,
198 // CSV_META_RANGE,
199 // new Object[] { kms[0], kms[kms.length-1] })
200 // });
201 193
202 // "##METADATEN PEILUNG" 194 // "##METADATEN PEILUNG"
203 writeCSVMeataEntry(writer, CSV_META_HEADER_SOUNDING ); 195 writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING );
204 196
197 // FIXME: check: macht nicht viel sinn da es mehrere geben kann.. oder immer wieder wiederholen?
205 // "# Jahr der Peilung: " 198 // "# Jahr der Peilung: "
206 // FIXME 199 // FIXME
207 // "# Aufnahmeart: " 200 // "# Aufnahmeart: "
208 // FIXME 201 // FIXME
209 // "# Lagesystem: " 202 // "# Lagesystem: "
211 // "# Höhensystem: " 204 // "# Höhensystem: "
212 // FIXME 205 // FIXME
213 // "# ursprüngliches Höhensystem: " 206 // "# ursprüngliches Höhensystem: "
214 // FIXME 207 // FIXME
215 // "##METADATEN WASSERSPIEGELLAGE" 208 // "##METADATEN WASSERSPIEGELLAGE"
216 writeCSVMeataEntry(writer, CSV_META_HEADER_WATERLEVEL ); 209 writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL );
217 // "# Bezeichnung der Wasserspiegellage: " 210 // "# Bezeichnung der Wasserspiegellage: "
218 // FIXME 211 // FIXME
219 // "# Höhensystem der Wasserspiegellage: " 212 // "# Höhensystem der Wasserspiegellage: "
220 // FIXME 213 // FIXME
221 // "# Auswerter: " 214 // "# Auswerter: "
229 // TODO: unklar, es wird kein W eingegeben 222 // TODO: unklar, es wird kein W eingegeben
230 223
231 // "# Q (m³/s): " (nur bei Eingabe des Durchflusses) 224 // "# Q (m³/s): " (nur bei Eingabe des Durchflusses)
232 // TODO: unklar, es wird kein Q eingegeben 225 // TODO: unklar, es wird kein Q eingegeben
233 226
234 // writer.writeNext(new String[] {
235 // Resources.getMsg(
236 // meta,
237 // CSV_META_GAUGE,
238 // CSV_META_GAUGE,
239 // new Object[] { RiverUtils.getGaugename(flys) })
240 // });
241
242 writer.writeNext(new String[] { "" }); 227 writer.writeNext(new String[] { "" });
243 } 228 }
244 229
245 230
246 private void writeCSVMeataEntry(CSVWriter writer, String message, Object... messageArgs) { 231 private void writeCSVMetaEntry(CSVWriter writer, String message, Object... messageArgs) {
247 232
248 CallMeta meta = context.getMeta(); 233 CallMeta meta = context.getMeta();
249 234
250 writer.writeNext(new String[] { 235 writer.writeNext(new String[] {
251 Resources.getMsg( 236 Resources.getMsg(
257 } 242 }
258 243
259 /** 244 /**
260 * Write the header, with different headings depending on whether at a 245 * Write the header, with different headings depending on whether at a
261 * gauge or at a location. 246 * gauge or at a location.
247 * @param river
262 * @param useTkh 248 * @param useTkh
263 */ 249 */
264 private void writeCSVHeader( 250 private void writeCSVHeader(
265 final CSVWriter writer, 251 final CSVWriter writer,
252 final River river,
266 final boolean useTkh 253 final boolean useTkh
267 ) { 254 ) {
268 log.info("FlowDepthExporter.writeCSVHeader"); 255 log.info("FlowDepthExporter.writeCSVHeader");
269 256
270 final Collection<String> header = new ArrayList<>(11); 257 final Collection<String> header = new ArrayList<>(11);
271 258
272 header.add(msg(CSV_KM_HEADER,CSV_KM_HEADER)); 259 header.add(msg(CSV_KM_HEADER));
273 header.add(msg(CSV_FLOWDEPTH_HEADER)); 260 header.add(msgUnit(CSV_FLOWDEPTH_HEADER, UNIT_M));
274 if( useTkh ) 261 if( useTkh )
275 { 262 {
276 header.add(msg(CSV_FLOWDEPTHTKH_HEADER)); 263 header.add(msgUnit(CSV_FLOWDEPTHTKH_HEADER, UNIT_M));
277 header.add(msg(CSV_TKH_HEADER)); 264 header.add(msgUnit(CSV_TKH_HEADER, UNIT_CM));
278 } 265 }
279 header.add(msg(CSV_WATERLEVEL_HEADER)); 266
280 header.add(msg(CSV_DISCHARGE_HEADER)); 267 final String wstUnitName = river.getWstUnit().getName();
268 header.add(msgUnit(CSV_WATERLEVEL_HEADER, wstUnitName));
269 header.add(msgUnit(CSV_DISCHARGE_HEADER, UNIT_CUBIC_M));
281 header.add(msg(CSV_LABEL_HEADER)); 270 header.add(msg(CSV_LABEL_HEADER));
282 header.add(msg(CSV_GAUGE_HEADER)); 271 header.add(msg(CSV_GAUGE_HEADER));
283 header.add(msg(CSV_MEAN_BED_HEIGHT_HEADER)); 272 header.add(msgUnit(CSV_MEAN_BED_HEIGHT_HEADER, wstUnitName));
284 header.add(msg(CSV_SOUNDING_HEADER)); 273 header.add(msg(CSV_SOUNDING_HEADER));
285 header.add(msg(CSV_LOCATION_HEADER)); 274 header.add(msg(CSV_LOCATION_HEADER));
286 275
287 writer.writeNext(header.toArray(new String[header.size()])); 276 writer.writeNext(header.toArray(new String[header.size()]));
288 } 277 }
311 // TKH [cm] 300 // TKH [cm]
312 lines.add( getTkhFormatter().format( row.getTkh() ) ); 301 lines.add( getTkhFormatter().format( row.getTkh() ) );
313 } 302 }
314 303
315 // Wasserstand [NN + m] 304 // Wasserstand [NN + m]
316 lines.add( getWFormatter().format( row.getWaterlevel() ) ); 305 lines.add( getW2Formatter().format( row.getWaterlevel() ) );
317 306
318 // Q [m³/s] 307 // Q [m³/s]
319 lines.add( getQFormatter().format( row.getDischarge() ) ); 308 lines.add( getQFormatter().format( row.getDischarge() ) );
320 309
321 // Bezeichnung 310 // Bezeichnung
323 312
324 // Bezugspegel 313 // Bezugspegel
325 lines.add( row.getGauge() ); 314 lines.add( row.getGauge() );
326 315
327 // Mittlere Sohlhöhe [NN + m] 316 // Mittlere Sohlhöhe [NN + m]
328 lines.add( getMeanBedHeightFormatter().format( row.getMeanBedHeight( ) ) ); 317 lines.add( getMeanBedHeighFormatter().format( row.getMeanBedHeight( ) ) );
329 318
330 // Peilung/Epoche 319 // Peilung/Epoche
331 lines.add( row.getSoundageLabel() ); 320 lines.add( row.getSoundageLabel() );
332 321
333 // Lage 322 // Lage
392 return source; 381 return source;
393 } 382 }
394 383
395 private void addJRMetaData(final MetaAndTableJRDataSource source, FlowDepthCalculationResults results) { 384 private void addJRMetaData(final MetaAndTableJRDataSource source, FlowDepthCalculationResults results) {
396 385
397 // Workflow zur Berechnung der Fließtiefe.pdf 386 final River river = results.getRiver();
398 // "##ERGEBNISAUSGABE - Name des Gewässers - Fließtiefe" 387 final String wstUnitName = river.getWstUnit().getName();
399 // writeCSVMeataEntry(writer, CSV_META_HEADER_RESULT, inputData.getRiver() ); 388
400 389 /* general metadata */
401 // FIXME 390 source.addMetaData("header", msg(CSV_META_HEADER_RESULT_LABEL));
402 final String flysVersion = "unbekannt"; 391 source.addMetaData("calcMode", results.getCalcModeLabel());
403 // CSV_META_VERSION 392
404 source.addMetaData("version", flysVersion); 393 source.addMetaData("version_label", msg(CSV_META_VERSION_LABEL));
405 394 source.addMetaData("version", FLYS.VERSION);
406 // FIXME 395
407 String user = "unbekannt"; 396 source.addMetaData("user_label", msg(CSV_META_USER_LABEL));
408 // CSV_META_USER 397 source.addMetaData("user", results.getUser());
409 source.addMetaData("user", user);
410 398
411 final Locale locale = Resources.getLocale(context.getMeta()); 399 final Locale locale = Resources.getLocale(context.getMeta());
412 final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale); 400 final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
401 source.addMetaData("date_label", msg(CSV_META_CREATION_LABEL));
413 source.addMetaData("date", df.format(new Date())); 402 source.addMetaData("date", df.format(new Date()));
414 403
415 // CSV_META_RIVER 404 source.addMetaData("river_label", msg(CSV_META_RIVER_LABEL) );
416 source.addMetaData("river", results.getRiver().getName()); 405 source.addMetaData("river", river.getName());
417 406
418 // FIXME 407 final String rangeValue = String.format( "%s - %s", getKmFormatter().format(results.getFrom() ), getKmFormatter().format( results.getTo()));
419 source.addMetaData("range", "FIXME"); 408 source.addMetaData("range_label", msg(CSV_META_RANGE_LABEL));
420 // "# Ort/Bereich (km): " 409 source.addMetaData("range", rangeValue);
421 // FIXME 410
422 // TODO: unklar, es wird nur ein Bereich eingegeben 411 /* column headings */
423 // RangeAccess rangeAccess = new RangeAccess(flys); 412 source.addMetaData("station_header", msg(CSV_KM_HEADER));
424 // double[] kms = rangeAccess.getKmRange(); 413 source.addMetaData("flowdepth_header", msg(CSV_FLOWDEPTH_HEADER));
425 // writer.writeNext(new String[] { 414 source.addMetaData("flowdepth_tkh_header", msg(CSV_FLOWDEPTHTKH_HEADER));
426 // Resources.getMsg( 415 source.addMetaData("tkh_header", msg(CSV_TKH_HEADER));
427 // meta, 416 source.addMetaData("waterlevel_header", msg(CSV_WATERLEVEL_HEADER));
428 // CSV_META_RANGE, 417 source.addMetaData("river_unit", wstUnitName);
429 // CSV_META_RANGE, 418 source.addMetaData("discharge_header", msg(CSV_DISCHARGE_HEADER));
430 // new Object[] { kms[0], kms[kms.length-1] }) 419 source.addMetaData("waterlevel_name_header", msg(CSV_LABEL_HEADER));
431 // }); 420 source.addMetaData("gauge_header", msg(CSV_GAUGE_HEADER));
432 421 source.addMetaData("bedheight_header", msg(CSV_MEAN_BED_HEIGHT_HEADER_SHORT));
433 // RangeAccess rangeAccess = new RangeAccess(flys); 422 source.addMetaData("sounding_name_header", msg(CSV_SOUNDING_HEADER));
434 // double[] kms = rangeAccess.getKmRange(); 423 source.addMetaData("location_header", msg(CSV_LOCATION_HEADER));
435 // source.addMetaData("range",
436 // kmf.format(kms[0]) + " - " + kmf.format(kms[kms.length-1]));
437 } 424 }
438 425
439 private void addJRTableData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResult result, final boolean useTkh) { 426 private void addJRTableData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResult result, final boolean useTkh) {
440 427
441 final Collection<FlowDepthRow> rows = result.getRows(); 428 final Collection<FlowDepthRow> rows = result.getRows();
442 429
443 for (final FlowDepthRow row : rows) { 430 for (final FlowDepthRow row : rows) {
444 431
445 final String[] formattedRow = formatFlowDepthRow(row, useTkh); 432 final String[] formattedRow = formatFlowDepthRow(row, useTkh);
446 source.addData(formattedRow); 433 source.addData(formattedRow);
447 } 434 }

http://dive4elements.wald.intevation.org