Mercurial > dive4elements > river
comparison flys-backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java @ 5828:dfb26b03b179
Moved directories to org.dive4elements.river
author | Sascha L. Teichmann <teichmann@intevation.de> |
---|---|
date | Thu, 25 Apr 2013 11:53:11 +0200 |
parents | flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java@dba9e1fa233e |
children | 18619c1e7c2a |
comparison
equal
deleted
inserted
replaced
5827:e308d4ecd35a | 5828:dfb26b03b179 |
---|---|
1 package de.intevation.flys.importer; | |
2 | |
3 import de.intevation.artifacts.common.utils.FileTools.HashedFile; | |
4 | |
5 import de.intevation.artifacts.common.utils.FileTools; | |
6 | |
7 import de.intevation.flys.importer.parsers.AnnotationClassifier; | |
8 import de.intevation.flys.importer.parsers.AnnotationsParser; | |
9 import de.intevation.flys.importer.parsers.BedHeightEpochParser; | |
10 import de.intevation.flys.importer.parsers.BedHeightSingleParser; | |
11 import de.intevation.flys.importer.parsers.CrossSectionParser; | |
12 import de.intevation.flys.importer.parsers.DA50Parser; | |
13 import de.intevation.flys.importer.parsers.DA66Parser; | |
14 import de.intevation.flys.importer.parsers.FlowVelocityMeasurementParser; | |
15 import de.intevation.flys.importer.parsers.FlowVelocityModelParser; | |
16 import de.intevation.flys.importer.parsers.HYKParser; | |
17 import de.intevation.flys.importer.parsers.MeasurementStationsParser; | |
18 import de.intevation.flys.importer.parsers.MorphologicalWidthParser; | |
19 import de.intevation.flys.importer.parsers.PRFParser; | |
20 import de.intevation.flys.importer.parsers.PegelGltParser; | |
21 import de.intevation.flys.importer.parsers.SQRelationParser; | |
22 import de.intevation.flys.importer.parsers.SedimentDensityParser; | |
23 import de.intevation.flys.importer.parsers.SedimentYieldParser; | |
24 import de.intevation.flys.importer.parsers.W80Parser; | |
25 import de.intevation.flys.importer.parsers.W80CSVParser; | |
26 import de.intevation.flys.importer.parsers.WaterlevelDifferencesParser; | |
27 import de.intevation.flys.importer.parsers.WaterlevelParser; | |
28 import de.intevation.flys.importer.parsers.WstParser; | |
29 | |
30 import de.intevation.flys.model.River; | |
31 import de.intevation.flys.model.Unit; | |
32 | |
33 import java.io.File; | |
34 import java.io.IOException; | |
35 | |
36 import java.util.ArrayList; | |
37 import java.util.Calendar; | |
38 import java.util.Date; | |
39 import java.util.HashSet; | |
40 import java.util.List; | |
41 import java.util.Map; | |
42 import java.util.Set; | |
43 | |
44 import org.apache.log4j.Logger; | |
45 | |
46 import org.hibernate.Query; | |
47 import org.hibernate.Session; | |
48 | |
49 | |
50 /** Import all river-related data (files) that can be found. */ | |
51 public class ImportRiver | |
52 { | |
53 /** Private logger. */ | |
54 private static Logger log = Logger.getLogger(ImportRiver.class); | |
55 | |
56 public static final String PEGEL_GLT = "PEGEL.GLT"; | |
57 | |
58 public static final String FIXATIONS = "Fixierungen"; | |
59 | |
60 public static final String EXTRA_LONGITUDINALS = | |
61 "Zus.L\u00e4ngsschnitte"; | |
62 | |
63 public static final String [] OFFICIAL_LINES_FOLDERS = { | |
64 "Basisdaten", | |
65 "Fixierungen" }; | |
66 | |
67 public static final String OFFICIAL_LINES = | |
68 "Amtl_Linien.wst"; | |
69 | |
70 public static final String FLOOD_WATER = "HW-Marken"; | |
71 | |
72 public static final String FLOOD_PROTECTION = | |
73 "HW-Schutzanlagen"; | |
74 | |
75 public static final String MINFO_DIR = "Morphologie"; | |
76 | |
77 public static final String BED_HEIGHT_DIR = "Sohlhoehen"; | |
78 | |
79 public static final String BED_HEIGHT_SINGLE_DIR = "Einzeljahre"; | |
80 | |
81 public static final String BED_HEIGHT_EPOCH_DIR = "Epochen"; | |
82 | |
83 public static final String SEDIMENT_DENSITY_DIR = "Sedimentdichte"; | |
84 | |
85 public static final String MORPHOLOGICAL_WIDTH_DIR = "morphologische_Breite"; | |
86 | |
87 public static final String FLOW_VELOCITY_DIR = "Geschwindigkeit_Schubspannung"; | |
88 | |
89 public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen"; | |
90 | |
91 public static final String FLOW_VELOCITY_MEASUREMENTS = "v-Messungen"; | |
92 | |
93 public static final String SEDIMENT_YIELD_DIR = "Fracht"; | |
94 | |
95 public static final String SEDIMENT_YIELD_SINGLE_DIR = "Einzeljahre"; | |
96 | |
97 public static final String SEDIMENT_YIELD_EPOCH_DIR = "Epochen"; | |
98 | |
99 public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse"; | |
100 | |
101 public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen"; | |
102 | |
103 public static final String MINFO_WATERLEVEL_DIFF_DIR = "Wasserspiegeldifferenzen"; | |
104 | |
105 public static final String MINFO_BASE_DIR = "Basisdaten"; | |
106 | |
107 public static final String MINFO_CORE_DATA_FILE = "Stammdaten_Messstellen.csv"; | |
108 | |
109 public static final String MINFO_SQ_DIR = "Feststofftransport-Abfluss-Beziehung"; | |
110 | |
111 protected String name; | |
112 | |
113 protected Long officialNumber; | |
114 | |
115 protected File wstFile; | |
116 | |
117 protected File bbInfoFile; | |
118 | |
119 protected List<ImportGauge> gauges; | |
120 | |
121 protected List<ImportAnnotation> annotations; | |
122 | |
123 protected List<ImportHYK> hyks; | |
124 | |
125 protected List<ImportCrossSection> crossSections; | |
126 | |
127 protected List<ImportWst> extraWsts; | |
128 | |
129 protected List<ImportWst> fixations; | |
130 | |
131 protected List<ImportWst> officialLines; | |
132 | |
133 protected List<ImportWst> floodWater; | |
134 | |
135 protected List<ImportWst> floodProtection; | |
136 | |
137 /** Wst-structures from waterlevel-csv files. */ | |
138 protected List<ImportWst> waterlevels; | |
139 | |
140 /** Wst-structures from waterlevel-difference-csv files. */ | |
141 protected List<ImportWst> waterlevelDifferences; | |
142 | |
143 protected List<ImportBedHeight> bedHeightSingles; | |
144 | |
145 protected List<ImportBedHeight> bedHeightEpochs; | |
146 | |
147 protected List<ImportSedimentDensity> sedimentDensities; | |
148 | |
149 protected List<ImportMorphWidth> morphologicalWidths; | |
150 | |
151 protected List<ImportFlowVelocityModel> flowVelocityModels; | |
152 | |
153 protected List<ImportFlowVelocityMeasurement> flowVelocityMeasurements; | |
154 | |
155 protected List<ImportSedimentYield> sedimentYields; | |
156 | |
157 protected List<ImportMeasurementStation> measurementStations; | |
158 | |
159 protected List<ImportSQRelation> sqRelations; | |
160 | |
161 protected ImportWst wst; | |
162 | |
163 protected ImportUnit wstUnit; | |
164 | |
165 protected AnnotationClassifier annotationClassifier; | |
166 | |
167 /** Database-mapped River instance. */ | |
168 protected River peer; | |
169 | |
170 | |
171 /** Callback-implementation for CrossSectionParsers: | |
172 * Accept files with different md5(?)sums than what has already been parsed, | |
173 * on successfull parse, add data. */ | |
174 class ImportRiverCrossSectionParserCallback implements CrossSectionParser.Callback { | |
175 Set<HashedFile> files = new HashSet<HashedFile>(); | |
176 String type; | |
177 | |
178 | |
179 /** | |
180 * Create new Callback, given type which is used for logging | |
181 * purposes only. | |
182 */ | |
183 public ImportRiverCrossSectionParserCallback (String type) { | |
184 this.type = type; | |
185 } | |
186 | |
187 | |
188 /** Accept file if not duplicate. */ | |
189 public boolean accept(File file) { | |
190 HashedFile hf = new HashedFile(file); | |
191 boolean success = files.add(hf); | |
192 if (!success) { | |
193 log.warn(type + " file '" + file + "' seems to be a duplicate."); | |
194 } | |
195 return success; | |
196 } | |
197 | |
198 | |
199 /** Add crosssection. */ | |
200 public void parsed(CrossSectionParser parser) { | |
201 log.debug("callback from " + type + " parser"); | |
202 | |
203 addCrossSections(parser); | |
204 } | |
205 } // ImportRiverCrossSectionParserCallback | |
206 | |
207 | |
208 public ImportRiver() { | |
209 hyks = new ArrayList<ImportHYK>(); | |
210 crossSections = new ArrayList<ImportCrossSection>(); | |
211 extraWsts = new ArrayList<ImportWst>(); | |
212 fixations = new ArrayList<ImportWst>(); | |
213 officialLines = new ArrayList<ImportWst>(); | |
214 floodWater = new ArrayList<ImportWst>(); | |
215 waterlevels = new ArrayList<ImportWst>(); | |
216 waterlevelDifferences = new ArrayList<ImportWst>(); | |
217 floodProtection = new ArrayList<ImportWst>(); | |
218 sedimentDensities = new ArrayList<ImportSedimentDensity>(); | |
219 morphologicalWidths = new ArrayList<ImportMorphWidth>(); | |
220 flowVelocityModels = new ArrayList<ImportFlowVelocityModel>(); | |
221 flowVelocityMeasurements = new ArrayList<ImportFlowVelocityMeasurement>(); | |
222 sedimentYields = new ArrayList<ImportSedimentYield>(); | |
223 measurementStations = new ArrayList<ImportMeasurementStation>(); | |
224 sqRelations = new ArrayList<ImportSQRelation>(); | |
225 } | |
226 | |
227 public ImportRiver( | |
228 String name, | |
229 File wstFile, | |
230 File bbInfoFile, | |
231 AnnotationClassifier annotationClassifier | |
232 ) { | |
233 this(); | |
234 this.name = name; | |
235 this.wstFile = wstFile; | |
236 this.bbInfoFile = bbInfoFile; | |
237 this.annotationClassifier = annotationClassifier; | |
238 } | |
239 | |
240 public String getName() { | |
241 return name; | |
242 } | |
243 | |
244 public void setName(String name) { | |
245 this.name = name; | |
246 } | |
247 | |
248 public Long getOfficialNumber() { | |
249 return this.officialNumber; | |
250 } | |
251 | |
252 public void setOfficialNumber(Long officialNumber) { | |
253 this.officialNumber = officialNumber; | |
254 } | |
255 | |
256 public File getWstFile() { | |
257 return wstFile; | |
258 } | |
259 | |
260 public void setWstFile(File wstFile) { | |
261 this.wstFile = wstFile; | |
262 } | |
263 | |
264 public File getBBInfo() { | |
265 return bbInfoFile; | |
266 } | |
267 | |
268 public void setBBInfo(File bbInfoFile) { | |
269 this.bbInfoFile = bbInfoFile; | |
270 } | |
271 | |
272 public ImportWst getWst() { | |
273 return wst; | |
274 } | |
275 | |
276 public void setWst(ImportWst wst) { | |
277 this.wst = wst; | |
278 } | |
279 | |
280 public File getMinfoDir() { | |
281 File riverDir = wstFile.getParentFile().getParentFile().getParentFile(); | |
282 return new File(riverDir, MINFO_DIR); | |
283 } | |
284 | |
285 public void parseDependencies() throws IOException { | |
286 parseGauges(); | |
287 parseAnnotations(); | |
288 parsePRFs(); | |
289 parseDA66s(); | |
290 parseDA50s(); | |
291 parseW80s(); | |
292 parseW80CSVs(); | |
293 parseHYKs(); | |
294 parseWst(); | |
295 parseExtraWsts(); | |
296 parseFixations(); | |
297 parseOfficialLines(); | |
298 parseFloodWater(); | |
299 parseFloodProtection(); | |
300 parseBedHeight(); | |
301 parseSedimentDensity(); | |
302 parseMorphologicalWidth(); | |
303 parseFlowVelocity(); | |
304 parseSedimentYield(); | |
305 parseWaterlevels(); | |
306 parseWaterlevelDifferences(); | |
307 parseMeasurementStations(); | |
308 parseSQRelation(); | |
309 } | |
310 | |
311 public void parseFloodProtection() throws IOException { | |
312 if (Config.INSTANCE.skipFloodProtection()) { | |
313 log.info("skip parsing flood protection"); | |
314 return; | |
315 } | |
316 | |
317 log.info("Parse flood protection wst file"); | |
318 | |
319 File riverDir = wstFile.getParentFile().getParentFile(); | |
320 | |
321 File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION)); | |
322 | |
323 if (!dir.isDirectory() || !dir.canRead()) { | |
324 log.info("no directory '" + dir + "' found"); | |
325 return; | |
326 } | |
327 | |
328 File [] files = dir.listFiles(); | |
329 | |
330 if (files == null) { | |
331 log.warn("cannot read '" + dir + "'"); | |
332 return; | |
333 } | |
334 | |
335 for (File file: files) { | |
336 if (!file.isFile() || !file.canRead()) { | |
337 continue; | |
338 } | |
339 String name = file.getName().toLowerCase(); | |
340 if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { | |
341 continue; | |
342 } | |
343 log.info("found file '" + file.getName() + "'"); | |
344 WstParser wstParser = new WstParser(); | |
345 wstParser.parse(file); | |
346 ImportWst iw = wstParser.getWst(); | |
347 iw.setKind(5); | |
348 iw.setDescription(FLOOD_PROTECTION + "/" + iw.getDescription()); | |
349 floodProtection.add(iw); | |
350 } | |
351 } | |
352 | |
353 public void storeOfficialNumber() { | |
354 if (Config.INSTANCE.skipBWASTR()) { | |
355 log.info("skip storing official number."); | |
356 return; | |
357 } | |
358 getPeer().setOfficialNumber(officialNumber); | |
359 } | |
360 | |
361 public void parseBedHeight() throws IOException { | |
362 File minfoDir = getMinfoDir(); | |
363 File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR); | |
364 File singlesDir = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR); | |
365 File epochDir = new File(bedHeightDir, BED_HEIGHT_EPOCH_DIR); | |
366 | |
367 if (Config.INSTANCE.skipBedHeightSingle()) { | |
368 log.info("skip parsing bed height single."); | |
369 } | |
370 else { | |
371 log.info("Parse bed height single."); | |
372 parseBedHeightSingles(singlesDir); | |
373 } | |
374 | |
375 if (Config.INSTANCE.skipBedHeightEpoch()) { | |
376 log.info("skip parsing bed height epochs."); | |
377 } | |
378 else { | |
379 log.info("Parse bed height epochs."); | |
380 parseBedHeightEpochs(epochDir); | |
381 } | |
382 } | |
383 | |
384 | |
385 protected void parseSedimentDensity() throws IOException { | |
386 if (Config.INSTANCE.skipSedimentDensity()) { | |
387 log.info("skip parsing sediment density."); | |
388 return; | |
389 } | |
390 | |
391 log.debug("Parse sediment density"); | |
392 | |
393 File minfoDir = getMinfoDir(); | |
394 File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR); | |
395 | |
396 File[] files = sediment.listFiles(); | |
397 | |
398 if (files == null) { | |
399 log.warn("Cannot read directory '" + sediment + "'"); | |
400 return; | |
401 } | |
402 | |
403 SedimentDensityParser parser = new SedimentDensityParser(); | |
404 | |
405 for (File file: files) { | |
406 parser.parse(file); | |
407 } | |
408 | |
409 sedimentDensities = parser.getSedimentDensities(); | |
410 | |
411 log.info("Parsed " + sedimentDensities.size() + " sediment densities."); | |
412 } | |
413 | |
414 | |
415 protected void parseMorphologicalWidth() throws IOException { | |
416 if (Config.INSTANCE.skipMorphologicalWidth()) { | |
417 log.info("skip parsing morphological width."); | |
418 return; | |
419 } | |
420 | |
421 log.debug("Parse morphological width"); | |
422 | |
423 File minfoDir = getMinfoDir(); | |
424 File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR); | |
425 | |
426 File[] files = morphDir.listFiles(); | |
427 | |
428 if (files == null) { | |
429 log.warn("Cannot read directory '" + morphDir + "'"); | |
430 return; | |
431 } | |
432 | |
433 MorphologicalWidthParser parser = new MorphologicalWidthParser(); | |
434 | |
435 for (File file: files) { | |
436 parser.parse(file); | |
437 } | |
438 | |
439 morphologicalWidths = parser.getMorphologicalWidths(); | |
440 | |
441 log.info("Parsed " + morphologicalWidths.size() + " morph. widths files."); | |
442 } | |
443 | |
444 | |
445 protected void parseFlowVelocity() throws IOException { | |
446 if (Config.INSTANCE.skipFlowVelocity()) { | |
447 log.info("skip parsing flow velocity"); | |
448 return; | |
449 } | |
450 | |
451 log.debug("Parse flow velocity"); | |
452 | |
453 File minfoDir = getMinfoDir(); | |
454 File flowDir = new File(minfoDir, FLOW_VELOCITY_DIR); | |
455 File modelDir = new File(flowDir, FLOW_VELOCITY_MODEL); | |
456 File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS); | |
457 | |
458 File[] modelFiles = modelDir.listFiles(); | |
459 File[] measureFiles = measureDir.listFiles(); | |
460 | |
461 if (modelFiles == null) { | |
462 log.warn("Cannot read directory '" + modelDir + "'"); | |
463 } | |
464 else { | |
465 FlowVelocityModelParser parser = new FlowVelocityModelParser(); | |
466 | |
467 for (File model: modelFiles) { | |
468 log.debug("Parse file '" + model + "'"); | |
469 parser.parse(model); | |
470 } | |
471 | |
472 flowVelocityModels = parser.getModels(); | |
473 } | |
474 | |
475 if (measureFiles == null) { | |
476 log.warn("Cannot read directory '" + measureDir + "'"); | |
477 } | |
478 else { | |
479 FlowVelocityMeasurementParser parser = | |
480 new FlowVelocityMeasurementParser(); | |
481 | |
482 for (File measurement: measureFiles) { | |
483 log.debug("Parse file '" + measurement + "'"); | |
484 parser.parse(measurement); | |
485 } | |
486 | |
487 flowVelocityMeasurements = parser.getMeasurements(); | |
488 } | |
489 } | |
490 | |
491 | |
492 protected void parseSedimentYield() throws IOException { | |
493 if (Config.INSTANCE.skipSedimentYield()) { | |
494 log.info("skip parsing sediment yield data"); | |
495 return; | |
496 } | |
497 | |
498 log.debug("Parse sediment yield data"); | |
499 | |
500 File minfoDir = getMinfoDir(); | |
501 File sedimentYieldDir = new File(minfoDir, SEDIMENT_YIELD_DIR); | |
502 | |
503 File singleDir = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR); | |
504 File epochDir = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR); | |
505 | |
506 File[] singles = singleDir.listFiles(); | |
507 File[] epochs = epochDir.listFiles(); | |
508 | |
509 SedimentYieldParser parser = new SedimentYieldParser(); | |
510 | |
511 if (singles == null || singles.length == 0) { | |
512 log.warn("Cannot read directory '" + singleDir + "'"); | |
513 } | |
514 else { | |
515 for (File file: singles) { | |
516 if (file.isDirectory()) { | |
517 for (File child: file.listFiles()) { | |
518 parser.parse(child); | |
519 } | |
520 } | |
521 else { | |
522 parser.parse(file); | |
523 } | |
524 } | |
525 } | |
526 | |
527 if (epochs == null || epochs.length == 0) { | |
528 log.warn("Cannot read directory '" + epochDir + "'"); | |
529 } | |
530 else { | |
531 for (File file: epochs) { | |
532 if (file.isDirectory()) { | |
533 for (File child: file.listFiles()) { | |
534 parser.parse(child); | |
535 } | |
536 } | |
537 else { | |
538 parser.parse(file); | |
539 } | |
540 } | |
541 } | |
542 | |
543 sedimentYields = parser.getSedimentYields(); | |
544 } | |
545 | |
546 | |
547 protected void parseWaterlevels() throws IOException { | |
548 if (Config.INSTANCE.skipWaterlevels()) { | |
549 log.info("skip parsing waterlevels"); | |
550 return; | |
551 } | |
552 | |
553 log.info("Parse waterlevels"); | |
554 | |
555 File minfo = getMinfoDir(); | |
556 File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); | |
557 File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR); | |
558 | |
559 File[] files = wspDir.listFiles(); | |
560 | |
561 if (files == null) { | |
562 log.warn("Cannot read directory for wl '" + wspDir + "'"); | |
563 return; | |
564 } | |
565 | |
566 WaterlevelParser parser = new WaterlevelParser(); | |
567 | |
568 for (File file: files) { | |
569 parser.parse(file); | |
570 } | |
571 | |
572 // The parsed ImportWaterlevels are converted to | |
573 // 'fixation'-wsts now. | |
574 for(ImportWst iw: parser.getWaterlevels()) { | |
575 iw.setDescription("CSV/" + iw.getDescription()); | |
576 iw.setKind(6); | |
577 waterlevels.add(iw); | |
578 } | |
579 } | |
580 | |
581 protected void parseMeasurementStations() throws IOException { | |
582 if (Config.INSTANCE.skipMeasurementStations()) { | |
583 log.info("skip parsing measurement stations"); | |
584 return; | |
585 } | |
586 | |
587 log.info("Parse measurement stations"); | |
588 | |
589 File minfo = getMinfoDir(); | |
590 File minfoBaseDir = new File(minfo, MINFO_BASE_DIR); | |
591 File coredataFile = new File(minfoBaseDir, MINFO_CORE_DATA_FILE); | |
592 | |
593 if (coredataFile == null || !coredataFile.exists()) { | |
594 log.warn("No core data file '" + coredataFile.getAbsolutePath() + "' found"); | |
595 return; | |
596 } | |
597 | |
598 MeasurementStationsParser parser = new MeasurementStationsParser(); | |
599 try { | |
600 parser.parse(coredataFile); | |
601 measurementStations = parser.getMeasurementStations(); | |
602 | |
603 log.info("Successfully parsed " + measurementStations.size() + " measurement stations."); | |
604 } | |
605 catch (IOException ioe) { | |
606 log.error("unable to parse file '" + coredataFile.getName() + | |
607 ": " + ioe.getMessage()); | |
608 } | |
609 } | |
610 | |
611 | |
612 protected void parseWaterlevelDifferences() throws IOException { | |
613 if (Config.INSTANCE.skipWaterlevelDifferences()) { | |
614 log.info("skip parsing waterlevel differences"); | |
615 return; | |
616 } | |
617 | |
618 log.info("Parse waterlevel differences"); | |
619 | |
620 File minfo = getMinfoDir(); | |
621 File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); | |
622 File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR); | |
623 | |
624 File[] files = diffDir.listFiles(); | |
625 | |
626 if (files == null) { | |
627 log.warn("Cannot read directory '" + diffDir + "'"); | |
628 return; | |
629 } | |
630 | |
631 WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser(); | |
632 | |
633 for (File file: files) { | |
634 parser.parse(file); | |
635 } | |
636 | |
637 // WaterlevelDifferences become Wsts now. | |
638 for(ImportWst iw: parser.getDifferences()) { | |
639 iw.setDescription("CSV/" + iw.getDescription()); | |
640 iw.setKind(7); | |
641 waterlevelDifferences.add(iw); | |
642 } | |
643 } | |
644 | |
645 | |
646 protected void parseSQRelation() throws IOException { | |
647 if (Config.INSTANCE.skipSQRelation()) { | |
648 log.info("skip parsing sq relation"); | |
649 return; | |
650 } | |
651 | |
652 log.info("Parse sq relations"); | |
653 | |
654 File minfo = getMinfoDir(); | |
655 File sqDir = new File(minfo, MINFO_SQ_DIR); | |
656 | |
657 File[] files = sqDir.listFiles(); | |
658 | |
659 if (files == null) { | |
660 log.warn("Cannot read directory '" + sqDir + "'"); | |
661 return; | |
662 } | |
663 | |
664 SQRelationParser parser = new SQRelationParser(); | |
665 | |
666 for (File file: files) { | |
667 parser.parse(file); | |
668 } | |
669 | |
670 sqRelations = parser.getSQRelations(); | |
671 | |
672 log.debug("Parsed " + sqRelations.size() + " SQ relations."); | |
673 } | |
674 | |
675 | |
676 protected void parseBedHeightSingles(File dir) throws IOException { | |
677 log.debug("Parse bed height singles"); | |
678 | |
679 File[] files = dir.listFiles(); | |
680 | |
681 if (files == null) { | |
682 log.warn("Cannot read directory '" + dir + "'"); | |
683 return; | |
684 } | |
685 | |
686 BedHeightSingleParser parser = new BedHeightSingleParser(); | |
687 | |
688 for (File file: files) { | |
689 parser.parse(file); | |
690 } | |
691 | |
692 bedHeightSingles = parser.getBedHeights(); | |
693 } | |
694 | |
695 | |
696 protected void parseBedHeightEpochs(File dir) throws IOException { | |
697 log.debug("Parse bed height epochs"); | |
698 | |
699 File[] files = dir.listFiles(); | |
700 | |
701 if (files == null) { | |
702 log.warn("Cannot read directory '" + dir + "'"); | |
703 return; | |
704 } | |
705 | |
706 BedHeightEpochParser parser = new BedHeightEpochParser(); | |
707 | |
708 for (File file: files) { | |
709 parser.parse(file); | |
710 } | |
711 | |
712 bedHeightEpochs = parser.getBedHeights(); | |
713 } | |
714 | |
715 | |
716 public void parseFloodWater() throws IOException { | |
717 if (Config.INSTANCE.skipFloodWater()) { | |
718 log.info("skip parsing flod water"); | |
719 return; | |
720 } | |
721 | |
722 log.info("Parse flood water wst file"); | |
723 | |
724 File riverDir = wstFile.getParentFile().getParentFile(); | |
725 | |
726 File dir = FileTools.repair(new File(riverDir, FLOOD_WATER)); | |
727 | |
728 if (!dir.isDirectory() || !dir.canRead()) { | |
729 log.info("no directory '" + dir + "' found"); | |
730 return; | |
731 } | |
732 | |
733 File [] files = dir.listFiles(); | |
734 | |
735 if (files == null) { | |
736 log.warn("cannot read '" + dir + "'"); | |
737 return; | |
738 } | |
739 | |
740 for (File file: files) { | |
741 if (!file.isFile() || !file.canRead()) { | |
742 continue; | |
743 } | |
744 String name = file.getName().toLowerCase(); | |
745 if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { | |
746 continue; | |
747 } | |
748 log.info("found file '" + file.getName() + "'"); | |
749 WstParser wstParser = new WstParser(); | |
750 wstParser.parse(file); | |
751 ImportWst iw = wstParser.getWst(); | |
752 iw.setKind(4); | |
753 iw.setDescription(FLOOD_WATER + "/" + iw.getDescription()); | |
754 floodWater.add(iw); | |
755 } | |
756 } | |
757 | |
758 public void parseOfficialLines() throws IOException { | |
759 if (Config.INSTANCE.skipOfficialLines()) { | |
760 log.info("skip parsing official lines"); | |
761 return; | |
762 } | |
763 | |
764 log.info("Parse official wst files"); | |
765 | |
766 File riverDir = wstFile.getParentFile().getParentFile(); | |
767 | |
768 for (String folder: OFFICIAL_LINES_FOLDERS) { | |
769 File dir = FileTools.repair(new File(riverDir, folder)); | |
770 | |
771 if (!dir.isDirectory() || !dir.canRead()) { | |
772 log.info("no directory '" + folder + "' found"); | |
773 continue; | |
774 } | |
775 | |
776 File file = FileTools.repair(new File(dir, OFFICIAL_LINES)); | |
777 if (!file.isFile() || !file.canRead()) { | |
778 log.warn("no official lines wst file found"); | |
779 continue; | |
780 } | |
781 log.debug("Found WST file: " + file); | |
782 | |
783 WstParser wstParser = new WstParser(); | |
784 wstParser.parse(file); | |
785 ImportWst iw = wstParser.getWst(); | |
786 iw.setKind(3); | |
787 iw.setDescription(folder + "/" + iw.getDescription()); | |
788 officialLines.add(iw); | |
789 } // for all folders | |
790 | |
791 } | |
792 | |
793 public void parseFixations() throws IOException { | |
794 if (Config.INSTANCE.skipFixations()) { | |
795 log.info("skip parsing fixations"); | |
796 return; | |
797 } | |
798 | |
799 log.info("Parse fixation wst files"); | |
800 | |
801 File riverDir = wstFile.getParentFile().getParentFile(); | |
802 | |
803 File fixDir = FileTools.repair( | |
804 new File(riverDir, FIXATIONS)); | |
805 | |
806 if (!fixDir.isDirectory() || !fixDir.canRead()) { | |
807 log.info("no fixation wst file directory found"); | |
808 return; | |
809 } | |
810 | |
811 File [] files = fixDir.listFiles(); | |
812 | |
813 if (files == null) { | |
814 log.warn("cannot read fixations wst file directory"); | |
815 return; | |
816 } | |
817 | |
818 for (File file: files) { | |
819 if (!file.isFile() || !file.canRead()) { | |
820 continue; | |
821 } | |
822 String name = file.getName().toLowerCase(); | |
823 if (!name.endsWith(".wst")) { | |
824 continue; | |
825 } | |
826 log.debug("Found WST file: " + file); | |
827 | |
828 WstParser wstParser = new WstParser(); | |
829 wstParser.parse(file); | |
830 ImportWst iw = wstParser.getWst(); | |
831 iw.setKind(2); | |
832 iw.setDescription(FIXATIONS+ "/" + iw.getDescription()); | |
833 fixations.add(iw); | |
834 } | |
835 } | |
836 | |
837 public void parseExtraWsts() throws IOException { | |
838 if (Config.INSTANCE.skipExtraWsts()) { | |
839 log.info("skip parsing extra WST files"); | |
840 return; | |
841 } | |
842 | |
843 log.info("Parse extra longitudinal wst files"); | |
844 | |
845 File riverDir = wstFile.getParentFile().getParentFile(); | |
846 | |
847 File extraDir = FileTools.repair( | |
848 new File(riverDir, EXTRA_LONGITUDINALS)); | |
849 | |
850 if (!extraDir.isDirectory() || !extraDir.canRead()) { | |
851 log.info("no extra longitudinal wst file directory found"); | |
852 return; | |
853 } | |
854 | |
855 File [] files = extraDir.listFiles(); | |
856 | |
857 if (files == null) { | |
858 log.warn("cannot read extra longitudinal wst file directory"); | |
859 return; | |
860 } | |
861 | |
862 for (File file: files) { | |
863 if (!file.isFile() || !file.canRead()) { | |
864 continue; | |
865 } | |
866 String name = file.getName().toLowerCase(); | |
867 if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { | |
868 continue; | |
869 } | |
870 log.debug("Found WST file: " + file); | |
871 | |
872 WstParser wstParser = new WstParser(); | |
873 wstParser.parse(file); | |
874 ImportWst iw = wstParser.getWst(); | |
875 iw.setKind(1); | |
876 iw.setDescription(EXTRA_LONGITUDINALS + "/" + iw.getDescription()); | |
877 extraWsts.add(iw); | |
878 } | |
879 | |
880 } | |
881 | |
882 public void parseWst() throws IOException { | |
883 if (Config.INSTANCE.skipWst()) { | |
884 log.info("skip parsing WST file"); | |
885 return; | |
886 } | |
887 | |
888 WstParser wstParser = new WstParser(); | |
889 wstParser.parse(wstFile); | |
890 wst = wstParser.getWst(); | |
891 } | |
892 | |
893 public void parseGauges() throws IOException { | |
894 if (Config.INSTANCE.skipGauges()) { | |
895 log.info("skip parsing gauges"); | |
896 return; | |
897 } | |
898 | |
899 File gltFile = new File(wstFile.getParentFile(), PEGEL_GLT); | |
900 gltFile = FileTools.repair(gltFile); | |
901 | |
902 if (!gltFile.isFile() || !gltFile.canRead()) { | |
903 log.warn("cannot read gauges from '" + gltFile + "'"); | |
904 return; | |
905 } | |
906 | |
907 PegelGltParser pgltp = new PegelGltParser(); | |
908 pgltp.parse(gltFile); | |
909 | |
910 gauges = pgltp.getGauges(); | |
911 | |
912 for (ImportGauge gauge: gauges) { | |
913 gauge.parseDependencies(); | |
914 } | |
915 } | |
916 | |
917 public void parseAnnotations() throws IOException { | |
918 if (Config.INSTANCE.skipAnnotations()) { | |
919 log.info("skip parsing annotations"); | |
920 return; | |
921 } | |
922 | |
923 File riverDir = wstFile.getParentFile().getParentFile(); | |
924 AnnotationsParser aparser = | |
925 new AnnotationsParser(annotationClassifier); | |
926 aparser.parse(riverDir); | |
927 | |
928 annotations = aparser.getAnnotations(); | |
929 } | |
930 | |
931 public void parseHYKs() { | |
932 if (Config.INSTANCE.skipHYKs()) { | |
933 log.info("skip parsing HYK files"); | |
934 return; | |
935 } | |
936 | |
937 log.info("looking for HYK files"); | |
938 HYKParser parser = new HYKParser(); | |
939 File riverDir = wstFile | |
940 .getParentFile() // Basisdaten | |
941 .getParentFile() // Hydrologie | |
942 .getParentFile(); // <river> | |
943 | |
944 parser.parseHYKs(riverDir, new HYKParser.Callback() { | |
945 | |
946 Set<HashedFile> hfs = new HashSet<HashedFile>(); | |
947 | |
948 @Override | |
949 public boolean hykAccept(File file) { | |
950 HashedFile hf = new HashedFile(file); | |
951 boolean success = hfs.add(hf); | |
952 if (!success) { | |
953 log.warn("HYK file '" + file + "' seems to be a duplicate."); | |
954 } | |
955 return success; | |
956 } | |
957 | |
958 @Override | |
959 public void hykParsed(HYKParser parser) { | |
960 log.debug("callback from HYK parser"); | |
961 ImportHYK hyk = parser.getHYK(); | |
962 hyk.setRiver(ImportRiver.this); | |
963 hyks.add(hyk); | |
964 } | |
965 }); | |
966 } | |
967 | |
968 | |
969 /** Add cross sections with description, years and lines to | |
970 * store. */ | |
971 private void addCrossSections(CrossSectionParser parser) { | |
972 String description = parser.getDescription(); | |
973 Integer year = parser.getYear(); | |
974 ImportTimeInterval ti = year != null | |
975 ? new ImportTimeInterval(yearToDate(year)) | |
976 : null; | |
977 | |
978 Map<Double, List<XY>> data = parser.getData(); | |
979 | |
980 List<ImportCrossSectionLine> lines = | |
981 new ArrayList<ImportCrossSectionLine>(data.size()); | |
982 | |
983 for (Map.Entry<Double, List<XY>> entry: data.entrySet()) { | |
984 Double km = entry.getKey(); | |
985 List<XY> points = entry.getValue(); | |
986 lines.add(new ImportCrossSectionLine(km, points)); | |
987 } | |
988 | |
989 crossSections.add(new ImportCrossSection( | |
990 ImportRiver.this, description, ti, lines)); | |
991 } | |
992 | |
993 /** Create a W80 Parser and parse w80 files found. */ | |
994 public void parseW80s() { | |
995 if (Config.INSTANCE.skipW80s()) { | |
996 log.info("skip parsing W80s"); | |
997 return; | |
998 } | |
999 W80Parser parser = new W80Parser(); | |
1000 File riverDir = wstFile | |
1001 .getParentFile() // Basisdaten | |
1002 .getParentFile() // Hydrologie | |
1003 .getParentFile(); // <river> | |
1004 | |
1005 ImportRiverCrossSectionParserCallback w80Callback = | |
1006 new ImportRiverCrossSectionParserCallback("w80"); | |
1007 parser.parseW80s(riverDir, w80Callback); | |
1008 } | |
1009 | |
1010 /** Create a W80 Parser and parse w80 files found. */ | |
1011 public void parseW80CSVs() { | |
1012 if (Config.INSTANCE.skipW80CSVs()) { | |
1013 log.info("skip parsing W80 csvs"); | |
1014 return; | |
1015 } | |
1016 W80CSVParser parser = new W80CSVParser(); | |
1017 File riverDir = wstFile | |
1018 .getParentFile() // Basisdaten | |
1019 .getParentFile() // Hydrologie | |
1020 .getParentFile(); // <river> | |
1021 | |
1022 // Construct the Cross-Section-Data path. | |
1023 File csDir = new File(riverDir.getPath() | |
1024 + File.separator + "Geodaesie" | |
1025 + File.separator + "Querprofile" | |
1026 + File.separator + "QP-Daten"); | |
1027 | |
1028 ImportRiverCrossSectionParserCallback w80CSVCallback = | |
1029 new ImportRiverCrossSectionParserCallback("w80-csv"); | |
1030 parser.parseW80CSVs(csDir, w80CSVCallback); | |
1031 } | |
1032 | |
1033 | |
1034 /** | |
1035 * Create and use a DA50Parser, parse the files found, add the | |
1036 * ross-sections found. | |
1037 */ | |
1038 public void parseDA50s() { | |
1039 if (Config.INSTANCE.skipDA50s()) { | |
1040 log.info("skip parsing DA50s"); | |
1041 return; | |
1042 } | |
1043 DA50Parser parser = new DA50Parser(); | |
1044 File riverDir = wstFile | |
1045 .getParentFile() // Basisdaten | |
1046 .getParentFile() // Hydrologie | |
1047 .getParentFile(); // <river> | |
1048 | |
1049 ImportRiverCrossSectionParserCallback da50Callback = | |
1050 new ImportRiverCrossSectionParserCallback("da50"); | |
1051 parser.parseDA50s(riverDir, da50Callback); | |
1052 } | |
1053 | |
1054 | |
1055 /** Create a DA66 Parser and parse the da66 files found. */ | |
1056 // TODO this is a copy of parsePRFs, extract interfaces (e.g. CrossSectionParser). | |
1057 public void parseDA66s() { | |
1058 if (Config.INSTANCE.skipDA66s()) { | |
1059 log.info("skip parsing DA66s"); | |
1060 return; | |
1061 } | |
1062 | |
1063 log.info("looking for DA66 files"); | |
1064 DA66Parser parser = new DA66Parser(); | |
1065 File riverDir = wstFile | |
1066 .getParentFile() // Basisdaten | |
1067 .getParentFile() // Hydrologie | |
1068 .getParentFile(); // <river> | |
1069 | |
1070 ImportRiverCrossSectionParserCallback da66Callback = | |
1071 new ImportRiverCrossSectionParserCallback("da66"); | |
1072 parser.parseDA66s(riverDir, da66Callback); | |
1073 } | |
1074 | |
1075 /** Create a PRFParser and let it parse the prf files found. */ | |
1076 public void parsePRFs() { | |
1077 if (Config.INSTANCE.skipPRFs()) { | |
1078 log.info("skip parsing PRFs"); | |
1079 return; | |
1080 } | |
1081 | |
1082 log.info("looking for PRF files"); | |
1083 PRFParser parser = new PRFParser(); | |
1084 File riverDir = wstFile | |
1085 .getParentFile() // Basisdaten | |
1086 .getParentFile() // Hydrologie | |
1087 .getParentFile(); // <river> | |
1088 | |
1089 ImportRiverCrossSectionParserCallback prfCallback = | |
1090 new ImportRiverCrossSectionParserCallback("prf"); | |
1091 parser.parsePRFs(riverDir, prfCallback); | |
1092 } | |
1093 | |
1094 public static Date yearToDate(int year) { | |
1095 Calendar cal = Calendar.getInstance(); | |
1096 cal.set(year, 5, 15, 12, 0, 0); | |
1097 long ms = cal.getTimeInMillis(); | |
1098 cal.setTimeInMillis(ms - ms%1000); | |
1099 return cal.getTime(); | |
1100 } | |
1101 | |
1102 public void storeDependencies() { | |
1103 storeWstUnit(); | |
1104 storeAnnotations(); | |
1105 storeHYKs(); | |
1106 storeCrossSections(); | |
1107 storeGauges(); | |
1108 storeWst(); | |
1109 storeExtraWsts(); | |
1110 storeFixations(); | |
1111 storeOfficialLines(); | |
1112 storeFloodWater(); | |
1113 storeFloodProtection(); | |
1114 storeBedHeight(); | |
1115 storeSedimentDensity(); | |
1116 storeMorphologicalWidth(); | |
1117 storeFlowVelocity(); | |
1118 storeSedimentYield(); | |
1119 storeWaterlevels(); | |
1120 storeWaterlevelDifferences(); | |
1121 storeMeasurementStations(); | |
1122 storeSQRelations(); | |
1123 storeOfficialNumber(); | |
1124 } | |
1125 | |
1126 public void storeWstUnit() { | |
1127 if (wst == null) { | |
1128 wstUnit = new ImportUnit("NN + m"); | |
1129 } | |
1130 else { | |
1131 wstUnit = wst.getUnit(); | |
1132 } | |
1133 } | |
1134 | |
1135 public void storeHYKs() { | |
1136 if (!Config.INSTANCE.skipHYKs()) { | |
1137 log.info("store HYKs"); | |
1138 getPeer(); | |
1139 for (ImportHYK hyk: hyks) { | |
1140 hyk.storeDependencies(); | |
1141 } | |
1142 } | |
1143 } | |
1144 | |
1145 public void storeCrossSections() { | |
1146 if (!Config.INSTANCE.skipPRFs() | |
1147 || !Config.INSTANCE.skipDA66s() | |
1148 || !Config.INSTANCE.skipDA50s() | |
1149 || !Config.INSTANCE.skipW80s() | |
1150 || !Config.INSTANCE.skipW80CSVs()) { | |
1151 log.info("store cross sections"); | |
1152 getPeer(); | |
1153 for (ImportCrossSection crossSection: crossSections) { | |
1154 crossSection.storeDependencies(); | |
1155 } | |
1156 } | |
1157 } | |
1158 | |
1159 public void storeWst() { | |
1160 if (!Config.INSTANCE.skipWst()) { | |
1161 River river = getPeer(); | |
1162 wst.storeDependencies(river); | |
1163 } | |
1164 } | |
1165 | |
1166 public void storeFixations() { | |
1167 if (!Config.INSTANCE.skipFixations()) { | |
1168 log.info("store fixation wsts"); | |
1169 River river = getPeer(); | |
1170 for (ImportWst fWst: fixations) { | |
1171 log.debug("Fixation name: " + fWst.getDescription()); | |
1172 fWst.storeDependencies(river); | |
1173 } | |
1174 } | |
1175 } | |
1176 | |
1177 | |
1178 /** Store wsts from waterlevel-csv files. */ | |
1179 public void storeWaterlevels() { | |
1180 if (!Config.INSTANCE.skipWaterlevels()) | |
1181 | |
1182 log.info("store waterlevel wsts from csv"); | |
1183 River river = getPeer(); | |
1184 for (ImportWst wWst: waterlevels) { | |
1185 log.debug("Waterlevel name: " + wWst.getDescription()); | |
1186 wWst.storeDependencies(river); | |
1187 } | |
1188 } | |
1189 | |
1190 | |
1191 /** Store wsts from waterleveldifference-csv files. */ | |
1192 public void storeWaterlevelDifferences() { | |
1193 if (!Config.INSTANCE.skipWaterlevelDifferences()) | |
1194 | |
1195 log.info("store waterleveldifferences wsts from csv"); | |
1196 River river = getPeer(); | |
1197 for (ImportWst dWst: waterlevelDifferences) { | |
1198 log.debug("water.diff.: name " + dWst.getDescription()); | |
1199 dWst.storeDependencies(river); | |
1200 } | |
1201 } | |
1202 | |
1203 | |
1204 public void storeExtraWsts() { | |
1205 if (!Config.INSTANCE.skipExtraWsts()) { | |
1206 log.info("store extra wsts"); | |
1207 River river = getPeer(); | |
1208 for (ImportWst wst: extraWsts) { | |
1209 log.debug("name: " + wst.getDescription()); | |
1210 wst.storeDependencies(river); | |
1211 } | |
1212 } | |
1213 } | |
1214 | |
1215 public void storeOfficialLines() { | |
1216 if (!Config.INSTANCE.skipOfficialLines()) { | |
1217 log.info("store official lines wsts"); | |
1218 River river = getPeer(); | |
1219 for (ImportWst wst: officialLines) { | |
1220 log.debug("name: " + wst.getDescription()); | |
1221 wst.storeDependencies(river); | |
1222 } | |
1223 } | |
1224 } | |
1225 | |
1226 public void storeFloodWater() { | |
1227 if (!Config.INSTANCE.skipFloodWater()) { | |
1228 log.info("store flood water wsts"); | |
1229 River river = getPeer(); | |
1230 for (ImportWst wst: floodWater) { | |
1231 log.debug("name: " + wst.getDescription()); | |
1232 wst.storeDependencies(river); | |
1233 } | |
1234 } | |
1235 } | |
1236 | |
1237 | |
1238 public void storeFloodProtection() { | |
1239 if (!Config.INSTANCE.skipFloodProtection()) { | |
1240 log.info("store flood protection wsts"); | |
1241 River river = getPeer(); | |
1242 for (ImportWst wst: floodProtection) { | |
1243 log.debug("name: " + wst.getDescription()); | |
1244 wst.storeDependencies(river); | |
1245 } | |
1246 } | |
1247 } | |
1248 | |
1249 | |
1250 public void storeBedHeight() { | |
1251 if (!Config.INSTANCE.skipBedHeightSingle()) { | |
1252 log.info("store bed heights single"); | |
1253 storeBedHeightSingle(); | |
1254 } | |
1255 | |
1256 if (!Config.INSTANCE.skipBedHeightEpoch()) { | |
1257 log.info("store bed height epoch."); | |
1258 storeBedHeightEpoch(); | |
1259 } | |
1260 } | |
1261 | |
1262 | |
1263 private void storeBedHeightSingle() { | |
1264 River river = getPeer(); | |
1265 | |
1266 if (bedHeightSingles != null) { | |
1267 for (ImportBedHeight tmp: bedHeightSingles) { | |
1268 ImportBedHeightSingle single = (ImportBedHeightSingle) tmp; | |
1269 | |
1270 String desc = single.getDescription(); | |
1271 | |
1272 log.debug("name: " + desc); | |
1273 | |
1274 single.storeDependencies(river); | |
1275 | |
1276 } | |
1277 } | |
1278 else { | |
1279 log.info("No single bed heights to store."); | |
1280 } | |
1281 } | |
1282 | |
1283 | |
1284 private void storeBedHeightEpoch() { | |
1285 River river = getPeer(); | |
1286 | |
1287 if (bedHeightEpochs != null) { | |
1288 for (ImportBedHeight tmp: bedHeightEpochs) { | |
1289 ImportBedHeightEpoch epoch = (ImportBedHeightEpoch) tmp; | |
1290 | |
1291 String desc = epoch.getDescription(); | |
1292 | |
1293 log.debug("name: " + desc); | |
1294 | |
1295 epoch.storeDependencies(river); | |
1296 | |
1297 } | |
1298 } | |
1299 else { | |
1300 log.info("No epoch bed heights to store."); | |
1301 } | |
1302 } | |
1303 | |
1304 public void storeSedimentDensity() { | |
1305 if (!Config.INSTANCE.skipSedimentDensity()) { | |
1306 log.info("store sediment density"); | |
1307 | |
1308 River river = getPeer(); | |
1309 | |
1310 for (ImportSedimentDensity density: sedimentDensities) { | |
1311 String desc = density.getDescription(); | |
1312 | |
1313 log.debug("name: " + desc); | |
1314 | |
1315 density.storeDependencies(river); | |
1316 | |
1317 } | |
1318 } | |
1319 } | |
1320 | |
1321 public void storeMorphologicalWidth() { | |
1322 if (!Config.INSTANCE.skipMorphologicalWidth()) { | |
1323 log.info("store morphological width"); | |
1324 | |
1325 River river = getPeer(); | |
1326 | |
1327 for (ImportMorphWidth width: morphologicalWidths) { | |
1328 | |
1329 width.storeDependencies(river); | |
1330 | |
1331 } | |
1332 } | |
1333 } | |
1334 | |
1335 public void storeFlowVelocity() { | |
1336 if (!Config.INSTANCE.skipFlowVelocity()) { | |
1337 log.info("store flow velocity"); | |
1338 | |
1339 River river = getPeer(); | |
1340 | |
1341 for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels){ | |
1342 | |
1343 flowVelocityModel.storeDependencies(river); | |
1344 | |
1345 } | |
1346 | |
1347 for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) { | |
1348 | |
1349 m.storeDependencies(river); | |
1350 | |
1351 } | |
1352 } | |
1353 } | |
1354 | |
1355 | |
1356 public void storeSedimentYield() { | |
1357 if (!Config.INSTANCE.skipSedimentYield()) { | |
1358 log.info("store sediment yield data"); | |
1359 | |
1360 River river = getPeer(); | |
1361 | |
1362 for (ImportSedimentYield sedimentYield: sedimentYields) { | |
1363 | |
1364 sedimentYield.storeDependencies(river); | |
1365 | |
1366 } | |
1367 } | |
1368 } | |
1369 | |
1370 | |
1371 public void storeMeasurementStations() { | |
1372 if (!Config.INSTANCE.skipMeasurementStations()) { | |
1373 log.info("store measurement stations"); | |
1374 | |
1375 River river = getPeer(); | |
1376 | |
1377 int count = 0; | |
1378 | |
1379 for (ImportMeasurementStation station: measurementStations) { | |
1380 | |
1381 boolean success = station.storeDependencies(river); | |
1382 if (success) { | |
1383 count++; | |
1384 } | |
1385 | |
1386 } | |
1387 | |
1388 log.info("stored " + count + " measurement stations."); | |
1389 } | |
1390 } | |
1391 | |
1392 | |
1393 public void storeSQRelations() { | |
1394 if (!Config.INSTANCE.skipSQRelation()) { | |
1395 log.info("store sq relations"); | |
1396 | |
1397 River river = getPeer(); | |
1398 | |
1399 int count = 0; | |
1400 | |
1401 for (ImportSQRelation sqRelation: sqRelations) { | |
1402 | |
1403 sqRelation.storeDependencies(river); | |
1404 count++; | |
1405 | |
1406 } | |
1407 | |
1408 log.info("stored " + count + " sq relations."); | |
1409 } | |
1410 } | |
1411 | |
1412 | |
1413 public void storeAnnotations() { | |
1414 if (!Config.INSTANCE.skipAnnotations()) { | |
1415 River river = getPeer(); | |
1416 for (ImportAnnotation annotation: annotations) { | |
1417 annotation.getPeer(river); | |
1418 } | |
1419 } | |
1420 } | |
1421 | |
1422 public void storeGauges() { | |
1423 if (!Config.INSTANCE.skipGauges()) { | |
1424 log.info("store gauges:"); | |
1425 River river = getPeer(); | |
1426 Session session = ImporterSession.getInstance() | |
1427 .getDatabaseSession(); | |
1428 for (ImportGauge gauge: gauges) { | |
1429 log.info("\tgauge: " + gauge.getName()); | |
1430 gauge.storeDependencies(river); | |
1431 ImporterSession.getInstance().getDatabaseSession(); | |
1432 session.flush(); | |
1433 } | |
1434 } | |
1435 } | |
1436 | |
1437 public River getPeer() { | |
1438 if (peer == null) { | |
1439 Session session = ImporterSession.getInstance().getDatabaseSession(); | |
1440 Query query = session.createQuery("from River where name=:name"); | |
1441 | |
1442 Unit u = null; | |
1443 if (wstUnit != null) { | |
1444 u = wstUnit.getPeer(); | |
1445 } | |
1446 | |
1447 query.setString("name", name); | |
1448 List<River> rivers = query.list(); | |
1449 if (rivers.isEmpty()) { | |
1450 log.info("Store new river '" + name + "'"); | |
1451 peer = new River(name, u); | |
1452 if (!Config.INSTANCE.skipBWASTR()) { | |
1453 peer.setOfficialNumber(officialNumber); | |
1454 } | |
1455 session.save(peer); | |
1456 } | |
1457 else { | |
1458 peer = rivers.get(0); | |
1459 } | |
1460 } | |
1461 return peer; | |
1462 } | |
1463 } | |
1464 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 : |