comparison backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java @ 7730:e1b831fe435a slt-simplify-cross-sections

Merged default into slt-simplify-cross-sections branch and updated package and class names.
author Tom Gottfried <tom@intevation.de>
date Mon, 20 Jan 2014 14:04:20 +0100
parents flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java@ca45dd039b54 flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java@d93bb557a94f
children 46273d890da5
comparison
equal deleted inserted replaced
5084:ca45dd039b54 7730:e1b831fe435a
1 /* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
2 * Software engineering by Intevation GmbH
3 *
4 * This file is Free Software under the GNU AGPL (>=v3)
5 * and comes with ABSOLUTELY NO WARRANTY! Check out the
6 * documentation coming with Dive4Elements River for details.
7 */
8
9 package org.dive4elements.river.importer;
10
11 import org.dive4elements.artifacts.common.utils.FileTools.HashedFile;
12
13 import org.dive4elements.artifacts.common.utils.FileTools;
14
15 import org.dive4elements.river.importer.parsers.AnnotationClassifier;
16 import org.dive4elements.river.importer.parsers.AnnotationsParser;
17 import org.dive4elements.river.importer.parsers.BedHeightEpochParser;
18 import org.dive4elements.river.importer.parsers.BedHeightSingleParser;
19 import org.dive4elements.river.importer.parsers.CrossSectionParser;
20 import org.dive4elements.river.importer.parsers.DA50Parser;
21 import org.dive4elements.river.importer.parsers.DA66Parser;
22 import org.dive4elements.river.importer.parsers.FlowVelocityMeasurementParser;
23 import org.dive4elements.river.importer.parsers.FlowVelocityModelParser;
24 import org.dive4elements.river.importer.parsers.HYKParser;
25 import org.dive4elements.river.importer.parsers.MeasurementStationsParser;
26 import org.dive4elements.river.importer.parsers.MorphologicalWidthParser;
27 import org.dive4elements.river.importer.parsers.OfficialLinesConfigParser;
28 import org.dive4elements.river.importer.parsers.PRFParser;
29 import org.dive4elements.river.importer.parsers.PegelGltParser;
30 import org.dive4elements.river.importer.parsers.SQRelationParser;
31 import org.dive4elements.river.importer.parsers.SedimentDensityParser;
32 import org.dive4elements.river.importer.parsers.SedimentYieldParser;
33 import org.dive4elements.river.importer.parsers.W80Parser;
34 import org.dive4elements.river.importer.parsers.W80CSVParser;
35 import org.dive4elements.river.importer.parsers.WaterlevelDifferencesParser;
36 import org.dive4elements.river.importer.parsers.WaterlevelParser;
37 import org.dive4elements.river.importer.parsers.WstParser;
38
39 import org.dive4elements.river.model.River;
40 import org.dive4elements.river.model.Unit;
41
42 import org.dive4elements.river.utils.DouglasPeuker;
43
44 import java.io.File;
45 import java.io.IOException;
46
47 import java.sql.SQLException;
48
49 import java.util.ArrayList;
50 import java.util.Calendar;
51 import java.util.Date;
52 import java.util.HashSet;
53 import java.util.List;
54 import java.util.Map;
55 import java.util.Set;
56 import java.util.Iterator;
57
58 import org.apache.log4j.Logger;
59
60 import org.hibernate.Query;
61 import org.hibernate.Session;
62
63 import org.hibernate.exception.ConstraintViolationException;
64
65
66 /** Import all river-related data (files) that can be found. */
67 public class ImportRiver
68 {
69 /** Private logger. */
70 private static Logger log = Logger.getLogger(ImportRiver.class);
71
72 public static final String PEGEL_GLT = "PEGEL.GLT";
73
74 public static final String FIXATIONS = "Fixierungen";
75
76 public static final String EXTRA_LONGITUDINALS =
77 "Zus.L\u00e4ngsschnitte";
78
79 public static final String [] OFFICIAL_LINES_FOLDERS = {
80 "Basisdaten",
81 "Fixierungen" };
82
83 public static final String OFFICIAL_LINES =
84 "Amtl_Linien.wst";
85
86 public static final String OFFICIAL_LINES_CONFIG =
87 "Amtl_Linien.config";
88
89 public static final String FLOOD_WATER = "HW-Marken";
90
91 public static final String FLOOD_PROTECTION =
92 "HW-Schutzanlagen";
93
94 public static final String MINFO_DIR = "Morphologie";
95
96 public static final String BED_HEIGHT_DIR = "Sohlhoehen";
97
98 public static final String BED_HEIGHT_SINGLE_DIR = "Einzeljahre";
99
100 public static final String BED_HEIGHT_EPOCH_DIR = "Epochen";
101
102 public static final String SEDIMENT_DENSITY_DIR = "Sedimentdichte";
103
104 public static final String MORPHOLOGICAL_WIDTH_DIR = "morphologische_Breite";
105
106 public static final String FLOW_VELOCITY_DIR = "Geschwindigkeit_Schubspannung";
107
108 public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen";
109
110 public static final String FLOW_VELOCITY_MEASUREMENTS = "v-Messungen";
111
112 public static final String SEDIMENT_YIELD_DIR = "Fracht";
113
114 public static final String SEDIMENT_YIELD_SINGLE_DIR = "Einzeljahre";
115
116 public static final String SEDIMENT_YIELD_EPOCH_DIR = "Epochen";
117
118 public static final String SEDIMENT_YIELD_OFF_EPOCH_DIR = "amtliche Epochen";
119
120 public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse";
121
122 public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen";
123
124 public static final String MINFO_WATERLEVEL_DIFF_DIR = "Wasserspiegeldifferenzen";
125
126 public static final String MINFO_BASE_DIR = "Basisdaten";
127
128 public static final String MINFO_CORE_DATA_FILE = "Stammdaten_Messstellen.csv";
129
130 public static final String MINFO_SQ_DIR = "Feststofftransport-Abfluss-Beziehung";
131
132 protected String name;
133
134 protected Long officialNumber;
135
136 protected File wstFile;
137
138 protected File bbInfoFile;
139
140 protected List<ImportGauge> gauges;
141
142 protected List<ImportAnnotation> annotations;
143
144 protected List<ImportHYK> hyks;
145
146 protected List<ImportCrossSection> crossSections;
147
148 protected List<ImportWst> extraWsts;
149
150 protected List<ImportWst> fixations;
151
152 protected List<ImportWst> officialLines;
153
154 protected List<ImportWst> floodWater;
155
156 protected List<ImportWst> floodProtection;
157
158 /** Wst-structures from waterlevel-csv files. */
159 protected List<ImportWst> waterlevels;
160
161 /** Wst-structures from waterlevel-difference-csv files. */
162 protected List<ImportWst> waterlevelDifferences;
163
164 protected List<ImportBedHeight> bedHeightSingles;
165
166 protected List<ImportBedHeight> bedHeightEpochs;
167
168 protected List<ImportSedimentDensity> sedimentDensities;
169
170 protected List<ImportMorphWidth> morphologicalWidths;
171
172 protected List<ImportFlowVelocityModel> flowVelocityModels;
173
174 protected List<ImportFlowVelocityMeasurement> flowVelocityMeasurements;
175
176 protected List<ImportSedimentYield> sedimentYields;
177
178 protected List<ImportMeasurementStation> measurementStations;
179
180 protected List<ImportSQRelation> sqRelations;
181
182 protected ImportWst wst;
183
184 protected ImportUnit wstUnit;
185
186 protected AnnotationClassifier annotationClassifier;
187
188 /** Database-mapped River instance. */
189 protected River peer;
190
191
192 /** Callback-implementation for CrossSectionParsers. */
193 private class ImportRiverCrossSectionParserCallback
194 implements CrossSectionParser.Callback {
195
196 private Set<HashedFile> files = new HashSet<HashedFile>();
197 private String type;
198
199 /**
200 * Create new Callback, given type which is used for logging
201 * purposes only.
202 */
203 public ImportRiverCrossSectionParserCallback(String type) {
204 this.type = type;
205 }
206
207
208 /** Accept file if not duplicate. */
209 @Override
210 public boolean accept(File file) {
211 HashedFile hf = new HashedFile(file);
212 boolean success = files.add(hf);
213 if (!success) {
214 log.warn(type + " file '" + file + "' seems to be a duplicate.");
215 }
216 return success;
217 }
218
219
220 /** Add crosssection. */
221 @Override
222 public void parsed(CrossSectionParser parser) {
223 log.debug("callback from " + type + " parser");
224
225 String description = parser.getDescription();
226 Integer year = parser.getYear();
227 ImportTimeInterval ti = year != null
228 ? new ImportTimeInterval(yearToDate(year))
229 : null;
230
231 Map<Double, List<XY>> data = parser.getData();
232
233 List<ImportCrossSectionLine> lines =
234 new ArrayList<ImportCrossSectionLine>(data.size());
235
236 Double simplificationEpsilon =
237 Config.INSTANCE.getCrossSectionSimplificationEpsilon();
238
239 long numReadPoints = 0L;
240 long numRemainingPoints = 0L;
241
242 for (Map.Entry<Double, List<XY>> entry: data.entrySet()) {
243 Double km = entry.getKey();
244 List<XY> points = entry.getValue();
245 numReadPoints += points.size();
246 if (simplificationEpsilon != null) {
247 points = DouglasPeuker.simplify(points, simplificationEpsilon);
248 }
249 numRemainingPoints += points.size();
250 lines.add(new ImportCrossSectionLine(km, points));
251 }
252
253 ImportRiver.this.addCrossSections(description, ti, lines);
254
255 double percent = numReadPoints > 0L
256 ? ((double)numRemainingPoints/numReadPoints)*100d
257 : 0d;
258
259 log.info(String.format(
260 "Number of points in cross section: %d / %d (%.2f%%)",
261 numReadPoints, numRemainingPoints, percent));
262 }
263 } // ImportRiverCrossSectionParserCallback
264
265
266 private void addCrossSections(
267 String description,
268 ImportTimeInterval ti,
269 List<ImportCrossSectionLine> lines
270 ) {
271 crossSections.add(new ImportCrossSection(this, description, ti, lines));
272 }
273
274
275 public ImportRiver() {
276 hyks = new ArrayList<ImportHYK>();
277 crossSections = new ArrayList<ImportCrossSection>();
278 extraWsts = new ArrayList<ImportWst>();
279 fixations = new ArrayList<ImportWst>();
280 officialLines = new ArrayList<ImportWst>();
281 floodWater = new ArrayList<ImportWst>();
282 waterlevels = new ArrayList<ImportWst>();
283 waterlevelDifferences = new ArrayList<ImportWst>();
284 floodProtection = new ArrayList<ImportWst>();
285 sedimentDensities = new ArrayList<ImportSedimentDensity>();
286 morphologicalWidths = new ArrayList<ImportMorphWidth>();
287 flowVelocityModels = new ArrayList<ImportFlowVelocityModel>();
288 flowVelocityMeasurements = new ArrayList<ImportFlowVelocityMeasurement>();
289 sedimentYields = new ArrayList<ImportSedimentYield>();
290 measurementStations = new ArrayList<ImportMeasurementStation>();
291 sqRelations = new ArrayList<ImportSQRelation>();
292 }
293
294 public ImportRiver(
295 String name,
296 File wstFile,
297 File bbInfoFile,
298 AnnotationClassifier annotationClassifier
299 ) {
300 this();
301 this.name = name;
302 this.wstFile = wstFile;
303 this.bbInfoFile = bbInfoFile;
304 this.annotationClassifier = annotationClassifier;
305 }
306
307 public String getName() {
308 return name;
309 }
310
311 public void setName(String name) {
312 this.name = name;
313 }
314
315 public Long getOfficialNumber() {
316 return this.officialNumber;
317 }
318
319 public void setOfficialNumber(Long officialNumber) {
320 this.officialNumber = officialNumber;
321 }
322
323 public File getWstFile() {
324 return wstFile;
325 }
326
327 public void setWstFile(File wstFile) {
328 this.wstFile = wstFile;
329 }
330
331 public File getBBInfo() {
332 return bbInfoFile;
333 }
334
335 public void setBBInfo(File bbInfoFile) {
336 this.bbInfoFile = bbInfoFile;
337 }
338
339 public ImportWst getWst() {
340 return wst;
341 }
342
343 public void setWst(ImportWst wst) {
344 this.wst = wst;
345 }
346
347 public File getMinfoDir() {
348 File riverDir = wstFile.getParentFile().getParentFile().getParentFile();
349 return new File(riverDir, MINFO_DIR);
350 }
351
352 public void parseDependencies() throws IOException {
353 parseGauges();
354 parseAnnotations();
355 parsePRFs();
356 parseDA66s();
357 parseDA50s();
358 parseW80s();
359 parseW80CSVs();
360 parseHYKs();
361 parseWst();
362 parseExtraWsts();
363 parseFixations();
364 parseOfficialLines();
365 parseFloodWater();
366 parseFloodProtection();
367 parseBedHeight();
368 parseSedimentDensity();
369 parseMorphologicalWidth();
370 parseFlowVelocity();
371 parseSedimentYield();
372 parseWaterlevels();
373 parseWaterlevelDifferences();
374 parseMeasurementStations();
375 parseSQRelation();
376 }
377
378 public void parseFloodProtection() throws IOException {
379 if (Config.INSTANCE.skipFloodProtection()) {
380 log.info("skip parsing flood protection");
381 return;
382 }
383
384 log.info("Parse flood protection wst file");
385
386 File riverDir = wstFile.getParentFile().getParentFile();
387
388 File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION));
389
390 if (!dir.isDirectory() || !dir.canRead()) {
391 log.info("no directory '" + dir + "' found");
392 return;
393 }
394
395 File [] files = dir.listFiles();
396
397 if (files == null) {
398 log.warn("cannot read '" + dir + "'");
399 return;
400 }
401
402 for (File file: files) {
403 if (!file.isFile() || !file.canRead()) {
404 continue;
405 }
406 String name = file.getName().toLowerCase();
407 if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
408 continue;
409 }
410 log.info("found file '" + file.getName() + "'");
411 try {
412 WstParser wstParser = new WstParser();
413 wstParser.parse(file);
414 ImportWst iw = wstParser.getWst();
415 iw.setKind(5);
416 iw.setDescription(FLOOD_PROTECTION + "/" + iw.getDescription());
417 floodProtection.add(iw);
418 }
419 catch (WstParser.ParseException e) {
420 log.error(e.getMessage());
421 }
422 }
423 }
424
425 public void storeOfficialNumber() {
426 if (Config.INSTANCE.skipBWASTR()) {
427 log.info("skip storing official number.");
428 return;
429 }
430 getPeer().setOfficialNumber(officialNumber);
431 }
432
433 public void parseBedHeight() throws IOException {
434 File minfoDir = getMinfoDir();
435 File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR);
436 File singlesDir = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR);
437 File epochDir = new File(bedHeightDir, BED_HEIGHT_EPOCH_DIR);
438
439 if (Config.INSTANCE.skipBedHeightSingle()) {
440 log.info("skip parsing bed height single.");
441 }
442 else {
443 log.info("Parse bed height single.");
444 parseBedHeightSingles(singlesDir);
445 }
446
447 if (Config.INSTANCE.skipBedHeightEpoch()) {
448 log.info("skip parsing bed height epochs.");
449 }
450 else {
451 log.info("Parse bed height epochs.");
452 parseBedHeightEpochs(epochDir);
453 }
454 }
455
456
457 protected void parseSedimentDensity() throws IOException {
458 if (Config.INSTANCE.skipSedimentDensity()) {
459 log.info("skip parsing sediment density.");
460 return;
461 }
462
463 log.debug("Parse sediment density");
464
465 File minfoDir = getMinfoDir();
466 File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR);
467
468 File[] files = sediment.listFiles();
469
470 if (files == null) {
471 log.warn("Cannot read directory '" + sediment + "'");
472 return;
473 }
474
475 SedimentDensityParser parser = new SedimentDensityParser();
476
477 for (File file: files) {
478 parser.parse(file);
479 }
480
481 sedimentDensities = parser.getSedimentDensities();
482
483 log.info("Parsed " + sedimentDensities.size() + " sediment densities.");
484 }
485
486
487 protected void parseMorphologicalWidth() throws IOException {
488 if (Config.INSTANCE.skipMorphologicalWidth()) {
489 log.info("skip parsing morphological width.");
490 return;
491 }
492
493 log.debug("Parse morphological width");
494
495 File minfoDir = getMinfoDir();
496 File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR);
497
498 File[] files = morphDir.listFiles();
499
500 if (files == null) {
501 log.warn("Cannot read directory '" + morphDir + "'");
502 return;
503 }
504
505 MorphologicalWidthParser parser = new MorphologicalWidthParser();
506
507 for (File file: files) {
508 parser.parse(file);
509 }
510
511 morphologicalWidths = parser.getMorphologicalWidths();
512
513 log.info("Parsed " + morphologicalWidths.size() + " morph. widths files.");
514 }
515
516
517 protected void parseFlowVelocity() throws IOException {
518 if (Config.INSTANCE.skipFlowVelocity()) {
519 log.info("skip parsing flow velocity");
520 return;
521 }
522
523 log.debug("Parse flow velocity");
524
525 File minfoDir = getMinfoDir();
526 File flowDir = new File(minfoDir, FLOW_VELOCITY_DIR);
527 File modelDir = new File(flowDir, FLOW_VELOCITY_MODEL);
528 File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS);
529
530 File[] modelFiles = modelDir.listFiles();
531 File[] measureFiles = measureDir.listFiles();
532
533 if (modelFiles == null) {
534 log.warn("Cannot read directory '" + modelDir + "'");
535 }
536 else {
537 FlowVelocityModelParser parser = new FlowVelocityModelParser();
538
539 for (File model: modelFiles) {
540 log.debug("Parse file '" + model + "'");
541 parser.parse(model);
542 }
543
544 flowVelocityModels = parser.getModels();
545 }
546
547 if (measureFiles == null) {
548 log.warn("Cannot read directory '" + measureDir + "'");
549 }
550 else {
551 FlowVelocityMeasurementParser parser =
552 new FlowVelocityMeasurementParser();
553
554 for (File measurement: measureFiles) {
555 log.debug("Parse file '" + measurement + "'");
556 parser.parse(measurement);
557 }
558
559 flowVelocityMeasurements = parser.getMeasurements();
560 }
561 }
562
563
564 private void parseSedimentYieldDir(
565 File[] files,
566 SedimentYieldParser parser
567 ) throws IOException {
568 for (File file: files) {
569 if (file.isDirectory()) {
570 for (File child: file.listFiles()) {
571 parser.parse(child);
572 }
573 }
574 else {
575 parser.parse(file);
576 }
577 }
578 }
579
580 protected void parseSedimentYield() throws IOException {
581 if (Config.INSTANCE.skipSedimentYield()) {
582 log.info("skip parsing sediment yield data");
583 return;
584 }
585
586 log.debug("Parse sediment yield data");
587
588 File minfoDir = getMinfoDir();
589 File sedimentYieldDir = new File(minfoDir, SEDIMENT_YIELD_DIR);
590
591 File singleDir = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR);
592 File epochDir = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR);
593 File offEpochDir = new File(sedimentYieldDir, SEDIMENT_YIELD_OFF_EPOCH_DIR);
594
595 File[] singles = singleDir.listFiles();
596 File[] epochs = epochDir.listFiles();
597 File[] offEpochs = offEpochDir.listFiles();
598
599 SedimentYieldParser parser = new SedimentYieldParser();
600
601 if (singles == null || singles.length == 0) {
602 log.warn("Cannot read directory '" + singleDir + "'");
603 }
604 else {
605 parseSedimentYieldDir(singles, parser);
606 }
607
608 if (epochs == null || epochs.length == 0) {
609 log.warn("Cannot read directory '" + epochDir + "'");
610 }
611 else {
612 parseSedimentYieldDir(epochs, parser);
613 }
614
615 if (offEpochs == null || offEpochs.length == 0) {
616 log.warn("Cannot read directory '" + offEpochDir + "'");
617 }
618 else {
619 parseSedimentYieldDir(offEpochs, parser);
620 }
621
622 sedimentYields = parser.getSedimentYields();
623 }
624
625
626 protected void parseWaterlevels() throws IOException {
627 if (Config.INSTANCE.skipWaterlevels()) {
628 log.info("skip parsing waterlevels");
629 return;
630 }
631
632 log.info("Parse waterlevels");
633
634 File minfo = getMinfoDir();
635 File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
636 File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR);
637
638 File[] files = wspDir.listFiles();
639
640 if (files == null) {
641 log.warn("Cannot read directory for wl '" + wspDir + "'");
642 return;
643 }
644
645 WaterlevelParser parser = new WaterlevelParser();
646
647 for (File file: files) {
648 parser.parse(file);
649 }
650
651 // The parsed ImportWaterlevels are converted to
652 // 'fixation'-wsts now.
653 for(ImportWst iw: parser.getWaterlevels()) {
654 iw.setDescription("CSV/" + iw.getDescription());
655 iw.setKind(7);
656 waterlevels.add(iw);
657 }
658 }
659
660 protected void parseMeasurementStations() throws IOException {
661 if (Config.INSTANCE.skipMeasurementStations()) {
662 log.info("skip parsing measurement stations");
663 return;
664 }
665
666 log.info("Parse measurement stations");
667
668 File minfo = getMinfoDir();
669 File minfoBaseDir = new File(minfo, MINFO_BASE_DIR);
670 File coredataFile = new File(minfoBaseDir, MINFO_CORE_DATA_FILE);
671
672 if (coredataFile == null || !coredataFile.exists()) {
673 log.warn("No core data file '" + coredataFile.getAbsolutePath() + "' found");
674 return;
675 }
676
677 MeasurementStationsParser parser = new MeasurementStationsParser();
678 try {
679 parser.parse(coredataFile);
680 measurementStations = parser.getMeasurementStations();
681
682 log.info("Successfully parsed " + measurementStations.size() + " measurement stations.");
683 }
684 catch (IOException ioe) {
685 log.error("unable to parse file '" + coredataFile.getName() +
686 ": " + ioe.getMessage());
687 }
688 }
689
690
691 protected void parseWaterlevelDifferences() throws IOException {
692 if (Config.INSTANCE.skipWaterlevelDifferences()) {
693 log.info("skip parsing waterlevel differences");
694 return;
695 }
696
697 log.info("Parse waterlevel differences");
698
699 File minfo = getMinfoDir();
700 File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
701 File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR);
702
703 File[] files = diffDir.listFiles();
704
705 if (files == null) {
706 log.warn("Cannot read directory '" + diffDir + "'");
707 return;
708 }
709
710 WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser();
711
712 for (File file: files) {
713 parser.parse(file);
714 }
715
716 // WaterlevelDifferences become Wsts now.
717 for(ImportWst iw: parser.getDifferences()) {
718 iw.setDescription("CSV/" + iw.getDescription());
719 iw.setKind(6);
720 waterlevelDifferences.add(iw);
721 }
722 }
723
724
725 protected void parseSQRelation() throws IOException {
726 if (Config.INSTANCE.skipSQRelation()) {
727 log.info("skip parsing sq relation");
728 return;
729 }
730
731 log.info("Parse sq relations");
732
733 File minfo = getMinfoDir();
734 File sqDir = new File(minfo, MINFO_SQ_DIR);
735
736 File[] files = sqDir.listFiles();
737
738 if (files == null) {
739 log.warn("Cannot read directory '" + sqDir + "'");
740 return;
741 }
742
743 SQRelationParser parser = new SQRelationParser();
744
745 for (File file: files) {
746 parser.parse(file);
747 }
748
749 sqRelations = parser.getSQRelations();
750
751 log.debug("Parsed " + sqRelations.size() + " SQ relations.");
752 }
753
754
755 protected void parseBedHeightSingles(File dir) throws IOException {
756 log.debug("Parse bed height singles");
757
758 File[] files = dir.listFiles();
759
760 if (files == null) {
761 log.warn("Cannot read directory '" + dir + "'");
762 return;
763 }
764
765 BedHeightSingleParser parser = new BedHeightSingleParser();
766
767 for (File file: files) {
768 parser.parse(file);
769 }
770
771 bedHeightSingles = parser.getBedHeights();
772 }
773
774
775 protected void parseBedHeightEpochs(File dir) throws IOException {
776 log.debug("Parse bed height epochs");
777
778 File[] files = dir.listFiles();
779
780 if (files == null) {
781 log.warn("Cannot read directory '" + dir + "'");
782 return;
783 }
784
785 BedHeightEpochParser parser = new BedHeightEpochParser();
786
787 for (File file: files) {
788 parser.parse(file);
789 }
790
791 bedHeightEpochs = parser.getBedHeights();
792 }
793
794
795 public void parseFloodWater() throws IOException {
796 if (Config.INSTANCE.skipFloodWater()) {
797 log.info("skip parsing flod water");
798 return;
799 }
800
801 log.info("Parse flood water wst file");
802
803 File riverDir = wstFile.getParentFile().getParentFile();
804
805 File dir = FileTools.repair(new File(riverDir, FLOOD_WATER));
806
807 if (!dir.isDirectory() || !dir.canRead()) {
808 log.info("no directory '" + dir + "' found");
809 return;
810 }
811
812 File [] files = dir.listFiles();
813
814 if (files == null) {
815 log.warn("cannot read '" + dir + "'");
816 return;
817 }
818
819 for (File file: files) {
820 if (!file.isFile() || !file.canRead()) {
821 continue;
822 }
823 String name = file.getName().toLowerCase();
824 if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
825 continue;
826 }
827 log.info("found file '" + file.getName() + "'");
828 try {
829 WstParser wstParser = new WstParser();
830 wstParser.parse(file);
831 ImportWst iw = wstParser.getWst();
832 iw.setKind(4);
833 iw.setDescription(FLOOD_WATER + "/" + iw.getDescription());
834 floodWater.add(iw);
835 }
836 catch (WstParser.ParseException e) {
837 log.error(e.getMessage());
838 }
839 }
840 }
841
842 public void parseOfficialLines() throws IOException {
843 if (Config.INSTANCE.skipOfficialLines()) {
844 log.info("skip parsing official lines");
845 return;
846 }
847
848 log.info("Parse official wst files");
849
850 File riverDir = wstFile.getParentFile().getParentFile();
851
852 for (String folder: OFFICIAL_LINES_FOLDERS) {
853 File dir = FileTools.repair(new File(riverDir, folder));
854
855 if (!dir.isDirectory() || !dir.canRead()) {
856 log.info("no directory '" + folder + "' found");
857 continue;
858 }
859
860 File file = FileTools.repair(new File(dir, OFFICIAL_LINES));
861 if (!file.isFile() || !file.canRead()) {
862 log.warn("no official lines wst file found");
863 continue;
864 }
865 log.debug("Found WST file: " + file);
866
867 ImportWst iw = new ImportWst(ImportOfficialWstColumn.COLUMN_FACTORY);
868
869 WstParser wstParser = new WstParser(iw);
870 try {
871 wstParser.parse(file);
872 }
873 catch (WstParser.ParseException e) {
874 log.error(e.getMessage());
875 continue;
876 }
877
878 iw.setKind(3);
879 iw.setDescription(folder + "/" + iw.getDescription());
880
881 File configFile = FileTools.repair(new File(dir, OFFICIAL_LINES_CONFIG));
882 if (!configFile.isFile() || !configFile.canRead()) {
883 log.warn("no config file for official lines found");
884 }
885 else {
886 OfficialLinesConfigParser olcp = new OfficialLinesConfigParser();
887 try {
888 olcp.parse(configFile);
889 }
890 catch (IOException ioe) {
891 log.warn("Error reading offical lines config", ioe);
892 }
893 List<String> mainValueNames = olcp.getMainValueNames();
894 if (mainValueNames.isEmpty()) {
895 log.warn("config file for offical lines contains no entries");
896 }
897 else {
898 // Join as much as possible.
899 Iterator<ImportWstColumn> wi = iw.getColumns().iterator();
900 Iterator<String> si = olcp.getMainValueNames().iterator();
901 while (wi.hasNext() && si.hasNext()) {
902 ImportOfficialWstColumn wc = (ImportOfficialWstColumn)wi.next();
903 String name = si.next();
904 ImportOfficialLine iol = new ImportOfficialLine(name, wc);
905 wc.setOfficialLine(iol);
906 }
907 }
908 }
909
910 officialLines.add(iw);
911 } // for all folders
912
913 }
914
915 public void parseFixations() throws IOException {
916 if (Config.INSTANCE.skipFixations()) {
917 log.info("skip parsing fixations");
918 return;
919 }
920
921 log.info("Parse fixation wst files");
922
923 File riverDir = wstFile.getParentFile().getParentFile();
924
925 File fixDir = FileTools.repair(
926 new File(riverDir, FIXATIONS));
927
928 if (!fixDir.isDirectory() || !fixDir.canRead()) {
929 log.info("no fixation wst file directory found");
930 return;
931 }
932
933 File [] files = fixDir.listFiles();
934
935 if (files == null) {
936 log.warn("cannot read fixations wst file directory");
937 return;
938 }
939
940 for (File file: files) {
941 if (!file.isFile() || !file.canRead()) {
942 continue;
943 }
944 String name = file.getName().toLowerCase();
945 if (!name.endsWith(".wst")) {
946 continue;
947 }
948 log.debug("Found WST file: " + file);
949
950 try {
951 WstParser wstParser = new WstParser();
952 wstParser.parse(file);
953 ImportWst iw = wstParser.getWst();
954 iw.setKind(2);
955 iw.setDescription(FIXATIONS+ "/" + iw.getDescription());
956 fixations.add(iw);
957 }
958 catch (WstParser.ParseException e) {
959 log.error(e.getMessage());
960 }
961 }
962 }
963
964 public void parseExtraWsts() throws IOException {
965 if (Config.INSTANCE.skipExtraWsts()) {
966 log.info("skip parsing extra WST files");
967 return;
968 }
969
970 log.info("Parse extra longitudinal wst files");
971
972 File riverDir = wstFile.getParentFile().getParentFile();
973
974 File extraDir = FileTools.repair(
975 new File(riverDir, EXTRA_LONGITUDINALS));
976
977 if (!extraDir.isDirectory() || !extraDir.canRead()) {
978 log.info("no extra longitudinal wst file directory found");
979 return;
980 }
981
982 File [] files = extraDir.listFiles();
983
984 if (files == null) {
985 log.warn("cannot read extra longitudinal wst file directory");
986 return;
987 }
988
989 for (File file: files) {
990 if (!file.isFile() || !file.canRead()) {
991 continue;
992 }
993 String name = file.getName().toLowerCase();
994 if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
995 continue;
996 }
997 log.debug("Found WST file: " + file);
998
999 try {
1000 WstParser wstParser = new WstParser();
1001 wstParser.parse(file);
1002 ImportWst iw = wstParser.getWst();
1003 iw.setKind(1);
1004 iw.setDescription(EXTRA_LONGITUDINALS + "/" + iw.getDescription());
1005 extraWsts.add(iw);
1006 }
1007 catch (WstParser.ParseException e) {
1008 log.error(e.getMessage());
1009 }
1010 }
1011
1012 }
1013
1014 public void parseWst() throws IOException {
1015 if (Config.INSTANCE.skipWst()) {
1016 log.info("skip parsing WST file");
1017 return;
1018 }
1019
1020 WstParser wstParser = new WstParser();
1021 try {
1022 wstParser.parse(wstFile);
1023 wst = wstParser.getWst();
1024 wst.setKmUp(wst.guessWaterLevelIncreasing());
1025 }
1026 catch (WstParser.ParseException e) {
1027 log.error(e.getMessage());
1028 }
1029 }
1030
1031 public void parseGauges() throws IOException {
1032 if (Config.INSTANCE.skipGauges()) {
1033 log.info("skip parsing gauges");
1034 return;
1035 }
1036
1037 File gltFile = new File(wstFile.getParentFile(), PEGEL_GLT);
1038 gltFile = FileTools.repair(gltFile);
1039
1040 if (!gltFile.isFile() || !gltFile.canRead()) {
1041 log.warn("cannot read gauges from '" + gltFile + "'");
1042 return;
1043 }
1044
1045 PegelGltParser pgltp = new PegelGltParser();
1046 pgltp.parse(gltFile);
1047
1048 gauges = pgltp.getGauges();
1049
1050 for (ImportGauge gauge: gauges) {
1051 gauge.parseDependencies();
1052 }
1053 }
1054
1055 public void parseAnnotations() throws IOException {
1056 if (Config.INSTANCE.skipAnnotations()) {
1057 log.info("skip parsing annotations");
1058 return;
1059 }
1060
1061 File riverDir = wstFile.getParentFile().getParentFile();
1062 AnnotationsParser aparser =
1063 new AnnotationsParser(annotationClassifier);
1064 aparser.parse(riverDir);
1065
1066 annotations = aparser.getAnnotations();
1067 }
1068
1069 public void parseHYKs() {
1070 if (Config.INSTANCE.skipHYKs()) {
1071 log.info("skip parsing HYK files");
1072 return;
1073 }
1074
1075 log.info("looking for HYK files");
1076 HYKParser parser = new HYKParser();
1077 File riverDir = wstFile
1078 .getParentFile() // Basisdaten
1079 .getParentFile() // Hydrologie
1080 .getParentFile(); // <river>
1081
1082 parser.parseHYKs(riverDir, new HYKParser.Callback() {
1083
1084 Set<HashedFile> hfs = new HashSet<HashedFile>();
1085
1086 @Override
1087 public boolean hykAccept(File file) {
1088 HashedFile hf = new HashedFile(file);
1089 boolean success = hfs.add(hf);
1090 if (!success) {
1091 log.warn("HYK file '" + file + "' seems to be a duplicate.");
1092 }
1093 return success;
1094 }
1095
1096 @Override
1097 public void hykParsed(HYKParser parser) {
1098 log.debug("callback from HYK parser");
1099 ImportHYK hyk = parser.getHYK();
1100 hyk.setRiver(ImportRiver.this);
1101 hyks.add(hyk);
1102 }
1103 });
1104 }
1105
1106
1107 /** Create a W80 Parser and parse w80 files found. */
1108 public void parseW80s() {
1109 if (Config.INSTANCE.skipW80s()) {
1110 log.info("skip parsing W80s");
1111 return;
1112 }
1113 W80Parser parser = new W80Parser();
1114 File riverDir = wstFile
1115 .getParentFile() // Basisdaten
1116 .getParentFile() // Hydrologie
1117 .getParentFile(); // <river>
1118
1119 ImportRiverCrossSectionParserCallback w80Callback =
1120 new ImportRiverCrossSectionParserCallback("w80");
1121 parser.parseW80s(riverDir, w80Callback);
1122 }
1123
1124 /** Create a W80 Parser and parse w80 files found. */
1125 public void parseW80CSVs() {
1126 if (Config.INSTANCE.skipW80CSVs()) {
1127 log.info("skip parsing W80 csvs");
1128 return;
1129 }
1130 W80CSVParser parser = new W80CSVParser();
1131 File riverDir = wstFile
1132 .getParentFile() // Basisdaten
1133 .getParentFile() // Hydrologie
1134 .getParentFile(); // <river>
1135
1136 // Construct the Cross-Section-Data path.
1137 File csDir = new File(riverDir.getPath()
1138 + File.separator + "Geodaesie"
1139 + File.separator + "Querprofile"
1140 + File.separator + "QP-Daten");
1141
1142 ImportRiverCrossSectionParserCallback w80CSVCallback =
1143 new ImportRiverCrossSectionParserCallback("w80-csv");
1144 parser.parseW80CSVs(csDir, w80CSVCallback);
1145 }
1146
1147
1148 /**
1149 * Create and use a DA50Parser, parse the files found, add the
1150 * ross-sections found.
1151 */
1152 public void parseDA50s() {
1153 if (Config.INSTANCE.skipDA50s()) {
1154 log.info("skip parsing DA50s");
1155 return;
1156 }
1157 DA50Parser parser = new DA50Parser();
1158 File riverDir = wstFile
1159 .getParentFile() // Basisdaten
1160 .getParentFile() // Hydrologie
1161 .getParentFile(); // <river>
1162
1163 ImportRiverCrossSectionParserCallback da50Callback =
1164 new ImportRiverCrossSectionParserCallback("da50");
1165
1166 parser.parseDA50s(riverDir, da50Callback);
1167 }
1168
1169
1170 /** Create a DA66 Parser and parse the da66 files found. */
1171 // TODO this is a copy of parsePRFs, extract interfaces (e.g. CrossSectionParser).
1172 public void parseDA66s() {
1173 if (Config.INSTANCE.skipDA66s()) {
1174 log.info("skip parsing DA66s");
1175 return;
1176 }
1177
1178 log.info("looking for DA66 files");
1179 DA66Parser parser = new DA66Parser();
1180 File riverDir = wstFile
1181 .getParentFile() // Basisdaten
1182 .getParentFile() // Hydrologie
1183 .getParentFile(); // <river>
1184
1185 ImportRiverCrossSectionParserCallback da66Callback =
1186 new ImportRiverCrossSectionParserCallback("da66");
1187
1188 parser.parseDA66s(riverDir, da66Callback);
1189 }
1190
1191 /** Create a PRFParser and let it parse the prf files found. */
1192 public void parsePRFs() {
1193 if (Config.INSTANCE.skipPRFs()) {
1194 log.info("skip parsing PRFs");
1195 return;
1196 }
1197
1198 log.info("looking for PRF files");
1199 PRFParser parser = new PRFParser();
1200 File riverDir = wstFile
1201 .getParentFile() // Basisdaten
1202 .getParentFile() // Hydrologie
1203 .getParentFile(); // <river>
1204
1205 ImportRiverCrossSectionParserCallback prfCallback =
1206 new ImportRiverCrossSectionParserCallback("prf");
1207 parser.parsePRFs(riverDir, prfCallback);
1208 }
1209
1210 public static Date yearToDate(int year) {
1211 Calendar cal = Calendar.getInstance();
1212 cal.set(year, 5, 15, 12, 0, 0);
1213 long ms = cal.getTimeInMillis();
1214 cal.setTimeInMillis(ms - ms%1000);
1215 return cal.getTime();
1216 }
1217
1218 public void storeDependencies() {
1219 /* test whether river is already in database.
1220 * Otherwise it makes no sense to skip waterlevel model WST-file
1221 * because the altitude reference is taken from there. */
1222 Session session = ImporterSession.getInstance().getDatabaseSession();
1223 Query query = session.createQuery("from River where name=:name");
1224 query.setString("name", name);
1225 List<River> rivers = query.list();
1226 if (rivers.isEmpty() && Config.INSTANCE.skipWst()){
1227 log.error("River not yet in database. You cannot skip importing waterlevel model.");
1228 return;
1229 }
1230
1231 storeWstUnit();
1232 storeAnnotations();
1233 storeHYKs();
1234 storeCrossSections();
1235 storeGauges();
1236 storeWst();
1237 storeExtraWsts();
1238 storeFixations();
1239 storeOfficialLines();
1240 storeFloodWater();
1241 storeFloodProtection();
1242 storeBedHeight();
1243 storeSedimentDensity();
1244 storeMorphologicalWidth();
1245 storeFlowVelocity();
1246 storeSedimentYield();
1247 storeWaterlevels();
1248 storeWaterlevelDifferences();
1249 storeMeasurementStations();
1250 storeSQRelations();
1251 storeOfficialNumber();
1252 }
1253
1254 public void storeWstUnit() {
1255 if (wst == null) {
1256 log.warn("No unit given. Waterlevel-model WST-file has to be imported already.");
1257 }
1258 else {
1259 wstUnit = wst.getUnit();
1260 }
1261 }
1262
1263 public void storeHYKs() {
1264 if (!Config.INSTANCE.skipHYKs()) {
1265 log.info("store HYKs");
1266 getPeer();
1267 for (ImportHYK hyk: hyks) {
1268 hyk.storeDependencies();
1269 }
1270 }
1271 }
1272
1273 public void storeCrossSections() {
1274 if (!Config.INSTANCE.skipPRFs()
1275 || !Config.INSTANCE.skipDA66s()
1276 || !Config.INSTANCE.skipDA50s()
1277 || !Config.INSTANCE.skipW80s()
1278 || !Config.INSTANCE.skipW80CSVs()) {
1279 log.info("store cross sections");
1280 getPeer();
1281 for (ImportCrossSection crossSection: crossSections) {
1282 crossSection.storeDependencies();
1283 }
1284 }
1285 }
1286
1287 public void storeWst() {
1288 if (wst != null && !Config.INSTANCE.skipWst()) {
1289 River river = getPeer();
1290 wst.storeDependencies(river);
1291
1292 // The flow direction of the main wst and the corresponding
1293 // waterlevels determine if the river is 'km_up'.
1294 Session session = ImporterSession.getInstance().getDatabaseSession();
1295 river.setKmUp(wst.getKmUp());
1296 session.save(river);
1297 }
1298 }
1299
1300 public void storeFixations() {
1301 if (!Config.INSTANCE.skipFixations()) {
1302 log.info("store fixation wsts");
1303 River river = getPeer();
1304 for (ImportWst fWst: fixations) {
1305 log.debug("Fixation name: " + fWst.getDescription());
1306 fWst.storeDependencies(river);
1307 }
1308 }
1309 }
1310
1311
1312 /** Store wsts from waterlevel-csv files. */
1313 public void storeWaterlevels() {
1314 if (!Config.INSTANCE.skipWaterlevels())
1315
1316 log.info("store waterlevel wsts from csv");
1317 River river = getPeer();
1318 for (ImportWst wWst: waterlevels) {
1319 log.debug("Waterlevel name: " + wWst.getDescription());
1320 wWst.storeDependencies(river);
1321 }
1322 }
1323
1324
1325 /** Store wsts from waterleveldifference-csv files. */
1326 public void storeWaterlevelDifferences() {
1327 if (!Config.INSTANCE.skipWaterlevelDifferences())
1328
1329 log.info("store waterleveldifferences wsts from csv");
1330 River river = getPeer();
1331 for (ImportWst dWst: waterlevelDifferences) {
1332 log.debug("water.diff.: name " + dWst.getDescription());
1333 dWst.storeDependencies(river);
1334 }
1335 }
1336
1337
1338 public void storeExtraWsts() {
1339 if (!Config.INSTANCE.skipExtraWsts()) {
1340 log.info("store extra wsts");
1341 River river = getPeer();
1342 for (ImportWst wst: extraWsts) {
1343 log.debug("name: " + wst.getDescription());
1344 wst.storeDependencies(river);
1345 }
1346 }
1347 }
1348
1349 public void storeOfficialLines() {
1350 if (Config.INSTANCE.skipOfficialLines() || officialLines.isEmpty()) {
1351 return;
1352 }
1353
1354 log.info("store official lines wsts");
1355 River river = getPeer();
1356 for (ImportWst wst: officialLines) {
1357 log.debug("name: " + wst.getDescription());
1358 wst.storeDependencies(river);
1359
1360 // Store the official lines after the columns are store.
1361 for (ImportWstColumn wc: wst.getColumns()) {
1362 ImportOfficialWstColumn owc = (ImportOfficialWstColumn)wc;
1363 ImportOfficialLine ioc = owc.getOfficialLine();
1364 if (ioc != null) {
1365 if (ioc.getPeer(river) == null) {
1366 log.warn("Cannot store official line: " + ioc.getName());
1367 }
1368 }
1369 }
1370 }
1371 }
1372
1373 public void storeFloodWater() {
1374 if (!Config.INSTANCE.skipFloodWater()) {
1375 log.info("store flood water wsts");
1376 River river = getPeer();
1377 for (ImportWst wst: floodWater) {
1378 log.debug("name: " + wst.getDescription());
1379 wst.storeDependencies(river);
1380 }
1381 }
1382 }
1383
1384
1385 public void storeFloodProtection() {
1386 if (!Config.INSTANCE.skipFloodProtection()) {
1387 log.info("store flood protection wsts");
1388 River river = getPeer();
1389 for (ImportWst wst: floodProtection) {
1390 log.debug("name: " + wst.getDescription());
1391 wst.storeDependencies(river);
1392 }
1393 }
1394 }
1395
1396
1397 public void storeBedHeight() {
1398 if (!Config.INSTANCE.skipBedHeightSingle()) {
1399 log.info("store bed heights single");
1400 storeBedHeightSingle();
1401 }
1402
1403 if (!Config.INSTANCE.skipBedHeightEpoch()) {
1404 log.info("store bed height epoch.");
1405 storeBedHeightEpoch();
1406 }
1407 }
1408
1409
1410 private void storeBedHeightSingle() {
1411 River river = getPeer();
1412
1413 if (bedHeightSingles != null) {
1414 for (ImportBedHeight tmp: bedHeightSingles) {
1415 ImportBedHeightSingle single = (ImportBedHeightSingle) tmp;
1416
1417 String desc = single.getDescription();
1418
1419 log.debug("name: " + desc);
1420
1421 single.storeDependencies(river);
1422 }
1423 }
1424 else {
1425 log.info("No single bed heights to store.");
1426 }
1427 }
1428
1429
1430 private void storeBedHeightEpoch() {
1431 River river = getPeer();
1432
1433 if (bedHeightEpochs != null) {
1434 for (ImportBedHeight tmp: bedHeightEpochs) {
1435 ImportBedHeightEpoch epoch = (ImportBedHeightEpoch) tmp;
1436
1437 String desc = epoch.getDescription();
1438
1439 log.debug("name: " + desc);
1440
1441 epoch.storeDependencies(river);
1442 }
1443 }
1444 else {
1445 log.info("No epoch bed heights to store.");
1446 }
1447 }
1448
1449 public void storeSedimentDensity() {
1450 if (!Config.INSTANCE.skipSedimentDensity()) {
1451 log.info("store sediment density");
1452
1453 River river = getPeer();
1454
1455 for (ImportSedimentDensity density: sedimentDensities) {
1456 String desc = density.getDescription();
1457
1458 log.debug("name: " + desc);
1459
1460 density.storeDependencies(river);
1461 }
1462 }
1463 }
1464
1465 public void storeMorphologicalWidth() {
1466 if (!Config.INSTANCE.skipMorphologicalWidth()) {
1467 log.info("store morphological width");
1468
1469 River river = getPeer();
1470
1471 for (ImportMorphWidth width: morphologicalWidths) {
1472 width.storeDependencies(river);
1473 }
1474 }
1475 }
1476
1477 public void storeFlowVelocity() {
1478 if (!Config.INSTANCE.skipFlowVelocity()) {
1479 log.info("store flow velocity");
1480
1481 River river = getPeer();
1482
1483 for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels){
1484 flowVelocityModel.storeDependencies(river);
1485 }
1486
1487 for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) {
1488 m.storeDependencies(river);
1489 }
1490 }
1491 }
1492
1493
1494 public void storeSedimentYield() {
1495 if (!Config.INSTANCE.skipSedimentYield()) {
1496 log.info("store sediment yield data");
1497
1498 River river = getPeer();
1499
1500 for (ImportSedimentYield sedimentYield: sedimentYields) {
1501 sedimentYield.storeDependencies(river);
1502 }
1503 }
1504 }
1505
1506
1507 public void storeMeasurementStations() {
1508 if (!Config.INSTANCE.skipMeasurementStations()) {
1509 log.info("store measurement stations");
1510
1511 River river = getPeer();
1512
1513 int count = 0;
1514
1515 for (ImportMeasurementStation station: measurementStations) {
1516 boolean success = station.storeDependencies(river);
1517 if (success) {
1518 count++;
1519 }
1520 }
1521
1522 log.info("stored " + count + " measurement stations.");
1523 }
1524 }
1525
1526
1527 public void storeSQRelations() {
1528 if (!Config.INSTANCE.skipSQRelation()) {
1529 log.info("store sq relations");
1530
1531 River river = getPeer();
1532
1533 int count = 0;
1534
1535 for (ImportSQRelation sqRelation: sqRelations) {
1536 sqRelation.storeDependencies(river);
1537 count++;
1538 }
1539
1540 log.info("stored " + count + " sq relations.");
1541 }
1542 }
1543
1544
1545 public void storeAnnotations() {
1546 if (!Config.INSTANCE.skipAnnotations()) {
1547 River river = getPeer();
1548 for (ImportAnnotation annotation: annotations) {
1549 annotation.getPeer(river);
1550 }
1551 }
1552 }
1553
1554 public void storeGauges() {
1555 if (!Config.INSTANCE.skipGauges()) {
1556 log.info("store gauges:");
1557 River river = getPeer();
1558 Session session = ImporterSession.getInstance()
1559 .getDatabaseSession();
1560 for (ImportGauge gauge: gauges) {
1561 log.info("\tgauge: " + gauge.getName());
1562 gauge.storeDependencies(river);
1563 ImporterSession.getInstance().getDatabaseSession();
1564 session.flush();
1565 }
1566 }
1567 }
1568
1569 public River getPeer() {
1570 if (peer == null) {
1571 Session session = ImporterSession.getInstance().getDatabaseSession();
1572 Query query = session.createQuery("from River where name=:name");
1573
1574 Unit u = null;
1575 if (wstUnit != null) {
1576 u = wstUnit.getPeer();
1577 }
1578
1579 query.setString("name", name);
1580 List<River> rivers = query.list();
1581 if (rivers.isEmpty()) {
1582 log.info("Store new river '" + name + "'");
1583 peer = new River(name, u);
1584 if (!Config.INSTANCE.skipBWASTR()) {
1585 peer.setOfficialNumber(officialNumber);
1586 }
1587 session.save(peer);
1588 }
1589 else {
1590 peer = rivers.get(0);
1591 }
1592 }
1593 return peer;
1594 }
1595 }
1596 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :

http://dive4elements.wald.intevation.org