Mercurial > dive4elements > river
diff backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java @ 8971:50416a0df385
Importer for the Schifffahrt (S-INFO) and Oekologie (U-INFO) files
author | mschaefer |
---|---|
date | Tue, 03 Apr 2018 10:18:30 +0200 |
parents | 5e38e2924c07 |
children | ae76f618d990 |
line wrap: on
line diff
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java Tue Apr 03 10:02:01 2018 +0200 +++ b/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java Tue Apr 03 10:18:30 2018 +0200 @@ -8,10 +8,23 @@ package org.dive4elements.river.importer; -import org.dive4elements.artifacts.common.utils.FileTools.HashedFile; +import java.io.File; +import java.io.FilenameFilter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.log4j.Logger; import org.dive4elements.artifacts.common.utils.FileTools; - +import org.dive4elements.artifacts.common.utils.FileTools.HashedFile; +import org.dive4elements.river.backend.utils.DouglasPeuker; +import org.dive4elements.river.importer.parsers.AbstractSedimentLoadParser; import org.dive4elements.river.importer.parsers.AnnotationClassifier; import org.dive4elements.river.importer.parsers.AnnotationsParser; import org.dive4elements.river.importer.parsers.BedHeightParser; @@ -29,34 +42,17 @@ import org.dive4elements.river.importer.parsers.PorosityParser; import org.dive4elements.river.importer.parsers.SQRelationParser; import org.dive4elements.river.importer.parsers.SedimentDensityParser; -import org.dive4elements.river.importer.parsers.AbstractSedimentLoadParser; import org.dive4elements.river.importer.parsers.SedimentLoadLSParser; import org.dive4elements.river.importer.parsers.SedimentLoadParser; +import org.dive4elements.river.importer.parsers.W80CSVParser; import org.dive4elements.river.importer.parsers.W80Parser; -import org.dive4elements.river.importer.parsers.W80CSVParser; import org.dive4elements.river.importer.parsers.WaterlevelDifferencesParser; import org.dive4elements.river.importer.parsers.WaterlevelParser; import org.dive4elements.river.importer.parsers.WstParser; - +import org.dive4elements.river.importer.sinfo.SInfoImporter; +import org.dive4elements.river.importer.uinfo.UInfoImporter; import org.dive4elements.river.model.River; import org.dive4elements.river.model.Unit; - -import org.dive4elements.river.backend.utils.DouglasPeuker; - -import java.io.File; -import java.io.IOException; - -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.Iterator; - -import org.apache.log4j.Logger; - import org.hibernate.Query; import org.hibernate.Session; @@ -71,22 +67,22 @@ public static final String FIXATIONS = "Fixierungen"; public static final String EXTRA_LONGITUDINALS = - "Zus.Laengsschnitte"; + "Zus.Laengsschnitte"; public static final String [] OFFICIAL_LINES_FOLDERS = { - "Basisdaten", - "Fixierungen" }; + "Basisdaten", + "Fixierungen" }; public static final String OFFICIAL_LINES = - "Amtl_Linien.wst"; + "Amtl_Linien.wst"; public static final String OFFICIAL_LINES_CONFIG = - "Amtl_Linien.config"; + "Amtl_Linien.config"; public static final String FLOOD_WATER = "HW-Marken"; public static final String FLOOD_PROTECTION = - "HW-Schutzanlagen"; + "HW-Schutzanlagen"; public static final String MINFO_DIR = "Morphologie"; @@ -99,10 +95,10 @@ public static final String POROSITY_DIR = "Porositaet"; public static final String MORPHOLOGICAL_WIDTH_DIR = - "morphologische_Breite"; + "morphologische_Breite"; public static final String FLOW_VELOCITY_DIR = - "Geschwindigkeit_Schubspannung"; + "Geschwindigkeit_Schubspannung"; public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen"; @@ -119,22 +115,22 @@ public static final String SEDIMENT_LOAD_EPOCH_DIR = "Epochen"; public static final String SEDIMENT_LOAD_OFF_EPOCH_DIR = - "amtliche Epochen"; + "amtliche Epochen"; public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse"; public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen"; public static final String MINFO_WATERLEVEL_DIFF_DIR = - "Wasserspiegeldifferenzen"; + "Wasserspiegeldifferenzen"; public static final String MINFO_BASE_DIR = "Basisdaten"; public static final String MINFO_CORE_DATA_FILE = - "Stammdaten_Messstellen.csv"; + "Stammdaten_Messstellen.csv"; public static final String MINFO_SQ_DIR = - "Feststofftransport-Abfluss-Beziehung"; + "Feststofftransport-Abfluss-Beziehung"; protected String name; @@ -142,6 +138,9 @@ protected Long officialNumber; + /** + * Path of the river/Hydrologie/Basisdaten/river.wst file from which all other file paths are derived + */ protected File wstFile; protected File bbInfoFile; @@ -199,31 +198,40 @@ /** Database-mapped River instance. */ protected River peer; + /** + * Importer for a river's S-INFO files. + */ + private final SInfoImporter sinfoImporter; + + /** + * Importer for a river's U-INFO files. + */ + private final UInfoImporter uinfoImporter; /** Callback-implementation for CrossSectionParsers. */ private class ImportRiverCrossSectionParserCallback implements CrossSectionParser.Callback { - private Set<HashedFile> files = new HashSet<HashedFile>(); - private String type; + private final Set<HashedFile> files = new HashSet<>(); + private final String type; /** * Create new Callback, given type which is used for logging * purposes only. */ - public ImportRiverCrossSectionParserCallback(String type) { + public ImportRiverCrossSectionParserCallback(final String type) { this.type = type; } /** Accept file if not duplicate. */ @Override - public boolean accept(File file) { - HashedFile hf = new HashedFile(file); - boolean success = files.add(hf); + public boolean accept(final File file) { + final HashedFile hf = new HashedFile(file); + final boolean success = this.files.add(hf); if (!success) { - log.warn(type + " file '" + file - + "' seems to be a duplicate."); + log.warn(this.type + " file '" + file + + "' seems to be a duplicate."); } return success; } @@ -231,92 +239,94 @@ /** Add crosssection. */ @Override - public void parsed(CrossSectionParser parser) { - log.debug("callback from " + type + " parser"); - - String description = parser.getDescription(); - Integer year = parser.getYear(); - ImportTimeInterval ti = year != null - ? new ImportTimeInterval(yearToDate(year)) - : null; - - Map<Double, List<XY>> data = parser.getData(); - - List<ImportCrossSectionLine> lines = - new ArrayList<ImportCrossSectionLine>(data.size()); - - Double simplificationEpsilon = - Config.INSTANCE.getCrossSectionSimplificationEpsilon(); - - long numReadPoints = 0L; - long numRemainingPoints = 0L; + public void parsed(final CrossSectionParser parser) { + log.debug("callback from " + this.type + " parser"); - for (Map.Entry<Double, List<XY>> entry: data.entrySet()) { - Double km = entry.getKey(); - List<XY> points = entry.getValue(); - numReadPoints += points.size(); - if (simplificationEpsilon != null) { - points = DouglasPeuker.simplify( - points, simplificationEpsilon); - } - numRemainingPoints += points.size(); - lines.add(new ImportCrossSectionLine(km, points)); - } + final String description = parser.getDescription(); + final Integer year = parser.getYear(); + final ImportTimeInterval ti = year != null + ? new ImportTimeInterval(yearToDate(year)) + : null; - ImportRiver.this.addCrossSections(description, ti, lines); + final Map<Double, List<XY>> data = parser.getData(); - if (simplificationEpsilon != null) { - double percent = numReadPoints > 0L - ? ((double)numRemainingPoints/numReadPoints)*100d - : 0d; + final List<ImportCrossSectionLine> lines = + new ArrayList<>(data.size()); - log.info(String.format( - "Number of points in cross section: %d / %d (%.2f%%)", - numReadPoints, numRemainingPoints, percent)); - } + final Double simplificationEpsilon = + Config.INSTANCE.getCrossSectionSimplificationEpsilon(); + + long numReadPoints = 0L; + long numRemainingPoints = 0L; + + for (final Map.Entry<Double, List<XY>> entry: data.entrySet()) { + final Double km = entry.getKey(); + List<XY> points = entry.getValue(); + numReadPoints += points.size(); + if (simplificationEpsilon != null) { + points = DouglasPeuker.simplify( + points, simplificationEpsilon); + } + numRemainingPoints += points.size(); + lines.add(new ImportCrossSectionLine(km, points)); + } + + ImportRiver.this.addCrossSections(description, ti, lines); + + if (simplificationEpsilon != null) { + final double percent = numReadPoints > 0L + ? ((double)numRemainingPoints/numReadPoints)*100d + : 0d; + + log.info(String.format( + "Number of points in cross section: %d / %d (%.2f%%)", + numReadPoints, numRemainingPoints, percent)); + } } } // ImportRiverCrossSectionParserCallback private void addCrossSections( - String description, - ImportTimeInterval ti, - List<ImportCrossSectionLine> lines - ) { - crossSections.add( - new ImportCrossSection(this, description, ti, lines)); + final String description, + final ImportTimeInterval ti, + final List<ImportCrossSectionLine> lines + ) { + this.crossSections.add( + new ImportCrossSection(this, description, ti, lines)); } public ImportRiver() { - hyks = new ArrayList<ImportHYK>(); - crossSections = new ArrayList<ImportCrossSection>(); - extraWsts = new ArrayList<ImportWst>(); - fixations = new ArrayList<ImportWst>(); - officialLines = new ArrayList<ImportWst>(); - floodWater = new ArrayList<ImportWst>(); - waterlevels = new ArrayList<ImportWst>(); - waterlevelDifferences = new ArrayList<ImportWst>(); - floodProtection = new ArrayList<ImportWst>(); - sedimentDensities = new ArrayList<ImportSedimentDensity>(); - porosities = new ArrayList<ImportPorosity>(); - morphologicalWidths = new ArrayList<ImportMorphWidth>(); - flowVelocityModels = new ArrayList<ImportFlowVelocityModel>(); - flowVelocityMeasurements = - new ArrayList<ImportFlowVelocityMeasurement>(); - sedimentLoadLSs = new ArrayList<ImportSedimentLoadLS>(); - sedimentLoads = new ArrayList<ImportSedimentLoad>(); - measurementStations = new ArrayList<ImportMeasurementStation>(); - sqRelations = new ArrayList<ImportSQRelation>(); + this.hyks = new ArrayList<>(); + this.crossSections = new ArrayList<>(); + this.extraWsts = new ArrayList<>(); + this.fixations = new ArrayList<>(); + this.officialLines = new ArrayList<>(); + this.floodWater = new ArrayList<>(); + this.waterlevels = new ArrayList<>(); + this.waterlevelDifferences = new ArrayList<>(); + this.floodProtection = new ArrayList<>(); + this.sedimentDensities = new ArrayList<>(); + this.porosities = new ArrayList<>(); + this.morphologicalWidths = new ArrayList<>(); + this.flowVelocityModels = new ArrayList<>(); + this.flowVelocityMeasurements = + new ArrayList<>(); + this.sedimentLoadLSs = new ArrayList<>(); + this.sedimentLoads = new ArrayList<>(); + this.measurementStations = new ArrayList<>(); + this.sqRelations = new ArrayList<>(); + this.sinfoImporter = new SInfoImporter(); + this.uinfoImporter = new UInfoImporter(); } public ImportRiver( - String name, - String modelUuid, - File wstFile, - File bbInfoFile, - AnnotationClassifier annotationClassifier - ) { + final String name, + final String modelUuid, + final File wstFile, + final File bbInfoFile, + final AnnotationClassifier annotationClassifier + ) { this(); this.name = name; this.modelUuid = modelUuid; @@ -326,18 +336,18 @@ } public String getName() { - return name; + return this.name; } - public void setName(String name) { + public void setName(final String name) { this.name = name; } public String getModelUuid() { - return modelUuid; + return this.modelUuid; } - public void setModelUuid(String modelUuid) { + public void setModelUuid(final String modelUuid) { this.modelUuid = modelUuid; } @@ -345,41 +355,44 @@ return this.officialNumber; } - public void setOfficialNumber(Long officialNumber) { + public void setOfficialNumber(final Long officialNumber) { this.officialNumber = officialNumber; } public File getWstFile() { - return wstFile; + return this.wstFile; } - public void setWstFile(File wstFile) { + public void setWstFile(final File wstFile) { this.wstFile = wstFile; } public File getBBInfo() { - return bbInfoFile; + return this.bbInfoFile; } - public void setBBInfo(File bbInfoFile) { + public void setBBInfo(final File bbInfoFile) { this.bbInfoFile = bbInfoFile; } public ImportWst getWst() { - return wst; + return this.wst; } - public void setWst(ImportWst wst) { + public void setWst(final ImportWst wst) { this.wst = wst; } - public File getMinfoDir() { - File riverDir = wstFile - .getParentFile().getParentFile().getParentFile(); - return new File(riverDir, MINFO_DIR); + private File getMinfoDir() { + return new File(getRiverDir(), MINFO_DIR); + } + + private File getRiverDir() { + return this.wstFile.getParentFile().getParentFile().getParentFile(); } public void parseDependencies() throws IOException { + log.info("Root dir is '" + getRiverDir() + "'"); parseGauges(); parseAnnotations(); parsePRFs(); @@ -405,6 +418,10 @@ parseWaterlevels(); parseWaterlevelDifferences(); parseSQRelation(); + this.sinfoImporter.setup(getRiverDir(), this); + this.sinfoImporter.parse(); + this.uinfoImporter.setup(getRiverDir(), this); + this.uinfoImporter.parse(); } public void parseFloodProtection() throws IOException { @@ -415,41 +432,41 @@ log.info("Parse flood protection wst file"); - File riverDir = wstFile.getParentFile().getParentFile(); + final File riverDir = this.wstFile.getParentFile().getParentFile(); - File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION)); + final File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION)); if (!dir.isDirectory() || !dir.canRead()) { log.info("no directory '" + dir + "' found"); return; } - File [] files = dir.listFiles(); + final File [] files = dir.listFiles(); if (files == null) { log.warn("cannot read '" + dir + "'"); return; } - for (File file: files) { + for (final File file: files) { if (!file.isFile() || !file.canRead()) { continue; } - String name = file.getName().toLowerCase(); + final String name = file.getName().toLowerCase(); if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { continue; } log.info("found file '" + file.getName() + "'"); try { - WstParser wstParser = new WstParser(); + final WstParser wstParser = new WstParser(); wstParser.parse(file); - ImportWst iw = wstParser.getWst(); + final ImportWst iw = wstParser.getWst(); iw.setKind(5); iw.setDescription( - FLOOD_PROTECTION + "/" + iw.getDescription()); - floodProtection.add(iw); + FLOOD_PROTECTION + "/" + iw.getDescription()); + this.floodProtection.add(iw); } - catch (WstParser.ParseException e) { + catch (final WstParser.ParseException e) { log.error(e.getMessage()); } } @@ -460,13 +477,13 @@ log.info("skip storing official number."); return; } - getPeer().setOfficialNumber(officialNumber); + getPeer().setOfficialNumber(this.officialNumber); } public void parseBedHeight() throws IOException { - File minfoDir = getMinfoDir(); - File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR); - File singlesDir = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR); + final File minfoDir = getMinfoDir(); + final File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR); + final File singlesDir = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR); if (Config.INSTANCE.skipBedHeight()) { log.info("skip parsing bed heights."); @@ -486,26 +503,26 @@ log.debug("Parse sediment density"); - File minfoDir = getMinfoDir(); - File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR); + final File minfoDir = getMinfoDir(); + final File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR); - File[] files = sediment.listFiles(); + final File[] files = sediment.listFiles(); if (files == null) { log.warn("Cannot read directory '" + sediment + "'"); return; } - SedimentDensityParser parser = new SedimentDensityParser(); + final SedimentDensityParser parser = new SedimentDensityParser(); - for (File file: files) { + for (final File file: files) { parser.parse(file); } - sedimentDensities = parser.getSedimentDensities(); + this.sedimentDensities = parser.getSedimentDensities(); - log.info("Parsed " + sedimentDensities.size() - + " sediment densities."); + log.info("Parsed " + this.sedimentDensities.size() + + " sediment densities."); } protected void parsePorosity() throws IOException { @@ -516,25 +533,25 @@ log.debug("Parse porosity"); - File minfoDir = getMinfoDir(); - File porosity = new File(minfoDir, POROSITY_DIR); + final File minfoDir = getMinfoDir(); + final File porosity = new File(minfoDir, POROSITY_DIR); - File[] files = porosity.listFiles(); + final File[] files = porosity.listFiles(); if (files == null) { log.warn("Cannot read directory '" + porosity + "'"); return; } - PorosityParser parser = new PorosityParser(); + final PorosityParser parser = new PorosityParser(); - for (File file: files) { + for (final File file: files) { parser.parse(file); } - porosities = parser.getPorosities(); + this.porosities = parser.getPorosities(); - log.info("Parsed " + porosities.size() + " porosities."); + log.info("Parsed " + this.porosities.size() + " porosities."); } protected void parseMorphologicalWidth() throws IOException { @@ -545,26 +562,26 @@ log.debug("Parse morphological width"); - File minfoDir = getMinfoDir(); - File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR); + final File minfoDir = getMinfoDir(); + final File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR); - File[] files = morphDir.listFiles(); + final File[] files = morphDir.listFiles(); if (files == null) { log.warn("Cannot read directory '" + morphDir + "'"); return; } - MorphologicalWidthParser parser = new MorphologicalWidthParser(); + final MorphologicalWidthParser parser = new MorphologicalWidthParser(); - for (File file: files) { + for (final File file: files) { parser.parse(file); } - morphologicalWidths = parser.getMorphologicalWidths(); + this.morphologicalWidths = parser.getMorphologicalWidths(); - log.info("Parsed " + morphologicalWidths.size() - + " morph. widths files."); + log.info("Parsed " + this.morphologicalWidths.size() + + " morph. widths files."); } @@ -576,78 +593,78 @@ log.debug("Parse flow velocity"); - File minfoDir = getMinfoDir(); - File flowDir = new File(minfoDir, FLOW_VELOCITY_DIR); - File modelDir = new File(flowDir, FLOW_VELOCITY_MODEL); - File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS); + final File minfoDir = getMinfoDir(); + final File flowDir = new File(minfoDir, FLOW_VELOCITY_DIR); + final File modelDir = new File(flowDir, FLOW_VELOCITY_MODEL); + final File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS); - File[] modelFiles = modelDir.listFiles(); - File[] measureFiles = measureDir.listFiles(); + final File[] modelFiles = modelDir.listFiles(); + final File[] measureFiles = measureDir.listFiles(); if (modelFiles == null) { log.warn("Cannot read directory '" + modelDir + "'"); } else { - FlowVelocityModelParser parser = new FlowVelocityModelParser(); + final FlowVelocityModelParser parser = new FlowVelocityModelParser(); - for (File model: modelFiles) { + for (final File model: modelFiles) { log.debug("Parse file '" + model + "'"); parser.parse(model); } - flowVelocityModels = parser.getModels(); + this.flowVelocityModels = parser.getModels(); } if (measureFiles == null) { log.warn("Cannot read directory '" + measureDir + "'"); } else { - FlowVelocityMeasurementParser parser = - new FlowVelocityMeasurementParser(); + final FlowVelocityMeasurementParser parser = + new FlowVelocityMeasurementParser(); - for (File measurement: measureFiles) { + for (final File measurement: measureFiles) { log.debug("Parse file '" + measurement + "'"); parser.parse(measurement); } - flowVelocityMeasurements = parser.getMeasurements(); + this.flowVelocityMeasurements = parser.getMeasurements(); } } private void parseSedimentLoadFiles( - File[] files, - AbstractSedimentLoadParser parser - ) throws IOException { - for (File file: files) { - if (file.isDirectory()) { - for (File child: file.listFiles()) { - parser.parse(child); - } - } - else { - parser.parse(file); - } - } + final File[] files, + final AbstractSedimentLoadParser parser + ) throws IOException { + for (final File file: files) { + if (file.isDirectory()) { + for (final File child: file.listFiles()) { + parser.parse(child); + } + } + else { + parser.parse(file); + } + } } private void parseSedimentLoadDir( - File sedimentLoadDir, - AbstractSedimentLoadParser parser - ) throws IOException { + final File sedimentLoadDir, + final AbstractSedimentLoadParser parser + ) throws IOException { - File[] sedimentLoadSubDirs = { - new File(sedimentLoadDir, - SEDIMENT_LOAD_SINGLE_DIR), - new File(sedimentLoadDir, - SEDIMENT_LOAD_EPOCH_DIR), - new File(sedimentLoadDir, - SEDIMENT_LOAD_OFF_EPOCH_DIR), + final File[] sedimentLoadSubDirs = { + new File(sedimentLoadDir, + SEDIMENT_LOAD_SINGLE_DIR), + new File(sedimentLoadDir, + SEDIMENT_LOAD_EPOCH_DIR), + new File(sedimentLoadDir, + SEDIMENT_LOAD_OFF_EPOCH_DIR), }; - for (File subDir : sedimentLoadSubDirs) { - File[] files = subDir.listFiles(); + for (final File subDir : sedimentLoadSubDirs) { + final File[] files = subDir.listFiles(); if (files == null || files.length == 0) { log.warn("Cannot read directory '" + subDir + "'"); @@ -667,38 +684,38 @@ log.debug("Parse sediment load longitudinal section data"); - SedimentLoadLSParser parser = new SedimentLoadLSParser(); + final SedimentLoadLSParser parser = new SedimentLoadLSParser(); - File minfoDir = getMinfoDir(); - File sedimentLoadDir = new File(minfoDir, SEDIMENT_LOAD_DIR); - File sedimentLoadLSDir = new File(sedimentLoadDir, - SEDIMENT_LOAD_LS_DIR); + final File minfoDir = getMinfoDir(); + final File sedimentLoadDir = new File(minfoDir, SEDIMENT_LOAD_DIR); + final File sedimentLoadLSDir = new File(sedimentLoadDir, + SEDIMENT_LOAD_LS_DIR); parseSedimentLoadDir(sedimentLoadLSDir, parser); - sedimentLoadLSs = parser.getSedimentLoadLSs(); + this.sedimentLoadLSs = parser.getSedimentLoadLSs(); } protected void parseSedimentLoad() throws IOException { if (Config.INSTANCE.skipSedimentLoad()) { log.info( - "skip parsing sediment load data at measurement stations"); + "skip parsing sediment load data at measurement stations"); return; } log.debug("Parse sediment load data at measurement stations"); - SedimentLoadParser parser = new SedimentLoadParser(getPeer()); + final SedimentLoadParser parser = new SedimentLoadParser(getPeer()); - File minfoDir = getMinfoDir(); - File sedimentLoadDir = new File(minfoDir, SEDIMENT_LOAD_DIR); - File sedimentLoadMSDir = new File(sedimentLoadDir, - SEDIMENT_LOAD_MS_DIR); + final File minfoDir = getMinfoDir(); + final File sedimentLoadDir = new File(minfoDir, SEDIMENT_LOAD_DIR); + final File sedimentLoadMSDir = new File(sedimentLoadDir, + SEDIMENT_LOAD_MS_DIR); parseSedimentLoadDir(sedimentLoadMSDir, parser); - sedimentLoads = parser.getSedimentLoads(); + this.sedimentLoads = parser.getSedimentLoads(); } @@ -710,29 +727,29 @@ log.info("Parse waterlevels"); - File minfo = getMinfoDir(); - File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); - File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR); + final File minfo = getMinfoDir(); + final File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); + final File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR); - File[] files = wspDir.listFiles(); + final File[] files = wspDir.listFiles(); if (files == null) { log.warn("Cannot read directory for wl '" + wspDir + "'"); return; } - WaterlevelParser parser = new WaterlevelParser(); + final WaterlevelParser parser = new WaterlevelParser(); - for (File file: files) { + for (final File file: files) { parser.parse(file); } // The parsed ImportWaterlevels are converted to // 'fixation'-wsts now. - for(ImportWst iw: parser.getWaterlevels()) { + for(final ImportWst iw: parser.getWaterlevels()) { iw.setDescription("CSV/" + iw.getDescription()); iw.setKind(7); - waterlevels.add(iw); + this.waterlevels.add(iw); } } @@ -744,27 +761,27 @@ log.info("Parse measurement stations"); - File minfo = getMinfoDir(); - File minfoBaseDir = new File(minfo, MINFO_BASE_DIR); - File coredataFile = new File(minfoBaseDir, MINFO_CORE_DATA_FILE); + final File minfo = getMinfoDir(); + final File minfoBaseDir = new File(minfo, MINFO_BASE_DIR); + final File coredataFile = new File(minfoBaseDir, MINFO_CORE_DATA_FILE); if (coredataFile == null || !coredataFile.exists()) { log.warn("No core data file '" - + coredataFile.getAbsolutePath() + "' found"); + + coredataFile.getAbsolutePath() + "' found"); return; } - MeasurementStationsParser parser = new MeasurementStationsParser(); + final MeasurementStationsParser parser = new MeasurementStationsParser(); try { parser.parse(coredataFile); - measurementStations = parser.getMeasurementStations(); + this.measurementStations = parser.getMeasurementStations(); - log.info("Successfully parsed " + measurementStations.size() - + " measurement stations."); + log.info("Successfully parsed " + this.measurementStations.size() + + " measurement stations."); } - catch (IOException ioe) { + catch (final IOException ioe) { log.error("unable to parse file '" + coredataFile.getName() + - ": " + ioe.getMessage()); + ": " + ioe.getMessage()); } } @@ -777,28 +794,28 @@ log.info("Parse waterlevel differences"); - File minfo = getMinfoDir(); - File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); - File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR); + final File minfo = getMinfoDir(); + final File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); + final File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR); - File[] files = diffDir.listFiles(); + final File[] files = diffDir.listFiles(); if (files == null) { log.warn("Cannot read directory '" + diffDir + "'"); return; } - WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser(); + final WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser(); - for (File file: files) { + for (final File file: files) { parser.parse(file); } // WaterlevelDifferences become Wsts now. - for(ImportWst iw: parser.getDifferences()) { + for(final ImportWst iw: parser.getDifferences()) { iw.setDescription("CSV/" + iw.getDescription()); iw.setKind(6); - waterlevelDifferences.add(iw); + this.waterlevelDifferences.add(iw); } } @@ -811,45 +828,50 @@ log.info("Parse sq relations"); - File minfo = getMinfoDir(); - File sqDir = new File(minfo, MINFO_SQ_DIR); + final File minfo = getMinfoDir(); + final File sqDir = new File(minfo, MINFO_SQ_DIR); - File[] files = sqDir.listFiles(); + final File[] files = sqDir.listFiles(); if (files == null) { log.warn("Cannot read directory '" + sqDir + "'"); return; } - SQRelationParser parser = new SQRelationParser(getPeer()); + final SQRelationParser parser = new SQRelationParser(getPeer()); - for (File file: files) { + for (final File file: files) { parser.parse(file); } - sqRelations = parser.getSQRelations(); + this.sqRelations = parser.getSQRelations(); - log.debug("Parsed " + sqRelations.size() + " SQ relations."); + log.debug("Parsed " + this.sqRelations.size() + " SQ relations."); } - protected void parseBedHeights(File dir) throws IOException { + protected void parseBedHeights(final File dir) throws IOException { log.debug("Parse bed height singles"); - File[] files = dir.listFiles(); + final File[] files = dir.listFiles(new FilenameFilter() { + @Override + public boolean accept(final File dir, final String name) { + return name.toLowerCase().endsWith(".csv"); + } + }); if (files == null) { log.warn("Cannot read directory '" + dir + "'"); return; } - BedHeightParser parser = new BedHeightParser(); + final BedHeightParser parser = new BedHeightParser(); - for (File file: files) { + for (final File file: files) { parser.parse(file); } - bedHeights = parser.getBedHeights(); + this.bedHeights = parser.getBedHeights(); } public void parseFloodWater() throws IOException { @@ -860,40 +882,40 @@ log.info("Parse flood water wst file"); - File riverDir = wstFile.getParentFile().getParentFile(); + final File riverDir = this.wstFile.getParentFile().getParentFile(); - File dir = FileTools.repair(new File(riverDir, FLOOD_WATER)); + final File dir = FileTools.repair(new File(riverDir, FLOOD_WATER)); if (!dir.isDirectory() || !dir.canRead()) { log.info("no directory '" + dir + "' found"); return; } - File [] files = dir.listFiles(); + final File [] files = dir.listFiles(); if (files == null) { log.warn("cannot read '" + dir + "'"); return; } - for (File file: files) { + for (final File file: files) { if (!file.isFile() || !file.canRead()) { continue; } - String name = file.getName().toLowerCase(); + final String name = file.getName().toLowerCase(); if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { continue; } log.info("found file '" + file.getName() + "'"); try { - WstParser wstParser = new WstParser(); + final WstParser wstParser = new WstParser(); wstParser.parse(file); - ImportWst iw = wstParser.getWst(); + final ImportWst iw = wstParser.getWst(); iw.setKind(4); iw.setDescription(FLOOD_WATER + "/" + iw.getDescription()); - floodWater.add(iw); + this.floodWater.add(iw); } - catch (WstParser.ParseException e) { + catch (final WstParser.ParseException e) { log.error(e.getMessage()); } } @@ -907,31 +929,31 @@ log.info("Parse official wst files"); - File riverDir = wstFile.getParentFile().getParentFile(); + final File riverDir = this.wstFile.getParentFile().getParentFile(); - for (String folder: OFFICIAL_LINES_FOLDERS) { - File dir = FileTools.repair(new File(riverDir, folder)); + for (final String folder: OFFICIAL_LINES_FOLDERS) { + final File dir = FileTools.repair(new File(riverDir, folder)); if (!dir.isDirectory() || !dir.canRead()) { log.info("no directory '" + folder + "' found"); continue; } - File file = FileTools.repair(new File(dir, OFFICIAL_LINES)); + final File file = FileTools.repair(new File(dir, OFFICIAL_LINES)); if (!file.isFile() || !file.canRead()) { log.warn("no official lines wst file found"); continue; } log.debug("Found WST file: " + file); - ImportWst iw = new ImportWst( - ImportOfficialWstColumn.COLUMN_FACTORY); + final ImportWst iw = new ImportWst( + ImportOfficialWstColumn.COLUMN_FACTORY); - WstParser wstParser = new WstParser(iw); + final WstParser wstParser = new WstParser(iw); try { wstParser.parse(file); } - catch (WstParser.ParseException e) { + catch (final WstParser.ParseException e) { log.error(e.getMessage()); continue; } @@ -939,41 +961,41 @@ iw.setKind(3); iw.setDescription(folder + "/" + iw.getDescription()); - File configFile = FileTools.repair( - new File(dir, OFFICIAL_LINES_CONFIG)); + final File configFile = FileTools.repair( + new File(dir, OFFICIAL_LINES_CONFIG)); if (!configFile.isFile() || !configFile.canRead()) { log.warn("no config file for official lines found"); } else { - OfficialLinesConfigParser olcp = - new OfficialLinesConfigParser(); + final OfficialLinesConfigParser olcp = + new OfficialLinesConfigParser(); try { olcp.parse(configFile); } - catch (IOException ioe) { + catch (final IOException ioe) { log.warn("Error reading offical lines config", ioe); } - List<String> mainValueNames = olcp.getMainValueNames(); + final List<String> mainValueNames = olcp.getMainValueNames(); if (mainValueNames.isEmpty()) { log.warn( - "config file for offical lines contains no entries"); + "config file for offical lines contains no entries"); } else { // Join as much as possible. - Iterator<ImportWstColumn> wi = iw.getColumns().iterator(); - Iterator<String> si = olcp.getMainValueNames().iterator(); + final Iterator<ImportWstColumn> wi = iw.getColumns().iterator(); + final Iterator<String> si = olcp.getMainValueNames().iterator(); while (wi.hasNext() && si.hasNext()) { - ImportOfficialWstColumn wc = - (ImportOfficialWstColumn)wi.next(); - String name = si.next(); - ImportOfficialLine iol = - new ImportOfficialLine(name, wc); + final ImportOfficialWstColumn wc = + (ImportOfficialWstColumn)wi.next(); + final String name = si.next(); + final ImportOfficialLine iol = + new ImportOfficialLine(name, wc); wc.setOfficialLine(iol); } } } - officialLines.add(iw); + this.officialLines.add(iw); } // for all folders } @@ -986,42 +1008,42 @@ log.info("Parse fixation wst files"); - File riverDir = wstFile.getParentFile().getParentFile(); + final File riverDir = this.wstFile.getParentFile().getParentFile(); - File fixDir = FileTools.repair( - new File(riverDir, FIXATIONS)); + final File fixDir = FileTools.repair( + new File(riverDir, FIXATIONS)); if (!fixDir.isDirectory() || !fixDir.canRead()) { log.info("no fixation wst file directory found"); return; } - File [] files = fixDir.listFiles(); + final File [] files = fixDir.listFiles(); if (files == null) { log.warn("cannot read fixations wst file directory"); return; } - for (File file: files) { + for (final File file: files) { if (!file.isFile() || !file.canRead()) { continue; } - String name = file.getName().toLowerCase(); + final String name = file.getName().toLowerCase(); if (!name.endsWith(".wst")) { continue; } log.debug("Found WST file: " + file); try { - WstParser wstParser = new WstParser(); + final WstParser wstParser = new WstParser(); wstParser.parse(file); - ImportWst iw = wstParser.getWst(); + final ImportWst iw = wstParser.getWst(); iw.setKind(2); iw.setDescription(FIXATIONS+ "/" + iw.getDescription()); - fixations.add(iw); + this.fixations.add(iw); } - catch (WstParser.ParseException e) { + catch (final WstParser.ParseException e) { log.error(e.getMessage()); } } @@ -1035,43 +1057,43 @@ log.info("Parse extra longitudinal wst files"); - File riverDir = wstFile.getParentFile().getParentFile(); + final File riverDir = this.wstFile.getParentFile().getParentFile(); - File extraDir = FileTools.repair( - new File(riverDir, EXTRA_LONGITUDINALS)); + final File extraDir = FileTools.repair( + new File(riverDir, EXTRA_LONGITUDINALS)); if (!extraDir.isDirectory() || !extraDir.canRead()) { log.info("no extra longitudinal wst file directory found"); return; } - File [] files = extraDir.listFiles(); + final File [] files = extraDir.listFiles(); if (files == null) { log.warn("cannot read extra longitudinal wst file directory"); return; } - for (File file: files) { + for (final File file: files) { if (!file.isFile() || !file.canRead()) { continue; } - String name = file.getName().toLowerCase(); + final String name = file.getName().toLowerCase(); if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { continue; } log.debug("Found WST file: " + file); try { - WstParser wstParser = new WstParser(); + final WstParser wstParser = new WstParser(); wstParser.parse(file); - ImportWst iw = wstParser.getWst(); + final ImportWst iw = wstParser.getWst(); iw.setKind(1); iw.setDescription( - EXTRA_LONGITUDINALS + "/" + iw.getDescription()); - extraWsts.add(iw); + EXTRA_LONGITUDINALS + "/" + iw.getDescription()); + this.extraWsts.add(iw); } - catch (WstParser.ParseException e) { + catch (final WstParser.ParseException e) { log.error(e.getMessage()); } } @@ -1084,13 +1106,13 @@ return; } - WstParser wstParser = new WstParser(); + final WstParser wstParser = new WstParser(); try { - wstParser.parse(wstFile); - wst = wstParser.getWst(); - wst.setKmUp(wst.guessWaterLevelIncreasing()); + wstParser.parse(this.wstFile); + this.wst = wstParser.getWst(); + this.wst.setKmUp(this.wst.guessWaterLevelIncreasing()); } - catch (WstParser.ParseException e) { + catch (final WstParser.ParseException e) { log.error(e.getMessage()); } } @@ -1101,7 +1123,7 @@ return; } - File gltFile = new File(wstFile.getParentFile(), PEGEL_GLT); + File gltFile = new File(this.wstFile.getParentFile(), PEGEL_GLT); gltFile = FileTools.repair(gltFile); if (!gltFile.isFile() || !gltFile.canRead()) { @@ -1109,12 +1131,12 @@ return; } - PegelGltParser pgltp = new PegelGltParser(); + final PegelGltParser pgltp = new PegelGltParser(); pgltp.parse(gltFile); - gauges = pgltp.getGauges(); + this.gauges = pgltp.getGauges(); - for (ImportGauge gauge: gauges) { + for (final ImportGauge gauge: this.gauges) { gauge.parseDependencies(); } } @@ -1125,12 +1147,12 @@ return; } - File riverDir = wstFile.getParentFile().getParentFile(); - AnnotationsParser aparser = - new AnnotationsParser(annotationClassifier); + final File riverDir = this.wstFile.getParentFile().getParentFile(); + final AnnotationsParser aparser = + new AnnotationsParser(this.annotationClassifier); aparser.parse(riverDir); - annotations = aparser.getAnnotations(); + this.annotations = aparser.getAnnotations(); } public void parseHYKs() { @@ -1140,33 +1162,33 @@ } log.info("looking for HYK files"); - HYKParser parser = new HYKParser(); - File riverDir = wstFile - .getParentFile() // Basisdaten - .getParentFile() // Hydrologie - .getParentFile(); // <river> + final HYKParser parser = new HYKParser(); + final File riverDir = this.wstFile + .getParentFile() // Basisdaten + .getParentFile() // Hydrologie + .getParentFile(); // <river> parser.parseHYKs(riverDir, new HYKParser.Callback() { - Set<HashedFile> hfs = new HashSet<HashedFile>(); + Set<HashedFile> hfs = new HashSet<>(); @Override - public boolean hykAccept(File file) { - HashedFile hf = new HashedFile(file); - boolean success = hfs.add(hf); + public boolean hykAccept(final File file) { + final HashedFile hf = new HashedFile(file); + final boolean success = this.hfs.add(hf); if (!success) { log.warn("HYK file '" + file - + "' seems to be a duplicate."); + + "' seems to be a duplicate."); } return success; } @Override - public void hykParsed(HYKParser parser) { + public void hykParsed(final HYKParser parser) { log.debug("callback from HYK parser"); - ImportHYK hyk = parser.getHYK(); + final ImportHYK hyk = parser.getHYK(); hyk.setRiver(ImportRiver.this); - hyks.add(hyk); + ImportRiver.this.hyks.add(hyk); } }); } @@ -1178,14 +1200,14 @@ log.info("skip parsing W80s"); return; } - W80Parser parser = new W80Parser(); - File riverDir = wstFile - .getParentFile() // Basisdaten - .getParentFile() // Hydrologie - .getParentFile(); // <river> + final W80Parser parser = new W80Parser(); + final File riverDir = this.wstFile + .getParentFile() // Basisdaten + .getParentFile() // Hydrologie + .getParentFile(); // <river> - ImportRiverCrossSectionParserCallback w80Callback = - new ImportRiverCrossSectionParserCallback("w80"); + final ImportRiverCrossSectionParserCallback w80Callback = + new ImportRiverCrossSectionParserCallback("w80"); parser.parseW80s(riverDir, w80Callback); } @@ -1195,20 +1217,20 @@ log.info("skip parsing W80 csvs"); return; } - W80CSVParser parser = new W80CSVParser(); - File riverDir = wstFile - .getParentFile() // Basisdaten - .getParentFile() // Hydrologie - .getParentFile(); // <river> + final W80CSVParser parser = new W80CSVParser(); + final File riverDir = this.wstFile + .getParentFile() // Basisdaten + .getParentFile() // Hydrologie + .getParentFile(); // <river> // Construct the Cross-Section-Data path. - File csDir = new File(riverDir.getPath() - + File.separator + "Geodaesie" - + File.separator + "Querprofile" - + File.separator + "QP-Daten"); + final File csDir = new File(riverDir.getPath() + + File.separator + "Geodaesie" + + File.separator + "Querprofile" + + File.separator + "QP-Daten"); - ImportRiverCrossSectionParserCallback w80CSVCallback = - new ImportRiverCrossSectionParserCallback("w80-csv"); + final ImportRiverCrossSectionParserCallback w80CSVCallback = + new ImportRiverCrossSectionParserCallback("w80-csv"); parser.parseW80CSVs(csDir, w80CSVCallback); } @@ -1222,14 +1244,14 @@ log.info("skip parsing DA50s"); return; } - DA50Parser parser = new DA50Parser(); - File riverDir = wstFile - .getParentFile() // Basisdaten - .getParentFile() // Hydrologie - .getParentFile(); // <river> + final DA50Parser parser = new DA50Parser(); + final File riverDir = this.wstFile + .getParentFile() // Basisdaten + .getParentFile() // Hydrologie + .getParentFile(); // <river> - ImportRiverCrossSectionParserCallback da50Callback = - new ImportRiverCrossSectionParserCallback("da50"); + final ImportRiverCrossSectionParserCallback da50Callback = + new ImportRiverCrossSectionParserCallback("da50"); parser.parseDA50s(riverDir, da50Callback); } @@ -1245,14 +1267,14 @@ } log.info("looking for DA66 files"); - DA66Parser parser = new DA66Parser(); - File riverDir = wstFile - .getParentFile() // Basisdaten - .getParentFile() // Hydrologie - .getParentFile(); // <river> + final DA66Parser parser = new DA66Parser(); + final File riverDir = this.wstFile + .getParentFile() // Basisdaten + .getParentFile() // Hydrologie + .getParentFile(); // <river> - ImportRiverCrossSectionParserCallback da66Callback = - new ImportRiverCrossSectionParserCallback("da66"); + final ImportRiverCrossSectionParserCallback da66Callback = + new ImportRiverCrossSectionParserCallback("da66"); parser.parseDA66s(riverDir, da66Callback); } @@ -1265,21 +1287,21 @@ } log.info("looking for PRF files"); - PRFParser parser = new PRFParser(); - File riverDir = wstFile - .getParentFile() // Basisdaten - .getParentFile() // Hydrologie - .getParentFile(); // <river> + final PRFParser parser = new PRFParser(); + final File riverDir = this.wstFile + .getParentFile() // Basisdaten + .getParentFile() // Hydrologie + .getParentFile(); // <river> - ImportRiverCrossSectionParserCallback prfCallback = - new ImportRiverCrossSectionParserCallback("prf"); + final ImportRiverCrossSectionParserCallback prfCallback = + new ImportRiverCrossSectionParserCallback("prf"); parser.parsePRFs(riverDir, prfCallback); } - public static Date yearToDate(int year) { - Calendar cal = Calendar.getInstance(); + public static Date yearToDate(final int year) { + final Calendar cal = Calendar.getInstance(); cal.set(year, 5, 15, 12, 0, 0); - long ms = cal.getTimeInMillis(); + final long ms = cal.getTimeInMillis(); cal.setTimeInMillis(ms - ms%1000); return cal.getTime(); } @@ -1288,13 +1310,13 @@ /* test whether river is already in database. * Otherwise it makes no sense to skip waterlevel model WST-file * because the altitude reference is taken from there. */ - Session session = ImporterSession.getInstance().getDatabaseSession(); - Query query = session.createQuery("from River where name=:name"); - query.setString("name", name); - List<River> rivers = query.list(); + final Session session = ImporterSession.getInstance().getDatabaseSession(); + final Query query = session.createQuery("from River where name=:name"); + query.setString("name", this.name); + final List<River> rivers = query.list(); if (rivers.isEmpty() && Config.INSTANCE.skipWst()){ log.error("River not yet in database. " - + "You cannot skip importing waterlevel model."); + + "You cannot skip importing waterlevel model."); return; } @@ -1321,15 +1343,17 @@ storeWaterlevelDifferences(); storeSQRelations(); storeOfficialNumber(); + this.sinfoImporter.store(); + this.uinfoImporter.store(); } public void storeWstUnit() { - if (wst == null) { + if (this.wst == null) { log.warn("No unit given. " - + "Waterlevel-model WST-file has to be imported already."); + + "Waterlevel-model WST-file has to be imported already."); } else { - wstUnit = wst.getUnit(); + this.wstUnit = this.wst.getUnit(); } } @@ -1337,7 +1361,7 @@ if (!Config.INSTANCE.skipHYKs()) { log.info("store HYKs"); getPeer(); - for (ImportHYK hyk: hyks) { + for (final ImportHYK hyk: this.hyks) { hyk.storeDependencies(); } } @@ -1345,28 +1369,28 @@ public void storeCrossSections() { if (!Config.INSTANCE.skipPRFs() - || !Config.INSTANCE.skipDA66s() - || !Config.INSTANCE.skipDA50s() - || !Config.INSTANCE.skipW80s() - || !Config.INSTANCE.skipW80CSVs()) { + || !Config.INSTANCE.skipDA66s() + || !Config.INSTANCE.skipDA50s() + || !Config.INSTANCE.skipW80s() + || !Config.INSTANCE.skipW80CSVs()) { log.info("store cross sections"); getPeer(); - for (ImportCrossSection crossSection: crossSections) { + for (final ImportCrossSection crossSection: this.crossSections) { crossSection.storeDependencies(); } } } public void storeWst() { - if (wst != null && !Config.INSTANCE.skipWst()) { - River river = getPeer(); - wst.storeDependencies(river); + if (this.wst != null && !Config.INSTANCE.skipWst()) { + final River river = getPeer(); + this.wst.storeDependencies(river); // The flow direction of the main wst and the corresponding // waterlevels determine if the river is 'km_up'. - Session session = ImporterSession.getInstance() - .getDatabaseSession(); - river.setKmUp(wst.getKmUp()); + final Session session = ImporterSession.getInstance() + .getDatabaseSession(); + river.setKmUp(this.wst.getKmUp()); session.save(river); } } @@ -1374,8 +1398,8 @@ public void storeFixations() { if (!Config.INSTANCE.skipFixations()) { log.info("store fixation wsts"); - River river = getPeer(); - for (ImportWst fWst: fixations) { + final River river = getPeer(); + for (final ImportWst fWst: this.fixations) { log.debug("Fixation name: " + fWst.getDescription()); fWst.storeDependencies(river); } @@ -1387,9 +1411,9 @@ public void storeWaterlevels() { if (!Config.INSTANCE.skipWaterlevels()) - log.info("store waterlevel wsts from csv"); - River river = getPeer(); - for (ImportWst wWst: waterlevels) { + log.info("store waterlevel wsts from csv"); + final River river = getPeer(); + for (final ImportWst wWst: this.waterlevels) { log.debug("Waterlevel name: " + wWst.getDescription()); wWst.storeDependencies(river); } @@ -1400,9 +1424,9 @@ public void storeWaterlevelDifferences() { if (!Config.INSTANCE.skipWaterlevelDifferences()) - log.info("store waterleveldifferences wsts from csv"); - River river = getPeer(); - for (ImportWst dWst: waterlevelDifferences) { + log.info("store waterleveldifferences wsts from csv"); + final River river = getPeer(); + for (final ImportWst dWst: this.waterlevelDifferences) { log.debug("water.diff.: name " + dWst.getDescription()); dWst.storeDependencies(river); } @@ -1412,8 +1436,8 @@ public void storeExtraWsts() { if (!Config.INSTANCE.skipExtraWsts()) { log.info("store extra wsts"); - River river = getPeer(); - for (ImportWst wst: extraWsts) { + final River river = getPeer(); + for (final ImportWst wst: this.extraWsts) { log.debug("name: " + wst.getDescription()); wst.storeDependencies(river); } @@ -1421,24 +1445,24 @@ } public void storeOfficialLines() { - if (Config.INSTANCE.skipOfficialLines() || officialLines.isEmpty()) { + if (Config.INSTANCE.skipOfficialLines() || this.officialLines.isEmpty()) { return; } log.info("store official lines wsts"); - River river = getPeer(); - for (ImportWst wst: officialLines) { + final River river = getPeer(); + for (final ImportWst wst: this.officialLines) { log.debug("name: " + wst.getDescription()); wst.storeDependencies(river); // Store the official lines after the columns are store. - for (ImportWstColumn wc: wst.getColumns()) { - ImportOfficialWstColumn owc = (ImportOfficialWstColumn)wc; - ImportOfficialLine ioc = owc.getOfficialLine(); + for (final ImportWstColumn wc: wst.getColumns()) { + final ImportOfficialWstColumn owc = (ImportOfficialWstColumn)wc; + final ImportOfficialLine ioc = owc.getOfficialLine(); if (ioc != null) { if (ioc.getPeer(river) == null) { log.warn("Cannot store official line: " - + ioc.getName()); + + ioc.getName()); } } } @@ -1448,8 +1472,8 @@ public void storeFloodWater() { if (!Config.INSTANCE.skipFloodWater()) { log.info("store flood water wsts"); - River river = getPeer(); - for (ImportWst wst: floodWater) { + final River river = getPeer(); + for (final ImportWst wst: this.floodWater) { log.debug("name: " + wst.getDescription()); wst.storeDependencies(river); } @@ -1460,8 +1484,8 @@ public void storeFloodProtection() { if (!Config.INSTANCE.skipFloodProtection()) { log.info("store flood protection wsts"); - River river = getPeer(); - for (ImportWst wst: floodProtection) { + final River river = getPeer(); + for (final ImportWst wst: this.floodProtection) { log.debug("name: " + wst.getDescription()); wst.storeDependencies(river); } @@ -1472,13 +1496,13 @@ public void storeBedHeight() { if (!Config.INSTANCE.skipBedHeight()) { log.info("store bed heights"); - River river = getPeer(); + final River river = getPeer(); - if (bedHeights != null) { - for (ImportBedHeight tmp: bedHeights) { - ImportBedHeight single = (ImportBedHeight) tmp; + if (this.bedHeights != null) { + for (final ImportBedHeight tmp: this.bedHeights) { + final ImportBedHeight single = tmp; - String desc = single.getDescription(); + final String desc = single.getDescription(); log.debug("name: " + desc); @@ -1496,10 +1520,10 @@ if (!Config.INSTANCE.skipSedimentDensity()) { log.info("store sediment density"); - River river = getPeer(); + final River river = getPeer(); - for (ImportSedimentDensity density: sedimentDensities) { - String desc = density.getDescription(); + for (final ImportSedimentDensity density: this.sedimentDensities) { + final String desc = density.getDescription(); log.debug("name: " + desc); @@ -1512,10 +1536,10 @@ if (!Config.INSTANCE.skipPorosity()) { log.info("store porosity"); - River river = getPeer(); + final River river = getPeer(); - for (ImportPorosity porosity: porosities) { - String desc = porosity.getDescription(); + for (final ImportPorosity porosity: this.porosities) { + final String desc = porosity.getDescription(); log.debug("name: " + desc); @@ -1528,9 +1552,9 @@ if (!Config.INSTANCE.skipMorphologicalWidth()) { log.info("store morphological width"); - River river = getPeer(); + final River river = getPeer(); - for (ImportMorphWidth width: morphologicalWidths) { + for (final ImportMorphWidth width: this.morphologicalWidths) { width.storeDependencies(river); } } @@ -1540,14 +1564,14 @@ if (!Config.INSTANCE.skipFlowVelocity()) { log.info("store flow velocity"); - River river = getPeer(); + final River river = getPeer(); - for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels - ) { + for (final ImportFlowVelocityModel flowVelocityModel: this.flowVelocityModels + ) { flowVelocityModel.storeDependencies(river); } - for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) { + for (final ImportFlowVelocityMeasurement m: this.flowVelocityMeasurements) { m.storeDependencies(river); } } @@ -1558,9 +1582,9 @@ if (!Config.INSTANCE.skipSedimentLoadLS()) { log.info("store sediment load longitudinal section data"); - River river = getPeer(); + final River river = getPeer(); - for (ImportSedimentLoadLS sedimentLoadLS: sedimentLoadLSs) { + for (final ImportSedimentLoadLS sedimentLoadLS: this.sedimentLoadLSs) { sedimentLoadLS.storeDependencies(river); } } @@ -1571,7 +1595,7 @@ if (!Config.INSTANCE.skipSedimentLoad()) { log.info("store sediment load data at measurement stations"); - for (ImportSedimentLoad sedimentLoad: sedimentLoads) { + for (final ImportSedimentLoad sedimentLoad: this.sedimentLoads) { sedimentLoad.storeDependencies(); } } @@ -1582,12 +1606,12 @@ if (!Config.INSTANCE.skipMeasurementStations()) { log.info("store measurement stations"); - River river = getPeer(); + final River river = getPeer(); int count = 0; - for (ImportMeasurementStation station: measurementStations) { - boolean success = station.storeDependencies(river); + for (final ImportMeasurementStation station: this.measurementStations) { + final boolean success = station.storeDependencies(river); if (success) { count++; } @@ -1604,7 +1628,7 @@ int count = 0; - for (ImportSQRelation sqRelation: sqRelations) { + for (final ImportSQRelation sqRelation: this.sqRelations) { sqRelation.storeDependencies(); count++; } @@ -1616,8 +1640,8 @@ public void storeAnnotations() { if (!Config.INSTANCE.skipAnnotations()) { - River river = getPeer(); - for (ImportAnnotation annotation: annotations) { + final River river = getPeer(); + for (final ImportAnnotation annotation: this.annotations) { annotation.getPeer(river); } } @@ -1626,10 +1650,10 @@ public void storeGauges() { if (!Config.INSTANCE.skipGauges()) { log.info("store gauges:"); - River river = getPeer(); - Session session = ImporterSession.getInstance() - .getDatabaseSession(); - for (ImportGauge gauge: gauges) { + final River river = getPeer(); + final Session session = ImporterSession.getInstance() + .getDatabaseSession(); + for (final ImportGauge gauge: this.gauges) { log.info("\tgauge: " + gauge.getName()); gauge.storeDependencies(river); ImporterSession.getInstance().getDatabaseSession(); @@ -1639,31 +1663,31 @@ } public River getPeer() { - if (peer == null) { - Session session = ImporterSession.getInstance() - .getDatabaseSession(); - Query query = session.createQuery("from River where name=:name"); + if (this.peer == null) { + final Session session = ImporterSession.getInstance() + .getDatabaseSession(); + final Query query = session.createQuery("from River where name=:name"); Unit u = null; - if (wstUnit != null) { - u = wstUnit.getPeer(); + if (this.wstUnit != null) { + u = this.wstUnit.getPeer(); } - query.setString("name", name); - List<River> rivers = query.list(); + query.setString("name", this.name); + final List<River> rivers = query.list(); if (rivers.isEmpty()) { - log.info("Store new river '" + name + "'"); - peer = new River(name, u, modelUuid); + log.info("Store new river '" + this.name + "'"); + this.peer = new River(this.name, u, this.modelUuid); if (!Config.INSTANCE.skipBWASTR()) { - peer.setOfficialNumber(officialNumber); + this.peer.setOfficialNumber(this.officialNumber); } - session.save(peer); + session.save(this.peer); } else { - peer = rivers.get(0); + this.peer = rivers.get(0); } } - return peer; + return this.peer; } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :