Mercurial > dive4elements > river
view backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java @ 9594:225e48df608c
Softwaretests...20181219 6.1: overflow day compute with 365.25 instead of 365.0
author | mschaefer |
---|---|
date | Tue, 05 Feb 2019 15:24:22 +0100 |
parents | ae76f618d990 |
children | a2a42a6bac6b |
line wrap: on
line source
/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde * Software engineering by Intevation GmbH * * This file is Free Software under the GNU AGPL (>=v3) * and comes with ABSOLUTELY NO WARRANTY! Check out the * documentation coming with Dive4Elements River for details. */ package org.dive4elements.river.importer; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import org.dive4elements.artifacts.common.utils.FileTools; import org.dive4elements.artifacts.common.utils.FileTools.HashedFile; import org.dive4elements.river.backend.utils.DouglasPeuker; import org.dive4elements.river.importer.parsers.AbstractSedimentLoadParser; import org.dive4elements.river.importer.parsers.AnnotationClassifier; import org.dive4elements.river.importer.parsers.AnnotationsParser; import org.dive4elements.river.importer.parsers.BedHeightParser; import org.dive4elements.river.importer.parsers.CrossSectionParser; import org.dive4elements.river.importer.parsers.DA50Parser; import org.dive4elements.river.importer.parsers.DA66Parser; import org.dive4elements.river.importer.parsers.FlowVelocityMeasurementParser; import org.dive4elements.river.importer.parsers.FlowVelocityModelParser; import org.dive4elements.river.importer.parsers.HYKParser; import org.dive4elements.river.importer.parsers.MeasurementStationsParser; import org.dive4elements.river.importer.parsers.MorphologicalWidthParser; import org.dive4elements.river.importer.parsers.OfficialLinesConfigParser; import org.dive4elements.river.importer.parsers.PRFParser; import org.dive4elements.river.importer.parsers.PegelGltParser; import org.dive4elements.river.importer.parsers.PorosityParser; import org.dive4elements.river.importer.parsers.SQRelationParser; import org.dive4elements.river.importer.parsers.SedimentDensityParser; import org.dive4elements.river.importer.parsers.SedimentLoadLSParser; import org.dive4elements.river.importer.parsers.SedimentLoadParser; import org.dive4elements.river.importer.parsers.W80CSVParser; import org.dive4elements.river.importer.parsers.W80Parser; import org.dive4elements.river.importer.parsers.WaterlevelDifferencesParser; import org.dive4elements.river.importer.parsers.WaterlevelParser; import org.dive4elements.river.importer.parsers.WstParser; import org.dive4elements.river.importer.sinfo.SInfoImporter; import org.dive4elements.river.importer.uinfo.UInfoImporter; import org.dive4elements.river.model.River; import org.dive4elements.river.model.Unit; import org.hibernate.Query; import org.hibernate.Session; /** Import all river-related data (files) that can be found. */ public class ImportRiver { /** Private log. */ private static Logger log = Logger.getLogger(ImportRiver.class); public static final String PEGEL_GLT = "PEGEL.GLT"; public static final String FIXATIONS = "Fixierungen"; public static final String EXTRA_LONGITUDINALS = "Zus.Laengsschnitte"; public static final String [] OFFICIAL_LINES_FOLDERS = { "Basisdaten", "Fixierungen" }; public static final String OFFICIAL_LINES = "Amtl_Linien.wst"; public static final String OFFICIAL_LINES_CONFIG = "Amtl_Linien.config"; public static final String FLOOD_WATER = "HW-Marken"; public static final String FLOOD_PROTECTION = "HW-Schutzanlagen"; public static final String MINFO_DIR = "Morphologie"; public static final String BED_HEIGHT_DIR = "Sohlhoehen"; public static final String BED_HEIGHT_SINGLE_DIR = "Einzeljahre"; public static final String SEDIMENT_DENSITY_DIR = "Sedimentdichte"; public static final String POROSITY_DIR = "Porositaet"; public static final String MORPHOLOGICAL_WIDTH_DIR = "morphologische_Breite"; public static final String FLOW_VELOCITY_DIR = "Geschwindigkeit_Schubspannung"; public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen"; public static final String FLOW_VELOCITY_MEASUREMENTS = "v-Messungen"; public static final String SEDIMENT_LOAD_DIR = "Fracht"; public static final String SEDIMENT_LOAD_LS_DIR = "Laengsschnitte"; public static final String SEDIMENT_LOAD_MS_DIR = "Messstellen"; public static final String SEDIMENT_LOAD_SINGLE_DIR = "Einzeljahre"; public static final String SEDIMENT_LOAD_EPOCH_DIR = "Epochen"; public static final String SEDIMENT_LOAD_OFF_EPOCH_DIR = "amtliche Epochen"; public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse"; public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen"; public static final String MINFO_WATERLEVEL_DIFF_DIR = "Wasserspiegeldifferenzen"; public static final String MINFO_BASE_DIR = "Basisdaten"; public static final String MINFO_CORE_DATA_FILE = "Stammdaten_Messstellen.csv"; public static final String MINFO_SQ_DIR = "Feststofftransport-Abfluss-Beziehung"; protected String name; protected String modelUuid; protected Long officialNumber; /** * Path of the river/Hydrologie/Basisdaten/river.wst file from which all other file paths are derived */ protected File wstFile; protected File bbInfoFile; protected List<ImportGauge> gauges; protected List<ImportAnnotation> annotations; protected List<ImportHYK> hyks; protected List<ImportCrossSection> crossSections; protected List<ImportWst> extraWsts; protected List<ImportWst> fixations; protected List<ImportWst> officialLines; protected List<ImportWst> floodWater; protected List<ImportWst> floodProtection; /** Wst-structures from waterlevel-csv files. */ protected List<ImportWst> waterlevels; /** Wst-structures from waterlevel-difference-csv files. */ protected List<ImportWst> waterlevelDifferences; protected List<ImportBedHeight> bedHeights; protected List<ImportSedimentDensity> sedimentDensities; protected List<ImportPorosity> porosities; protected List<ImportMorphWidth> morphologicalWidths; protected List<ImportFlowVelocityModel> flowVelocityModels; protected List<ImportFlowVelocityMeasurement> flowVelocityMeasurements; protected List<ImportSedimentLoadLS> sedimentLoadLSs; protected List<ImportSedimentLoad> sedimentLoads; protected List<ImportMeasurementStation> measurementStations; protected List<ImportSQRelation> sqRelations; protected ImportWst wst; protected ImportUnit wstUnit; protected AnnotationClassifier annotationClassifier; /** Database-mapped River instance. */ protected River peer; /** * Importer for a river's S-INFO files. */ private final SInfoImporter sinfoImporter; /** * Importer for a river's U-INFO files. */ private final UInfoImporter uinfoImporter; /** Callback-implementation for CrossSectionParsers. */ private class ImportRiverCrossSectionParserCallback implements CrossSectionParser.Callback { private final Set<HashedFile> files = new HashSet<>(); private final String type; /** * Create new Callback, given type which is used for logging * purposes only. */ public ImportRiverCrossSectionParserCallback(final String type) { this.type = type; } /** Accept file if not duplicate. */ @Override public boolean accept(final File file) { final HashedFile hf = new HashedFile(file); final boolean success = this.files.add(hf); if (!success) { log.warn(this.type + " file '" + file + "' seems to be a duplicate."); } return success; } /** Add crosssection. */ @Override public void parsed(final CrossSectionParser parser) { log.debug("callback from " + this.type + " parser"); final String description = parser.getDescription(); final Integer year = parser.getYear(); final ImportTimeInterval ti = year != null ? new ImportTimeInterval(yearToDate(year)) : null; final Map<Double, List<XY>> data = parser.getData(); final List<ImportCrossSectionLine> lines = new ArrayList<>(data.size()); final Double simplificationEpsilon = Config.INSTANCE.getCrossSectionSimplificationEpsilon(); long numReadPoints = 0L; long numRemainingPoints = 0L; for (final Map.Entry<Double, List<XY>> entry: data.entrySet()) { final Double km = entry.getKey(); List<XY> points = entry.getValue(); numReadPoints += points.size(); if (simplificationEpsilon != null) { points = DouglasPeuker.simplify( points, simplificationEpsilon); } numRemainingPoints += points.size(); lines.add(new ImportCrossSectionLine(km, points)); } ImportRiver.this.addCrossSections(description, ti, lines); if (simplificationEpsilon != null) { final double percent = numReadPoints > 0L ? ((double)numRemainingPoints/numReadPoints)*100d : 0d; log.info(String.format( "Number of points in cross section: %d / %d (%.2f%%)", numReadPoints, numRemainingPoints, percent)); } } } // ImportRiverCrossSectionParserCallback private void addCrossSections( final String description, final ImportTimeInterval ti, final List<ImportCrossSectionLine> lines ) { this.crossSections.add( new ImportCrossSection(this, description, ti, lines)); } public ImportRiver() { this.hyks = new ArrayList<>(); this.crossSections = new ArrayList<>(); this.extraWsts = new ArrayList<>(); this.fixations = new ArrayList<>(); this.officialLines = new ArrayList<>(); this.floodWater = new ArrayList<>(); this.waterlevels = new ArrayList<>(); this.waterlevelDifferences = new ArrayList<>(); this.floodProtection = new ArrayList<>(); this.sedimentDensities = new ArrayList<>(); this.porosities = new ArrayList<>(); this.morphologicalWidths = new ArrayList<>(); this.flowVelocityModels = new ArrayList<>(); this.flowVelocityMeasurements = new ArrayList<>(); this.sedimentLoadLSs = new ArrayList<>(); this.sedimentLoads = new ArrayList<>(); this.measurementStations = new ArrayList<>(); this.sqRelations = new ArrayList<>(); this.sinfoImporter = new SInfoImporter(); this.uinfoImporter = new UInfoImporter(); } public ImportRiver( final String name, final String modelUuid, final File wstFile, final File bbInfoFile, final AnnotationClassifier annotationClassifier ) { this(); this.name = name; this.modelUuid = modelUuid; this.wstFile = wstFile; this.bbInfoFile = bbInfoFile; this.annotationClassifier = annotationClassifier; } public String getName() { return this.name; } public void setName(final String name) { this.name = name; } public String getModelUuid() { return this.modelUuid; } public void setModelUuid(final String modelUuid) { this.modelUuid = modelUuid; } public Long getOfficialNumber() { return this.officialNumber; } public void setOfficialNumber(final Long officialNumber) { this.officialNumber = officialNumber; } public File getWstFile() { return this.wstFile; } public void setWstFile(final File wstFile) { this.wstFile = wstFile; } public File getBBInfo() { return this.bbInfoFile; } public void setBBInfo(final File bbInfoFile) { this.bbInfoFile = bbInfoFile; } public ImportWst getWst() { return this.wst; } public void setWst(final ImportWst wst) { this.wst = wst; } private File getMinfoDir() { return new File(getRiverDir(), MINFO_DIR); } private File getRiverDir() { return this.wstFile.getParentFile().getParentFile().getParentFile(); } public AnnotationClassifier getAnnotationClassifier() { return this.annotationClassifier; } public void parseDependencies() throws IOException { log.info("Root dir is '" + getRiverDir() + "'"); parseGauges(); parseAnnotations(); parsePRFs(); parseDA66s(); parseDA50s(); parseW80s(); parseW80CSVs(); parseHYKs(); parseWst(); parseExtraWsts(); parseFixations(); parseOfficialLines(); parseFloodWater(); parseFloodProtection(); parseMeasurementStations(); parseBedHeight(); parseSedimentDensity(); parsePorosity(); parseMorphologicalWidth(); parseFlowVelocity(); parseSedimentLoadLS(); parseSedimentLoad(); parseWaterlevels(); parseWaterlevelDifferences(); parseSQRelation(); this.sinfoImporter.setup(getRiverDir(), this); this.sinfoImporter.parse(); this.uinfoImporter.setup(getRiverDir(), this); this.uinfoImporter.parse(); } public void parseFloodProtection() throws IOException { if (Config.INSTANCE.skipFloodProtection()) { log.info("skip parsing flood protection"); return; } log.info("Parse flood protection wst file"); final File riverDir = this.wstFile.getParentFile().getParentFile(); final File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION)); if (!dir.isDirectory() || !dir.canRead()) { log.info("no directory '" + dir + "' found"); return; } final File [] files = dir.listFiles(); if (files == null) { log.warn("cannot read '" + dir + "'"); return; } for (final File file: files) { if (!file.isFile() || !file.canRead()) { continue; } final String name = file.getName().toLowerCase(); if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { continue; } log.info("found file '" + file.getName() + "'"); try { final WstParser wstParser = new WstParser(); wstParser.parse(file); final ImportWst iw = wstParser.getWst(); iw.setKind(5); iw.setDescription( FLOOD_PROTECTION + "/" + iw.getDescription()); this.floodProtection.add(iw); } catch (final WstParser.ParseException e) { log.error(e.getMessage()); } } } public void storeOfficialNumber() { if (Config.INSTANCE.skipBWASTR()) { log.info("skip storing official number."); return; } getPeer().setOfficialNumber(this.officialNumber); } public void parseBedHeight() throws IOException { final File minfoDir = getMinfoDir(); final File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR); final File singlesDir = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR); if (Config.INSTANCE.skipBedHeight()) { log.info("skip parsing bed heights."); } else { log.info("Parse bed heights."); parseBedHeights(singlesDir); } } protected void parseSedimentDensity() throws IOException { if (Config.INSTANCE.skipSedimentDensity()) { log.info("skip parsing sediment density."); return; } log.debug("Parse sediment density"); final File minfoDir = getMinfoDir(); final File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR); final File[] files = sediment.listFiles(); if (files == null) { log.warn("Cannot read directory '" + sediment + "'"); return; } final SedimentDensityParser parser = new SedimentDensityParser(); for (final File file: files) { parser.parse(file); } this.sedimentDensities = parser.getSedimentDensities(); log.info("Parsed " + this.sedimentDensities.size() + " sediment densities."); } protected void parsePorosity() throws IOException { if (Config.INSTANCE.skipPorosity()) { log.info("skip parsing porosity."); return; } log.debug("Parse porosity"); final File minfoDir = getMinfoDir(); final File porosity = new File(minfoDir, POROSITY_DIR); final File[] files = porosity.listFiles(); if (files == null) { log.warn("Cannot read directory '" + porosity + "'"); return; } final PorosityParser parser = new PorosityParser(); for (final File file: files) { parser.parse(file); } this.porosities = parser.getPorosities(); log.info("Parsed " + this.porosities.size() + " porosities."); } protected void parseMorphologicalWidth() throws IOException { if (Config.INSTANCE.skipMorphologicalWidth()) { log.info("skip parsing morphological width."); return; } log.debug("Parse morphological width"); final File minfoDir = getMinfoDir(); final File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR); final File[] files = morphDir.listFiles(); if (files == null) { log.warn("Cannot read directory '" + morphDir + "'"); return; } final MorphologicalWidthParser parser = new MorphologicalWidthParser(); for (final File file: files) { parser.parse(file); } this.morphologicalWidths = parser.getMorphologicalWidths(); log.info("Parsed " + this.morphologicalWidths.size() + " morph. widths files."); } protected void parseFlowVelocity() throws IOException { if (Config.INSTANCE.skipFlowVelocity()) { log.info("skip parsing flow velocity"); return; } log.debug("Parse flow velocity"); final File minfoDir = getMinfoDir(); final File flowDir = new File(minfoDir, FLOW_VELOCITY_DIR); final File modelDir = new File(flowDir, FLOW_VELOCITY_MODEL); final File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS); final File[] modelFiles = modelDir.listFiles(); final File[] measureFiles = measureDir.listFiles(); if (modelFiles == null) { log.warn("Cannot read directory '" + modelDir + "'"); } else { final FlowVelocityModelParser parser = new FlowVelocityModelParser(); for (final File model: modelFiles) { log.debug("Parse file '" + model + "'"); parser.parse(model); } this.flowVelocityModels = parser.getModels(); } if (measureFiles == null) { log.warn("Cannot read directory '" + measureDir + "'"); } else { final FlowVelocityMeasurementParser parser = new FlowVelocityMeasurementParser(); for (final File measurement: measureFiles) { log.debug("Parse file '" + measurement + "'"); parser.parse(measurement); } this.flowVelocityMeasurements = parser.getMeasurements(); } } private void parseSedimentLoadFiles( final File[] files, final AbstractSedimentLoadParser parser ) throws IOException { for (final File file: files) { if (file.isDirectory()) { for (final File child: file.listFiles()) { parser.parse(child); } } else { parser.parse(file); } } } private void parseSedimentLoadDir( final File sedimentLoadDir, final AbstractSedimentLoadParser parser ) throws IOException { final File[] sedimentLoadSubDirs = { new File(sedimentLoadDir, SEDIMENT_LOAD_SINGLE_DIR), new File(sedimentLoadDir, SEDIMENT_LOAD_EPOCH_DIR), new File(sedimentLoadDir, SEDIMENT_LOAD_OFF_EPOCH_DIR), }; for (final File subDir : sedimentLoadSubDirs) { final File[] files = subDir.listFiles(); if (files == null || files.length == 0) { log.warn("Cannot read directory '" + subDir + "'"); } else { parseSedimentLoadFiles(files, parser); } } } protected void parseSedimentLoadLS() throws IOException { if (Config.INSTANCE.skipSedimentLoadLS()) { log.info("skip parsing sediment load longitudinal section data"); return; } log.debug("Parse sediment load longitudinal section data"); final SedimentLoadLSParser parser = new SedimentLoadLSParser(); final File minfoDir = getMinfoDir(); final File sedimentLoadDir = new File(minfoDir, SEDIMENT_LOAD_DIR); final File sedimentLoadLSDir = new File(sedimentLoadDir, SEDIMENT_LOAD_LS_DIR); parseSedimentLoadDir(sedimentLoadLSDir, parser); this.sedimentLoadLSs = parser.getSedimentLoadLSs(); } protected void parseSedimentLoad() throws IOException { if (Config.INSTANCE.skipSedimentLoad()) { log.info( "skip parsing sediment load data at measurement stations"); return; } log.debug("Parse sediment load data at measurement stations"); final SedimentLoadParser parser = new SedimentLoadParser(getPeer()); final File minfoDir = getMinfoDir(); final File sedimentLoadDir = new File(minfoDir, SEDIMENT_LOAD_DIR); final File sedimentLoadMSDir = new File(sedimentLoadDir, SEDIMENT_LOAD_MS_DIR); parseSedimentLoadDir(sedimentLoadMSDir, parser); this.sedimentLoads = parser.getSedimentLoads(); } protected void parseWaterlevels() throws IOException { if (Config.INSTANCE.skipWaterlevels()) { log.info("skip parsing waterlevels"); return; } log.info("Parse waterlevels"); final File minfo = getMinfoDir(); final File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); final File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR); final File[] files = wspDir.listFiles(); if (files == null) { log.warn("Cannot read directory for wl '" + wspDir + "'"); return; } final WaterlevelParser parser = new WaterlevelParser(); for (final File file: files) { parser.parse(file); } // The parsed ImportWaterlevels are converted to // 'fixation'-wsts now. for(final ImportWst iw: parser.getWaterlevels()) { iw.setDescription("CSV/" + iw.getDescription()); iw.setKind(7); this.waterlevels.add(iw); } } protected void parseMeasurementStations() throws IOException { if (Config.INSTANCE.skipMeasurementStations()) { log.info("skip parsing measurement stations"); return; } log.info("Parse measurement stations"); final File minfo = getMinfoDir(); final File minfoBaseDir = new File(minfo, MINFO_BASE_DIR); final File coredataFile = new File(minfoBaseDir, MINFO_CORE_DATA_FILE); if (coredataFile == null || !coredataFile.exists()) { log.warn("No core data file '" + coredataFile.getAbsolutePath() + "' found"); return; } final MeasurementStationsParser parser = new MeasurementStationsParser(); try { parser.parse(coredataFile); this.measurementStations = parser.getMeasurementStations(); log.info("Successfully parsed " + this.measurementStations.size() + " measurement stations."); } catch (final IOException ioe) { log.error("unable to parse file '" + coredataFile.getName() + ": " + ioe.getMessage()); } } protected void parseWaterlevelDifferences() throws IOException { if (Config.INSTANCE.skipWaterlevelDifferences()) { log.info("skip parsing waterlevel differences"); return; } log.info("Parse waterlevel differences"); final File minfo = getMinfoDir(); final File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); final File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR); final File[] files = diffDir.listFiles(); if (files == null) { log.warn("Cannot read directory '" + diffDir + "'"); return; } final WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser(); for (final File file: files) { parser.parse(file); } // WaterlevelDifferences become Wsts now. for(final ImportWst iw: parser.getDifferences()) { iw.setDescription("CSV/" + iw.getDescription()); iw.setKind(6); this.waterlevelDifferences.add(iw); } } protected void parseSQRelation() throws IOException { if (Config.INSTANCE.skipSQRelation()) { log.info("skip parsing sq relation"); return; } log.info("Parse sq relations"); final File minfo = getMinfoDir(); final File sqDir = new File(minfo, MINFO_SQ_DIR); final File[] files = sqDir.listFiles(); if (files == null) { log.warn("Cannot read directory '" + sqDir + "'"); return; } final SQRelationParser parser = new SQRelationParser(getPeer()); for (final File file: files) { parser.parse(file); } this.sqRelations = parser.getSQRelations(); log.debug("Parsed " + this.sqRelations.size() + " SQ relations."); } protected void parseBedHeights(final File dir) throws IOException { log.debug("Parse bed height singles"); final File[] files = dir.listFiles(new FilenameFilter() { @Override public boolean accept(final File dir, final String name) { return name.toLowerCase().endsWith(".csv"); } }); if (files == null) { log.warn("Cannot read directory '" + dir + "'"); return; } final BedHeightParser parser = new BedHeightParser(); for (final File file: files) { parser.parse(file); } this.bedHeights = parser.getBedHeights(); } public void parseFloodWater() throws IOException { if (Config.INSTANCE.skipFloodWater()) { log.info("skip parsing flod water"); return; } log.info("Parse flood water wst file"); final File riverDir = this.wstFile.getParentFile().getParentFile(); final File dir = FileTools.repair(new File(riverDir, FLOOD_WATER)); if (!dir.isDirectory() || !dir.canRead()) { log.info("no directory '" + dir + "' found"); return; } final File [] files = dir.listFiles(); if (files == null) { log.warn("cannot read '" + dir + "'"); return; } for (final File file: files) { if (!file.isFile() || !file.canRead()) { continue; } final String name = file.getName().toLowerCase(); if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { continue; } log.info("found file '" + file.getName() + "'"); try { final WstParser wstParser = new WstParser(); wstParser.parse(file); final ImportWst iw = wstParser.getWst(); iw.setKind(4); iw.setDescription(FLOOD_WATER + "/" + iw.getDescription()); this.floodWater.add(iw); } catch (final WstParser.ParseException e) { log.error(e.getMessage()); } } } public void parseOfficialLines() throws IOException { if (Config.INSTANCE.skipOfficialLines()) { log.info("skip parsing official lines"); return; } log.info("Parse official wst files"); final File riverDir = this.wstFile.getParentFile().getParentFile(); for (final String folder: OFFICIAL_LINES_FOLDERS) { final File dir = FileTools.repair(new File(riverDir, folder)); if (!dir.isDirectory() || !dir.canRead()) { log.info("no directory '" + folder + "' found"); continue; } final File file = FileTools.repair(new File(dir, OFFICIAL_LINES)); if (!file.isFile() || !file.canRead()) { log.warn("no official lines wst file found"); continue; } log.debug("Found WST file: " + file); final ImportWst iw = new ImportWst( ImportOfficialWstColumn.COLUMN_FACTORY); final WstParser wstParser = new WstParser(iw); try { wstParser.parse(file); } catch (final WstParser.ParseException e) { log.error(e.getMessage()); continue; } iw.setKind(3); iw.setDescription(folder + "/" + iw.getDescription()); final File configFile = FileTools.repair( new File(dir, OFFICIAL_LINES_CONFIG)); if (!configFile.isFile() || !configFile.canRead()) { log.warn("no config file for official lines found"); } else { final OfficialLinesConfigParser olcp = new OfficialLinesConfigParser(); try { olcp.parse(configFile); } catch (final IOException ioe) { log.warn("Error reading offical lines config", ioe); } final List<String> mainValueNames = olcp.getMainValueNames(); if (mainValueNames.isEmpty()) { log.warn( "config file for offical lines contains no entries"); } else { // Join as much as possible. final Iterator<ImportWstColumn> wi = iw.getColumns().iterator(); final Iterator<String> si = olcp.getMainValueNames().iterator(); while (wi.hasNext() && si.hasNext()) { final ImportOfficialWstColumn wc = (ImportOfficialWstColumn)wi.next(); final String name = si.next(); final ImportOfficialLine iol = new ImportOfficialLine(name, wc); wc.setOfficialLine(iol); } } } this.officialLines.add(iw); } // for all folders } public void parseFixations() throws IOException { if (Config.INSTANCE.skipFixations()) { log.info("skip parsing fixations"); return; } log.info("Parse fixation wst files"); final File riverDir = this.wstFile.getParentFile().getParentFile(); final File fixDir = FileTools.repair( new File(riverDir, FIXATIONS)); if (!fixDir.isDirectory() || !fixDir.canRead()) { log.info("no fixation wst file directory found"); return; } final File [] files = fixDir.listFiles(); if (files == null) { log.warn("cannot read fixations wst file directory"); return; } for (final File file: files) { if (!file.isFile() || !file.canRead()) { continue; } final String name = file.getName().toLowerCase(); if (!name.endsWith(".wst")) { continue; } log.debug("Found WST file: " + file); try { final WstParser wstParser = new WstParser(); wstParser.parse(file); final ImportWst iw = wstParser.getWst(); iw.setKind(2); iw.setDescription(FIXATIONS+ "/" + iw.getDescription()); this.fixations.add(iw); } catch (final WstParser.ParseException e) { log.error(e.getMessage()); } } } public void parseExtraWsts() throws IOException { if (Config.INSTANCE.skipExtraWsts()) { log.info("skip parsing extra WST files"); return; } log.info("Parse extra longitudinal wst files"); final File riverDir = this.wstFile.getParentFile().getParentFile(); final File extraDir = FileTools.repair( new File(riverDir, EXTRA_LONGITUDINALS)); if (!extraDir.isDirectory() || !extraDir.canRead()) { log.info("no extra longitudinal wst file directory found"); return; } final File [] files = extraDir.listFiles(); if (files == null) { log.warn("cannot read extra longitudinal wst file directory"); return; } for (final File file: files) { if (!file.isFile() || !file.canRead()) { continue; } final String name = file.getName().toLowerCase(); if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { continue; } log.debug("Found WST file: " + file); try { final WstParser wstParser = new WstParser(); wstParser.parse(file); final ImportWst iw = wstParser.getWst(); iw.setKind(1); iw.setDescription( EXTRA_LONGITUDINALS + "/" + iw.getDescription()); this.extraWsts.add(iw); } catch (final WstParser.ParseException e) { log.error(e.getMessage()); } } } public void parseWst() throws IOException { if (Config.INSTANCE.skipWst()) { log.info("skip parsing WST file"); return; } final WstParser wstParser = new WstParser(); try { wstParser.parse(this.wstFile); this.wst = wstParser.getWst(); this.wst.setKmUp(this.wst.guessWaterLevelIncreasing()); } catch (final WstParser.ParseException e) { log.error(e.getMessage()); } } public void parseGauges() throws IOException { if (Config.INSTANCE.skipGauges()) { log.info("skip parsing gauges"); return; } File gltFile = new File(this.wstFile.getParentFile(), PEGEL_GLT); gltFile = FileTools.repair(gltFile); if (!gltFile.isFile() || !gltFile.canRead()) { log.warn("cannot read gauges from '" + gltFile + "'"); return; } final PegelGltParser pgltp = new PegelGltParser(); pgltp.parse(gltFile); this.gauges = pgltp.getGauges(); for (final ImportGauge gauge: this.gauges) { gauge.parseDependencies(); } } public void parseAnnotations() throws IOException { if (Config.INSTANCE.skipAnnotations()) { log.info("skip parsing annotations"); return; } final File riverDir = this.wstFile.getParentFile().getParentFile(); final AnnotationsParser aparser = new AnnotationsParser(this.annotationClassifier); aparser.parse(riverDir); this.annotations = aparser.getAnnotations(); } public void parseHYKs() { if (Config.INSTANCE.skipHYKs()) { log.info("skip parsing HYK files"); return; } log.info("looking for HYK files"); final HYKParser parser = new HYKParser(); final File riverDir = this.wstFile .getParentFile() // Basisdaten .getParentFile() // Hydrologie .getParentFile(); // <river> parser.parseHYKs(riverDir, new HYKParser.Callback() { Set<HashedFile> hfs = new HashSet<>(); @Override public boolean hykAccept(final File file) { final HashedFile hf = new HashedFile(file); final boolean success = this.hfs.add(hf); if (!success) { log.warn("HYK file '" + file + "' seems to be a duplicate."); } return success; } @Override public void hykParsed(final HYKParser parser) { log.debug("callback from HYK parser"); final ImportHYK hyk = parser.getHYK(); hyk.setRiver(ImportRiver.this); ImportRiver.this.hyks.add(hyk); } }); } /** Create a W80 Parser and parse w80 files found. */ public void parseW80s() { if (Config.INSTANCE.skipW80s()) { log.info("skip parsing W80s"); return; } final W80Parser parser = new W80Parser(); final File riverDir = this.wstFile .getParentFile() // Basisdaten .getParentFile() // Hydrologie .getParentFile(); // <river> final ImportRiverCrossSectionParserCallback w80Callback = new ImportRiverCrossSectionParserCallback("w80"); parser.parseW80s(riverDir, w80Callback); } /** Create a W80 Parser and parse w80 files found. */ public void parseW80CSVs() { if (Config.INSTANCE.skipW80CSVs()) { log.info("skip parsing W80 csvs"); return; } final W80CSVParser parser = new W80CSVParser(); final File riverDir = this.wstFile .getParentFile() // Basisdaten .getParentFile() // Hydrologie .getParentFile(); // <river> // Construct the Cross-Section-Data path. final File csDir = new File(riverDir.getPath() + File.separator + "Geodaesie" + File.separator + "Querprofile" + File.separator + "QP-Daten"); final ImportRiverCrossSectionParserCallback w80CSVCallback = new ImportRiverCrossSectionParserCallback("w80-csv"); parser.parseW80CSVs(csDir, w80CSVCallback); } /** * Create and use a DA50Parser, parse the files found, add the * ross-sections found. */ public void parseDA50s() { if (Config.INSTANCE.skipDA50s()) { log.info("skip parsing DA50s"); return; } final DA50Parser parser = new DA50Parser(); final File riverDir = this.wstFile .getParentFile() // Basisdaten .getParentFile() // Hydrologie .getParentFile(); // <river> final ImportRiverCrossSectionParserCallback da50Callback = new ImportRiverCrossSectionParserCallback("da50"); parser.parseDA50s(riverDir, da50Callback); } /** Create a DA66 Parser and parse the da66 files found. */ // TODO this is a copy of parsePRFs, extract interfaces //(e.g. CrossSectionParser). public void parseDA66s() { if (Config.INSTANCE.skipDA66s()) { log.info("skip parsing DA66s"); return; } log.info("looking for DA66 files"); final DA66Parser parser = new DA66Parser(); final File riverDir = this.wstFile .getParentFile() // Basisdaten .getParentFile() // Hydrologie .getParentFile(); // <river> final ImportRiverCrossSectionParserCallback da66Callback = new ImportRiverCrossSectionParserCallback("da66"); parser.parseDA66s(riverDir, da66Callback); } /** Create a PRFParser and let it parse the prf files found. */ public void parsePRFs() { if (Config.INSTANCE.skipPRFs()) { log.info("skip parsing PRFs"); return; } log.info("looking for PRF files"); final PRFParser parser = new PRFParser(); final File riverDir = this.wstFile .getParentFile() // Basisdaten .getParentFile() // Hydrologie .getParentFile(); // <river> final ImportRiverCrossSectionParserCallback prfCallback = new ImportRiverCrossSectionParserCallback("prf"); parser.parsePRFs(riverDir, prfCallback); } public static Date yearToDate(final int year) { final Calendar cal = Calendar.getInstance(); cal.set(year, 5, 15, 12, 0, 0); final long ms = cal.getTimeInMillis(); cal.setTimeInMillis(ms - ms%1000); return cal.getTime(); } public void storeDependencies() { /* test whether river is already in database. * Otherwise it makes no sense to skip waterlevel model WST-file * because the altitude reference is taken from there. */ final Session session = ImporterSession.getInstance().getDatabaseSession(); final Query query = session.createQuery("from River where name=:name"); query.setString("name", this.name); final List<River> rivers = query.list(); if (rivers.isEmpty() && Config.INSTANCE.skipWst()){ log.error("River not yet in database. " + "You cannot skip importing waterlevel model."); return; } storeWstUnit(); storeAnnotations(); storeHYKs(); storeCrossSections(); storeGauges(); storeWst(); storeExtraWsts(); storeFixations(); storeOfficialLines(); storeFloodWater(); storeFloodProtection(); storeMeasurementStations(); storeBedHeight(); storeSedimentDensity(); storePorosity(); storeMorphologicalWidth(); storeFlowVelocity(); storeSedimentLoadLS(); storeSedimentLoad(); storeWaterlevels(); storeWaterlevelDifferences(); storeSQRelations(); storeOfficialNumber(); this.sinfoImporter.store(); this.uinfoImporter.store(); } public void storeWstUnit() { if (this.wst == null) { log.warn("No unit given. " + "Waterlevel-model WST-file has to be imported already."); } else { this.wstUnit = this.wst.getUnit(); } } public void storeHYKs() { if (!Config.INSTANCE.skipHYKs()) { log.info("store HYKs"); getPeer(); for (final ImportHYK hyk: this.hyks) { hyk.storeDependencies(); } } } public void storeCrossSections() { if (!Config.INSTANCE.skipPRFs() || !Config.INSTANCE.skipDA66s() || !Config.INSTANCE.skipDA50s() || !Config.INSTANCE.skipW80s() || !Config.INSTANCE.skipW80CSVs()) { log.info("store cross sections"); getPeer(); for (final ImportCrossSection crossSection: this.crossSections) { crossSection.storeDependencies(); } } } public void storeWst() { if (this.wst != null && !Config.INSTANCE.skipWst()) { final River river = getPeer(); this.wst.storeDependencies(river); // The flow direction of the main wst and the corresponding // waterlevels determine if the river is 'km_up'. final Session session = ImporterSession.getInstance() .getDatabaseSession(); river.setKmUp(this.wst.getKmUp()); session.save(river); } } public void storeFixations() { if (!Config.INSTANCE.skipFixations()) { log.info("store fixation wsts"); final River river = getPeer(); for (final ImportWst fWst: this.fixations) { log.debug("Fixation name: " + fWst.getDescription()); fWst.storeDependencies(river); } } } /** Store wsts from waterlevel-csv files. */ public void storeWaterlevels() { if (!Config.INSTANCE.skipWaterlevels()) log.info("store waterlevel wsts from csv"); final River river = getPeer(); for (final ImportWst wWst: this.waterlevels) { log.debug("Waterlevel name: " + wWst.getDescription()); wWst.storeDependencies(river); } } /** Store wsts from waterleveldifference-csv files. */ public void storeWaterlevelDifferences() { if (!Config.INSTANCE.skipWaterlevelDifferences()) log.info("store waterleveldifferences wsts from csv"); final River river = getPeer(); for (final ImportWst dWst: this.waterlevelDifferences) { log.debug("water.diff.: name " + dWst.getDescription()); dWst.storeDependencies(river); } } public void storeExtraWsts() { if (!Config.INSTANCE.skipExtraWsts()) { log.info("store extra wsts"); final River river = getPeer(); for (final ImportWst wst: this.extraWsts) { log.debug("name: " + wst.getDescription()); wst.storeDependencies(river); } } } public void storeOfficialLines() { if (Config.INSTANCE.skipOfficialLines() || this.officialLines.isEmpty()) { return; } log.info("store official lines wsts"); final River river = getPeer(); for (final ImportWst wst: this.officialLines) { log.debug("name: " + wst.getDescription()); wst.storeDependencies(river); // Store the official lines after the columns are store. for (final ImportWstColumn wc: wst.getColumns()) { final ImportOfficialWstColumn owc = (ImportOfficialWstColumn)wc; final ImportOfficialLine ioc = owc.getOfficialLine(); if (ioc != null) { if (ioc.getPeer(river) == null) { log.warn("Cannot store official line: " + ioc.getName()); } } } } } public void storeFloodWater() { if (!Config.INSTANCE.skipFloodWater()) { log.info("store flood water wsts"); final River river = getPeer(); for (final ImportWst wst: this.floodWater) { log.debug("name: " + wst.getDescription()); wst.storeDependencies(river); } } } public void storeFloodProtection() { if (!Config.INSTANCE.skipFloodProtection()) { log.info("store flood protection wsts"); final River river = getPeer(); for (final ImportWst wst: this.floodProtection) { log.debug("name: " + wst.getDescription()); wst.storeDependencies(river); } } } public void storeBedHeight() { if (!Config.INSTANCE.skipBedHeight()) { log.info("store bed heights"); final River river = getPeer(); if (this.bedHeights != null) { for (final ImportBedHeight tmp: this.bedHeights) { final ImportBedHeight single = tmp; final String desc = single.getDescription(); log.debug("name: " + desc); single.storeDependencies(river); } } else { log.info("No bed heights to store."); } } } public void storeSedimentDensity() { if (!Config.INSTANCE.skipSedimentDensity()) { log.info("store sediment density"); final River river = getPeer(); for (final ImportSedimentDensity density: this.sedimentDensities) { final String desc = density.getDescription(); log.debug("name: " + desc); density.storeDependencies(river); } } } public void storePorosity() { if (!Config.INSTANCE.skipPorosity()) { log.info("store porosity"); final River river = getPeer(); for (final ImportPorosity porosity: this.porosities) { final String desc = porosity.getDescription(); log.debug("name: " + desc); porosity.storeDependencies(river); } } } public void storeMorphologicalWidth() { if (!Config.INSTANCE.skipMorphologicalWidth()) { log.info("store morphological width"); final River river = getPeer(); for (final ImportMorphWidth width: this.morphologicalWidths) { width.storeDependencies(river); } } } public void storeFlowVelocity() { if (!Config.INSTANCE.skipFlowVelocity()) { log.info("store flow velocity"); final River river = getPeer(); for (final ImportFlowVelocityModel flowVelocityModel: this.flowVelocityModels ) { flowVelocityModel.storeDependencies(river); } for (final ImportFlowVelocityMeasurement m: this.flowVelocityMeasurements) { m.storeDependencies(river); } } } public void storeSedimentLoadLS() { if (!Config.INSTANCE.skipSedimentLoadLS()) { log.info("store sediment load longitudinal section data"); final River river = getPeer(); for (final ImportSedimentLoadLS sedimentLoadLS: this.sedimentLoadLSs) { sedimentLoadLS.storeDependencies(river); } } } public void storeSedimentLoad() { if (!Config.INSTANCE.skipSedimentLoad()) { log.info("store sediment load data at measurement stations"); for (final ImportSedimentLoad sedimentLoad: this.sedimentLoads) { sedimentLoad.storeDependencies(); } } } public void storeMeasurementStations() { if (!Config.INSTANCE.skipMeasurementStations()) { log.info("store measurement stations"); final River river = getPeer(); int count = 0; for (final ImportMeasurementStation station: this.measurementStations) { final boolean success = station.storeDependencies(river); if (success) { count++; } } log.info("stored " + count + " measurement stations."); } } public void storeSQRelations() { if (!Config.INSTANCE.skipSQRelation()) { log.info("store sq relations"); int count = 0; for (final ImportSQRelation sqRelation: this.sqRelations) { sqRelation.storeDependencies(); count++; } log.info("stored " + count + " sq relations."); } } public void storeAnnotations() { if (!Config.INSTANCE.skipAnnotations()) { final River river = getPeer(); for (final ImportAnnotation annotation: this.annotations) { annotation.getPeer(river); } } } public void storeGauges() { if (!Config.INSTANCE.skipGauges()) { log.info("store gauges:"); final River river = getPeer(); final Session session = ImporterSession.getInstance() .getDatabaseSession(); for (final ImportGauge gauge: this.gauges) { log.info("\tgauge: " + gauge.getName()); gauge.storeDependencies(river); ImporterSession.getInstance().getDatabaseSession(); session.flush(); } } } public River getPeer() { if (this.peer == null) { final Session session = ImporterSession.getInstance() .getDatabaseSession(); final Query query = session.createQuery("from River where name=:name"); Unit u = null; if (this.wstUnit != null) { u = this.wstUnit.getPeer(); } query.setString("name", this.name); final List<River> rivers = query.list(); if (rivers.isEmpty()) { log.info("Store new river '" + this.name + "'"); this.peer = new River(this.name, u, this.modelUuid); if (!Config.INSTANCE.skipBWASTR()) { this.peer.setOfficialNumber(this.officialNumber); } session.save(this.peer); } else { this.peer = rivers.get(0); } } return this.peer; } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :