sascha@177: package de.intevation.flys.importer; sascha@177: sascha@177: import java.io.File; sascha@184: import java.io.IOException; ingo@2809: import java.sql.SQLException; sascha@3320: import java.util.ArrayList; sascha@3320: import java.util.Calendar; sascha@3320: import java.util.Date; sascha@3320: import java.util.HashSet; sascha@3320: import java.util.List; sascha@3320: import java.util.Map; sascha@3320: import java.util.Set; sascha@3320: sascha@184: import org.apache.log4j.Logger; sascha@3320: import org.hibernate.Query; sascha@3320: import org.hibernate.Session; ingo@4193: import org.hibernate.exception.ConstraintViolationException; sascha@177: ingo@4193: import de.intevation.artifacts.common.utils.FileTools; ingo@4193: import de.intevation.artifacts.common.utils.FileTools.HashedFile; ingo@4193: import de.intevation.flys.importer.parsers.AnnotationClassifier; ingo@4193: import de.intevation.flys.importer.parsers.AnnotationsParser; ingo@4193: import de.intevation.flys.importer.parsers.BedHeightEpochParser; ingo@4193: import de.intevation.flys.importer.parsers.BedHeightSingleParser; felix@4716: import de.intevation.flys.importer.parsers.CrossSectionParser; felix@4782: import de.intevation.flys.importer.parsers.DA50Parser; felix@4706: import de.intevation.flys.importer.parsers.DA66Parser; ingo@4193: import de.intevation.flys.importer.parsers.FlowVelocityMeasurementParser; ingo@4193: import de.intevation.flys.importer.parsers.FlowVelocityModelParser; ingo@4193: import de.intevation.flys.importer.parsers.HYKParser; ingo@4193: import de.intevation.flys.importer.parsers.MeasurementStationsParser; ingo@4193: import de.intevation.flys.importer.parsers.MorphologicalWidthParser; ingo@4193: import de.intevation.flys.importer.parsers.PRFParser; ingo@4193: import de.intevation.flys.importer.parsers.PegelGltParser; ingo@4193: import de.intevation.flys.importer.parsers.SQRelationParser; ingo@4193: import de.intevation.flys.importer.parsers.SedimentDensityParser; ingo@4193: import de.intevation.flys.importer.parsers.SedimentYieldParser; felix@4767: import de.intevation.flys.importer.parsers.W80Parser; ingo@4193: import de.intevation.flys.importer.parsers.WaterlevelDifferencesParser; ingo@4193: import de.intevation.flys.importer.parsers.WaterlevelParser; ingo@4193: import de.intevation.flys.importer.parsers.WstParser; ingo@4193: import de.intevation.flys.model.River; ingo@4193: import de.intevation.flys.model.Unit; sascha@188: felix@4706: felix@4706: /** Import all river-related data (files) that can be found. */ sascha@177: public class ImportRiver sascha@177: { felix@5016: /** Private logger. */ sascha@184: private static Logger log = Logger.getLogger(ImportRiver.class); sascha@184: sascha@184: public static final String PEGEL_GLT = "PEGEL.GLT"; sascha@184: sascha@483: public static final String FIXATIONS = "Fixierungen"; sascha@483: sascha@482: public static final String EXTRA_LONGITUDINALS = sascha@482: "Zus.L\u00e4ngsschnitte"; sascha@482: sascha@484: public static final String [] OFFICIAL_LINES_FOLDERS = { sascha@484: "Basisdaten", sascha@484: "Fixierungen" }; sascha@484: sascha@484: public static final String OFFICIAL_LINES = sascha@484: "Amtl_Linien.wst"; sascha@484: sascha@490: public static final String FLOOD_WATER = "HW-Marken"; sascha@490: sascha@494: public static final String FLOOD_PROTECTION = sascha@494: "HW-Schutzanlagen"; sascha@494: ingo@2806: public static final String MINFO_DIR = "Morphologie"; ingo@2806: ingo@2806: public static final String BED_HEIGHT_DIR = "Sohlhoehen"; ingo@2806: ingo@2806: public static final String BED_HEIGHT_SINGLE_DIR = "Einzeljahre"; ingo@2806: ingo@2806: public static final String BED_HEIGHT_EPOCH_DIR = "Epochen"; ingo@2806: ingo@2815: public static final String SEDIMENT_DENSITY_DIR = "Sedimentdichte"; ingo@2815: ingo@2821: public static final String MORPHOLOGICAL_WIDTH_DIR = "morphologische_Breite"; ingo@2821: ingo@2826: public static final String FLOW_VELOCITY_DIR = "Geschwindigkeit_Schubspannung"; ingo@2826: ingo@2826: public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen"; ingo@2826: ingo@2826: public static final String FLOW_VELOCITY_MEASUREMENTS = "v-Messungen"; ingo@2826: ingo@2839: public static final String SEDIMENT_YIELD_DIR = "Fracht"; ingo@2839: ingo@2839: public static final String SEDIMENT_YIELD_SINGLE_DIR = "Einzeljahre"; ingo@2839: ingo@2839: public static final String SEDIMENT_YIELD_EPOCH_DIR = "Epochen"; ingo@2839: ingo@2844: public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse"; ingo@2844: ingo@2844: public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen"; ingo@2844: ingo@2851: public static final String MINFO_WATERLEVEL_DIFF_DIR = "Wasserspiegeldifferenzen"; ingo@2851: ingo@4193: public static final String MINFO_BASE_DIR = "Basisdaten"; ingo@4193: ingo@4193: public static final String MINFO_CORE_DATA_FILE = "Stammdaten_Messstellen.csv"; ingo@4193: ingo@3328: public static final String MINFO_SQ_DIR = "Feststofftransport-Abfluss-Beziehung"; ingo@3328: sascha@177: protected String name; sascha@177: felix@5020: protected Long officialNumber; sascha@177: felix@5020: protected File wstFile; felix@5020: felix@5020: protected File bbInfoFile; sascha@177: sascha@184: protected List gauges; sascha@184: sascha@186: protected List annotations; sascha@186: sascha@1220: protected List hyks; sascha@1220: sascha@1204: protected List crossSections; sascha@1204: sascha@482: protected List extraWsts; sascha@482: sascha@483: protected List fixations; sascha@483: sascha@484: protected List officialLines; sascha@484: sascha@490: protected List floodWater; sascha@490: sascha@494: protected List floodProtection; sascha@494: felix@5227: /** Wst-structures from waterlevel-difference-csv files. */ felix@5227: protected List waterlevelDifferences; felix@5227: ingo@2811: protected List bedHeightSingles; ingo@2806: ingo@2811: protected List bedHeightEpochs; ingo@2806: ingo@2815: protected List sedimentDensities; ingo@2815: ingo@2821: protected List morphologicalWidths; ingo@2821: ingo@2827: protected List flowVelocityModels; ingo@2827: ingo@2832: protected List flowVelocityMeasurements; ingo@2832: ingo@2839: protected List sedimentYields; ingo@2839: ingo@2844: protected List waterlevels; ingo@2844: ingo@2851: protected List waterlevelDiffs; ingo@2851: ingo@4193: protected List measurementStations; ingo@4193: ingo@3328: protected List sqRelations; ingo@3328: sascha@201: protected ImportWst wst; sascha@201: ingo@2347: protected ImportUnit wstUnit; ingo@2347: sascha@765: protected AnnotationClassifier annotationClassifier; sascha@765: felix@5017: /** Database-mapped River instance. */ sascha@188: protected River peer; sascha@188: felix@4778: felix@4778: /** Callback-implementation for CrossSectionParsers. */ felix@4778: class ImportRiverCrossSectionParserCallback implements CrossSectionParser.Callback { felix@4778: Set files = new HashSet(); felix@4778: String type; felix@4778: felix@4778: felix@4778: /** felix@4778: * Create new Callback, given type which is used for logging felix@4778: * purposes only. felix@4778: */ felix@4778: public ImportRiverCrossSectionParserCallback (String type) { felix@4778: this.type = type; felix@4778: } felix@4778: felix@4778: felix@4778: /** Accept file if not duplicate. */ felix@4778: public boolean accept(File file) { felix@4778: HashedFile hf = new HashedFile(file); felix@4778: boolean success = files.add(hf); felix@4778: if (!success) { felix@4778: log.warn(type + " file '" + file + "' seems to be a duplicate."); felix@4778: } felix@4778: return success; felix@4778: } felix@4778: felix@4778: felix@4778: /** Add crosssection. */ felix@4778: public void parsed(CrossSectionParser parser) { felix@4778: log.debug("callback from " + type + " parser"); felix@4778: felix@4778: addCrossSections(parser); felix@4778: } teichmann@5149: } // ImportRiverCrossSectionParserCallback felix@4778: felix@4778: sascha@177: public ImportRiver() { ingo@2832: hyks = new ArrayList(); ingo@2832: crossSections = new ArrayList(); ingo@2832: extraWsts = new ArrayList(); ingo@2832: fixations = new ArrayList(); ingo@2832: officialLines = new ArrayList(); ingo@2832: floodWater = new ArrayList(); felix@5227: waterlevelDifferences = new ArrayList(); ingo@2832: floodProtection = new ArrayList(); ingo@2832: sedimentDensities = new ArrayList(); ingo@2832: morphologicalWidths = new ArrayList(); ingo@2832: flowVelocityModels = new ArrayList(); ingo@2832: flowVelocityMeasurements = new ArrayList(); ingo@2839: sedimentYields = new ArrayList(); ingo@2844: waterlevels = new ArrayList(); ingo@2851: waterlevelDiffs = new ArrayList(); ingo@4193: measurementStations = new ArrayList(); ingo@3328: sqRelations = new ArrayList(); sascha@177: } sascha@177: sascha@766: public ImportRiver( sascha@766: String name, sascha@766: File wstFile, sascha@766: File bbInfoFile, sascha@766: AnnotationClassifier annotationClassifier sascha@766: ) { sascha@482: this(); sascha@766: this.name = name; sascha@766: this.wstFile = wstFile; sascha@766: this.bbInfoFile = bbInfoFile; sascha@766: this.annotationClassifier = annotationClassifier; sascha@177: } sascha@177: sascha@177: public String getName() { sascha@177: return name; sascha@177: } sascha@177: sascha@177: public void setName(String name) { sascha@177: this.name = name; sascha@177: } sascha@177: felix@5020: public Long getOfficialNumber() { felix@5020: return this.officialNumber; felix@5020: } felix@5020: felix@5020: public void setOfficialNumber(Long officialNumber) { felix@5020: this.officialNumber = officialNumber; felix@5020: } felix@5020: sascha@177: public File getWstFile() { sascha@177: return wstFile; sascha@177: } sascha@177: sascha@177: public void setWstFile(File wstFile) { sascha@177: this.wstFile = wstFile; sascha@177: } sascha@177: sascha@177: public File getBBInfo() { sascha@177: return bbInfoFile; sascha@177: } sascha@177: sascha@177: public void setBBInfo(File bbInfoFile) { sascha@177: this.bbInfoFile = bbInfoFile; sascha@177: } sascha@184: sascha@201: public ImportWst getWst() { sascha@201: return wst; sascha@201: } sascha@201: sascha@201: public void setWst(ImportWst wst) { sascha@201: this.wst = wst; sascha@201: } sascha@201: ingo@2806: public File getMinfoDir() { ingo@2806: File riverDir = wstFile.getParentFile().getParentFile().getParentFile(); ingo@2806: return new File(riverDir, MINFO_DIR); ingo@2806: } ingo@2806: sascha@186: public void parseDependencies() throws IOException { sascha@186: parseGauges(); sascha@186: parseAnnotations(); sascha@1204: parsePRFs(); felix@4712: parseDA66s(); felix@4767: parseDA50s(); felix@4767: parseW80s(); sascha@1220: parseHYKs(); sascha@197: parseWst(); sascha@482: parseExtraWsts(); sascha@483: parseFixations(); sascha@484: parseOfficialLines(); sascha@490: parseFloodWater(); sascha@494: parseFloodProtection(); ingo@2806: parseBedHeight(); ingo@2815: parseSedimentDensity(); ingo@2821: parseMorphologicalWidth(); ingo@2826: parseFlowVelocity(); ingo@2839: parseSedimentYield(); ingo@2844: parseWaterlevels(); ingo@2851: parseWaterlevelDifferences(); ingo@4193: parseMeasurementStations(); ingo@3328: parseSQRelation(); sascha@494: } sascha@494: sascha@494: public void parseFloodProtection() throws IOException { sascha@1223: if (Config.INSTANCE.skipFloodProtection()) { sascha@1223: log.info("skip parsing flood protection"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@494: log.info("Parse flood protection wst file"); sascha@494: sascha@494: File riverDir = wstFile.getParentFile().getParentFile(); sascha@494: sascha@494: File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION)); sascha@494: sascha@494: if (!dir.isDirectory() || !dir.canRead()) { sascha@494: log.info("no directory '" + dir + "' found"); sascha@494: return; sascha@494: } sascha@494: sascha@494: File [] files = dir.listFiles(); sascha@494: sascha@494: if (files == null) { sascha@494: log.warn("cannot read '" + dir + "'"); sascha@494: return; sascha@494: } sascha@494: sascha@494: for (File file: files) { sascha@494: if (!file.isFile() || !file.canRead()) { sascha@494: continue; sascha@494: } sascha@494: String name = file.getName().toLowerCase(); sascha@494: if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { sascha@494: continue; sascha@494: } sascha@494: log.info("found file '" + file.getName() + "'"); sascha@494: WstParser wstParser = new WstParser(); sascha@494: wstParser.parse(file); sascha@494: ImportWst iw = wstParser.getWst(); sascha@494: iw.setKind(5); sascha@494: iw.setDescription(FLOOD_PROTECTION + "/" + iw.getDescription()); sascha@494: floodProtection.add(iw); sascha@494: } sascha@490: } sascha@490: felix@5027: public void storeOfficialNumber() { felix@5027: if (Config.INSTANCE.skipBWASTR()) { felix@5027: log.info("skip storing official number."); felix@5027: return; felix@5027: } felix@5027: getPeer().setOfficialNumber(officialNumber); felix@5027: } ingo@2806: ingo@2806: public void parseBedHeight() throws IOException { ingo@2806: File minfoDir = getMinfoDir(); ingo@2806: File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR); ingo@2806: File singlesDir = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR); ingo@2806: File epochDir = new File(bedHeightDir, BED_HEIGHT_EPOCH_DIR); ingo@2806: ingo@2810: if (Config.INSTANCE.skipBedHeightSingle()) { ingo@2810: log.info("skip parsing bed height single."); ingo@2810: } ingo@2810: else { ingo@2844: log.info("Parse bed height single."); ingo@2810: parseBedHeightSingles(singlesDir); ingo@2810: } ingo@2810: ingo@2810: if (Config.INSTANCE.skipBedHeightEpoch()) { ingo@2810: log.info("skip parsing bed height epochs."); ingo@2810: } ingo@2810: else { ingo@2844: log.info("Parse bed height epochs."); ingo@2810: parseBedHeightEpochs(epochDir); ingo@2810: } ingo@2806: } ingo@2806: ingo@2806: ingo@2815: protected void parseSedimentDensity() throws IOException { ingo@2815: if (Config.INSTANCE.skipSedimentDensity()) { ingo@2815: log.info("skip parsing sediment density."); ingo@2815: return; ingo@2815: } ingo@2815: ingo@3327: log.debug("Parse sediment density"); ingo@3327: ingo@2815: File minfoDir = getMinfoDir(); ingo@2815: File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR); ingo@2815: ingo@2815: File[] files = sediment.listFiles(); ingo@2815: ingo@2815: if (files == null) { sascha@3660: log.warn("Cannot read directory '" + sediment + "'"); ingo@2815: return; ingo@2815: } ingo@2815: ingo@2815: SedimentDensityParser parser = new SedimentDensityParser(); ingo@2815: ingo@2815: for (File file: files) { ingo@2815: parser.parse(file); ingo@2815: } ingo@2815: ingo@2815: sedimentDensities = parser.getSedimentDensities(); ingo@2815: ingo@2815: log.info("Parsed " + sedimentDensities.size() + " sediment densities."); ingo@2815: } ingo@2815: ingo@2815: ingo@2821: protected void parseMorphologicalWidth() throws IOException { ingo@2821: if (Config.INSTANCE.skipMorphologicalWidth()) { ingo@2821: log.info("skip parsing morphological width."); ingo@2821: return; ingo@2821: } ingo@2821: ingo@3327: log.debug("Parse morphological width"); ingo@3327: ingo@2821: File minfoDir = getMinfoDir(); ingo@2821: File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR); ingo@2821: ingo@2821: File[] files = morphDir.listFiles(); ingo@2821: ingo@2821: if (files == null) { sascha@3660: log.warn("Cannot read directory '" + morphDir + "'"); ingo@2821: return; ingo@2821: } ingo@2821: ingo@2821: MorphologicalWidthParser parser = new MorphologicalWidthParser(); ingo@2821: ingo@2821: for (File file: files) { ingo@2821: parser.parse(file); ingo@2821: } ingo@2821: ingo@2821: morphologicalWidths = parser.getMorphologicalWidths(); ingo@2821: ingo@2821: log.info("Parsed " + morphologicalWidths.size() + " morph. widths files."); ingo@2821: } ingo@2821: ingo@2821: ingo@2826: protected void parseFlowVelocity() throws IOException { ingo@2826: if (Config.INSTANCE.skipFlowVelocity()) { ingo@2826: log.info("skip parsing flow velocity"); ingo@2826: return; ingo@2826: } ingo@2826: ingo@3328: log.debug("Parse flow velocity"); ingo@3328: ingo@2826: File minfoDir = getMinfoDir(); ingo@2826: File flowDir = new File(minfoDir, FLOW_VELOCITY_DIR); ingo@2826: File modelDir = new File(flowDir, FLOW_VELOCITY_MODEL); ingo@2826: File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS); ingo@2826: ingo@2826: File[] modelFiles = modelDir.listFiles(); ingo@2826: File[] measureFiles = measureDir.listFiles(); ingo@2826: ingo@2826: if (modelFiles == null) { sascha@3660: log.warn("Cannot read directory '" + modelDir + "'"); ingo@2826: } ingo@2826: else { ingo@2828: FlowVelocityModelParser parser = new FlowVelocityModelParser(); ingo@2828: ingo@2826: for (File model: modelFiles) { ingo@2826: log.debug("Parse file '" + model + "'"); ingo@2828: parser.parse(model); ingo@2826: } ingo@2828: ingo@2828: flowVelocityModels = parser.getModels(); ingo@2826: } ingo@2826: ingo@2826: if (measureFiles == null) { sascha@3660: log.warn("Cannot read directory '" + measureDir + "'"); ingo@2826: } ingo@2826: else { ingo@2832: FlowVelocityMeasurementParser parser = ingo@2832: new FlowVelocityMeasurementParser(); ingo@2832: ingo@2826: for (File measurement: measureFiles) { ingo@2826: log.debug("Parse file '" + measurement + "'"); ingo@2832: parser.parse(measurement); ingo@2826: } ingo@2832: ingo@2832: flowVelocityMeasurements = parser.getMeasurements(); ingo@2826: } ingo@2826: } ingo@2826: ingo@2826: ingo@2839: protected void parseSedimentYield() throws IOException { ingo@2839: if (Config.INSTANCE.skipSedimentYield()) { ingo@2839: log.info("skip parsing sediment yield data"); ingo@2839: return; ingo@2839: } ingo@2839: ingo@3327: log.debug("Parse sediment yield data"); ingo@3327: ingo@2839: File minfoDir = getMinfoDir(); ingo@2839: File sedimentYieldDir = new File(minfoDir, SEDIMENT_YIELD_DIR); ingo@2839: ingo@2839: File singleDir = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR); ingo@2839: File epochDir = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR); ingo@2839: ingo@2839: File[] singles = singleDir.listFiles(); ingo@2839: File[] epochs = epochDir.listFiles(); ingo@2839: ingo@2840: SedimentYieldParser parser = new SedimentYieldParser(); ingo@2840: ingo@2839: if (singles == null || singles.length == 0) { sascha@3660: log.warn("Cannot read directory '" + singleDir + "'"); ingo@2839: } ingo@2839: else { ingo@2840: for (File file: singles) { ingo@2840: if (file.isDirectory()) { ingo@2840: for (File child: file.listFiles()) { ingo@2840: parser.parse(child); ingo@2840: } ingo@2840: } ingo@2840: else { ingo@2840: parser.parse(file); ingo@2840: } ingo@2840: } ingo@2839: } ingo@2839: ingo@2839: if (epochs == null || epochs.length == 0) { sascha@3660: log.warn("Cannot read directory '" + epochDir + "'"); ingo@2839: } ingo@2839: else { ingo@2840: for (File file: epochs) { ingo@2840: if (file.isDirectory()) { ingo@2840: for (File child: file.listFiles()) { ingo@2840: parser.parse(child); ingo@2840: } ingo@2840: } ingo@2840: else { ingo@2840: parser.parse(file); ingo@2840: } ingo@2840: } ingo@2839: } ingo@2840: ingo@2840: sedimentYields = parser.getSedimentYields(); ingo@2839: } ingo@2839: ingo@2839: ingo@2844: protected void parseWaterlevels() throws IOException { ingo@2844: if (Config.INSTANCE.skipWaterlevels()) { ingo@2844: log.info("skip parsing waterlevels"); ingo@2844: return; ingo@2844: } ingo@2844: ingo@2851: log.info("Parse waterlevels"); ingo@2851: ingo@2844: File minfo = getMinfoDir(); ingo@2844: File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); ingo@2845: File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR); ingo@2844: ingo@2844: File[] files = wspDir.listFiles(); ingo@2844: ingo@2844: if (files == null) { ingo@2844: log.warn("Cannot read directory '" + wspDir + "'"); ingo@2844: return; ingo@2844: } ingo@2844: ingo@2844: WaterlevelParser parser = new WaterlevelParser(); ingo@2844: ingo@2844: for (File file: files) { ingo@2844: parser.parse(file); ingo@2844: } ingo@2844: felix@5227: // TODO use own List for waterlevels felix@5050: // The parsed ImportWaterlevels are converted to felix@5050: // 'fixation'-wsts now. felix@5050: for(ImportWst iw: parser.exportWsts()) { felix@5227: iw.setDescription("CSV/" + iw.getDescription()); felix@5050: fixations.add(iw); felix@5050: } ingo@2851: } ingo@2844: ingo@4193: protected void parseMeasurementStations() throws IOException { ingo@4193: if (Config.INSTANCE.skipMeasurementStations()) { ingo@4193: log.info("skip parsing measurement stations"); ingo@4193: return; ingo@4193: } ingo@4193: ingo@4193: log.info("Parse measurement stations"); ingo@4193: ingo@4193: File minfo = getMinfoDir(); ingo@4193: File minfoBaseDir = new File(minfo, MINFO_BASE_DIR); ingo@4193: File coredataFile = new File(minfoBaseDir, MINFO_CORE_DATA_FILE); ingo@4193: ingo@4193: if (coredataFile == null || !coredataFile.exists()) { ingo@4193: log.warn("No core data file '" + MINFO_CORE_DATA_FILE + "' found"); ingo@4193: return; ingo@4193: } ingo@4193: ingo@4193: MeasurementStationsParser parser = new MeasurementStationsParser(); ingo@4193: try { ingo@4193: parser.parse(coredataFile); ingo@4193: measurementStations = parser.getMeasurementStations(); ingo@4193: ingo@4193: log.info("Successfully parsed " + measurementStations.size() + " measurement stations."); ingo@4193: } ingo@4193: catch (IOException ioe) { ingo@4193: log.error("unable to parse file '" + coredataFile.getName() + ingo@4193: ": " + ioe.getMessage()); ingo@4193: } ingo@4193: } ingo@4193: ingo@2851: ingo@2851: protected void parseWaterlevelDifferences() throws IOException { ingo@2851: if (Config.INSTANCE.skipWaterlevelDifferences()) { ingo@2851: log.info("skip parsing waterlevel differences"); ingo@2851: return; ingo@2851: } ingo@2851: ingo@2851: log.info("Parse waterlevel differences"); ingo@2851: ingo@2851: File minfo = getMinfoDir(); ingo@2851: File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); ingo@2851: File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR); ingo@2851: ingo@2851: File[] files = diffDir.listFiles(); ingo@2851: ingo@2851: if (files == null) { ingo@2851: log.warn("Cannot read directory '" + diffDir + "'"); ingo@2851: return; ingo@2851: } ingo@2851: ingo@2851: WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser(); ingo@2851: ingo@2851: for (File file: files) { ingo@2851: parser.parse(file); ingo@2851: } ingo@2851: ingo@2851: waterlevelDiffs = parser.getDifferences(); ingo@2844: } ingo@2844: ingo@2844: ingo@3328: protected void parseSQRelation() throws IOException { ingo@3328: if (Config.INSTANCE.skipSQRelation()) { ingo@3329: log.info("skip parsing sq relation"); ingo@3328: return; ingo@3328: } ingo@3328: ingo@3328: log.info("Parse sq relations"); ingo@3328: ingo@3328: File minfo = getMinfoDir(); ingo@3328: File sqDir = new File(minfo, MINFO_SQ_DIR); ingo@3328: ingo@3328: File[] files = sqDir.listFiles(); ingo@3328: ingo@3328: if (files == null) { ingo@3328: log.warn("Cannot read directory '" + sqDir + "'"); ingo@3328: return; ingo@3328: } ingo@3328: ingo@3328: SQRelationParser parser = new SQRelationParser(); ingo@3328: ingo@3328: for (File file: files) { ingo@3328: parser.parse(file); ingo@3328: } ingo@3328: ingo@3328: sqRelations = parser.getSQRelations(); ingo@3328: ingo@3328: log.debug("Parsed " + sqRelations.size() + " SQ relations."); ingo@3328: } ingo@3328: ingo@3328: ingo@2806: protected void parseBedHeightSingles(File dir) throws IOException { ingo@2806: log.debug("Parse bed height singles"); ingo@2806: ingo@2806: File[] files = dir.listFiles(); ingo@2806: ingo@2806: if (files == null) { sascha@3660: log.warn("Cannot read directory '" + dir + "'"); ingo@2806: return; ingo@2806: } ingo@2806: ingo@2806: BedHeightSingleParser parser = new BedHeightSingleParser(); ingo@2806: ingo@2806: for (File file: files) { ingo@2806: parser.parse(file); ingo@2806: } ingo@2806: ingo@2806: bedHeightSingles = parser.getBedHeights(); ingo@2806: } ingo@2806: ingo@2806: ingo@2806: protected void parseBedHeightEpochs(File dir) throws IOException { ingo@2806: log.debug("Parse bed height epochs"); ingo@2806: ingo@2806: File[] files = dir.listFiles(); ingo@2806: ingo@2806: if (files == null) { sascha@3660: log.warn("Cannot read directory '" + dir + "'"); ingo@2806: return; ingo@2806: } ingo@2806: ingo@2806: BedHeightEpochParser parser = new BedHeightEpochParser(); ingo@2806: ingo@2806: for (File file: files) { ingo@2806: parser.parse(file); ingo@2806: } ingo@2806: ingo@2806: bedHeightEpochs = parser.getBedHeights(); ingo@2806: } ingo@2806: ingo@2806: sascha@490: public void parseFloodWater() throws IOException { sascha@1223: if (Config.INSTANCE.skipFloodWater()) { sascha@1223: log.info("skip parsing flod water"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@490: log.info("Parse flood water wst file"); sascha@490: sascha@490: File riverDir = wstFile.getParentFile().getParentFile(); sascha@490: sascha@490: File dir = FileTools.repair(new File(riverDir, FLOOD_WATER)); sascha@490: sascha@490: if (!dir.isDirectory() || !dir.canRead()) { sascha@490: log.info("no directory '" + dir + "' found"); sascha@490: return; sascha@490: } sascha@490: sascha@490: File [] files = dir.listFiles(); sascha@490: sascha@490: if (files == null) { sascha@490: log.warn("cannot read '" + dir + "'"); sascha@490: return; sascha@490: } sascha@490: sascha@490: for (File file: files) { sascha@490: if (!file.isFile() || !file.canRead()) { sascha@490: continue; sascha@490: } sascha@490: String name = file.getName().toLowerCase(); sascha@490: if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { sascha@490: continue; sascha@490: } sascha@490: log.info("found file '" + file.getName() + "'"); sascha@490: WstParser wstParser = new WstParser(); sascha@490: wstParser.parse(file); sascha@490: ImportWst iw = wstParser.getWst(); sascha@490: iw.setKind(4); sascha@490: iw.setDescription(FLOOD_WATER + "/" + iw.getDescription()); sascha@490: floodWater.add(iw); sascha@490: } sascha@484: } sascha@484: sascha@484: public void parseOfficialLines() throws IOException { sascha@1223: if (Config.INSTANCE.skipOfficialLines()) { sascha@1223: log.info("skip parsing official lines"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@484: log.info("Parse official wst files"); sascha@484: sascha@484: File riverDir = wstFile.getParentFile().getParentFile(); sascha@484: sascha@484: for (String folder: OFFICIAL_LINES_FOLDERS) { sascha@484: File dir = FileTools.repair(new File(riverDir, folder)); sascha@484: sascha@484: if (!dir.isDirectory() || !dir.canRead()) { sascha@484: log.info("no directory '" + folder + "' found"); sascha@484: continue; sascha@484: } sascha@484: sascha@484: File file = FileTools.repair(new File(dir, OFFICIAL_LINES)); sascha@484: if (!file.isFile() || !file.canRead()) { sascha@484: log.warn("no official lines wst file found"); sascha@484: continue; sascha@484: } sascha@484: log.debug("Found WST file: " + file); sascha@484: sascha@484: WstParser wstParser = new WstParser(); sascha@484: wstParser.parse(file); sascha@484: ImportWst iw = wstParser.getWst(); sascha@484: iw.setKind(3); sascha@484: iw.setDescription(folder + "/" + iw.getDescription()); sascha@484: officialLines.add(iw); sascha@484: } // for all folders sascha@484: sascha@483: } sascha@483: sascha@483: public void parseFixations() throws IOException { sascha@1223: if (Config.INSTANCE.skipFixations()) { sascha@1223: log.info("skip parsing fixations"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@483: log.info("Parse fixation wst files"); sascha@483: sascha@483: File riverDir = wstFile.getParentFile().getParentFile(); sascha@483: sascha@483: File fixDir = FileTools.repair( sascha@483: new File(riverDir, FIXATIONS)); sascha@483: sascha@483: if (!fixDir.isDirectory() || !fixDir.canRead()) { sascha@483: log.info("no fixation wst file directory found"); sascha@483: return; sascha@483: } sascha@483: sascha@483: File [] files = fixDir.listFiles(); sascha@483: sascha@483: if (files == null) { sascha@483: log.warn("cannot read fixations wst file directory"); sascha@483: return; sascha@483: } sascha@483: sascha@483: for (File file: files) { sascha@483: if (!file.isFile() || !file.canRead()) { sascha@483: continue; sascha@483: } sascha@483: String name = file.getName().toLowerCase(); sascha@483: if (!name.endsWith(".wst")) { sascha@483: continue; sascha@483: } sascha@483: log.debug("Found WST file: " + file); sascha@483: sascha@483: WstParser wstParser = new WstParser(); sascha@483: wstParser.parse(file); sascha@483: ImportWst iw = wstParser.getWst(); sascha@483: iw.setKind(2); sascha@490: iw.setDescription(FIXATIONS+ "/" + iw.getDescription()); sascha@483: fixations.add(iw); sascha@483: } sascha@482: } sascha@482: sascha@482: public void parseExtraWsts() throws IOException { sascha@1223: if (Config.INSTANCE.skipExtraWsts()) { sascha@1223: log.info("skip parsing extra WST files"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@482: log.info("Parse extra longitudinal wst files"); sascha@482: sascha@482: File riverDir = wstFile.getParentFile().getParentFile(); sascha@482: sascha@482: File extraDir = FileTools.repair( sascha@482: new File(riverDir, EXTRA_LONGITUDINALS)); sascha@482: sascha@482: if (!extraDir.isDirectory() || !extraDir.canRead()) { sascha@482: log.info("no extra longitudinal wst file directory found"); sascha@482: return; sascha@482: } sascha@482: sascha@482: File [] files = extraDir.listFiles(); sascha@482: sascha@482: if (files == null) { sascha@482: log.warn("cannot read extra longitudinal wst file directory"); sascha@482: return; sascha@482: } sascha@482: sascha@482: for (File file: files) { sascha@482: if (!file.isFile() || !file.canRead()) { sascha@482: continue; sascha@482: } sascha@482: String name = file.getName().toLowerCase(); sascha@482: if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { sascha@482: continue; sascha@482: } sascha@482: log.debug("Found WST file: " + file); sascha@482: sascha@482: WstParser wstParser = new WstParser(); sascha@482: wstParser.parse(file); sascha@482: ImportWst iw = wstParser.getWst(); sascha@482: iw.setKind(1); sascha@491: iw.setDescription(EXTRA_LONGITUDINALS + "/" + iw.getDescription()); sascha@482: extraWsts.add(iw); sascha@482: } sascha@482: sascha@197: } sascha@197: sascha@197: public void parseWst() throws IOException { sascha@1223: if (Config.INSTANCE.skipWst()) { sascha@1223: log.info("skip parsing WST file"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@197: WstParser wstParser = new WstParser(); sascha@197: wstParser.parse(wstFile); sascha@201: wst = wstParser.getWst(); sascha@186: } sascha@186: sascha@184: public void parseGauges() throws IOException { sascha@1223: if (Config.INSTANCE.skipGauges()) { sascha@1223: log.info("skip parsing gauges"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@184: File gltFile = new File(wstFile.getParentFile(), PEGEL_GLT); sascha@184: gltFile = FileTools.repair(gltFile); sascha@184: sascha@184: if (!gltFile.isFile() || !gltFile.canRead()) { sascha@184: log.warn("cannot read gauges from '" + gltFile + "'"); sascha@184: return; sascha@184: } sascha@184: sascha@184: PegelGltParser pgltp = new PegelGltParser(); sascha@184: pgltp.parse(gltFile); sascha@184: sascha@184: gauges = pgltp.getGauges(); sascha@189: sascha@189: for (ImportGauge gauge: gauges) { sascha@189: gauge.parseDependencies(); sascha@189: } sascha@184: } sascha@186: sascha@186: public void parseAnnotations() throws IOException { sascha@1223: if (Config.INSTANCE.skipAnnotations()) { sascha@1223: log.info("skip parsing annotations"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@186: File riverDir = wstFile.getParentFile().getParentFile(); sascha@765: AnnotationsParser aparser = sascha@765: new AnnotationsParser(annotationClassifier); sascha@186: aparser.parse(riverDir); sascha@186: sascha@186: annotations = aparser.getAnnotations(); sascha@186: } sascha@188: sascha@1220: public void parseHYKs() { sascha@1223: if (Config.INSTANCE.skipHYKs()) { sascha@1223: log.info("skip parsing HYK files"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@1220: log.info("looking for HYK files"); sascha@1220: HYKParser parser = new HYKParser(); sascha@1220: File riverDir = wstFile sascha@1220: .getParentFile() // Basisdaten sascha@1220: .getParentFile() // Hydrologie sascha@1220: .getParentFile(); // sascha@1220: sascha@1220: parser.parseHYKs(riverDir, new HYKParser.Callback() { sascha@1220: sascha@1220: Set hfs = new HashSet(); sascha@1220: sascha@1220: @Override sascha@1220: public boolean hykAccept(File file) { sascha@1220: HashedFile hf = new HashedFile(file); sascha@1220: boolean success = hfs.add(hf); sascha@1220: if (!success) { sascha@1220: log.warn("HYK file '" + file + "' seems to be a duplicate."); sascha@1220: } sascha@1220: return success; sascha@1220: } sascha@1220: sascha@1220: @Override sascha@1220: public void hykParsed(HYKParser parser) { sascha@1220: log.debug("callback from HYK parser"); sascha@1220: ImportHYK hyk = parser.getHYK(); sascha@1220: hyk.setRiver(ImportRiver.this); sascha@1220: hyks.add(hyk); sascha@1220: } sascha@1220: }); sascha@1220: } sascha@1220: felix@4712: felix@4724: /** Add cross sections with description, years and lines to felix@4724: * store. */ felix@4724: private void addCrossSections(CrossSectionParser parser) { felix@4724: String description = parser.getDescription(); felix@4724: Integer year = parser.getYear(); felix@4724: ImportTimeInterval ti = year != null felix@4724: ? new ImportTimeInterval(yearToDate(year)) felix@4724: : null; felix@4724: teichmann@4735: Map> data = parser.getData(); teichmann@4735: felix@4724: List lines = teichmann@4735: new ArrayList(data.size()); felix@4724: teichmann@4735: for (Map.Entry> entry: data.entrySet()) { felix@4724: Double km = entry.getKey(); felix@4724: List points = entry.getValue(); felix@4724: lines.add(new ImportCrossSectionLine(km, points)); felix@4724: } felix@4724: felix@4724: crossSections.add(new ImportCrossSection( felix@4724: ImportRiver.this, description, ti, lines)); felix@4724: } felix@4724: felix@4767: /** Create a W80 Parser and parse w80 files found. */ felix@4767: public void parseW80s() { felix@4767: if (Config.INSTANCE.skipW80s()) { felix@4767: log.info("skip parsing W80s"); felix@4781: return; felix@4767: } felix@4767: W80Parser parser = new W80Parser(); felix@4767: File riverDir = wstFile felix@4767: .getParentFile() // Basisdaten felix@4767: .getParentFile() // Hydrologie felix@4767: .getParentFile(); // felix@4767: felix@4779: ImportRiverCrossSectionParserCallback w80Callback = felix@4779: new ImportRiverCrossSectionParserCallback("w80"); felix@4779: parser.parseW80s(riverDir, w80Callback); felix@4767: } felix@4767: felix@4782: felix@4782: /** felix@4782: * Create and use a DA50Parser, parse the files found, add the felix@4782: * ross-sections found. felix@4782: */ felix@4767: public void parseDA50s() { felix@4767: if (Config.INSTANCE.skipDA50s()) { felix@4767: log.info("skip parsing DA50s"); felix@4781: return; felix@4767: } felix@4782: DA50Parser parser = new DA50Parser(); felix@4782: File riverDir = wstFile felix@4782: .getParentFile() // Basisdaten felix@4782: .getParentFile() // Hydrologie felix@4782: .getParentFile(); // felix@4782: felix@4782: ImportRiverCrossSectionParserCallback da50Callback = felix@4782: new ImportRiverCrossSectionParserCallback("da50"); felix@4782: parser.parseDA50s(riverDir, da50Callback); felix@4767: } felix@4767: felix@4724: felix@4712: /** Create a DA66 Parser and parse the da66 files found. */ felix@4712: // TODO this is a copy of parsePRFs, extract interfaces (e.g. CrossSectionParser). felix@4712: public void parseDA66s() { felix@4712: if (Config.INSTANCE.skipDA66s()) { felix@4712: log.info("skip parsing DA66s"); felix@4712: return; felix@4712: } felix@4712: felix@4712: log.info("looking for DA66 files"); felix@4712: DA66Parser parser = new DA66Parser(); felix@4712: File riverDir = wstFile felix@4712: .getParentFile() // Basisdaten felix@4712: .getParentFile() // Hydrologie felix@4712: .getParentFile(); // felix@4712: felix@4779: ImportRiverCrossSectionParserCallback da66Callback = felix@4779: new ImportRiverCrossSectionParserCallback("da66"); felix@4779: parser.parseDA66s(riverDir, da66Callback); felix@4712: } felix@4712: felix@4712: /** Create a PRFParser and let it parse the prf files found. */ sascha@1204: public void parsePRFs() { sascha@1223: if (Config.INSTANCE.skipPRFs()) { sascha@1223: log.info("skip parsing PRFs"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@1204: log.info("looking for PRF files"); sascha@1204: PRFParser parser = new PRFParser(); sascha@1204: File riverDir = wstFile sascha@1204: .getParentFile() // Basisdaten sascha@1204: .getParentFile() // Hydrologie sascha@1204: .getParentFile(); // sascha@1206: felix@4779: ImportRiverCrossSectionParserCallback prfCallback = felix@4779: new ImportRiverCrossSectionParserCallback("prf"); felix@4779: parser.parsePRFs(riverDir, prfCallback); sascha@1204: } sascha@1204: sascha@1204: public static Date yearToDate(int year) { sascha@1204: Calendar cal = Calendar.getInstance(); sascha@1204: cal.set(year, 5, 15, 12, 0, 0); sascha@1204: long ms = cal.getTimeInMillis(); sascha@1204: cal.setTimeInMillis(ms - ms%1000); sascha@1204: return cal.getTime(); sascha@1204: } sascha@1204: sascha@190: public void storeDependencies() { ingo@2347: storeWstUnit(); sascha@190: storeAnnotations(); sascha@1221: storeHYKs(); sascha@1204: storeCrossSections(); sascha@190: storeGauges(); sascha@201: storeWst(); sascha@482: storeExtraWsts(); sascha@483: storeFixations(); sascha@484: storeOfficialLines(); sascha@490: storeFloodWater(); sascha@494: storeFloodProtection(); ingo@2806: storeBedHeight(); ingo@2815: storeSedimentDensity(); ingo@2821: storeMorphologicalWidth(); ingo@2826: storeFlowVelocity(); ingo@2839: storeSedimentYield(); ingo@2844: storeWaterlevels(); ingo@2851: storeWaterlevelDifferences(); ingo@4193: storeMeasurementStations(); ingo@3328: storeSQRelations(); felix@5080: storeOfficialNumber(); sascha@201: } sascha@201: ingo@2347: public void storeWstUnit() { ingo@2347: if (wst == null) { ingo@2347: wstUnit = new ImportUnit("NN + m"); ingo@2347: } ingo@2347: else { ingo@2347: wstUnit = wst.getUnit(); ingo@2347: } ingo@2347: } ingo@2347: sascha@1221: public void storeHYKs() { sascha@1223: if (!Config.INSTANCE.skipHYKs()) { sascha@1223: log.info("store HYKs"); sascha@1223: getPeer(); sascha@1223: for (ImportHYK hyk: hyks) { sascha@1223: hyk.storeDependencies(); sascha@1223: } sascha@1221: } sascha@1221: } sascha@1221: sascha@1204: public void storeCrossSections() { felix@4767: if (!Config.INSTANCE.skipPRFs() || !Config.INSTANCE.skipDA66s() || !Config.INSTANCE.skipDA50s() || !Config.INSTANCE.skipW80s()) { sascha@1223: log.info("store cross sections"); sascha@1223: getPeer(); sascha@1223: for (ImportCrossSection crossSection: crossSections) { sascha@1223: crossSection.storeDependencies(); sascha@1223: } sascha@1204: } sascha@1204: } sascha@1204: sascha@201: public void storeWst() { sascha@1223: if (!Config.INSTANCE.skipWst()) { sascha@1223: River river = getPeer(); sascha@483: wst.storeDependencies(river); sascha@483: } sascha@483: } sascha@483: sascha@1223: public void storeFixations() { felix@5051: if (!Config.INSTANCE.skipFixations() || !Config.INSTANCE.skipWaterlevels()) { felix@5051: log.info("store fixation wsts and/or csvs"); sascha@1223: River river = getPeer(); sascha@1223: for (ImportWst wst: fixations) { sascha@1223: log.debug("name: " + wst.getDescription()); sascha@1223: wst.storeDependencies(river); sascha@1223: } sascha@1223: } sascha@1223: } sascha@1223: sascha@482: public void storeExtraWsts() { sascha@1223: if (!Config.INSTANCE.skipExtraWsts()) { sascha@1223: log.info("store extra wsts"); sascha@1223: River river = getPeer(); sascha@1223: for (ImportWst wst: extraWsts) { sascha@1223: log.debug("name: " + wst.getDescription()); sascha@1223: wst.storeDependencies(river); sascha@1223: } sascha@482: } sascha@482: } sascha@482: sascha@484: public void storeOfficialLines() { sascha@1223: if (!Config.INSTANCE.skipOfficialLines()) { sascha@1223: log.info("store official lines wsts"); sascha@1223: River river = getPeer(); sascha@1223: for (ImportWst wst: officialLines) { sascha@1223: log.debug("name: " + wst.getDescription()); sascha@1223: wst.storeDependencies(river); sascha@1223: } sascha@484: } sascha@484: } sascha@484: sascha@490: public void storeFloodWater() { sascha@1223: if (!Config.INSTANCE.skipFloodWater()) { sascha@1223: log.info("store flood water wsts"); sascha@1223: River river = getPeer(); sascha@1223: for (ImportWst wst: floodWater) { sascha@1223: log.debug("name: " + wst.getDescription()); sascha@1223: wst.storeDependencies(river); sascha@1223: } sascha@490: } sascha@490: } sascha@490: sascha@494: public void storeFloodProtection() { sascha@1223: if (!Config.INSTANCE.skipFloodProtection()) { sascha@1223: log.info("store flood protection wsts"); sascha@1223: River river = getPeer(); sascha@1223: for (ImportWst wst: floodProtection) { sascha@1223: log.debug("name: " + wst.getDescription()); sascha@1223: wst.storeDependencies(river); sascha@1223: } sascha@494: } sascha@494: } sascha@494: ingo@2806: ingo@2806: public void storeBedHeight() { ingo@2810: if (!Config.INSTANCE.skipBedHeightSingle()) { ingo@2810: log.info("store bed heights single"); ingo@2810: storeBedHeightSingle(); ingo@2810: } ingo@2809: ingo@2810: if (!Config.INSTANCE.skipBedHeightEpoch()) { ingo@2810: log.info("store bed height epoch."); ingo@2810: storeBedHeightEpoch(); ingo@2810: } ingo@2810: } ingo@2809: ingo@2810: ingo@2810: private void storeBedHeightSingle() { ingo@2810: River river = getPeer(); ingo@2810: ingo@2810: if (bedHeightSingles != null) { ingo@2811: for (ImportBedHeight tmp: bedHeightSingles) { ingo@2811: ImportBedHeightSingle single = (ImportBedHeightSingle) tmp; ingo@2811: ingo@2810: String desc = single.getDescription(); ingo@2810: ingo@2810: log.debug("name: " + desc); ingo@2810: ingo@2810: try { ingo@2810: single.storeDependencies(river); ingo@2810: } ingo@2810: catch (SQLException sqle) { ingo@2810: log.error("File '" + desc + "' is broken!"); ingo@2810: } ingo@2810: catch (ConstraintViolationException cve) { ingo@2810: log.error("File '" + desc + "' is broken!"); ingo@2806: } ingo@2806: } ingo@2810: } ingo@2810: else { ingo@2810: log.info("No single bed heights to store."); ingo@2810: } ingo@2810: } ingo@2806: ingo@2810: ingo@2810: private void storeBedHeightEpoch() { ingo@2810: River river = getPeer(); ingo@2810: ingo@2810: if (bedHeightEpochs != null) { ingo@2811: for (ImportBedHeight tmp: bedHeightEpochs) { ingo@2811: ImportBedHeightEpoch epoch = (ImportBedHeightEpoch) tmp; ingo@2811: ingo@2810: String desc = epoch.getDescription(); ingo@2810: ingo@2810: log.debug("name: " + desc); ingo@2810: ingo@2810: try { ingo@2806: epoch.storeDependencies(river); ingo@2806: } ingo@2810: catch (SQLException sqle) { ingo@2810: log.error("File '" + desc + "' is broken!"); ingo@2810: } ingo@2810: catch (ConstraintViolationException cve) { ingo@2810: log.error("File '" + desc + "' is broken!"); ingo@2810: } ingo@2806: } ingo@2810: } ingo@2810: else { ingo@2810: log.info("No epoch bed heights to store."); ingo@2806: } ingo@2806: } ingo@2806: ingo@2815: public void storeSedimentDensity() { ingo@2815: if (!Config.INSTANCE.skipSedimentDensity()) { ingo@2815: log.info("store sediment density"); ingo@2815: ingo@2817: River river = getPeer(); ingo@2817: ingo@2817: for (ImportSedimentDensity density: sedimentDensities) { ingo@2817: String desc = density.getDescription(); ingo@2817: ingo@2817: log.debug("name: " + desc); ingo@2817: ingo@2817: try { ingo@2817: density.storeDependencies(river); ingo@2817: } ingo@2817: catch (SQLException sqle) { ingo@2817: log.error("File '" + desc + "' is broken!"); ingo@2817: } ingo@2817: catch (ConstraintViolationException cve) { ingo@2817: log.error("File '" + desc + "' is broken!"); ingo@2817: } ingo@2817: } ingo@2815: } ingo@2815: } ingo@2815: ingo@2821: public void storeMorphologicalWidth() { ingo@2821: if (!Config.INSTANCE.skipMorphologicalWidth()) { ingo@2821: log.info("store morphological width"); ingo@2821: ingo@2821: River river = getPeer(); ingo@2821: ingo@2821: for (ImportMorphWidth width: morphologicalWidths) { ingo@2821: try { ingo@2821: width.storeDependencies(river); ingo@2821: } ingo@2821: catch (SQLException sqle) { sascha@3659: log.error("Error while parsing file for morph. width.", sqle); ingo@2821: } ingo@2821: catch (ConstraintViolationException cve) { sascha@3659: log.error("Error while parsing file for morph. width.", cve); ingo@2821: } ingo@2821: } ingo@2821: } ingo@2821: } ingo@2821: ingo@2826: public void storeFlowVelocity() { ingo@2826: if (!Config.INSTANCE.skipFlowVelocity()) { ingo@2826: log.info("store flow velocity"); ingo@2826: ingo@2827: River river = getPeer(); ingo@2827: ingo@2827: for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels){ ingo@2827: try { ingo@2827: flowVelocityModel.storeDependencies(river); ingo@2827: } ingo@2827: catch (SQLException sqle) { ingo@2827: log.error("Error while storing flow velocity model.", sqle); ingo@2827: } ingo@2827: catch (ConstraintViolationException cve) { ingo@2827: log.error("Error while storing flow velocity model.", cve); ingo@2827: } ingo@2827: } ingo@2832: ingo@2832: for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) { ingo@2832: try { ingo@2832: m.storeDependencies(river); ingo@2832: } ingo@2832: catch (SQLException sqle) { ingo@2832: log.error("Error while storing flow velocity measurement.", sqle); ingo@2832: } ingo@2832: catch (ConstraintViolationException cve) { ingo@2832: log.error("Error while storing flow velocity measurement.", cve); ingo@2832: } ingo@2832: } ingo@2826: } ingo@2826: } ingo@2826: ingo@2839: ingo@2839: public void storeSedimentYield() { ingo@2839: if (!Config.INSTANCE.skipSedimentYield()) { ingo@2839: log.info("store sediment yield data"); ingo@2839: ingo@2839: River river = getPeer(); ingo@2839: ingo@2839: for (ImportSedimentYield sedimentYield: sedimentYields) { ingo@2839: try { ingo@2839: sedimentYield.storeDependencies(river); ingo@2839: } ingo@2839: catch (SQLException sqle) { ingo@2839: log.error("Error while storing sediment yield.", sqle); ingo@2839: } ingo@2839: catch (ConstraintViolationException cve) { ingo@2839: log.error("Error while storing sediment yield.", cve); ingo@2839: } ingo@2839: } ingo@2839: } ingo@2839: } ingo@2839: ingo@2844: ingo@2844: public void storeWaterlevels() { ingo@2844: if (!Config.INSTANCE.skipWaterlevels()) { ingo@2844: log.info("store waterlevels"); ingo@2844: ingo@2844: River river = getPeer(); ingo@2844: ingo@2844: for (ImportWaterlevel waterlevel: waterlevels) { ingo@2844: waterlevel.storeDependencies(river); ingo@2844: } ingo@2844: } ingo@2844: } ingo@2844: ingo@2844: ingo@2851: public void storeWaterlevelDifferences() { ingo@2851: if (!Config.INSTANCE.skipWaterlevelDifferences()) { ingo@2851: log.info("store waterlevel differences"); ingo@2851: ingo@2851: River river = getPeer(); ingo@2851: ingo@2851: for (ImportWaterlevelDifference diff: waterlevelDiffs) { ingo@2851: try { ingo@2851: diff.storeDependencies(river); ingo@2851: } ingo@2851: catch (SQLException sqle) { ingo@2851: log.error("Error while storing waterlevel diff.", sqle); ingo@2851: } ingo@2851: catch (ConstraintViolationException cve) { ingo@2851: log.error("Error while storing waterlevel diff.", cve); ingo@2851: } ingo@2851: } ingo@2851: } ingo@2851: } ingo@2851: ingo@2851: ingo@4193: public void storeMeasurementStations() { ingo@4193: if (!Config.INSTANCE.skipMeasurementStations()) { ingo@4193: log.info("store measurement stations"); ingo@4193: ingo@4193: River river = getPeer(); ingo@4193: ingo@4193: int count = 0; ingo@4193: ingo@4193: for (ImportMeasurementStation station: measurementStations) { ingo@4193: try { ingo@4193: boolean success = station.storeDependencies(river); ingo@4193: if (success) { ingo@4193: count++; ingo@4193: } ingo@4193: } ingo@4193: catch (SQLException sqle) { ingo@4193: log.error("Error while storing measurement station.", sqle); ingo@4193: } ingo@4193: catch (ConstraintViolationException cve) { ingo@4193: log.error("Error while storing measurement station.", cve); ingo@4193: } ingo@4193: } ingo@4193: ingo@4193: log.info("stored " + count + " measurement stations."); ingo@4193: } ingo@4193: } ingo@4193: ingo@4193: ingo@3328: public void storeSQRelations() { ingo@3328: if (!Config.INSTANCE.skipSQRelation()) { ingo@3328: log.info("store sq relations"); ingo@3328: ingo@3329: River river = getPeer(); ingo@3329: ingo@3329: int count = 0; ingo@3329: ingo@3329: for (ImportSQRelation sqRelation: sqRelations) { ingo@3329: try { ingo@3329: sqRelation.storeDependencies(river); ingo@3329: count++; ingo@3329: } ingo@3329: catch (SQLException sqle) { ingo@3329: log.error("Error while storing sq relation.", sqle); ingo@3329: } ingo@3329: catch (ConstraintViolationException cve) { ingo@3329: log.error("Error while storing sq relation.", cve); ingo@3329: } ingo@3329: } ingo@3329: ingo@3329: log.info("stored " + count + " sq relations."); ingo@3328: } ingo@3328: } ingo@3328: ingo@3328: sascha@188: public void storeAnnotations() { sascha@1223: if (!Config.INSTANCE.skipAnnotations()) { sascha@1223: River river = getPeer(); sascha@1223: for (ImportAnnotation annotation: annotations) { sascha@1223: annotation.getPeer(river); sascha@1223: } sascha@188: } sascha@188: } sascha@188: sascha@190: public void storeGauges() { sascha@1223: if (!Config.INSTANCE.skipGauges()) { sascha@1223: log.info("store gauges:"); sascha@1223: River river = getPeer(); sascha@1223: Session session = ImporterSession.getInstance() sascha@1223: .getDatabaseSession(); sascha@1223: for (ImportGauge gauge: gauges) { sascha@1223: log.info("\tgauge: " + gauge.getName()); sascha@1223: gauge.storeDependencies(river); sascha@1223: ImporterSession.getInstance().getDatabaseSession(); sascha@1223: session.flush(); sascha@1223: } sascha@191: } sascha@190: } sascha@190: sascha@188: public River getPeer() { sascha@188: if (peer == null) { sascha@497: Session session = ImporterSession.getInstance().getDatabaseSession(); sascha@188: Query query = session.createQuery("from River where name=:name"); ingo@2347: felix@5027: Unit u = null; felix@5027: if (wstUnit != null) { felix@5027: u = wstUnit.getPeer(); felix@5027: } ingo@2347: sascha@188: query.setString("name", name); sascha@188: List rivers = query.list(); sascha@188: if (rivers.isEmpty()) { ingo@2347: log.info("Store new river '" + name + "'"); ingo@2347: peer = new River(name, u); felix@5027: if (!Config.INSTANCE.skipBWASTR()) { felix@5027: peer.setOfficialNumber(officialNumber); felix@5027: } sascha@188: session.save(peer); sascha@188: } sascha@188: else { sascha@188: peer = rivers.get(0); sascha@188: } sascha@188: } sascha@188: return peer; sascha@188: } sascha@177: } sascha@177: // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :