sascha@177: package de.intevation.flys.importer; sascha@177: sascha@3320: import de.intevation.artifacts.common.utils.FileTools.HashedFile; sascha@1204: sascha@3320: import de.intevation.artifacts.common.utils.FileTools; sascha@3320: sascha@3320: import de.intevation.flys.importer.parsers.AnnotationClassifier; sascha@3320: import de.intevation.flys.importer.parsers.AnnotationsParser; sascha@3320: import de.intevation.flys.importer.parsers.BedHeightEpochParser; sascha@3320: import de.intevation.flys.importer.parsers.BedHeightSingleParser; sascha@3320: import de.intevation.flys.importer.parsers.FlowVelocityMeasurementParser; sascha@3320: import de.intevation.flys.importer.parsers.FlowVelocityModelParser; sascha@3320: import de.intevation.flys.importer.parsers.HYKParser; sascha@3320: import de.intevation.flys.importer.parsers.MorphologicalWidthParser; sascha@3320: import de.intevation.flys.importer.parsers.PRFParser; sascha@3320: import de.intevation.flys.importer.parsers.PegelGltParser; sascha@3320: import de.intevation.flys.importer.parsers.SedimentDensityParser; sascha@3320: import de.intevation.flys.importer.parsers.SedimentYieldParser; sascha@3320: import de.intevation.flys.importer.parsers.WaterlevelDifferencesParser; sascha@3320: import de.intevation.flys.importer.parsers.WaterlevelParser; sascha@3320: import de.intevation.flys.importer.parsers.WstParser; sascha@3320: sascha@3320: import de.intevation.flys.model.River; sascha@3320: import de.intevation.flys.model.Unit; sascha@184: sascha@177: import java.io.File; sascha@184: import java.io.IOException; sascha@184: ingo@2809: import java.sql.SQLException; ingo@2809: sascha@3320: import java.util.ArrayList; sascha@3320: import java.util.Calendar; sascha@3320: import java.util.Date; sascha@3320: import java.util.HashSet; sascha@3320: import java.util.List; sascha@3320: import java.util.Map; sascha@3320: import java.util.Set; sascha@3320: sascha@184: import org.apache.log4j.Logger; sascha@184: sascha@3320: import org.hibernate.Query; sascha@3320: import org.hibernate.Session; sascha@177: ingo@2809: import org.hibernate.exception.ConstraintViolationException; sascha@188: sascha@177: public class ImportRiver sascha@177: { sascha@184: private static Logger log = Logger.getLogger(ImportRiver.class); sascha@184: sascha@184: public static final String PEGEL_GLT = "PEGEL.GLT"; sascha@184: sascha@483: public static final String FIXATIONS = "Fixierungen"; sascha@483: sascha@482: public static final String EXTRA_LONGITUDINALS = sascha@482: "Zus.L\u00e4ngsschnitte"; sascha@482: sascha@484: public static final String [] OFFICIAL_LINES_FOLDERS = { sascha@484: "Basisdaten", sascha@484: "Fixierungen" }; sascha@484: sascha@484: public static final String OFFICIAL_LINES = sascha@484: "Amtl_Linien.wst"; sascha@484: sascha@490: public static final String FLOOD_WATER = "HW-Marken"; sascha@490: sascha@494: public static final String FLOOD_PROTECTION = sascha@494: "HW-Schutzanlagen"; sascha@494: ingo@2806: public static final String MINFO_DIR = "Morphologie"; ingo@2806: ingo@2806: public static final String BED_HEIGHT_DIR = "Sohlhoehen"; ingo@2806: ingo@2806: public static final String BED_HEIGHT_SINGLE_DIR = "Einzeljahre"; ingo@2806: ingo@2806: public static final String BED_HEIGHT_EPOCH_DIR = "Epochen"; ingo@2806: ingo@2815: public static final String SEDIMENT_DENSITY_DIR = "Sedimentdichte"; ingo@2815: ingo@2821: public static final String MORPHOLOGICAL_WIDTH_DIR = "morphologische_Breite"; ingo@2821: ingo@2826: public static final String FLOW_VELOCITY_DIR = "Geschwindigkeit_Schubspannung"; ingo@2826: ingo@2826: public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen"; ingo@2826: ingo@2826: public static final String FLOW_VELOCITY_MEASUREMENTS = "v-Messungen"; ingo@2826: ingo@2839: public static final String SEDIMENT_YIELD_DIR = "Fracht"; ingo@2839: ingo@2839: public static final String SEDIMENT_YIELD_SINGLE_DIR = "Einzeljahre"; ingo@2839: ingo@2839: public static final String SEDIMENT_YIELD_EPOCH_DIR = "Epochen"; ingo@2839: ingo@2844: public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse"; ingo@2844: ingo@2844: public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen"; ingo@2844: ingo@2851: public static final String MINFO_WATERLEVEL_DIFF_DIR = "Wasserspiegeldifferenzen"; ingo@2851: ingo@2806: sascha@177: protected String name; sascha@177: sascha@177: protected File wstFile; sascha@177: sascha@177: protected File bbInfoFile; sascha@177: sascha@184: protected List gauges; sascha@184: sascha@186: protected List annotations; sascha@186: sascha@1220: protected List hyks; sascha@1220: sascha@1204: protected List crossSections; sascha@1204: sascha@482: protected List extraWsts; sascha@482: sascha@483: protected List fixations; sascha@483: sascha@484: protected List officialLines; sascha@484: sascha@490: protected List floodWater; sascha@490: sascha@494: protected List floodProtection; sascha@494: ingo@2811: protected List bedHeightSingles; ingo@2806: ingo@2811: protected List bedHeightEpochs; ingo@2806: ingo@2815: protected List sedimentDensities; ingo@2815: ingo@2821: protected List morphologicalWidths; ingo@2821: ingo@2827: protected List flowVelocityModels; ingo@2827: ingo@2832: protected List flowVelocityMeasurements; ingo@2832: ingo@2839: protected List sedimentYields; ingo@2839: ingo@2844: protected List waterlevels; ingo@2844: ingo@2851: protected List waterlevelDiffs; ingo@2851: sascha@201: protected ImportWst wst; sascha@201: ingo@2347: protected ImportUnit wstUnit; ingo@2347: sascha@765: protected AnnotationClassifier annotationClassifier; sascha@765: sascha@188: protected River peer; sascha@188: sascha@177: public ImportRiver() { ingo@2832: hyks = new ArrayList(); ingo@2832: crossSections = new ArrayList(); ingo@2832: extraWsts = new ArrayList(); ingo@2832: fixations = new ArrayList(); ingo@2832: officialLines = new ArrayList(); ingo@2832: floodWater = new ArrayList(); ingo@2832: floodProtection = new ArrayList(); ingo@2832: sedimentDensities = new ArrayList(); ingo@2832: morphologicalWidths = new ArrayList(); ingo@2832: flowVelocityModels = new ArrayList(); ingo@2832: flowVelocityMeasurements = new ArrayList(); ingo@2839: sedimentYields = new ArrayList(); ingo@2844: waterlevels = new ArrayList(); ingo@2851: waterlevelDiffs = new ArrayList(); sascha@177: } sascha@177: sascha@766: public ImportRiver( sascha@766: String name, sascha@766: File wstFile, sascha@766: File bbInfoFile, sascha@766: AnnotationClassifier annotationClassifier sascha@766: ) { sascha@482: this(); sascha@766: this.name = name; sascha@766: this.wstFile = wstFile; sascha@766: this.bbInfoFile = bbInfoFile; sascha@766: this.annotationClassifier = annotationClassifier; sascha@177: } sascha@177: sascha@177: public String getName() { sascha@177: return name; sascha@177: } sascha@177: sascha@177: public void setName(String name) { sascha@177: this.name = name; sascha@177: } sascha@177: sascha@177: public File getWstFile() { sascha@177: return wstFile; sascha@177: } sascha@177: sascha@177: public void setWstFile(File wstFile) { sascha@177: this.wstFile = wstFile; sascha@177: } sascha@177: sascha@177: public File getBBInfo() { sascha@177: return bbInfoFile; sascha@177: } sascha@177: sascha@177: public void setBBInfo(File bbInfoFile) { sascha@177: this.bbInfoFile = bbInfoFile; sascha@177: } sascha@184: sascha@201: public ImportWst getWst() { sascha@201: return wst; sascha@201: } sascha@201: sascha@201: public void setWst(ImportWst wst) { sascha@201: this.wst = wst; sascha@201: } sascha@201: ingo@2806: public File getMinfoDir() { ingo@2806: File riverDir = wstFile.getParentFile().getParentFile().getParentFile(); ingo@2806: return new File(riverDir, MINFO_DIR); ingo@2806: } ingo@2806: sascha@186: public void parseDependencies() throws IOException { sascha@186: parseGauges(); sascha@186: parseAnnotations(); sascha@1204: parsePRFs(); sascha@1220: parseHYKs(); sascha@197: parseWst(); sascha@482: parseExtraWsts(); sascha@483: parseFixations(); sascha@484: parseOfficialLines(); sascha@490: parseFloodWater(); sascha@494: parseFloodProtection(); ingo@2806: parseBedHeight(); ingo@2815: parseSedimentDensity(); ingo@2821: parseMorphologicalWidth(); ingo@2826: parseFlowVelocity(); ingo@2839: parseSedimentYield(); ingo@2844: parseWaterlevels(); ingo@2851: parseWaterlevelDifferences(); sascha@494: } sascha@494: sascha@494: public void parseFloodProtection() throws IOException { sascha@1223: if (Config.INSTANCE.skipFloodProtection()) { sascha@1223: log.info("skip parsing flood protection"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@494: log.info("Parse flood protection wst file"); sascha@494: sascha@494: File riverDir = wstFile.getParentFile().getParentFile(); sascha@494: sascha@494: File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION)); sascha@494: sascha@494: if (!dir.isDirectory() || !dir.canRead()) { sascha@494: log.info("no directory '" + dir + "' found"); sascha@494: return; sascha@494: } sascha@494: sascha@494: File [] files = dir.listFiles(); sascha@494: sascha@494: if (files == null) { sascha@494: log.warn("cannot read '" + dir + "'"); sascha@494: return; sascha@494: } sascha@494: sascha@494: for (File file: files) { sascha@494: if (!file.isFile() || !file.canRead()) { sascha@494: continue; sascha@494: } sascha@494: String name = file.getName().toLowerCase(); sascha@494: if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { sascha@494: continue; sascha@494: } sascha@494: log.info("found file '" + file.getName() + "'"); sascha@494: WstParser wstParser = new WstParser(); sascha@494: wstParser.parse(file); sascha@494: ImportWst iw = wstParser.getWst(); sascha@494: iw.setKind(5); sascha@494: iw.setDescription(FLOOD_PROTECTION + "/" + iw.getDescription()); sascha@494: floodProtection.add(iw); sascha@494: } sascha@490: } sascha@490: ingo@2806: ingo@2806: public void parseBedHeight() throws IOException { ingo@2806: File minfoDir = getMinfoDir(); ingo@2806: File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR); ingo@2806: File singlesDir = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR); ingo@2806: File epochDir = new File(bedHeightDir, BED_HEIGHT_EPOCH_DIR); ingo@2806: ingo@2810: if (Config.INSTANCE.skipBedHeightSingle()) { ingo@2810: log.info("skip parsing bed height single."); ingo@2810: } ingo@2810: else { ingo@2844: log.info("Parse bed height single."); ingo@2810: parseBedHeightSingles(singlesDir); ingo@2810: } ingo@2810: ingo@2810: if (Config.INSTANCE.skipBedHeightEpoch()) { ingo@2810: log.info("skip parsing bed height epochs."); ingo@2810: } ingo@2810: else { ingo@2844: log.info("Parse bed height epochs."); ingo@2810: parseBedHeightEpochs(epochDir); ingo@2810: } ingo@2806: } ingo@2806: ingo@2806: ingo@2815: protected void parseSedimentDensity() throws IOException { ingo@2815: log.debug("Parse sediment density"); ingo@2815: ingo@2815: if (Config.INSTANCE.skipSedimentDensity()) { ingo@2815: log.info("skip parsing sediment density."); ingo@2815: return; ingo@2815: } ingo@2815: ingo@2815: File minfoDir = getMinfoDir(); ingo@2815: File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR); ingo@2815: ingo@2815: File[] files = sediment.listFiles(); ingo@2815: ingo@2815: if (files == null) { ingo@2815: log.warn("Cannot parse directory '" + sediment + "'"); ingo@2815: return; ingo@2815: } ingo@2815: ingo@2815: SedimentDensityParser parser = new SedimentDensityParser(); ingo@2815: ingo@2815: for (File file: files) { ingo@2815: parser.parse(file); ingo@2815: } ingo@2815: ingo@2815: sedimentDensities = parser.getSedimentDensities(); ingo@2815: ingo@2815: log.info("Parsed " + sedimentDensities.size() + " sediment densities."); ingo@2815: } ingo@2815: ingo@2815: ingo@2821: protected void parseMorphologicalWidth() throws IOException { ingo@2821: log.debug("Parse morphological width"); ingo@2821: ingo@2821: if (Config.INSTANCE.skipMorphologicalWidth()) { ingo@2821: log.info("skip parsing morphological width."); ingo@2821: return; ingo@2821: } ingo@2821: ingo@2821: File minfoDir = getMinfoDir(); ingo@2821: File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR); ingo@2821: ingo@2821: File[] files = morphDir.listFiles(); ingo@2821: ingo@2821: if (files == null) { ingo@2821: log.warn("Cannot parse directory '" + morphDir + "'"); ingo@2821: return; ingo@2821: } ingo@2821: ingo@2821: MorphologicalWidthParser parser = new MorphologicalWidthParser(); ingo@2821: ingo@2821: for (File file: files) { ingo@2821: parser.parse(file); ingo@2821: } ingo@2821: ingo@2821: morphologicalWidths = parser.getMorphologicalWidths(); ingo@2821: ingo@2821: log.info("Parsed " + morphologicalWidths.size() + " morph. widths files."); ingo@2821: } ingo@2821: ingo@2821: ingo@2826: protected void parseFlowVelocity() throws IOException { ingo@2826: log.debug("Parse flow velocity"); ingo@2826: ingo@2826: if (Config.INSTANCE.skipFlowVelocity()) { ingo@2826: log.info("skip parsing flow velocity"); ingo@2826: return; ingo@2826: } ingo@2826: ingo@2826: File minfoDir = getMinfoDir(); ingo@2826: File flowDir = new File(minfoDir, FLOW_VELOCITY_DIR); ingo@2826: File modelDir = new File(flowDir, FLOW_VELOCITY_MODEL); ingo@2826: File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS); ingo@2826: ingo@2826: File[] modelFiles = modelDir.listFiles(); ingo@2826: File[] measureFiles = measureDir.listFiles(); ingo@2826: ingo@2826: if (modelFiles == null) { ingo@2826: log.warn("Cannot parse directory '" + modelDir + "'"); ingo@2826: } ingo@2826: else { ingo@2828: FlowVelocityModelParser parser = new FlowVelocityModelParser(); ingo@2828: ingo@2826: for (File model: modelFiles) { ingo@2826: log.debug("Parse file '" + model + "'"); ingo@2828: parser.parse(model); ingo@2826: } ingo@2828: ingo@2828: flowVelocityModels = parser.getModels(); ingo@2826: } ingo@2826: ingo@2826: if (measureFiles == null) { ingo@2826: log.warn("Cannot parse directory '" + measureDir + "'"); ingo@2826: } ingo@2826: else { ingo@2832: FlowVelocityMeasurementParser parser = ingo@2832: new FlowVelocityMeasurementParser(); ingo@2832: ingo@2826: for (File measurement: measureFiles) { ingo@2826: log.debug("Parse file '" + measurement + "'"); ingo@2832: parser.parse(measurement); ingo@2826: } ingo@2832: ingo@2832: flowVelocityMeasurements = parser.getMeasurements(); ingo@2826: } ingo@2826: } ingo@2826: ingo@2826: ingo@2839: protected void parseSedimentYield() throws IOException { ingo@2839: log.debug("Parse sediment yield data"); ingo@2839: ingo@2839: if (Config.INSTANCE.skipSedimentYield()) { ingo@2839: log.info("skip parsing sediment yield data"); ingo@2839: return; ingo@2839: } ingo@2839: ingo@2839: File minfoDir = getMinfoDir(); ingo@2839: File sedimentYieldDir = new File(minfoDir, SEDIMENT_YIELD_DIR); ingo@2839: ingo@2839: File singleDir = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR); ingo@2839: File epochDir = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR); ingo@2839: ingo@2839: File[] singles = singleDir.listFiles(); ingo@2839: File[] epochs = epochDir.listFiles(); ingo@2839: ingo@2840: SedimentYieldParser parser = new SedimentYieldParser(); ingo@2840: ingo@2839: if (singles == null || singles.length == 0) { ingo@2839: log.warn("Cannot parse directory '" + singleDir + "'"); ingo@2839: } ingo@2839: else { ingo@2840: for (File file: singles) { ingo@2840: if (file.isDirectory()) { ingo@2840: for (File child: file.listFiles()) { ingo@2840: parser.parse(child); ingo@2840: } ingo@2840: } ingo@2840: else { ingo@2840: parser.parse(file); ingo@2840: } ingo@2840: } ingo@2839: } ingo@2839: ingo@2839: if (epochs == null || epochs.length == 0) { ingo@2839: log.warn("Cannot parse directory '" + epochDir + "'"); ingo@2839: } ingo@2839: else { ingo@2840: for (File file: epochs) { ingo@2840: if (file.isDirectory()) { ingo@2840: for (File child: file.listFiles()) { ingo@2840: parser.parse(child); ingo@2840: } ingo@2840: } ingo@2840: else { ingo@2840: parser.parse(file); ingo@2840: } ingo@2840: } ingo@2839: } ingo@2840: ingo@2840: sedimentYields = parser.getSedimentYields(); ingo@2839: } ingo@2839: ingo@2839: ingo@2844: protected void parseWaterlevels() throws IOException { ingo@2844: if (Config.INSTANCE.skipWaterlevels()) { ingo@2844: log.info("skip parsing waterlevels"); ingo@2844: return; ingo@2844: } ingo@2844: ingo@2851: log.info("Parse waterlevels"); ingo@2851: ingo@2844: File minfo = getMinfoDir(); ingo@2844: File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); ingo@2845: File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR); ingo@2844: ingo@2844: File[] files = wspDir.listFiles(); ingo@2844: ingo@2844: if (files == null) { ingo@2844: log.warn("Cannot read directory '" + wspDir + "'"); ingo@2844: return; ingo@2844: } ingo@2844: ingo@2844: WaterlevelParser parser = new WaterlevelParser(); ingo@2844: ingo@2844: for (File file: files) { ingo@2844: parser.parse(file); ingo@2844: } ingo@2844: ingo@2851: waterlevels = parser.getWaterlevels(); ingo@2851: } ingo@2844: ingo@2851: ingo@2851: protected void parseWaterlevelDifferences() throws IOException { ingo@2851: if (Config.INSTANCE.skipWaterlevelDifferences()) { ingo@2851: log.info("skip parsing waterlevel differences"); ingo@2851: return; ingo@2851: } ingo@2851: ingo@2851: log.info("Parse waterlevel differences"); ingo@2851: ingo@2851: File minfo = getMinfoDir(); ingo@2851: File fixDir = new File(minfo, MINFO_FIXATIONS_DIR); ingo@2851: File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR); ingo@2851: ingo@2851: File[] files = diffDir.listFiles(); ingo@2851: ingo@2851: if (files == null) { ingo@2851: log.warn("Cannot read directory '" + diffDir + "'"); ingo@2851: return; ingo@2851: } ingo@2851: ingo@2851: WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser(); ingo@2851: ingo@2851: for (File file: files) { ingo@2851: parser.parse(file); ingo@2851: } ingo@2851: ingo@2851: waterlevelDiffs = parser.getDifferences(); ingo@2844: } ingo@2844: ingo@2844: ingo@2806: protected void parseBedHeightSingles(File dir) throws IOException { ingo@2806: log.debug("Parse bed height singles"); ingo@2806: ingo@2806: File[] files = dir.listFiles(); ingo@2806: ingo@2806: if (files == null) { ingo@2806: log.warn("Cannot parse directory '" + dir + "'"); ingo@2806: return; ingo@2806: } ingo@2806: ingo@2806: BedHeightSingleParser parser = new BedHeightSingleParser(); ingo@2806: ingo@2806: for (File file: files) { ingo@2806: parser.parse(file); ingo@2806: } ingo@2806: ingo@2806: bedHeightSingles = parser.getBedHeights(); ingo@2806: } ingo@2806: ingo@2806: ingo@2806: protected void parseBedHeightEpochs(File dir) throws IOException { ingo@2806: log.debug("Parse bed height epochs"); ingo@2806: ingo@2806: File[] files = dir.listFiles(); ingo@2806: ingo@2806: if (files == null) { ingo@2806: log.warn("Cannot parse directory '" + dir + "'"); ingo@2806: return; ingo@2806: } ingo@2806: ingo@2806: BedHeightEpochParser parser = new BedHeightEpochParser(); ingo@2806: ingo@2806: for (File file: files) { ingo@2806: parser.parse(file); ingo@2806: } ingo@2806: ingo@2806: bedHeightEpochs = parser.getBedHeights(); ingo@2806: } ingo@2806: ingo@2806: sascha@490: public void parseFloodWater() throws IOException { sascha@1223: if (Config.INSTANCE.skipFloodWater()) { sascha@1223: log.info("skip parsing flod water"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@490: log.info("Parse flood water wst file"); sascha@490: sascha@490: File riverDir = wstFile.getParentFile().getParentFile(); sascha@490: sascha@490: File dir = FileTools.repair(new File(riverDir, FLOOD_WATER)); sascha@490: sascha@490: if (!dir.isDirectory() || !dir.canRead()) { sascha@490: log.info("no directory '" + dir + "' found"); sascha@490: return; sascha@490: } sascha@490: sascha@490: File [] files = dir.listFiles(); sascha@490: sascha@490: if (files == null) { sascha@490: log.warn("cannot read '" + dir + "'"); sascha@490: return; sascha@490: } sascha@490: sascha@490: for (File file: files) { sascha@490: if (!file.isFile() || !file.canRead()) { sascha@490: continue; sascha@490: } sascha@490: String name = file.getName().toLowerCase(); sascha@490: if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { sascha@490: continue; sascha@490: } sascha@490: log.info("found file '" + file.getName() + "'"); sascha@490: WstParser wstParser = new WstParser(); sascha@490: wstParser.parse(file); sascha@490: ImportWst iw = wstParser.getWst(); sascha@490: iw.setKind(4); sascha@490: iw.setDescription(FLOOD_WATER + "/" + iw.getDescription()); sascha@490: floodWater.add(iw); sascha@490: } sascha@484: } sascha@484: sascha@484: public void parseOfficialLines() throws IOException { sascha@1223: if (Config.INSTANCE.skipOfficialLines()) { sascha@1223: log.info("skip parsing official lines"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@484: log.info("Parse official wst files"); sascha@484: sascha@484: File riverDir = wstFile.getParentFile().getParentFile(); sascha@484: sascha@484: for (String folder: OFFICIAL_LINES_FOLDERS) { sascha@484: File dir = FileTools.repair(new File(riverDir, folder)); sascha@484: sascha@484: if (!dir.isDirectory() || !dir.canRead()) { sascha@484: log.info("no directory '" + folder + "' found"); sascha@484: continue; sascha@484: } sascha@484: sascha@484: File file = FileTools.repair(new File(dir, OFFICIAL_LINES)); sascha@484: if (!file.isFile() || !file.canRead()) { sascha@484: log.warn("no official lines wst file found"); sascha@484: continue; sascha@484: } sascha@484: log.debug("Found WST file: " + file); sascha@484: sascha@484: WstParser wstParser = new WstParser(); sascha@484: wstParser.parse(file); sascha@484: ImportWst iw = wstParser.getWst(); sascha@484: iw.setKind(3); sascha@484: iw.setDescription(folder + "/" + iw.getDescription()); sascha@484: officialLines.add(iw); sascha@484: } // for all folders sascha@484: sascha@483: } sascha@483: sascha@483: public void parseFixations() throws IOException { sascha@1223: if (Config.INSTANCE.skipFixations()) { sascha@1223: log.info("skip parsing fixations"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@483: log.info("Parse fixation wst files"); sascha@483: sascha@483: File riverDir = wstFile.getParentFile().getParentFile(); sascha@483: sascha@483: File fixDir = FileTools.repair( sascha@483: new File(riverDir, FIXATIONS)); sascha@483: sascha@483: if (!fixDir.isDirectory() || !fixDir.canRead()) { sascha@483: log.info("no fixation wst file directory found"); sascha@483: return; sascha@483: } sascha@483: sascha@483: File [] files = fixDir.listFiles(); sascha@483: sascha@483: if (files == null) { sascha@483: log.warn("cannot read fixations wst file directory"); sascha@483: return; sascha@483: } sascha@483: sascha@483: for (File file: files) { sascha@483: if (!file.isFile() || !file.canRead()) { sascha@483: continue; sascha@483: } sascha@483: String name = file.getName().toLowerCase(); sascha@483: if (!name.endsWith(".wst")) { sascha@483: continue; sascha@483: } sascha@483: log.debug("Found WST file: " + file); sascha@483: sascha@483: WstParser wstParser = new WstParser(); sascha@483: wstParser.parse(file); sascha@483: ImportWst iw = wstParser.getWst(); sascha@483: iw.setKind(2); sascha@490: iw.setDescription(FIXATIONS+ "/" + iw.getDescription()); sascha@483: fixations.add(iw); sascha@483: } sascha@482: } sascha@482: sascha@482: public void parseExtraWsts() throws IOException { sascha@1223: if (Config.INSTANCE.skipExtraWsts()) { sascha@1223: log.info("skip parsing extra WST files"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@482: log.info("Parse extra longitudinal wst files"); sascha@482: sascha@482: File riverDir = wstFile.getParentFile().getParentFile(); sascha@482: sascha@482: File extraDir = FileTools.repair( sascha@482: new File(riverDir, EXTRA_LONGITUDINALS)); sascha@482: sascha@482: if (!extraDir.isDirectory() || !extraDir.canRead()) { sascha@482: log.info("no extra longitudinal wst file directory found"); sascha@482: return; sascha@482: } sascha@482: sascha@482: File [] files = extraDir.listFiles(); sascha@482: sascha@482: if (files == null) { sascha@482: log.warn("cannot read extra longitudinal wst file directory"); sascha@482: return; sascha@482: } sascha@482: sascha@482: for (File file: files) { sascha@482: if (!file.isFile() || !file.canRead()) { sascha@482: continue; sascha@482: } sascha@482: String name = file.getName().toLowerCase(); sascha@482: if (!(name.endsWith(".zus") || name.endsWith(".wst"))) { sascha@482: continue; sascha@482: } sascha@482: log.debug("Found WST file: " + file); sascha@482: sascha@482: WstParser wstParser = new WstParser(); sascha@482: wstParser.parse(file); sascha@482: ImportWst iw = wstParser.getWst(); sascha@482: iw.setKind(1); sascha@491: iw.setDescription(EXTRA_LONGITUDINALS + "/" + iw.getDescription()); sascha@482: extraWsts.add(iw); sascha@482: } sascha@482: sascha@197: } sascha@197: sascha@197: public void parseWst() throws IOException { sascha@1223: if (Config.INSTANCE.skipWst()) { sascha@1223: log.info("skip parsing WST file"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@197: WstParser wstParser = new WstParser(); sascha@197: wstParser.parse(wstFile); sascha@201: wst = wstParser.getWst(); sascha@186: } sascha@186: sascha@184: public void parseGauges() throws IOException { sascha@1223: if (Config.INSTANCE.skipGauges()) { sascha@1223: log.info("skip parsing gauges"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@184: File gltFile = new File(wstFile.getParentFile(), PEGEL_GLT); sascha@184: gltFile = FileTools.repair(gltFile); sascha@184: sascha@184: if (!gltFile.isFile() || !gltFile.canRead()) { sascha@184: log.warn("cannot read gauges from '" + gltFile + "'"); sascha@184: return; sascha@184: } sascha@184: sascha@184: PegelGltParser pgltp = new PegelGltParser(); sascha@184: pgltp.parse(gltFile); sascha@184: sascha@184: gauges = pgltp.getGauges(); sascha@189: sascha@189: for (ImportGauge gauge: gauges) { sascha@189: gauge.parseDependencies(); sascha@189: } sascha@184: } sascha@186: sascha@186: public void parseAnnotations() throws IOException { sascha@1223: if (Config.INSTANCE.skipAnnotations()) { sascha@1223: log.info("skip parsing annotations"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@186: File riverDir = wstFile.getParentFile().getParentFile(); sascha@765: AnnotationsParser aparser = sascha@765: new AnnotationsParser(annotationClassifier); sascha@186: aparser.parse(riverDir); sascha@186: sascha@186: annotations = aparser.getAnnotations(); sascha@186: } sascha@188: sascha@1220: public void parseHYKs() { sascha@1223: if (Config.INSTANCE.skipHYKs()) { sascha@1223: log.info("skip parsing HYK files"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@1220: log.info("looking for HYK files"); sascha@1220: HYKParser parser = new HYKParser(); sascha@1220: File riverDir = wstFile sascha@1220: .getParentFile() // Basisdaten sascha@1220: .getParentFile() // Hydrologie sascha@1220: .getParentFile(); // sascha@1220: sascha@1220: parser.parseHYKs(riverDir, new HYKParser.Callback() { sascha@1220: sascha@1220: Set hfs = new HashSet(); sascha@1220: sascha@1220: @Override sascha@1220: public boolean hykAccept(File file) { sascha@1220: HashedFile hf = new HashedFile(file); sascha@1220: boolean success = hfs.add(hf); sascha@1220: if (!success) { sascha@1220: log.warn("HYK file '" + file + "' seems to be a duplicate."); sascha@1220: } sascha@1220: return success; sascha@1220: } sascha@1220: sascha@1220: @Override sascha@1220: public void hykParsed(HYKParser parser) { sascha@1220: log.debug("callback from HYK parser"); sascha@1220: ImportHYK hyk = parser.getHYK(); sascha@1220: hyk.setRiver(ImportRiver.this); sascha@1220: hyks.add(hyk); sascha@1220: } sascha@1220: }); sascha@1220: } sascha@1220: sascha@1204: public void parsePRFs() { sascha@1223: if (Config.INSTANCE.skipPRFs()) { sascha@1223: log.info("skip parsing PRFs"); sascha@1223: return; sascha@1223: } sascha@1223: sascha@1204: log.info("looking for PRF files"); sascha@1204: PRFParser parser = new PRFParser(); sascha@1204: File riverDir = wstFile sascha@1204: .getParentFile() // Basisdaten sascha@1204: .getParentFile() // Hydrologie sascha@1204: .getParentFile(); // sascha@1206: sascha@1206: parser.parsePRFs(riverDir, new PRFParser.Callback() { sascha@1206: sascha@1206: Set prfs = new HashSet(); sascha@1206: sascha@1206: @Override sascha@1206: public boolean prfAccept(File file) { sascha@1206: HashedFile hf = new HashedFile(file); sascha@1206: boolean success = prfs.add(hf); sascha@1206: if (!success) { sascha@1206: log.warn("PRF file '" + file + "' seems to be a duplicate."); sascha@1206: } sascha@1206: return success; sascha@1206: } sascha@1206: sascha@1206: @Override sascha@1206: public void prfParsed(PRFParser parser) { sascha@1206: log.debug("callback from PRF parser"); sascha@1206: sascha@1206: String description = parser.getDescription(); sascha@1206: Integer year = parser.getYear(); sascha@1206: ImportTimeInterval ti = year != null sascha@1206: ? new ImportTimeInterval(yearToDate(year)) sascha@1206: : null; sascha@1206: sascha@1206: List lines = sascha@1206: new ArrayList(); sascha@1206: sascha@1206: for (Map.Entry> entry: parser.getData().entrySet()) { sascha@2860: Double km = entry.getKey(); sascha@1206: List points = entry.getValue(); sascha@1206: lines.add(new ImportCrossSectionLine(km, points)); sascha@1206: } sascha@1206: sascha@1206: crossSections.add(new ImportCrossSection( sascha@1206: ImportRiver.this, description, ti, lines)); sascha@1206: } sascha@1206: }); sascha@1204: } sascha@1204: sascha@1204: public static Date yearToDate(int year) { sascha@1204: Calendar cal = Calendar.getInstance(); sascha@1204: cal.set(year, 5, 15, 12, 0, 0); sascha@1204: long ms = cal.getTimeInMillis(); sascha@1204: cal.setTimeInMillis(ms - ms%1000); sascha@1204: return cal.getTime(); sascha@1204: } sascha@1204: sascha@190: public void storeDependencies() { ingo@2347: storeWstUnit(); sascha@190: storeAnnotations(); sascha@1221: storeHYKs(); sascha@1204: storeCrossSections(); sascha@190: storeGauges(); sascha@201: storeWst(); sascha@482: storeExtraWsts(); sascha@483: storeFixations(); sascha@484: storeOfficialLines(); sascha@490: storeFloodWater(); sascha@494: storeFloodProtection(); ingo@2806: storeBedHeight(); ingo@2815: storeSedimentDensity(); ingo@2821: storeMorphologicalWidth(); ingo@2826: storeFlowVelocity(); ingo@2839: storeSedimentYield(); ingo@2844: storeWaterlevels(); ingo@2851: storeWaterlevelDifferences(); sascha@201: } sascha@201: ingo@2347: public void storeWstUnit() { ingo@2347: if (wst == null) { ingo@2347: wstUnit = new ImportUnit("NN + m"); ingo@2347: } ingo@2347: else { ingo@2347: wstUnit = wst.getUnit(); ingo@2347: } ingo@2347: } ingo@2347: sascha@1221: public void storeHYKs() { sascha@1223: if (!Config.INSTANCE.skipHYKs()) { sascha@1223: log.info("store HYKs"); sascha@1223: getPeer(); sascha@1223: for (ImportHYK hyk: hyks) { sascha@1223: hyk.storeDependencies(); sascha@1223: } sascha@1221: } sascha@1221: } sascha@1221: sascha@1204: public void storeCrossSections() { sascha@1223: if (!Config.INSTANCE.skipPRFs()) { sascha@1223: log.info("store cross sections"); sascha@1223: getPeer(); sascha@1223: for (ImportCrossSection crossSection: crossSections) { sascha@1223: crossSection.storeDependencies(); sascha@1223: } sascha@1204: } sascha@1204: } sascha@1204: sascha@201: public void storeWst() { sascha@1223: if (!Config.INSTANCE.skipWst()) { sascha@1223: River river = getPeer(); sascha@483: wst.storeDependencies(river); sascha@483: } sascha@483: } sascha@483: sascha@1223: public void storeFixations() { sascha@1223: if (!Config.INSTANCE.skipFixations()) { sascha@1223: log.info("store fixation wsts"); sascha@1223: River river = getPeer(); sascha@1223: for (ImportWst wst: fixations) { sascha@1223: log.debug("name: " + wst.getDescription()); sascha@1223: wst.storeDependencies(river); sascha@1223: } sascha@1223: } sascha@1223: } sascha@1223: sascha@482: public void storeExtraWsts() { sascha@1223: if (!Config.INSTANCE.skipExtraWsts()) { sascha@1223: log.info("store extra wsts"); sascha@1223: River river = getPeer(); sascha@1223: for (ImportWst wst: extraWsts) { sascha@1223: log.debug("name: " + wst.getDescription()); sascha@1223: wst.storeDependencies(river); sascha@1223: } sascha@482: } sascha@482: } sascha@482: sascha@484: public void storeOfficialLines() { sascha@1223: if (!Config.INSTANCE.skipOfficialLines()) { sascha@1223: log.info("store official lines wsts"); sascha@1223: River river = getPeer(); sascha@1223: for (ImportWst wst: officialLines) { sascha@1223: log.debug("name: " + wst.getDescription()); sascha@1223: wst.storeDependencies(river); sascha@1223: } sascha@484: } sascha@484: } sascha@484: sascha@490: public void storeFloodWater() { sascha@1223: if (!Config.INSTANCE.skipFloodWater()) { sascha@1223: log.info("store flood water wsts"); sascha@1223: River river = getPeer(); sascha@1223: for (ImportWst wst: floodWater) { sascha@1223: log.debug("name: " + wst.getDescription()); sascha@1223: wst.storeDependencies(river); sascha@1223: } sascha@490: } sascha@490: } sascha@490: sascha@494: public void storeFloodProtection() { sascha@1223: if (!Config.INSTANCE.skipFloodProtection()) { sascha@1223: log.info("store flood protection wsts"); sascha@1223: River river = getPeer(); sascha@1223: for (ImportWst wst: floodProtection) { sascha@1223: log.debug("name: " + wst.getDescription()); sascha@1223: wst.storeDependencies(river); sascha@1223: } sascha@494: } sascha@494: } sascha@494: ingo@2806: ingo@2806: public void storeBedHeight() { ingo@2810: if (!Config.INSTANCE.skipBedHeightSingle()) { ingo@2810: log.info("store bed heights single"); ingo@2810: storeBedHeightSingle(); ingo@2810: } ingo@2809: ingo@2810: if (!Config.INSTANCE.skipBedHeightEpoch()) { ingo@2810: log.info("store bed height epoch."); ingo@2810: storeBedHeightEpoch(); ingo@2810: } ingo@2810: } ingo@2809: ingo@2810: ingo@2810: private void storeBedHeightSingle() { ingo@2810: River river = getPeer(); ingo@2810: ingo@2810: if (bedHeightSingles != null) { ingo@2811: for (ImportBedHeight tmp: bedHeightSingles) { ingo@2811: ImportBedHeightSingle single = (ImportBedHeightSingle) tmp; ingo@2811: ingo@2810: String desc = single.getDescription(); ingo@2810: ingo@2810: log.debug("name: " + desc); ingo@2810: ingo@2810: try { ingo@2810: single.storeDependencies(river); ingo@2810: } ingo@2810: catch (SQLException sqle) { ingo@2810: log.error("File '" + desc + "' is broken!"); ingo@2810: } ingo@2810: catch (ConstraintViolationException cve) { ingo@2810: log.error("File '" + desc + "' is broken!"); ingo@2806: } ingo@2806: } ingo@2810: } ingo@2810: else { ingo@2810: log.info("No single bed heights to store."); ingo@2810: } ingo@2810: } ingo@2806: ingo@2810: ingo@2810: private void storeBedHeightEpoch() { ingo@2810: River river = getPeer(); ingo@2810: ingo@2810: if (bedHeightEpochs != null) { ingo@2811: for (ImportBedHeight tmp: bedHeightEpochs) { ingo@2811: ImportBedHeightEpoch epoch = (ImportBedHeightEpoch) tmp; ingo@2811: ingo@2810: String desc = epoch.getDescription(); ingo@2810: ingo@2810: log.debug("name: " + desc); ingo@2810: ingo@2810: try { ingo@2806: epoch.storeDependencies(river); ingo@2806: } ingo@2810: catch (SQLException sqle) { ingo@2810: log.error("File '" + desc + "' is broken!"); ingo@2810: } ingo@2810: catch (ConstraintViolationException cve) { ingo@2810: log.error("File '" + desc + "' is broken!"); ingo@2810: } ingo@2806: } ingo@2810: } ingo@2810: else { ingo@2810: log.info("No epoch bed heights to store."); ingo@2806: } ingo@2806: } ingo@2806: ingo@2815: public void storeSedimentDensity() { ingo@2815: if (!Config.INSTANCE.skipSedimentDensity()) { ingo@2815: log.info("store sediment density"); ingo@2815: ingo@2817: River river = getPeer(); ingo@2817: ingo@2817: for (ImportSedimentDensity density: sedimentDensities) { ingo@2817: String desc = density.getDescription(); ingo@2817: ingo@2817: log.debug("name: " + desc); ingo@2817: ingo@2817: try { ingo@2817: density.storeDependencies(river); ingo@2817: } ingo@2817: catch (SQLException sqle) { ingo@2817: log.error("File '" + desc + "' is broken!"); ingo@2817: } ingo@2817: catch (ConstraintViolationException cve) { ingo@2817: log.error("File '" + desc + "' is broken!"); ingo@2817: } ingo@2817: } ingo@2815: } ingo@2815: } ingo@2815: ingo@2821: public void storeMorphologicalWidth() { ingo@2821: if (!Config.INSTANCE.skipMorphologicalWidth()) { ingo@2821: log.info("store morphological width"); ingo@2821: ingo@2821: River river = getPeer(); ingo@2821: ingo@2821: for (ImportMorphWidth width: morphologicalWidths) { ingo@2821: try { ingo@2821: width.storeDependencies(river); ingo@2821: } ingo@2821: catch (SQLException sqle) { ingo@2821: log.error("Error while parsing file for morph. width."); ingo@2821: } ingo@2821: catch (ConstraintViolationException cve) { ingo@2821: log.error("Error while parsing file for morph. width."); ingo@2821: } ingo@2821: } ingo@2821: } ingo@2821: } ingo@2821: ingo@2826: public void storeFlowVelocity() { ingo@2826: if (!Config.INSTANCE.skipFlowVelocity()) { ingo@2826: log.info("store flow velocity"); ingo@2826: ingo@2827: River river = getPeer(); ingo@2827: ingo@2827: for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels){ ingo@2827: try { ingo@2827: flowVelocityModel.storeDependencies(river); ingo@2827: } ingo@2827: catch (SQLException sqle) { ingo@2827: log.error("Error while storing flow velocity model.", sqle); ingo@2827: } ingo@2827: catch (ConstraintViolationException cve) { ingo@2827: log.error("Error while storing flow velocity model.", cve); ingo@2827: } ingo@2827: } ingo@2832: ingo@2832: for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) { ingo@2832: try { ingo@2832: m.storeDependencies(river); ingo@2832: } ingo@2832: catch (SQLException sqle) { ingo@2832: log.error("Error while storing flow velocity measurement.", sqle); ingo@2832: } ingo@2832: catch (ConstraintViolationException cve) { ingo@2832: log.error("Error while storing flow velocity measurement.", cve); ingo@2832: } ingo@2832: } ingo@2826: } ingo@2826: } ingo@2826: ingo@2839: ingo@2839: public void storeSedimentYield() { ingo@2839: if (!Config.INSTANCE.skipSedimentYield()) { ingo@2839: log.info("store sediment yield data"); ingo@2839: ingo@2839: River river = getPeer(); ingo@2839: ingo@2839: for (ImportSedimentYield sedimentYield: sedimentYields) { ingo@2839: try { ingo@2839: sedimentYield.storeDependencies(river); ingo@2839: } ingo@2839: catch (SQLException sqle) { ingo@2839: log.error("Error while storing sediment yield.", sqle); ingo@2839: } ingo@2839: catch (ConstraintViolationException cve) { ingo@2839: log.error("Error while storing sediment yield.", cve); ingo@2839: } ingo@2839: } ingo@2839: } ingo@2839: } ingo@2839: ingo@2844: ingo@2844: public void storeWaterlevels() { ingo@2844: if (!Config.INSTANCE.skipWaterlevels()) { ingo@2844: log.info("store waterlevels"); ingo@2844: ingo@2844: River river = getPeer(); ingo@2844: ingo@2844: for (ImportWaterlevel waterlevel: waterlevels) { ingo@2844: waterlevel.storeDependencies(river); ingo@2844: } ingo@2844: } ingo@2844: } ingo@2844: ingo@2844: ingo@2851: public void storeWaterlevelDifferences() { ingo@2851: if (!Config.INSTANCE.skipWaterlevelDifferences()) { ingo@2851: log.info("store waterlevel differences"); ingo@2851: ingo@2851: River river = getPeer(); ingo@2851: ingo@2851: for (ImportWaterlevelDifference diff: waterlevelDiffs) { ingo@2851: try { ingo@2851: diff.storeDependencies(river); ingo@2851: } ingo@2851: catch (SQLException sqle) { ingo@2851: log.error("Error while storing waterlevel diff.", sqle); ingo@2851: } ingo@2851: catch (ConstraintViolationException cve) { ingo@2851: log.error("Error while storing waterlevel diff.", cve); ingo@2851: } ingo@2851: } ingo@2851: } ingo@2851: } ingo@2851: ingo@2851: sascha@188: public void storeAnnotations() { sascha@1223: if (!Config.INSTANCE.skipAnnotations()) { sascha@1223: River river = getPeer(); sascha@1223: for (ImportAnnotation annotation: annotations) { sascha@1223: annotation.getPeer(river); sascha@1223: } sascha@188: } sascha@188: } sascha@188: sascha@190: public void storeGauges() { sascha@1223: if (!Config.INSTANCE.skipGauges()) { sascha@1223: log.info("store gauges:"); sascha@1223: River river = getPeer(); sascha@1223: Session session = ImporterSession.getInstance() sascha@1223: .getDatabaseSession(); sascha@1223: for (ImportGauge gauge: gauges) { sascha@1223: log.info("\tgauge: " + gauge.getName()); sascha@1223: gauge.storeDependencies(river); sascha@1223: ImporterSession.getInstance().getDatabaseSession(); sascha@1223: session.flush(); sascha@1223: } sascha@191: } sascha@190: } sascha@190: sascha@188: public River getPeer() { sascha@188: if (peer == null) { sascha@497: Session session = ImporterSession.getInstance().getDatabaseSession(); sascha@188: Query query = session.createQuery("from River where name=:name"); ingo@2347: ingo@2347: Unit u = wstUnit.getPeer(); ingo@2347: sascha@188: query.setString("name", name); sascha@188: List rivers = query.list(); sascha@188: if (rivers.isEmpty()) { ingo@2347: log.info("Store new river '" + name + "'"); ingo@2347: peer = new River(name, u); sascha@188: session.save(peer); sascha@188: } sascha@188: else { sascha@188: peer = rivers.get(0); sascha@188: } sascha@188: } sascha@188: return peer; sascha@188: } sascha@177: } sascha@177: // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :