view flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java @ 5261:1e403a0efc21

ImportRiver: Light cleaning up, waterlevelimport more parralel to other imports.
author Felix Wolfsteller <felix.wolfsteller@intevation.de>
date Wed, 13 Mar 2013 09:19:33 +0100
parents 35b78d8c44b8
children 0d9ad1c432b8
line wrap: on
line source
package de.intevation.flys.importer;

import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.log4j.Logger;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.exception.ConstraintViolationException;

import de.intevation.artifacts.common.utils.FileTools;
import de.intevation.artifacts.common.utils.FileTools.HashedFile;
import de.intevation.flys.importer.parsers.AnnotationClassifier;
import de.intevation.flys.importer.parsers.AnnotationsParser;
import de.intevation.flys.importer.parsers.BedHeightEpochParser;
import de.intevation.flys.importer.parsers.BedHeightSingleParser;
import de.intevation.flys.importer.parsers.CrossSectionParser;
import de.intevation.flys.importer.parsers.DA50Parser;
import de.intevation.flys.importer.parsers.DA66Parser;
import de.intevation.flys.importer.parsers.FlowVelocityMeasurementParser;
import de.intevation.flys.importer.parsers.FlowVelocityModelParser;
import de.intevation.flys.importer.parsers.HYKParser;
import de.intevation.flys.importer.parsers.MeasurementStationsParser;
import de.intevation.flys.importer.parsers.MorphologicalWidthParser;
import de.intevation.flys.importer.parsers.PRFParser;
import de.intevation.flys.importer.parsers.PegelGltParser;
import de.intevation.flys.importer.parsers.SQRelationParser;
import de.intevation.flys.importer.parsers.SedimentDensityParser;
import de.intevation.flys.importer.parsers.SedimentYieldParser;
import de.intevation.flys.importer.parsers.W80Parser;
import de.intevation.flys.importer.parsers.WaterlevelDifferencesParser;
import de.intevation.flys.importer.parsers.WaterlevelParser;
import de.intevation.flys.importer.parsers.WstParser;
import de.intevation.flys.model.River;
import de.intevation.flys.model.Unit;


/** Import all river-related data (files) that can be found. */
public class ImportRiver
{
    /** Private logger. */
    private static Logger log = Logger.getLogger(ImportRiver.class);

    public static final String PEGEL_GLT = "PEGEL.GLT";

    public static final String FIXATIONS = "Fixierungen";

    public static final String EXTRA_LONGITUDINALS =
        "Zus.L\u00e4ngsschnitte";

    public static final String [] OFFICIAL_LINES_FOLDERS = {
        "Basisdaten",
        "Fixierungen" };

    public static final String OFFICIAL_LINES =
        "Amtl_Linien.wst";

    public static final String FLOOD_WATER = "HW-Marken";

    public static final String FLOOD_PROTECTION =
        "HW-Schutzanlagen";

    public static final String MINFO_DIR = "Morphologie";

    public static final String BED_HEIGHT_DIR = "Sohlhoehen";

    public static final String BED_HEIGHT_SINGLE_DIR = "Einzeljahre";

    public static final String BED_HEIGHT_EPOCH_DIR = "Epochen";

    public static final String SEDIMENT_DENSITY_DIR = "Sedimentdichte";

    public static final String MORPHOLOGICAL_WIDTH_DIR = "morphologische_Breite";

    public static final String FLOW_VELOCITY_DIR = "Geschwindigkeit_Schubspannung";

    public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen";

    public static final String FLOW_VELOCITY_MEASUREMENTS = "v-Messungen";

    public static final String SEDIMENT_YIELD_DIR = "Fracht";

    public static final String SEDIMENT_YIELD_SINGLE_DIR = "Einzeljahre";

    public static final String SEDIMENT_YIELD_EPOCH_DIR = "Epochen";

    public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse";

    public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen";

    public static final String MINFO_WATERLEVEL_DIFF_DIR = "Wasserspiegeldifferenzen";

    public static final String MINFO_BASE_DIR = "Basisdaten";

    public static final String MINFO_CORE_DATA_FILE = "Stammdaten_Messstellen.csv";

    public static final String MINFO_SQ_DIR = "Feststofftransport-Abfluss-Beziehung";

    protected String name;

    protected Long officialNumber;

    protected File wstFile;

    protected File bbInfoFile;

    protected List<ImportGauge> gauges;

    protected List<ImportAnnotation> annotations;

    protected List<ImportHYK> hyks;

    protected List<ImportCrossSection> crossSections;

    protected List<ImportWst> extraWsts;

    protected List<ImportWst> fixations;

    protected List<ImportWst> officialLines;

    protected List<ImportWst> floodWater;

    protected List<ImportWst> floodProtection;

    /** Wst-structures from waterlevel-csv files. */
    protected List<ImportWst> waterlevels;

    /** Wst-structures from waterlevel-difference-csv files. */
    protected List<ImportWst> waterlevelDifferences;

    protected List<ImportBedHeight> bedHeightSingles;

    protected List<ImportBedHeight> bedHeightEpochs;

    protected List<ImportSedimentDensity> sedimentDensities;

    protected List<ImportMorphWidth> morphologicalWidths;

    protected List<ImportFlowVelocityModel> flowVelocityModels;

    protected List<ImportFlowVelocityMeasurement> flowVelocityMeasurements;

    protected List<ImportSedimentYield> sedimentYields;

    protected List<ImportMeasurementStation> measurementStations;

    protected List<ImportSQRelation> sqRelations;

    protected ImportWst wst;

    protected ImportUnit wstUnit;

    protected AnnotationClassifier annotationClassifier;

    /** Database-mapped River instance. */
    protected River peer;


    /** Callback-implementation for CrossSectionParsers. */
    class ImportRiverCrossSectionParserCallback implements CrossSectionParser.Callback {
        Set<HashedFile> files = new HashSet<HashedFile>();
        String type;


        /**
         * Create new Callback, given type which is used for logging
         * purposes only.
         */
        public ImportRiverCrossSectionParserCallback (String type) {
            this.type = type;
        }


        /** Accept file if not duplicate. */
        public boolean accept(File file) {
            HashedFile hf = new HashedFile(file);
            boolean success = files.add(hf);
            if (!success) {
                log.warn(type + " file '" + file + "' seems to be a duplicate.");
            }
            return success;
        }


        /** Add crosssection. */
        public void    parsed(CrossSectionParser parser) {
           log.debug("callback from " + type + " parser");

           addCrossSections(parser);
        }
    } // ImportRiverCrossSectionParserCallback


    public ImportRiver() {
        hyks                      = new ArrayList<ImportHYK>();
        crossSections             = new ArrayList<ImportCrossSection>();
        extraWsts                 = new ArrayList<ImportWst>();
        fixations                 = new ArrayList<ImportWst>();
        officialLines             = new ArrayList<ImportWst>();
        floodWater                = new ArrayList<ImportWst>();
        waterlevels               = new ArrayList<ImportWst>();
        waterlevelDifferences     = new ArrayList<ImportWst>();
        floodProtection           = new ArrayList<ImportWst>();
        sedimentDensities         = new ArrayList<ImportSedimentDensity>();
        morphologicalWidths       = new ArrayList<ImportMorphWidth>();
        flowVelocityModels        = new ArrayList<ImportFlowVelocityModel>();
        flowVelocityMeasurements  = new ArrayList<ImportFlowVelocityMeasurement>();
        sedimentYields            = new ArrayList<ImportSedimentYield>();
        measurementStations       = new ArrayList<ImportMeasurementStation>();
        sqRelations               = new ArrayList<ImportSQRelation>();
    }

    public ImportRiver(
        String               name,
        File                 wstFile,
        File                 bbInfoFile,
        AnnotationClassifier annotationClassifier
    ) {
        this();
        this.name                 = name;
        this.wstFile              = wstFile;
        this.bbInfoFile           = bbInfoFile;
        this.annotationClassifier = annotationClassifier;
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public Long getOfficialNumber() {
        return this.officialNumber;
    }

    public void setOfficialNumber(Long officialNumber) {
        this.officialNumber = officialNumber;
    }

    public File getWstFile() {
        return wstFile;
    }

    public void setWstFile(File wstFile) {
        this.wstFile = wstFile;
    }

    public File getBBInfo() {
        return bbInfoFile;
    }

    public void setBBInfo(File bbInfoFile) {
        this.bbInfoFile = bbInfoFile;
    }

    public ImportWst getWst() {
        return wst;
    }

    public void setWst(ImportWst wst) {
        this.wst = wst;
    }

    public File getMinfoDir() {
        File riverDir  = wstFile.getParentFile().getParentFile().getParentFile();
        return new File(riverDir, MINFO_DIR);
    }

    public void parseDependencies() throws IOException {
        parseGauges();
        parseAnnotations();
        parsePRFs();
        parseDA66s();
        parseDA50s();
        parseW80s();
        parseHYKs();
        parseWst();
        parseExtraWsts();
        parseFixations();
        parseOfficialLines();
        parseFloodWater();
        parseFloodProtection();
        parseBedHeight();
        parseSedimentDensity();
        parseMorphologicalWidth();
        parseFlowVelocity();
        parseSedimentYield();
        parseWaterlevels();
        parseWaterlevelDifferences();
        parseMeasurementStations();
        parseSQRelation();
    }

    public void parseFloodProtection() throws IOException {
        if (Config.INSTANCE.skipFloodProtection()) {
            log.info("skip parsing flood protection");
            return;
        }

        log.info("Parse flood protection wst file");

        File riverDir = wstFile.getParentFile().getParentFile();

        File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION));

        if (!dir.isDirectory() || !dir.canRead()) {
            log.info("no directory '" + dir + "' found");
            return;
        }

        File [] files = dir.listFiles();

        if (files == null) {
            log.warn("cannot read '" + dir + "'");
            return;
        }

        for (File file: files) {
            if (!file.isFile() || !file.canRead()) {
                continue;
            }
            String name = file.getName().toLowerCase();
            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
                continue;
            }
            log.info("found file '" + file.getName() + "'");
            WstParser wstParser = new WstParser();
            wstParser.parse(file);
            ImportWst iw = wstParser.getWst();
            iw.setKind(5);
            iw.setDescription(FLOOD_PROTECTION + "/" + iw.getDescription());
            floodProtection.add(iw);
        }
    }

    public void storeOfficialNumber() {
        if (Config.INSTANCE.skipBWASTR()) {
            log.info("skip storing official number.");
            return;
        }
        getPeer().setOfficialNumber(officialNumber);
    }

    public void parseBedHeight() throws IOException {
        File minfoDir     = getMinfoDir();
        File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR);
        File singlesDir   = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR);
        File epochDir     = new File(bedHeightDir, BED_HEIGHT_EPOCH_DIR);

        if (Config.INSTANCE.skipBedHeightSingle()) {
            log.info("skip parsing bed height single.");
        }
        else {
            log.info("Parse bed height single.");
            parseBedHeightSingles(singlesDir);
        }

        if (Config.INSTANCE.skipBedHeightEpoch()) {
            log.info("skip parsing bed height epochs.");
        }
        else {
            log.info("Parse bed height epochs.");
            parseBedHeightEpochs(epochDir);
        }
    }


    protected void parseSedimentDensity() throws IOException {
        if (Config.INSTANCE.skipSedimentDensity()) {
            log.info("skip parsing sediment density.");
            return;
        }

        log.debug("Parse sediment density");

        File minfoDir = getMinfoDir();
        File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR);

        File[] files = sediment.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + sediment + "'");
            return;
        }

        SedimentDensityParser parser = new SedimentDensityParser();

        for (File file: files) {
            parser.parse(file);
        }

        sedimentDensities = parser.getSedimentDensities();

        log.info("Parsed " + sedimentDensities.size() + " sediment densities.");
    }


    protected void parseMorphologicalWidth() throws IOException {
        if (Config.INSTANCE.skipMorphologicalWidth()) {
            log.info("skip parsing morphological width.");
            return;
        }

        log.debug("Parse morphological width");

        File minfoDir = getMinfoDir();
        File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR);

        File[] files = morphDir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + morphDir + "'");
            return;
        }

        MorphologicalWidthParser parser = new MorphologicalWidthParser();

        for (File file: files) {
            parser.parse(file);
        }

        morphologicalWidths = parser.getMorphologicalWidths();

        log.info("Parsed " + morphologicalWidths.size() + " morph. widths files.");
    }


    protected void parseFlowVelocity() throws IOException {
        if (Config.INSTANCE.skipFlowVelocity()) {
            log.info("skip parsing flow velocity");
            return;
        }

        log.debug("Parse flow velocity");

        File minfoDir   = getMinfoDir();
        File flowDir    = new File(minfoDir, FLOW_VELOCITY_DIR);
        File modelDir   = new File(flowDir, FLOW_VELOCITY_MODEL);
        File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS);

        File[] modelFiles   = modelDir.listFiles();
        File[] measureFiles = measureDir.listFiles();

        if (modelFiles == null) {
            log.warn("Cannot read directory '" + modelDir + "'");
        }
        else {
            FlowVelocityModelParser parser = new FlowVelocityModelParser();

            for (File model: modelFiles) {
                log.debug("Parse file '" + model + "'");
                parser.parse(model);
            }

            flowVelocityModels = parser.getModels();
        }

        if (measureFiles == null) {
            log.warn("Cannot read directory '" + measureDir + "'");
        }
        else {
            FlowVelocityMeasurementParser parser =
                new FlowVelocityMeasurementParser();

            for (File measurement: measureFiles) {
                log.debug("Parse file '" + measurement + "'");
                parser.parse(measurement);
            }

            flowVelocityMeasurements = parser.getMeasurements();
        }
    }


    protected void parseSedimentYield() throws IOException {
        if (Config.INSTANCE.skipSedimentYield()) {
            log.info("skip parsing sediment yield data");
            return;
        }

        log.debug("Parse sediment yield data");

        File minfoDir         = getMinfoDir();
        File sedimentYieldDir = new File(minfoDir, SEDIMENT_YIELD_DIR);

        File singleDir = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR);
        File epochDir  = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR);

        File[] singles = singleDir.listFiles();
        File[] epochs  = epochDir.listFiles();

        SedimentYieldParser parser = new SedimentYieldParser();

        if (singles == null || singles.length == 0) {
            log.warn("Cannot read directory '" + singleDir + "'");
        }
        else {
            for (File file: singles) {
                if (file.isDirectory()) {
                    for (File child: file.listFiles()) {
                        parser.parse(child);
                    }
                }
                else {
                    parser.parse(file);
                }
            }
        }

        if (epochs == null || epochs.length == 0) {
            log.warn("Cannot read directory '" + epochDir + "'");
        }
        else {
            for (File file: epochs) {
                if (file.isDirectory()) {
                    for (File child: file.listFiles()) {
                        parser.parse(child);
                    }
                }
                else {
                    parser.parse(file);
                }
            }
        }

        sedimentYields = parser.getSedimentYields();
    }


    protected void parseWaterlevels() throws IOException {
        if (Config.INSTANCE.skipWaterlevels()) {
            log.info("skip parsing waterlevels");
            return;
        }

        log.info("Parse waterlevels");

        File minfo  = getMinfoDir();
        File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
        File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR);

        File[] files = wspDir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + wspDir + "'");
            return;
        }

        WaterlevelParser parser = new WaterlevelParser();

        for (File file: files) {
            parser.parse(file);
        }

        // TODO use own List<ImportWst> for waterlevels
        // The parsed ImportWaterlevels are converted to
        // 'fixation'-wsts now.
        for(ImportWst iw: parser.getWaterlevels()) {
            iw.setDescription("CSV/" + iw.getDescription());
            iw.setKind(6);
            waterlevels.add(iw);
        }
    }

    protected void parseMeasurementStations() throws IOException {
        if (Config.INSTANCE.skipMeasurementStations()) {
            log.info("skip parsing measurement stations");
            return;
        }

        log.info("Parse measurement stations");

        File minfo = getMinfoDir();
        File minfoBaseDir = new File(minfo, MINFO_BASE_DIR);
        File coredataFile = new File(minfoBaseDir, MINFO_CORE_DATA_FILE);

        if (coredataFile == null || !coredataFile.exists()) {
            log.warn("No core data file '" + MINFO_CORE_DATA_FILE + "' found");
            return;
        }

        MeasurementStationsParser parser = new MeasurementStationsParser();
        try {
            parser.parse(coredataFile);
            measurementStations = parser.getMeasurementStations();

            log.info("Successfully parsed " + measurementStations.size() + " measurement stations.");
        }
        catch (IOException ioe) {
            log.error("unable to parse file '" + coredataFile.getName() +
                ": " + ioe.getMessage());
        }
    }


    protected void parseWaterlevelDifferences() throws IOException {
        if (Config.INSTANCE.skipWaterlevelDifferences()) {
            log.info("skip parsing waterlevel differences");
            return;
        }

        log.info("Parse waterlevel differences");

        File minfo  = getMinfoDir();
        File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
        File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR);

        File[] files = diffDir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + diffDir + "'");
            return;
        }

        WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser();

        for (File file: files) {
            parser.parse(file);
        }

        // WaterlevelDifferences become Wsts now.
        for(ImportWst iw: parser.getDifferences()) {
            iw.setDescription("CSV/" + iw.getDescription());
            iw.setKind(7);
            waterlevelDifferences.add(iw);
        }
    }


    protected void parseSQRelation() throws IOException {
        if (Config.INSTANCE.skipSQRelation()) {
            log.info("skip parsing sq relation");
            return;
        }

        log.info("Parse sq relations");

        File minfo = getMinfoDir();
        File sqDir = new File(minfo, MINFO_SQ_DIR);

        File[] files = sqDir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + sqDir + "'");
            return;
        }

        SQRelationParser parser = new SQRelationParser();

        for (File file: files) {
            parser.parse(file);
        }

        sqRelations = parser.getSQRelations();

        log.debug("Parsed " + sqRelations.size() + " SQ relations.");
    }


    protected void parseBedHeightSingles(File dir) throws IOException {
        log.debug("Parse bed height singles");

        File[] files = dir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + dir + "'");
            return;
        }

        BedHeightSingleParser parser = new BedHeightSingleParser();

        for (File file: files) {
            parser.parse(file);
        }

        bedHeightSingles = parser.getBedHeights();
    }


    protected void parseBedHeightEpochs(File dir) throws IOException {
        log.debug("Parse bed height epochs");

        File[] files = dir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + dir + "'");
            return;
        }

        BedHeightEpochParser parser = new BedHeightEpochParser();

        for (File file: files) {
            parser.parse(file);
        }

        bedHeightEpochs = parser.getBedHeights();
    }


    public void parseFloodWater() throws IOException {
        if (Config.INSTANCE.skipFloodWater()) {
            log.info("skip parsing flod water");
            return;
        }

        log.info("Parse flood water wst file");

        File riverDir = wstFile.getParentFile().getParentFile();

        File dir = FileTools.repair(new File(riverDir, FLOOD_WATER));

        if (!dir.isDirectory() || !dir.canRead()) {
            log.info("no directory '" + dir + "' found");
            return;
        }

        File [] files = dir.listFiles();

        if (files == null) {
            log.warn("cannot read '" + dir + "'");
            return;
        }

        for (File file: files) {
            if (!file.isFile() || !file.canRead()) {
                continue;
            }
            String name = file.getName().toLowerCase();
            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
                continue;
            }
            log.info("found file '" + file.getName() + "'");
            WstParser wstParser = new WstParser();
            wstParser.parse(file);
            ImportWst iw = wstParser.getWst();
            iw.setKind(4);
            iw.setDescription(FLOOD_WATER + "/" + iw.getDescription());
            floodWater.add(iw);
        }
    }

    public void parseOfficialLines() throws IOException {
        if (Config.INSTANCE.skipOfficialLines()) {
            log.info("skip parsing official lines");
            return;
        }

        log.info("Parse official wst files");

        File riverDir = wstFile.getParentFile().getParentFile();

        for (String folder: OFFICIAL_LINES_FOLDERS) {
            File dir = FileTools.repair(new File(riverDir, folder));

            if (!dir.isDirectory() || !dir.canRead()) {
                log.info("no directory '" + folder + "' found");
                continue;
            }

            File file = FileTools.repair(new File(dir, OFFICIAL_LINES));
            if (!file.isFile() || !file.canRead()) {
                log.warn("no official lines wst file found");
                continue;
            }
            log.debug("Found WST file: " + file);

            WstParser wstParser = new WstParser();
            wstParser.parse(file);
            ImportWst iw = wstParser.getWst();
            iw.setKind(3);
            iw.setDescription(folder + "/" + iw.getDescription());
            officialLines.add(iw);
        } // for all folders

    }

    public void parseFixations() throws IOException {
        if (Config.INSTANCE.skipFixations()) {
            log.info("skip parsing fixations");
            return;
        }

        log.info("Parse fixation wst files");

        File riverDir = wstFile.getParentFile().getParentFile();

        File fixDir = FileTools.repair(
            new File(riverDir, FIXATIONS));

        if (!fixDir.isDirectory() || !fixDir.canRead()) {
            log.info("no fixation wst file directory found");
            return;
        }

        File [] files = fixDir.listFiles();

        if (files == null) {
            log.warn("cannot read fixations wst file directory");
            return;
        }

        for (File file: files) {
            if (!file.isFile() || !file.canRead()) {
                continue;
            }
            String name = file.getName().toLowerCase();
            if (!name.endsWith(".wst")) {
                continue;
            }
            log.debug("Found WST file: " + file);

            WstParser wstParser = new WstParser();
            wstParser.parse(file);
            ImportWst iw = wstParser.getWst();
            iw.setKind(2);
            iw.setDescription(FIXATIONS+ "/" + iw.getDescription());
            fixations.add(iw);
        }
    }

    public void parseExtraWsts() throws IOException {
        if (Config.INSTANCE.skipExtraWsts()) {
            log.info("skip parsing extra WST files");
            return;
        }

        log.info("Parse extra longitudinal wst files");

        File riverDir = wstFile.getParentFile().getParentFile();

        File extraDir = FileTools.repair(
            new File(riverDir, EXTRA_LONGITUDINALS));

        if (!extraDir.isDirectory() || !extraDir.canRead()) {
            log.info("no extra longitudinal wst file directory found");
            return;
        }

        File [] files = extraDir.listFiles();

        if (files == null) {
            log.warn("cannot read extra longitudinal wst file directory");
            return;
        }

        for (File file: files) {
            if (!file.isFile() || !file.canRead()) {
                continue;
            }
            String name = file.getName().toLowerCase();
            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
                continue;
            }
            log.debug("Found WST file: " + file);

            WstParser wstParser = new WstParser();
            wstParser.parse(file);
            ImportWst iw = wstParser.getWst();
            iw.setKind(1);
            iw.setDescription(EXTRA_LONGITUDINALS + "/" + iw.getDescription());
            extraWsts.add(iw);
        }

    }

    public void parseWst() throws IOException {
        if (Config.INSTANCE.skipWst()) {
            log.info("skip parsing WST file");
            return;
        }

        WstParser wstParser = new WstParser();
        wstParser.parse(wstFile);
        wst = wstParser.getWst();
    }

    public void parseGauges() throws IOException {
        if (Config.INSTANCE.skipGauges()) {
            log.info("skip parsing gauges");
            return;
        }

        File gltFile = new File(wstFile.getParentFile(), PEGEL_GLT);
        gltFile = FileTools.repair(gltFile);

        if (!gltFile.isFile() || !gltFile.canRead()) {
            log.warn("cannot read gauges from '" + gltFile + "'");
            return;
        }

        PegelGltParser pgltp = new PegelGltParser();
        pgltp.parse(gltFile);

        gauges = pgltp.getGauges();

        for (ImportGauge gauge: gauges) {
            gauge.parseDependencies();
        }
    }

    public void parseAnnotations() throws IOException {
        if (Config.INSTANCE.skipAnnotations()) {
            log.info("skip parsing annotations");
            return;
        }

        File riverDir = wstFile.getParentFile().getParentFile();
        AnnotationsParser aparser =
            new AnnotationsParser(annotationClassifier);
        aparser.parse(riverDir);

        annotations = aparser.getAnnotations();
    }

    public void parseHYKs() {
        if (Config.INSTANCE.skipHYKs()) {
            log.info("skip parsing HYK files");
            return;
        }

        log.info("looking for HYK files");
        HYKParser parser = new HYKParser();
        File riverDir = wstFile
            .getParentFile()  // Basisdaten
            .getParentFile()  // Hydrologie
            .getParentFile(); // <river>

        parser.parseHYKs(riverDir, new HYKParser.Callback() {

            Set<HashedFile> hfs = new HashSet<HashedFile>();

            @Override
            public boolean hykAccept(File file) {
                HashedFile hf = new HashedFile(file);
                boolean success = hfs.add(hf);
                if (!success) {
                    log.warn("HYK file '" + file + "' seems to be a duplicate.");
                }
                return success;
            }

            @Override
            public void hykParsed(HYKParser parser) {
                log.debug("callback from HYK parser");
                ImportHYK hyk = parser.getHYK();
                hyk.setRiver(ImportRiver.this);
                hyks.add(hyk);
            }
        });
    }


    /** Add cross sections with description, years and lines to
     * store. */
    private void addCrossSections(CrossSectionParser parser) {
        String  description = parser.getDescription();
        Integer year        = parser.getYear();
        ImportTimeInterval ti = year != null
            ? new ImportTimeInterval(yearToDate(year))
            : null;

        Map<Double, List<XY>> data = parser.getData();

        List<ImportCrossSectionLine> lines =
            new ArrayList<ImportCrossSectionLine>(data.size());

        for (Map.Entry<Double, List<XY>> entry: data.entrySet()) {
            Double   km     = entry.getKey();
            List<XY> points = entry.getValue();
            lines.add(new ImportCrossSectionLine(km, points));
        }

        crossSections.add(new ImportCrossSection(
            ImportRiver.this, description, ti, lines));
    }

    /** Create a W80 Parser and parse w80 files found. */
    public void parseW80s() {
        if (Config.INSTANCE.skipW80s()) {
            log.info("skip parsing W80s");
            return;
        }
        W80Parser parser = new W80Parser();
        File riverDir = wstFile
            .getParentFile()  // Basisdaten
            .getParentFile()  // Hydrologie
            .getParentFile(); // <river>

        ImportRiverCrossSectionParserCallback w80Callback =
            new ImportRiverCrossSectionParserCallback("w80");
        parser.parseW80s(riverDir, w80Callback);
    }


    /**
     * Create and use a DA50Parser, parse the files found, add the
     * ross-sections found.
     */
    public void parseDA50s() {
        if (Config.INSTANCE.skipDA50s()) {
            log.info("skip parsing DA50s");
            return;
        }
        DA50Parser parser = new DA50Parser();
        File riverDir = wstFile
            .getParentFile()  // Basisdaten
            .getParentFile()  // Hydrologie
            .getParentFile(); // <river>

        ImportRiverCrossSectionParserCallback da50Callback =
            new ImportRiverCrossSectionParserCallback("da50");
        parser.parseDA50s(riverDir, da50Callback);
    }


    /** Create a DA66 Parser and parse the da66 files found. */
    // TODO this is a copy of parsePRFs, extract interfaces (e.g. CrossSectionParser).
    public void parseDA66s() {
        if (Config.INSTANCE.skipDA66s()) {
            log.info("skip parsing DA66s");
            return;
        }

        log.info("looking for DA66 files");
        DA66Parser parser = new DA66Parser();
        File riverDir = wstFile
            .getParentFile()  // Basisdaten
            .getParentFile()  // Hydrologie
            .getParentFile(); // <river>

        ImportRiverCrossSectionParserCallback da66Callback =
            new ImportRiverCrossSectionParserCallback("da66");
        parser.parseDA66s(riverDir, da66Callback);
    }

    /** Create a PRFParser and let it parse the prf files found. */
    public void parsePRFs() {
        if (Config.INSTANCE.skipPRFs()) {
            log.info("skip parsing PRFs");
            return;
        }

        log.info("looking for PRF files");
        PRFParser parser = new PRFParser();
        File riverDir = wstFile
            .getParentFile()  // Basisdaten
            .getParentFile()  // Hydrologie
            .getParentFile(); // <river>

        ImportRiverCrossSectionParserCallback prfCallback =
            new ImportRiverCrossSectionParserCallback("prf");
        parser.parsePRFs(riverDir, prfCallback);
    }

    public static Date yearToDate(int year) {
        Calendar cal = Calendar.getInstance();
        cal.set(year, 5, 15, 12, 0, 0);
        long ms = cal.getTimeInMillis();
        cal.setTimeInMillis(ms - ms%1000);
        return cal.getTime();
    }

    public void storeDependencies() {
        storeWstUnit();
        storeAnnotations();
        storeHYKs();
        storeCrossSections();
        storeGauges();
        storeWst();
        storeExtraWsts();
        storeFixations();
        storeOfficialLines();
        storeFloodWater();
        storeFloodProtection();
        storeBedHeight();
        storeSedimentDensity();
        storeMorphologicalWidth();
        storeFlowVelocity();
        storeSedimentYield();
        storeWaterlevels();
        storeWaterlevelDifferences();
        storeMeasurementStations();
        storeSQRelations();
        storeOfficialNumber();
    }

    public void storeWstUnit() {
        if (wst == null) {
            wstUnit = new ImportUnit("NN + m");
        }
        else {
            wstUnit = wst.getUnit();
        }
    }

    public void storeHYKs() {
        if (!Config.INSTANCE.skipHYKs()) {
            log.info("store HYKs");
            getPeer();
            for (ImportHYK hyk: hyks) {
                hyk.storeDependencies();
            }
        }
    }

    public void storeCrossSections() {
        if (!Config.INSTANCE.skipPRFs() || !Config.INSTANCE.skipDA66s() || !Config.INSTANCE.skipDA50s() || !Config.INSTANCE.skipW80s()) {
            log.info("store cross sections");
            getPeer();
            for (ImportCrossSection crossSection: crossSections) {
                crossSection.storeDependencies();
            }
        }
    }

    public void storeWst() {
        if (!Config.INSTANCE.skipWst()) {
            River river = getPeer();
            wst.storeDependencies(river);
        }
    }

    public void storeFixations() {
        if (!Config.INSTANCE.skipFixations() || !Config.INSTANCE.skipWaterlevels()) {
            log.info("store fixation wsts and/or csvs");
            River river = getPeer();
            for (ImportWst fWst: fixations) {
                log.debug("Fixation name: " + fWst.getDescription());
                fWst.storeDependencies(river);
            }
        }
    }


    /** Store wsts from waterlevel-csv files. */
    public void storeWaterlevels() {
        if (!Config.INSTANCE.skipWaterlevels())
        
        log.info("store waterlevel wsts from csv");
        River river = getPeer();
        for (ImportWst wWst: waterlevels) {
            log.debug("Waterlevel name: " + wWst.getDescription());
            wWst.storeDependencies(river);
        }
    }
        

    /** Store wsts from waterleveldifference-csv files. */
    public void storeWaterlevelDifferences() {
        if (!Config.INSTANCE.skipWaterlevelDifferences())
        
        log.info("store waterleveldifferences wsts from csv");
        River river = getPeer();
        for (ImportWst dWst: waterlevelDifferences) {
            log.debug("water.diff.: name " + dWst.getDescription());
            dWst.storeDependencies(river);
        }
    }
        

    public void storeExtraWsts() {
        if (!Config.INSTANCE.skipExtraWsts()) {
            log.info("store extra wsts");
            River river = getPeer();
            for (ImportWst wst: extraWsts) {
                log.debug("name: " + wst.getDescription());
                wst.storeDependencies(river);
            }
        }
    }

    public void storeOfficialLines() {
        if (!Config.INSTANCE.skipOfficialLines()) {
            log.info("store official lines wsts");
            River river = getPeer();
            for (ImportWst wst: officialLines) {
                log.debug("name: " + wst.getDescription());
                wst.storeDependencies(river);
            }
        }
    }

    public void storeFloodWater() {
        if (!Config.INSTANCE.skipFloodWater()) {
            log.info("store flood water wsts");
            River river = getPeer();
            for (ImportWst wst: floodWater) {
                log.debug("name: " + wst.getDescription());
                wst.storeDependencies(river);
            }
        }
    }

    public void storeFloodProtection() {
        if (!Config.INSTANCE.skipFloodProtection()) {
            log.info("store flood protection wsts");
            River river = getPeer();
            for (ImportWst wst: floodProtection) {
                log.debug("name: " + wst.getDescription());
                wst.storeDependencies(river);
            }
        }
    }


    public void storeBedHeight() {
        if (!Config.INSTANCE.skipBedHeightSingle()) {
            log.info("store bed heights single");
            storeBedHeightSingle();
        }

        if (!Config.INSTANCE.skipBedHeightEpoch()) {
            log.info("store bed height epoch.");
            storeBedHeightEpoch();
        }
    }


    private void storeBedHeightSingle() {
        River river = getPeer();

        if (bedHeightSingles != null) {
            for (ImportBedHeight tmp: bedHeightSingles) {
                ImportBedHeightSingle single = (ImportBedHeightSingle) tmp;

                String desc = single.getDescription();

                log.debug("name: " + desc);

                try {
                    single.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("File '" + desc + "' is broken!");
                }
                catch (ConstraintViolationException cve) {
                    log.error("File '" + desc + "' is broken!");
                }
            }
        }
        else {
            log.info("No single bed heights to store.");
        }
    }


    private void storeBedHeightEpoch() {
        River river = getPeer();

        if (bedHeightEpochs != null) {
            for (ImportBedHeight tmp: bedHeightEpochs) {
                ImportBedHeightEpoch epoch = (ImportBedHeightEpoch) tmp;

                String desc = epoch.getDescription();

                log.debug("name: " + desc);

                try {
                    epoch.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("File '" + desc + "' is broken!");
                }
                catch (ConstraintViolationException cve) {
                    log.error("File '" + desc + "' is broken!");
                }
            }
        }
        else {
            log.info("No epoch bed heights to store.");
        }
    }

    public void storeSedimentDensity() {
        if (!Config.INSTANCE.skipSedimentDensity()) {
            log.info("store sediment density");

            River river = getPeer();

            for (ImportSedimentDensity density: sedimentDensities) {
                String desc = density.getDescription();

                log.debug("name: " + desc);

                try {
                    density.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("File '" + desc + "' is broken!");
                }
                catch (ConstraintViolationException cve) {
                    log.error("File '" + desc + "' is broken!");
                }
            }
        }
    }

    public void storeMorphologicalWidth() {
        if (!Config.INSTANCE.skipMorphologicalWidth()) {
            log.info("store morphological width");

            River river = getPeer();

            for (ImportMorphWidth width: morphologicalWidths) {
                try {
                    width.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("Error while parsing file for morph. width.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while parsing file for morph. width.", cve);
                }
            }
        }
    }

    public void storeFlowVelocity() {
        if (!Config.INSTANCE.skipFlowVelocity()) {
            log.info("store flow velocity");

            River river = getPeer();

            for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels){
                try {
                    flowVelocityModel.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("Error while storing flow velocity model.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while storing flow velocity model.", cve);
                }
            }

            for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) {
                try {
                    m.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("Error while storing flow velocity measurement.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while storing flow velocity measurement.", cve);
                }
            }
        }
    }


    public void storeSedimentYield() {
        if (!Config.INSTANCE.skipSedimentYield()) {
            log.info("store sediment yield data");

            River river = getPeer();

            for (ImportSedimentYield sedimentYield: sedimentYields) {
                try {
                    sedimentYield.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("Error while storing sediment yield.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while storing sediment yield.", cve);
                }
            }
        }
    }


    public void storeMeasurementStations() {
        if (!Config.INSTANCE.skipMeasurementStations()) {
            log.info("store measurement stations");

            River river = getPeer();

            int count = 0;

            for (ImportMeasurementStation station: measurementStations) {
                try {
                    boolean success = station.storeDependencies(river);
                    if (success) {
                        count++;
                    }
                }
                catch (SQLException sqle) {
                    log.error("Error while storing measurement station.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while storing measurement station.", cve);
                }
            }

            log.info("stored " + count + " measurement stations.");
        }
    }


    public void storeSQRelations() {
        if (!Config.INSTANCE.skipSQRelation()) {
            log.info("store sq relations");

            River river = getPeer();

            int count = 0;

            for (ImportSQRelation sqRelation: sqRelations) {
                try {
                    sqRelation.storeDependencies(river);
                    count++;
                }
                catch (SQLException sqle) {
                    log.error("Error while storing sq relation.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while storing sq relation.", cve);
                }
            }

            log.info("stored " + count + " sq relations.");
        }
    }


    public void storeAnnotations() {
        if (!Config.INSTANCE.skipAnnotations()) {
            River river = getPeer();
            for (ImportAnnotation annotation: annotations) {
                annotation.getPeer(river);
            }
        }
    }

    public void storeGauges() {
        if (!Config.INSTANCE.skipGauges()) {
            log.info("store gauges:");
            River river = getPeer();
            Session session = ImporterSession.getInstance()
                .getDatabaseSession();
            for (ImportGauge gauge: gauges) {
                log.info("\tgauge: " + gauge.getName());
                gauge.storeDependencies(river);
                ImporterSession.getInstance().getDatabaseSession();
                session.flush();
            }
        }
    }

    public River getPeer() {
        if (peer == null) {
            Session session = ImporterSession.getInstance().getDatabaseSession();
            Query query = session.createQuery("from River where name=:name");

            Unit u = null;
            if (wstUnit != null) {
                u = wstUnit.getPeer();
            }

            query.setString("name", name);
            List<River> rivers = query.list();
            if (rivers.isEmpty()) {
                log.info("Store new river '" + name + "'");
                peer = new River(name, u);
                if (!Config.INSTANCE.skipBWASTR()) {
                    peer.setOfficialNumber(officialNumber);
                }
                session.save(peer);
            }
            else {
                peer = rivers.get(0);
            }
        }
        return peer;
    }
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :

http://dive4elements.wald.intevation.org