view flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java @ 4173:7d4480c0e68e

Allow users to select the current relevant discharge table in historical discharge table calculattion. In addition to this, the discharge tables in the helper panel displayed in the client is ordered in time.
author Ingo Weinzierl <ingo.weinzierl@intevation.de>
date Thu, 18 Oct 2012 12:13:48 +0200
parents 976ead36192d
children f63b39799d2d
line wrap: on
line source
package de.intevation.flys.importer;

import de.intevation.artifacts.common.utils.FileTools.HashedFile;

import de.intevation.artifacts.common.utils.FileTools;

import de.intevation.flys.importer.parsers.AnnotationClassifier;
import de.intevation.flys.importer.parsers.AnnotationsParser;
import de.intevation.flys.importer.parsers.BedHeightEpochParser;
import de.intevation.flys.importer.parsers.BedHeightSingleParser;
import de.intevation.flys.importer.parsers.FlowVelocityMeasurementParser;
import de.intevation.flys.importer.parsers.FlowVelocityModelParser;
import de.intevation.flys.importer.parsers.HYKParser;
import de.intevation.flys.importer.parsers.MorphologicalWidthParser;
import de.intevation.flys.importer.parsers.PRFParser;
import de.intevation.flys.importer.parsers.PegelGltParser;
import de.intevation.flys.importer.parsers.SedimentDensityParser;
import de.intevation.flys.importer.parsers.SedimentYieldParser;
import de.intevation.flys.importer.parsers.SQRelationParser;
import de.intevation.flys.importer.parsers.WaterlevelDifferencesParser;
import de.intevation.flys.importer.parsers.WaterlevelParser;
import de.intevation.flys.importer.parsers.WstParser;

import de.intevation.flys.model.River;
import de.intevation.flys.model.Unit;

import java.io.File;
import java.io.IOException;

import java.sql.SQLException;

import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.log4j.Logger;

import org.hibernate.Query;
import org.hibernate.Session;

import org.hibernate.exception.ConstraintViolationException;

public class ImportRiver
{
    private static Logger log = Logger.getLogger(ImportRiver.class);

    public static final String PEGEL_GLT = "PEGEL.GLT";

    public static final String FIXATIONS = "Fixierungen";

    public static final String EXTRA_LONGITUDINALS =
        "Zus.L\u00e4ngsschnitte";

    public static final String [] OFFICIAL_LINES_FOLDERS = {
        "Basisdaten",
        "Fixierungen" };

    public static final String OFFICIAL_LINES =
        "Amtl_Linien.wst";

    public static final String FLOOD_WATER = "HW-Marken";

    public static final String FLOOD_PROTECTION =
        "HW-Schutzanlagen";

    public static final String MINFO_DIR = "Morphologie";

    public static final String BED_HEIGHT_DIR = "Sohlhoehen";

    public static final String BED_HEIGHT_SINGLE_DIR = "Einzeljahre";

    public static final String BED_HEIGHT_EPOCH_DIR = "Epochen";

    public static final String SEDIMENT_DENSITY_DIR = "Sedimentdichte";

    public static final String MORPHOLOGICAL_WIDTH_DIR = "morphologische_Breite";

    public static final String FLOW_VELOCITY_DIR = "Geschwindigkeit_Schubspannung";

    public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen";

    public static final String FLOW_VELOCITY_MEASUREMENTS = "v-Messungen";

    public static final String SEDIMENT_YIELD_DIR = "Fracht";

    public static final String SEDIMENT_YIELD_SINGLE_DIR = "Einzeljahre";

    public static final String SEDIMENT_YIELD_EPOCH_DIR = "Epochen";

    public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse";

    public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen";

    public static final String MINFO_WATERLEVEL_DIFF_DIR = "Wasserspiegeldifferenzen";

    public static final String MINFO_SQ_DIR = "Feststofftransport-Abfluss-Beziehung";


    protected String name;

    protected File   wstFile;

    protected File   bbInfoFile;

    protected List<ImportGauge> gauges;

    protected List<ImportAnnotation> annotations;

    protected List<ImportHYK> hyks;

    protected List<ImportCrossSection> crossSections;

    protected List<ImportWst> extraWsts;

    protected List<ImportWst> fixations;

    protected List<ImportWst> officialLines;

    protected List<ImportWst> floodWater;

    protected List<ImportWst> floodProtection;

    protected List<ImportBedHeight> bedHeightSingles;

    protected List<ImportBedHeight> bedHeightEpochs;

    protected List<ImportSedimentDensity> sedimentDensities;

    protected List<ImportMorphWidth> morphologicalWidths;

    protected List<ImportFlowVelocityModel> flowVelocityModels;

    protected List<ImportFlowVelocityMeasurement> flowVelocityMeasurements;

    protected List<ImportSedimentYield> sedimentYields;

    protected List<ImportWaterlevel> waterlevels;

    protected List<ImportWaterlevelDifference> waterlevelDiffs;

    protected List<ImportSQRelation> sqRelations;

    protected ImportWst wst;

    protected ImportUnit wstUnit;

    protected AnnotationClassifier annotationClassifier;

    protected River peer;

    public ImportRiver() {
        hyks                      = new ArrayList<ImportHYK>();
        crossSections             = new ArrayList<ImportCrossSection>();
        extraWsts                 = new ArrayList<ImportWst>();
        fixations                 = new ArrayList<ImportWst>();
        officialLines             = new ArrayList<ImportWst>();
        floodWater                = new ArrayList<ImportWst>();
        floodProtection           = new ArrayList<ImportWst>();
        sedimentDensities         = new ArrayList<ImportSedimentDensity>();
        morphologicalWidths       = new ArrayList<ImportMorphWidth>();
        flowVelocityModels        = new ArrayList<ImportFlowVelocityModel>();
        flowVelocityMeasurements  = new ArrayList<ImportFlowVelocityMeasurement>();
        sedimentYields            = new ArrayList<ImportSedimentYield>();
        waterlevels               = new ArrayList<ImportWaterlevel>();
        waterlevelDiffs           = new ArrayList<ImportWaterlevelDifference>();
        sqRelations               = new ArrayList<ImportSQRelation>();
    }

    public ImportRiver(
        String               name,
        File                 wstFile,
        File                 bbInfoFile,
        AnnotationClassifier annotationClassifier
    ) {
        this();
        this.name                 = name;
        this.wstFile              = wstFile;
        this.bbInfoFile           = bbInfoFile;
        this.annotationClassifier = annotationClassifier;
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public File getWstFile() {
        return wstFile;
    }

    public void setWstFile(File wstFile) {
        this.wstFile = wstFile;
    }

    public File getBBInfo() {
        return bbInfoFile;
    }

    public void setBBInfo(File bbInfoFile) {
        this.bbInfoFile = bbInfoFile;
    }

    public ImportWst getWst() {
        return wst;
    }

    public void setWst(ImportWst wst) {
        this.wst = wst;
    }

    public File getMinfoDir() {
        File riverDir  = wstFile.getParentFile().getParentFile().getParentFile();
        return new File(riverDir, MINFO_DIR);
    }

    public void parseDependencies() throws IOException {
        parseGauges();
        parseAnnotations();
        parsePRFs();
        parseHYKs();
        parseWst();
        parseExtraWsts();
        parseFixations();
        parseOfficialLines();
        parseFloodWater();
        parseFloodProtection();
        parseBedHeight();
        parseSedimentDensity();
        parseMorphologicalWidth();
        parseFlowVelocity();
        parseSedimentYield();
        parseWaterlevels();
        parseWaterlevelDifferences();
        parseSQRelation();
    }

    public void parseFloodProtection() throws IOException {
        if (Config.INSTANCE.skipFloodProtection()) {
            log.info("skip parsing flood protection");
            return;
        }

        log.info("Parse flood protection wst file");

        File riverDir = wstFile.getParentFile().getParentFile();

        File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION));

        if (!dir.isDirectory() || !dir.canRead()) {
            log.info("no directory '" + dir + "' found");
            return;
        }

        File [] files = dir.listFiles();

        if (files == null) {
            log.warn("cannot read '" + dir + "'");
            return;
        }

        for (File file: files) {
            if (!file.isFile() || !file.canRead()) {
                continue;
            }
            String name = file.getName().toLowerCase();
            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
                continue;
            }
            log.info("found file '" + file.getName() + "'");
            WstParser wstParser = new WstParser();
            wstParser.parse(file);
            ImportWst iw = wstParser.getWst();
            iw.setKind(5);
            iw.setDescription(FLOOD_PROTECTION + "/" + iw.getDescription());
            floodProtection.add(iw);
        }
    }


    public void parseBedHeight() throws IOException {
        File minfoDir     = getMinfoDir();
        File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR);
        File singlesDir   = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR);
        File epochDir     = new File(bedHeightDir, BED_HEIGHT_EPOCH_DIR);

        if (Config.INSTANCE.skipBedHeightSingle()) {
            log.info("skip parsing bed height single.");
        }
        else {
            log.info("Parse bed height single.");
            parseBedHeightSingles(singlesDir);
        }

        if (Config.INSTANCE.skipBedHeightEpoch()) {
            log.info("skip parsing bed height epochs.");
        }
        else {
            log.info("Parse bed height epochs.");
            parseBedHeightEpochs(epochDir);
        }
    }


    protected void parseSedimentDensity() throws IOException {
        if (Config.INSTANCE.skipSedimentDensity()) {
            log.info("skip parsing sediment density.");
            return;
        }

        log.debug("Parse sediment density");

        File minfoDir = getMinfoDir();
        File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR);

        File[] files = sediment.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + sediment + "'");
            return;
        }

        SedimentDensityParser parser = new SedimentDensityParser();

        for (File file: files) {
            parser.parse(file);
        }

        sedimentDensities = parser.getSedimentDensities();

        log.info("Parsed " + sedimentDensities.size() + " sediment densities.");
    }


    protected void parseMorphologicalWidth() throws IOException {
        if (Config.INSTANCE.skipMorphologicalWidth()) {
            log.info("skip parsing morphological width.");
            return;
        }

        log.debug("Parse morphological width");

        File minfoDir = getMinfoDir();
        File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR);

        File[] files = morphDir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + morphDir + "'");
            return;
        }

        MorphologicalWidthParser parser = new MorphologicalWidthParser();

        for (File file: files) {
            parser.parse(file);
        }

        morphologicalWidths = parser.getMorphologicalWidths();

        log.info("Parsed " + morphologicalWidths.size() + " morph. widths files.");
    }


    protected void parseFlowVelocity() throws IOException {
        if (Config.INSTANCE.skipFlowVelocity()) {
            log.info("skip parsing flow velocity");
            return;
        }

        log.debug("Parse flow velocity");

        File minfoDir   = getMinfoDir();
        File flowDir    = new File(minfoDir, FLOW_VELOCITY_DIR);
        File modelDir   = new File(flowDir, FLOW_VELOCITY_MODEL);
        File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS);

        File[] modelFiles   = modelDir.listFiles();
        File[] measureFiles = measureDir.listFiles();

        if (modelFiles == null) {
            log.warn("Cannot read directory '" + modelDir + "'");
        }
        else {
            FlowVelocityModelParser parser = new FlowVelocityModelParser();

            for (File model: modelFiles) {
                log.debug("Parse file '" + model + "'");
                parser.parse(model);
            }

            flowVelocityModels = parser.getModels();
        }

        if (measureFiles == null) {
            log.warn("Cannot read directory '" + measureDir + "'");
        }
        else {
            FlowVelocityMeasurementParser parser =
                new FlowVelocityMeasurementParser();

            for (File measurement: measureFiles) {
                log.debug("Parse file '" + measurement + "'");
                parser.parse(measurement);
            }

            flowVelocityMeasurements = parser.getMeasurements();
        }
    }


    protected void parseSedimentYield() throws IOException {
        if (Config.INSTANCE.skipSedimentYield()) {
            log.info("skip parsing sediment yield data");
            return;
        }

        log.debug("Parse sediment yield data");

        File minfoDir         = getMinfoDir();
        File sedimentYieldDir = new File(minfoDir, SEDIMENT_YIELD_DIR);

        File singleDir = new File(sedimentYieldDir, SEDIMENT_YIELD_SINGLE_DIR);
        File epochDir  = new File(sedimentYieldDir, SEDIMENT_YIELD_EPOCH_DIR);

        File[] singles = singleDir.listFiles();
        File[] epochs  = epochDir.listFiles();

        SedimentYieldParser parser = new SedimentYieldParser();

        if (singles == null || singles.length == 0) {
            log.warn("Cannot read directory '" + singleDir + "'");
        }
        else {
            for (File file: singles) {
                if (file.isDirectory()) {
                    for (File child: file.listFiles()) {
                        parser.parse(child);
                    }
                }
                else {
                    parser.parse(file);
                }
            }
        }

        if (epochs == null || epochs.length == 0) {
            log.warn("Cannot read directory '" + epochDir + "'");
        }
        else {
            for (File file: epochs) {
                if (file.isDirectory()) {
                    for (File child: file.listFiles()) {
                        parser.parse(child);
                    }
                }
                else {
                    parser.parse(file);
                }
            }
        }

        sedimentYields = parser.getSedimentYields();
    }


    protected void parseWaterlevels() throws IOException {
        if (Config.INSTANCE.skipWaterlevels()) {
            log.info("skip parsing waterlevels");
            return;
        }

        log.info("Parse waterlevels");

        File minfo  = getMinfoDir();
        File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
        File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR);

        File[] files = wspDir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + wspDir + "'");
            return;
        }

        WaterlevelParser parser = new WaterlevelParser();

        for (File file: files) {
            parser.parse(file);
        }

        waterlevels = parser.getWaterlevels();
    }


    protected void parseWaterlevelDifferences() throws IOException {
        if (Config.INSTANCE.skipWaterlevelDifferences()) {
            log.info("skip parsing waterlevel differences");
            return;
        }

        log.info("Parse waterlevel differences");

        File minfo  = getMinfoDir();
        File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
        File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR);

        File[] files = diffDir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + diffDir + "'");
            return;
        }

        WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser();

        for (File file: files) {
            parser.parse(file);
        }

        waterlevelDiffs = parser.getDifferences();
    }


    protected void parseSQRelation() throws IOException {
        if (Config.INSTANCE.skipSQRelation()) {
            log.info("skip parsing sq relation");
            return;
        }

        log.info("Parse sq relations");

        File minfo = getMinfoDir();
        File sqDir = new File(minfo, MINFO_SQ_DIR);

        File[] files = sqDir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + sqDir + "'");
            return;
        }

        SQRelationParser parser = new SQRelationParser();

        for (File file: files) {
            parser.parse(file);
        }

        sqRelations = parser.getSQRelations();

        log.debug("Parsed " + sqRelations.size() + " SQ relations.");
    }


    protected void parseBedHeightSingles(File dir) throws IOException {
        log.debug("Parse bed height singles");

        File[] files = dir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + dir + "'");
            return;
        }

        BedHeightSingleParser parser = new BedHeightSingleParser();

        for (File file: files) {
            parser.parse(file);
        }

        bedHeightSingles = parser.getBedHeights();
    }


    protected void parseBedHeightEpochs(File dir) throws IOException {
        log.debug("Parse bed height epochs");

        File[] files = dir.listFiles();

        if (files == null) {
            log.warn("Cannot read directory '" + dir + "'");
            return;
        }

        BedHeightEpochParser parser = new BedHeightEpochParser();

        for (File file: files) {
            parser.parse(file);
        }

        bedHeightEpochs = parser.getBedHeights();
    }


    public void parseFloodWater() throws IOException {
        if (Config.INSTANCE.skipFloodWater()) {
            log.info("skip parsing flod water");
            return;
        }

        log.info("Parse flood water wst file");

        File riverDir = wstFile.getParentFile().getParentFile();

        File dir = FileTools.repair(new File(riverDir, FLOOD_WATER));

        if (!dir.isDirectory() || !dir.canRead()) {
            log.info("no directory '" + dir + "' found");
            return;
        }

        File [] files = dir.listFiles();

        if (files == null) {
            log.warn("cannot read '" + dir + "'");
            return;
        }

        for (File file: files) {
            if (!file.isFile() || !file.canRead()) {
                continue;
            }
            String name = file.getName().toLowerCase();
            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
                continue;
            }
            log.info("found file '" + file.getName() + "'");
            WstParser wstParser = new WstParser();
            wstParser.parse(file);
            ImportWst iw = wstParser.getWst();
            iw.setKind(4);
            iw.setDescription(FLOOD_WATER + "/" + iw.getDescription());
            floodWater.add(iw);
        }
    }

    public void parseOfficialLines() throws IOException {
        if (Config.INSTANCE.skipOfficialLines()) {
            log.info("skip parsing official lines");
            return;
        }

        log.info("Parse official wst files");

        File riverDir = wstFile.getParentFile().getParentFile();

        for (String folder: OFFICIAL_LINES_FOLDERS) {
            File dir = FileTools.repair(new File(riverDir, folder));

            if (!dir.isDirectory() || !dir.canRead()) {
                log.info("no directory '" + folder + "' found");
                continue;
            }

            File file = FileTools.repair(new File(dir, OFFICIAL_LINES));
            if (!file.isFile() || !file.canRead()) {
                log.warn("no official lines wst file found");
                continue;
            }
            log.debug("Found WST file: " + file);

            WstParser wstParser = new WstParser();
            wstParser.parse(file);
            ImportWst iw = wstParser.getWst();
            iw.setKind(3);
            iw.setDescription(folder + "/" + iw.getDescription());
            officialLines.add(iw);
        } // for all folders

    }

    public void parseFixations() throws IOException {
        if (Config.INSTANCE.skipFixations()) {
            log.info("skip parsing fixations");
            return;
        }

        log.info("Parse fixation wst files");

        File riverDir = wstFile.getParentFile().getParentFile();

        File fixDir = FileTools.repair(
            new File(riverDir, FIXATIONS));

        if (!fixDir.isDirectory() || !fixDir.canRead()) {
            log.info("no fixation wst file directory found");
            return;
        }

        File [] files = fixDir.listFiles();

        if (files == null) {
            log.warn("cannot read fixations wst file directory");
            return;
        }

        for (File file: files) {
            if (!file.isFile() || !file.canRead()) {
                continue;
            }
            String name = file.getName().toLowerCase();
            if (!name.endsWith(".wst")) {
                continue;
            }
            log.debug("Found WST file: " + file);

            WstParser wstParser = new WstParser();
            wstParser.parse(file);
            ImportWst iw = wstParser.getWst();
            iw.setKind(2);
            iw.setDescription(FIXATIONS+ "/" + iw.getDescription());
            fixations.add(iw);
        }
    }

    public void parseExtraWsts() throws IOException {
        if (Config.INSTANCE.skipExtraWsts()) {
            log.info("skip parsing extra WST files");
            return;
        }

        log.info("Parse extra longitudinal wst files");

        File riverDir = wstFile.getParentFile().getParentFile();

        File extraDir = FileTools.repair(
            new File(riverDir, EXTRA_LONGITUDINALS));

        if (!extraDir.isDirectory() || !extraDir.canRead()) {
            log.info("no extra longitudinal wst file directory found");
            return;
        }

        File [] files = extraDir.listFiles();

        if (files == null) {
            log.warn("cannot read extra longitudinal wst file directory");
            return;
        }

        for (File file: files) {
            if (!file.isFile() || !file.canRead()) {
                continue;
            }
            String name = file.getName().toLowerCase();
            if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
                continue;
            }
            log.debug("Found WST file: " + file);

            WstParser wstParser = new WstParser();
            wstParser.parse(file);
            ImportWst iw = wstParser.getWst();
            iw.setKind(1);
            iw.setDescription(EXTRA_LONGITUDINALS + "/" + iw.getDescription());
            extraWsts.add(iw);
        }

    }

    public void parseWst() throws IOException {
        if (Config.INSTANCE.skipWst()) {
            log.info("skip parsing WST file");
            return;
        }

        WstParser wstParser = new WstParser();
        wstParser.parse(wstFile);
        wst = wstParser.getWst();
    }

    public void parseGauges() throws IOException {
        if (Config.INSTANCE.skipGauges()) {
            log.info("skip parsing gauges");
            return;
        }

        File gltFile = new File(wstFile.getParentFile(), PEGEL_GLT);
        gltFile = FileTools.repair(gltFile);

        if (!gltFile.isFile() || !gltFile.canRead()) {
            log.warn("cannot read gauges from '" + gltFile + "'");
            return;
        }

        PegelGltParser pgltp = new PegelGltParser();
        pgltp.parse(gltFile);

        gauges = pgltp.getGauges();

        for (ImportGauge gauge: gauges) {
            gauge.parseDependencies();
        }
    }

    public void parseAnnotations() throws IOException {
        if (Config.INSTANCE.skipAnnotations()) {
            log.info("skip parsing annotations");
            return;
        }

        File riverDir = wstFile.getParentFile().getParentFile();
        AnnotationsParser aparser =
            new AnnotationsParser(annotationClassifier);
        aparser.parse(riverDir);

        annotations = aparser.getAnnotations();
    }

    public void parseHYKs() {
        if (Config.INSTANCE.skipHYKs()) {
            log.info("skip parsing HYK files");
            return;
        }

        log.info("looking for HYK files");
        HYKParser parser = new HYKParser();
        File riverDir = wstFile
            .getParentFile()  // Basisdaten
            .getParentFile()  // Hydrologie
            .getParentFile(); // <river>

        parser.parseHYKs(riverDir, new HYKParser.Callback() {

            Set<HashedFile> hfs = new HashSet<HashedFile>();

            @Override
            public boolean hykAccept(File file) {
                HashedFile hf = new HashedFile(file);
                boolean success = hfs.add(hf);
                if (!success) {
                    log.warn("HYK file '" + file + "' seems to be a duplicate.");
                }
                return success;
            }

            @Override
            public void hykParsed(HYKParser parser) {
                log.debug("callback from HYK parser");
                ImportHYK hyk = parser.getHYK();
                hyk.setRiver(ImportRiver.this);
                hyks.add(hyk);
            }
        });
    }

    public void parsePRFs() {
        if (Config.INSTANCE.skipPRFs()) {
            log.info("skip parsing PRFs");
            return;
        }

        log.info("looking for PRF files");
        PRFParser parser = new PRFParser();
        File riverDir = wstFile
            .getParentFile()  // Basisdaten
            .getParentFile()  // Hydrologie
            .getParentFile(); // <river>

        parser.parsePRFs(riverDir, new PRFParser.Callback() {

            Set<HashedFile> prfs = new HashSet<HashedFile>();

            @Override
            public boolean prfAccept(File file) {
                HashedFile hf = new HashedFile(file);
                boolean success = prfs.add(hf);
                if (!success) {
                    log.warn("PRF file '" + file + "' seems to be a duplicate.");
                }
                return success;
            }

            @Override
            public void prfParsed(PRFParser parser) {
                log.debug("callback from PRF parser");

                String  description = parser.getDescription();
                Integer year        = parser.getYear();
                ImportTimeInterval ti = year != null
                    ? new ImportTimeInterval(yearToDate(year))
                    : null;

                List<ImportCrossSectionLine> lines =
                    new ArrayList<ImportCrossSectionLine>();

                for (Map.Entry<Double, List<XY>> entry: parser.getData().entrySet()) {
                    Double km         = entry.getKey();
                    List<XY>   points = entry.getValue();
                    lines.add(new ImportCrossSectionLine(km, points));
                }

                crossSections.add(new ImportCrossSection(
                    ImportRiver.this, description, ti, lines));
            }
        });
    }

    public static Date yearToDate(int year) {
        Calendar cal = Calendar.getInstance();
        cal.set(year, 5, 15, 12, 0, 0);
        long ms = cal.getTimeInMillis();
        cal.setTimeInMillis(ms - ms%1000);
        return cal.getTime();
    }

    public void storeDependencies() {
        storeWstUnit();
        storeAnnotations();
        storeHYKs();
        storeCrossSections();
        storeGauges();
        storeWst();
        storeExtraWsts();
        storeFixations();
        storeOfficialLines();
        storeFloodWater();
        storeFloodProtection();
        storeBedHeight();
        storeSedimentDensity();
        storeMorphologicalWidth();
        storeFlowVelocity();
        storeSedimentYield();
        storeWaterlevels();
        storeWaterlevelDifferences();
        storeSQRelations();
    }

    public void storeWstUnit() {
        if (wst == null) {
            wstUnit = new ImportUnit("NN + m");
        }
        else {
            wstUnit = wst.getUnit();
        }
    }

    public void storeHYKs() {
        if (!Config.INSTANCE.skipHYKs()) {
            log.info("store HYKs");
            getPeer();
            for (ImportHYK hyk: hyks) {
                hyk.storeDependencies();
            }
        }
    }

    public void storeCrossSections() {
        if (!Config.INSTANCE.skipPRFs()) {
            log.info("store cross sections");
            getPeer();
            for (ImportCrossSection crossSection: crossSections) {
                crossSection.storeDependencies();
            }
        }
    }

    public void storeWst() {
        if (!Config.INSTANCE.skipWst()) {
            River river = getPeer();
            wst.storeDependencies(river);
        }
    }

    public void storeFixations() {
        if (!Config.INSTANCE.skipFixations()) {
            log.info("store fixation wsts");
            River river = getPeer();
            for (ImportWst wst: fixations) {
                log.debug("name: " + wst.getDescription());
                wst.storeDependencies(river);
            }
        }
    }

    public void storeExtraWsts() {
        if (!Config.INSTANCE.skipExtraWsts()) {
            log.info("store extra wsts");
            River river = getPeer();
            for (ImportWst wst: extraWsts) {
                log.debug("name: " + wst.getDescription());
                wst.storeDependencies(river);
            }
        }
    }

    public void storeOfficialLines() {
        if (!Config.INSTANCE.skipOfficialLines()) {
            log.info("store official lines wsts");
            River river = getPeer();
            for (ImportWst wst: officialLines) {
                log.debug("name: " + wst.getDescription());
                wst.storeDependencies(river);
            }
        }
    }

    public void storeFloodWater() {
        if (!Config.INSTANCE.skipFloodWater()) {
            log.info("store flood water wsts");
            River river = getPeer();
            for (ImportWst wst: floodWater) {
                log.debug("name: " + wst.getDescription());
                wst.storeDependencies(river);
            }
        }
    }

    public void storeFloodProtection() {
        if (!Config.INSTANCE.skipFloodProtection()) {
            log.info("store flood protection wsts");
            River river = getPeer();
            for (ImportWst wst: floodProtection) {
                log.debug("name: " + wst.getDescription());
                wst.storeDependencies(river);
            }
        }
    }


    public void storeBedHeight() {
        if (!Config.INSTANCE.skipBedHeightSingle()) {
            log.info("store bed heights single");
            storeBedHeightSingle();
        }

        if (!Config.INSTANCE.skipBedHeightEpoch()) {
            log.info("store bed height epoch.");
            storeBedHeightEpoch();
        }
    }


    private void storeBedHeightSingle() {
        River river = getPeer();

        if (bedHeightSingles != null) {
            for (ImportBedHeight tmp: bedHeightSingles) {
                ImportBedHeightSingle single = (ImportBedHeightSingle) tmp;

                String desc = single.getDescription();

                log.debug("name: " + desc);

                try {
                    single.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("File '" + desc + "' is broken!");
                }
                catch (ConstraintViolationException cve) {
                    log.error("File '" + desc + "' is broken!");
                }
            }
        }
        else {
            log.info("No single bed heights to store.");
        }
    }


    private void storeBedHeightEpoch() {
        River river = getPeer();

        if (bedHeightEpochs != null) {
            for (ImportBedHeight tmp: bedHeightEpochs) {
                ImportBedHeightEpoch epoch = (ImportBedHeightEpoch) tmp;

                String desc = epoch.getDescription();

                log.debug("name: " + desc);

                try {
                    epoch.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("File '" + desc + "' is broken!");
                }
                catch (ConstraintViolationException cve) {
                    log.error("File '" + desc + "' is broken!");
                }
            }
        }
        else {
            log.info("No epoch bed heights to store.");
        }
    }

    public void storeSedimentDensity() {
        if (!Config.INSTANCE.skipSedimentDensity()) {
            log.info("store sediment density");

            River river = getPeer();

            for (ImportSedimentDensity density: sedimentDensities) {
                String desc = density.getDescription();

                log.debug("name: " + desc);

                try {
                    density.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("File '" + desc + "' is broken!");
                }
                catch (ConstraintViolationException cve) {
                    log.error("File '" + desc + "' is broken!");
                }
            }
        }
    }

    public void storeMorphologicalWidth() {
        if (!Config.INSTANCE.skipMorphologicalWidth()) {
            log.info("store morphological width");

            River river = getPeer();

            for (ImportMorphWidth width: morphologicalWidths) {
                try {
                    width.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("Error while parsing file for morph. width.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while parsing file for morph. width.", cve);
                }
            }
        }
    }

    public void storeFlowVelocity() {
        if (!Config.INSTANCE.skipFlowVelocity()) {
            log.info("store flow velocity");

            River river = getPeer();

            for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels){
                try {
                    flowVelocityModel.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("Error while storing flow velocity model.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while storing flow velocity model.", cve);
                }
            }

            for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) {
                try {
                    m.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("Error while storing flow velocity measurement.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while storing flow velocity measurement.", cve);
                }
            }
        }
    }


    public void storeSedimentYield() {
        if (!Config.INSTANCE.skipSedimentYield()) {
            log.info("store sediment yield data");

            River river = getPeer();

            for (ImportSedimentYield sedimentYield: sedimentYields) {
                try {
                    sedimentYield.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("Error while storing sediment yield.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while storing sediment yield.", cve);
                }
            }
        }
    }


    public void storeWaterlevels() {
        if (!Config.INSTANCE.skipWaterlevels()) {
            log.info("store waterlevels");

            River river = getPeer();

            for (ImportWaterlevel waterlevel: waterlevels) {
                waterlevel.storeDependencies(river);
            }
        }
    }


    public void storeWaterlevelDifferences() {
        if (!Config.INSTANCE.skipWaterlevelDifferences()) {
            log.info("store waterlevel differences");

            River river = getPeer();

            for (ImportWaterlevelDifference diff: waterlevelDiffs) {
                try {
                    diff.storeDependencies(river);
                }
                catch (SQLException sqle) {
                    log.error("Error while storing waterlevel diff.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while storing waterlevel diff.", cve);
                }
            }
        }
    }


    public void storeSQRelations() {
        if (!Config.INSTANCE.skipSQRelation()) {
            log.info("store sq relations");

            River river = getPeer();

            int count = 0;

            for (ImportSQRelation sqRelation: sqRelations) {
                try {
                    sqRelation.storeDependencies(river);
                    count++;
                }
                catch (SQLException sqle) {
                    log.error("Error while storing sq relation.", sqle);
                }
                catch (ConstraintViolationException cve) {
                    log.error("Error while storing sq relation.", cve);
                }
            }

            log.info("stored " + count + " sq relations.");
        }
    }


    public void storeAnnotations() {
        if (!Config.INSTANCE.skipAnnotations()) {
            River river = getPeer();
            for (ImportAnnotation annotation: annotations) {
                annotation.getPeer(river);
            }
        }
    }

    public void storeGauges() {
        if (!Config.INSTANCE.skipGauges()) {
            log.info("store gauges:");
            River river = getPeer();
            Session session = ImporterSession.getInstance()
                .getDatabaseSession();
            for (ImportGauge gauge: gauges) {
                log.info("\tgauge: " + gauge.getName());
                gauge.storeDependencies(river);
                ImporterSession.getInstance().getDatabaseSession();
                session.flush();
            }
        }
    }

    public River getPeer() {
        if (peer == null) {
            Session session = ImporterSession.getInstance().getDatabaseSession();
            Query query = session.createQuery("from River where name=:name");

            Unit u = wstUnit.getPeer();

            query.setString("name", name);
            List<River> rivers = query.list();
            if (rivers.isEmpty()) {
                log.info("Store new river '" + name + "'");
                peer = new River(name, u);
                session.save(peer);
            }
            else {
                peer = rivers.get(0);
            }
        }
        return peer;
    }
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :

http://dive4elements.wald.intevation.org