view artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadDataFactory.java @ 8210:3bb00338228c

(issue1448) Use left join to permit loads with empty sq_time intervals to be handled.
author Andre Heinecke <andre.heinecke@intevation.de>
date Fri, 05 Sep 2014 15:09:44 +0200
parents 42ac86ec19c7
children 204905c16ade
line wrap: on
line source
/* Copyright (C) 2014 by Bundesanstalt für Gewässerkunde
 * Software engineering by Intevation GmbH
 *
 * This file is Free Software under the GNU AGPL (>=v3)
 * and comes with ABSOLUTELY NO WARRANTY! Check out the
 * documentation coming with Dive4Elements River for details.
 */
package org.dive4elements.river.artifacts.model.minfo;

import java.sql.Timestamp;
import java.util.HashMap;
import java.util.List;

import net.sf.ehcache.Cache;
import net.sf.ehcache.Element;

import org.apache.log4j.Logger;
import org.dive4elements.river.artifacts.model.RiverFactory;
import org.dive4elements.river.artifacts.cache.CacheFactory;
import org.dive4elements.river.backend.SessionHolder;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import org.hibernate.type.StandardBasicTypes;

public class SedimentLoadDataFactory
{
    private static Logger log = Logger.getLogger(SedimentLoadDataFactory.class);

    public static final String CACHE_NAME = "sediment-load-data";

    public static final String SUSPENDED_STRING = "Schwebstoff";

    public static final String SQL_LOAD_RIVER_SEDIMENT_LOADS =
        "SELECT " +
          "sl.id AS sl_id, " +
          "sl.kind AS sl_kind, " +
          "sl.description AS sl_description, " +
          "ti.start_time AS ti_start_time, " +
          "ti.stop_time AS ti_stop_time, " +
          "sqti.start_time AS sq_start_time, " +
          "sqti.stop_time AS sq_stop_time, " +
          "sqti.id AS sq_ti_id, " +
          "slv.value AS slv_value, " +
          "gf.name AS gf_name, " +
          "ms.id AS ms_id, " +
          "ms.station AS ms_station, " +
          "ms.measurement_type AS ms_type " +
        "FROM sediment_load_values slv " +
          "JOIN sediment_load sl ON slv.sediment_load_id = sl.id " +
          "JOIN time_intervals ti ON sl.time_interval_id = ti.id " +
          "LEFT JOIN time_intervals sqti ON sl.sq_time_interval_id = sqti.id " +
          "JOIN grain_fraction gf ON sl.grain_fraction_id = gf.id " +
          "JOIN measurement_station ms ON slv.measurement_station_id = ms.id " +
          "JOIN rivers r ON ms.river_id = r.id " +
        "WHERE r.name = :river " +
        "ORDER BY sl.id";

    public static final SedimentLoadDataFactory INSTANCE =
        new SedimentLoadDataFactory();

    private SedimentLoadDataFactory() {
    }

    public synchronized SedimentLoadData getSedimentLoadData(String river) {
        boolean debug = log.isDebugEnabled();

        if (debug) {
            log.debug(
                "Looking for sediment load data for river '" + river + "'");
        }

        Cache cache = CacheFactory.getCache(CACHE_NAME);

        if (cache == null) {
            if (debug) {
                log.debug("Cache not configured.");
            }
            return getUncached(river);
        }

        String key = "sediment-load-" + river;

        Element element = cache.get(key);

        if (element != null) {
            if (debug) {
                log.debug("Sediment load data found in cache");
            }
            return (SedimentLoadData)element.getValue();
        }

        SedimentLoadData sedimentLoad = getUncached(river);

        if (sedimentLoad != null) {
            if (debug) {
                log.debug("Store sediment load data in cache.");
            }
            cache.put(new Element(key, sedimentLoad));
        }

        return sedimentLoad;
    }

    public SedimentLoadData getUncached(String river) {

        Session session = SessionHolder.HOLDER.get();

        SQLQuery sqlQuery = session.createSQLQuery(SQL_LOAD_RIVER_SEDIMENT_LOADS)
            .addScalar("sl_id",          StandardBasicTypes.INTEGER)
            .addScalar("sl_kind",        StandardBasicTypes.INTEGER)
            .addScalar("sl_description", StandardBasicTypes.STRING)
            .addScalar("ti_start_time",  StandardBasicTypes.TIMESTAMP)
            .addScalar("ti_stop_time",   StandardBasicTypes.TIMESTAMP)
            .addScalar("sq_start_time",  StandardBasicTypes.TIMESTAMP)
            .addScalar("sq_stop_time",   StandardBasicTypes.TIMESTAMP)
            .addScalar("sq_ti_id",       StandardBasicTypes.INTEGER)
            .addScalar("slv_value",      StandardBasicTypes.DOUBLE)
            .addScalar("gf_name",        StandardBasicTypes.STRING)
            .addScalar("ms_id",          StandardBasicTypes.INTEGER)
            .addScalar("ms_station",     StandardBasicTypes.DOUBLE)
            .addScalar("ms_type",        StandardBasicTypes.STRING);

        sqlQuery.setString("river", river);

        SedimentLoadData.Load load = null;
        int grainFractionIndex = SedimentLoadData.GF_UNKNOWN;

        HashMap<Integer, SedimentLoadData.Station> id2station
            = new HashMap<Integer, SedimentLoadData.Station>();

        List<Object[]> list = sqlQuery.list();

        for (Object [] row: list) {

            Integer   sl_id            = (Integer)row[0];
            Integer   sl_kind          = (Integer)row[1];
            String    sl_description   = (String)row[2];
            Timestamp ti_start_time    = (Timestamp)row[3];
            Timestamp ti_stop_time     = (Timestamp)row[4];
            Timestamp sq_start_time    = (Timestamp)row[5];
            Timestamp sq_stop_time     = (Timestamp)row[6];
            Integer   sq_id            = (Integer)row[7];
            Double    slv_value        = (Double)row[8];
            String    gf_name          = (String)row[9];
            Integer   ms_id            = (Integer)row[10];
            Double    ms_station       = (Double)row[11];
            String    ms_type          = (String)row[12];

            if (load == null || load.getId() != sl_id) {
                if (sq_id == null) {
                    /* Avoid NPE when converting the objects. */
                    load = new SedimentLoadData.Load(
                        sl_id, sl_kind, sl_description,
                        ti_start_time, ti_stop_time);
                } else {
                    load = new SedimentLoadData.Load(
                        sl_id, sl_kind, sl_description,
                        ti_start_time, ti_stop_time, sq_id,
                        sq_start_time, sq_stop_time);
                }

                // Grain fractions only change when a new sediment load starts.
                grainFractionIndex =
                    SedimentLoadData.grainFractionIndex(gf_name);

                if (grainFractionIndex == SedimentLoadData.GF_UNKNOWN) {
                    log.error("Unknown grain fraction type: " + gf_name);
                    break;
                }
            }

            SedimentLoadData.Station station = id2station.get(ms_id);
            if (station == null) {
                int type = ms_type.equalsIgnoreCase(SUSPENDED_STRING)
                    ? SedimentLoadData.Station.SUSPENDED
                    : SedimentLoadData.Station.BED_LOAD;

                station = new SedimentLoadData.Station(type, ms_station);
                id2station.put(ms_id, station);
            }

            station.addValue(
                grainFractionIndex,
                new SedimentLoadData.Value(load, slv_value));
        }

        SedimentLoadData sld = new SedimentLoadData(id2station.values(),
            RiverFactory.getRiver(river).getKmUp());

        return sld;
    }
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :

http://dive4elements.wald.intevation.org