Mercurial > dive4elements > river
view flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/minfo/SedimentLoadFactory.java @ 4354:e0add97c432b
Circumvent NPE in Curve. issue flys/1019
author | Sascha L. Teichmann <teichmann@intevation.de> |
---|---|
date | Fri, 02 Nov 2012 11:02:15 +0100 |
parents | 3051bc28ac43 |
children | 6a65e7ef43c0 |
line wrap: on
line source
package de.intevation.flys.artifacts.model.minfo; import gnu.trove.TDoubleArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; import net.sf.ehcache.Cache; import net.sf.ehcache.Element; import org.apache.log4j.Logger; import org.hibernate.SQLQuery; import org.hibernate.Session; import org.hibernate.type.StandardBasicTypes; import de.intevation.flys.artifacts.cache.CacheFactory; import de.intevation.flys.artifacts.model.StaticSedimentLoadCacheKey; import de.intevation.flys.backend.SessionHolder; public class SedimentLoadFactory { /** Private logger to use here. */ private static Logger log = Logger.getLogger(SedimentLoadFactory.class); public static final String LOADS_CACHE_NAME = "sedimentloads"; public static final String LOAD_DATA_CACHE_NAME = "sedimentload-data"; /** Query to get km and ws for wst_id and column_pos. */ public static final String SQL_SELECT_SINGLES = "SELECT DISTINCT " + " sy.description AS description, " + " ti.start_time AS year " + " FROM sediment_yield sy " + " JOIN rivers r ON sy.river_id = r.id " + " JOIN sediment_yield_values syv ON sy.id = syv.sediment_yield_id " + " JOIN time_intervals ti ON sy.time_interval_id = ti.id " + " WHERE r.name = :name " + " AND ti.stop_time IS NULL " + " AND syv.station BETWEEN :startKm AND :endKm"; /** Query to get name for wst_id and column_pos. */ public static final String SQL_SELECT_EPOCHS = "SELECT DISTINCT " + " sy.description AS description, " + " ti.start_time AS start, " + " ti.stop_time AS end " + " FROM sediment_yield sy " + " JOIN rivers r ON sy.river_id = r.id " + " JOIN sediment_yield_values syv ON sy.id = syv.sediment_yield_id " + " JOIN time_intervals ti ON sy.time_interval_id = ti.id " + " WHERE r.name = :name " + " AND ti.stop_time IS NOT NULL " + " AND syv.station BETWEEN :startKm AND :endKm"; public static final String SQL_SELECT_SINGLES_DATA = "SELECT" + " sy.description AS description, " + " ti.start_time AS year, " + " syv.value AS load " + " FROM sediment_yield sy " + " JOIN rivers r ON sy.river_id = r.id " + " JOIN time_intervals ti ON sy.time_interval_id = ti.id " + " JOIN sediment_yield_vales syv ON sy.id = syv.sediment_yield_id " + " JOIN grain_fraction gf ON sy.grain_fraction_id = gf.id " + " WHERE r.name = :name " + " AND ti.start_time BETWEEN :begin AND :end " + " AND ti_stop_time IS NULL " + " AND gf.name = :grain " + " AND syv.station BETWEEN :startKm AND :endKm"; public static final String SQL_SELECT_EPOCHS_DATA = "SELECT" + " sy.description AS description," + " ti.start_time AS year," + " syv.value AS load" + " FROM sediment_yield sy" + " JOIN rivers r ON sy.river_id = r.id " + " JOIN time_intervals ti ON sy.time_interval_id = ti.id" + " JOIN sediment_yield_vales syv ON sy.id = syv.sediment_yield_id" + " JOIN grain_fraction gf ON sy.grain_fraction_id = gf.id" + " WHERE r.name = :name" + " AND ti.start_time BETWEEN :sbegin AND :send" + " AND ti_stop_time IS NOT NULL" + " AND ti_stop_time BETWEEN :ebegin AND :eend" + " AND gf.name = :grain " + " AND syv.station BETWEEN :startKm AND :endKm"; private SedimentLoadFactory() { } /** * */ public static SedimentLoad[] getLoads( String river, String type, double startKm, double endKm ) { log.debug("SedimentLoadFactory.getLoads"); Cache cache = CacheFactory.getCache(LOADS_CACHE_NAME); if (cache == null) { log.debug("Cache not configured."); return getSedimentLoadsUncached(river, type, startKm, endKm); } StaticSedimentLoadCacheKey key = new StaticSedimentLoadCacheKey(river, startKm, endKm, null); Element element = cache.get(key); if (element != null) { log.debug("SedimentLoad found in cache"); return (SedimentLoad[])element.getValue(); } SedimentLoad[] values = getSedimentLoadsUncached(river, type, startKm, endKm); if (values != null && key != null) { log.debug("Store static sediment loads values in cache."); element = new Element(key, values); cache.put(element); } return values; } public static SedimentLoad getLoadwithData( String river, String type, double startKm, double endKm, Date startDate, Date endDate ) { log.debug("SedimentLoadFactory.getLoadWithData"); Cache cache = CacheFactory.getCache(LOAD_DATA_CACHE_NAME); if (cache == null) { log.debug("Cache not configured."); return getSedimentLoadWithDataUncached( river, type, startKm, endKm, startDate, endDate); } StaticSedimentLoadCacheKey key = new StaticSedimentLoadCacheKey(river, startKm, endKm, startDate); Element element = cache.get(key); if (element != null) { log.debug("SedimentLoad found in cache"); return (SedimentLoad)element.getValue(); } SedimentLoad values = getSedimentLoadWithDataUncached(river, type, startKm, endKm, startDate, endDate); if (values != null && key != null) { log.debug("Store static bed height values in cache."); element = new Element(key, values); cache.put(element); } return values; } /** * Get sediment loads from db. * @param river the river * @param type the sediment load type (year or epoch) * @return according sediment loads. */ public static SedimentLoad[] getSedimentLoadsUncached( String river, String type, double startKm, double endKm ) { log.debug("SedimentLoadFactory.getSedimentLoadsUncached"); Session session = SessionHolder.HOLDER.get(); SQLQuery sqlQuery = null; if (type.equals("single")) { sqlQuery = session.createSQLQuery(SQL_SELECT_SINGLES) .addScalar("description", StandardBasicTypes.STRING) .addScalar("year", StandardBasicTypes.DATE); sqlQuery.setString("name", river); sqlQuery.setDouble("startKm", startKm); sqlQuery.setDouble("endKm", endKm); List<Object []> results = sqlQuery.list(); SedimentLoad[] loads = new SedimentLoad[results.size()]; for (int i = 0; i < results.size(); i++) { Object[] row = results.get(i); loads[i] = new SedimentLoad( (String) row[0], (Date) row[1], null, false); } return loads; } else if (type.equals("epoch")) { sqlQuery = session.createSQLQuery(SQL_SELECT_EPOCHS) .addScalar("description", StandardBasicTypes.STRING) .addScalar("start", StandardBasicTypes.DATE) .addScalar("end", StandardBasicTypes.DATE); sqlQuery.setString("name", river); sqlQuery.setDouble("startKm", startKm); sqlQuery.setDouble("endKm", endKm); List<Object []> results = sqlQuery.list(); SedimentLoad[] loads = new SedimentLoad[results.size()]; for (int i = 0; i < results.size(); i++) { Object[] row = results.get(i); loads[i] = new SedimentLoad( (String) row[0], (Date) row[1], (Date) row[2], true); } return loads; } return new SedimentLoad[0]; } /** * Get sediment loads from db. * @param river the river * @param type the sediment load type (year or epoch) * @return according sediment loads. */ public static SedimentLoad getSedimentLoadWithDataUncached( String river, String type, double startKm, double endKm, Date sdate, Date edate ) { log.debug("SedimentLoadFactory.getBedHeightUncached"); Session session = SessionHolder.HOLDER.get(); SQLQuery sqlQuery = null; Calendar cal = Calendar.getInstance(); cal.setTime(sdate); int year = cal.get(Calendar.YEAR); cal.set(year, 1, 1); Calendar end = Calendar.getInstance(); end.set(year, 12, 31); if (type.equals("single")) { sqlQuery = session.createSQLQuery(SQL_SELECT_SINGLES_DATA) .addScalar("description", StandardBasicTypes.STRING) .addScalar("year", StandardBasicTypes.DATE) .addScalar("load", StandardBasicTypes.DOUBLE); sqlQuery.setString("name", river); sqlQuery.setDouble("startKm", startKm); sqlQuery.setDouble("endKm", endKm); sqlQuery.setDate("begin", cal.getTime()); sqlQuery.setDate("end", end.getTime()); sqlQuery.setString("grain", "total"); List<Object []> results = sqlQuery.list(); SedimentLoad load = new SedimentLoad(); if (results.size() != 1) { // should not happen. throw some exception. return new SedimentLoad(); } Object[] row = results.get(0); load = new SedimentLoad( (String) row[0], (Date) row[1], null, false); load.addCoarseValues(getValues("coarse", sqlQuery)); load.addFineMiddleValues(getValues("fine_middle", sqlQuery)); load.addSandValues(getValues("sand", sqlQuery)); load.addSuspSandBedValues(getValues("suspended_sediment", sqlQuery)); load.addSuspSandBedValues(getValues("susp_sand_bed", sqlQuery)); return load; } else if (type.equals("epoch")) { Calendar send = Calendar.getInstance(); send.setTime(edate); int eyear = send.get(Calendar.YEAR); send.set(year, 1, 1); Calendar eend = Calendar.getInstance(); eend.set(eyear, 12, 31); sqlQuery = session.createSQLQuery(SQL_SELECT_EPOCHS) .addScalar("description", StandardBasicTypes.STRING) .addScalar("start_time", StandardBasicTypes.DATE) .addScalar("stop_time", StandardBasicTypes.DATE) .addScalar("load", StandardBasicTypes.DOUBLE); sqlQuery.setString("name", river); sqlQuery.setDouble("startKm", startKm); sqlQuery.setDouble("endKm", endKm); sqlQuery.setDate("sbegin", cal.getTime()); sqlQuery.setDate("sbegin", end.getTime()); sqlQuery.setDate("ebegin",send.getTime()); sqlQuery.setDate("eend", eend.getTime()); sqlQuery.setString("grain", "total"); List<Object []> results = sqlQuery.list(); SedimentLoad load = new SedimentLoad(); if (results.size() != 1) { // should not happen. throw some exception. return new SedimentLoad(); } Object[] row = results.get(0); load = new SedimentLoad( (String) row[0], (Date) row[1], null, false); load.addCoarseValues(getValues("coarse", sqlQuery)); load.addFineMiddleValues(getValues("fine_middle", sqlQuery)); load.addSandValues(getValues("sand", sqlQuery)); load.addSuspSandBedValues(getValues("suspended_sediment", sqlQuery)); load.addSuspSandBedValues(getValues("susp_sand_bed", sqlQuery)); return load; } return new SedimentLoad(); } /** * */ protected static TDoubleArrayList getValues ( String fraction, SQLQuery query ) { query.setString("grain", fraction); List<Object[]> results = query.list(); TDoubleArrayList values = new TDoubleArrayList(); for (int i = 0; i < results.size(); i++) { Object[] row = results.get(i); values.add(((Double)row[2]).doubleValue()); } return values; } }