Mercurial > dive4elements > river
changeset 6366:cc21c197d204 double-precision
merged changes from default into double-precision branch
author | Tom Gottfried <tom.gottfried@intevation.de> |
---|---|
date | Tue, 18 Jun 2013 16:05:10 +0200 |
parents | d50348a1506b (current diff) d3ba73a88533 (diff) |
children | 7fb0e755e4fa |
files | backend/src/main/java/org/dive4elements/river/importer/parsers/BedHeightParser.java gwt-client/src/main/webapp/images/FLYS_Saale_1_inactive.png gwt-client/src/main/webapp/images/FLYS_Saale_Thüringen.png gwt-client/src/main/webapp/images/FLYS_Saale_Thüringen_inactive.png |
diffstat | 76 files changed, 1314 insertions(+), 852 deletions(-) [+] |
line wrap: on
line diff
--- a/.hgtags Mon Jun 17 17:16:25 2013 +0200 +++ b/.hgtags Tue Jun 18 16:05:10 2013 +0200 @@ -52,3 +52,4 @@ 1a169e10c0f3e1f33990a91dd294512ac97d1a70 3.0.5 b689d2b9d1675739778083b2bcba336abb33f70c 3.0.6 5733d7f27196c5a8cf18231fbf187738f8fea560 3.0.7 +eec895f6ec801a7faaed96e9f01721e1143e7bb8 3.0.8
--- a/artifacts/doc/conf/mapserver/barrier_polygons_class.vm Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/doc/conf/mapserver/barrier_polygons_class.vm Tue Jun 18 16:05:10 2013 +0200 @@ -1,5 +1,6 @@ CLASS - NAME "POLYGON_BARRIERS" + NAME "Ringdeich" + EXPRESSION ("[TYP]"="Ringdeich") STYLE SIZE 5 OUTLINECOLOR "#FF8000"
--- a/artifacts/doc/conf/meta-data.xml Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/doc/conf/meta-data.xml Tue Jun 18 16:05:10 2013 +0200 @@ -105,6 +105,9 @@ <dc:call-macro name="bed-heights-epoch"/> </bedheights> </dc:when> + <dc:when test="$out = 'sedimentload_ls'"> + <dc:call-macro name="annotations"/> + </dc:when> </dc:choose> </dc:iterate> </dc:when> @@ -672,14 +675,22 @@ <dc:comment>TODO doesnt work nicely for fix/wq-diags. Aheinecke (27.5.2013): Why?</dc:comment> <dc:macro name="waterlevels-fix"> - <dc:filter expr="$out_name = 'longitudinal_section' and $facet_name = 'longitudinal_section.w'"> + <dc:filter expr="($out_name = 'longitudinal_section' and $facet_name = 'longitudinal_section.w') and + (not ($current-state-id = 'state.winfo.uesk.wsp' and $ld_m = 'location'))"> <dc:if test="dc:has-result()"> <waterlevels> <dc:group expr="concat($river, ' ', dc:date-format('dd.MM.yyyy - H:m:s', $a_creation), ' ', $collection_name)"> <waterlevels description="{dc:group-key()}"> <dc:for-each> - <dc:variable name="combined_desc" expr="concat($facet_description, ' von KM ', - $deffrom, ' bis KM ', $defto)"/> + <dc:choose> + <dc:when test="$ld_m = 'location'"> + <dc:variable name="combined_desc" expr="concat($facet_description, ' an KM ', $deffrom)"/> + </dc:when> + <dc:otherwise> + <dc:variable name="combined_desc" expr="concat($facet_description, ' von KM ', + $deffrom, ' bis KM ', $defto)"/> + </dc:otherwise> + </dc:choose> <dc:element name="${facet_name}"> <dc:attribute name="description" value="${combined_desc}"/> <dc:attribute name="ids" value="${facet_num}"/> @@ -2307,7 +2318,8 @@ </dc:macro> <dc:macro name="officiallines_user"> - <dc:comment comment=".wst -------------------------------"/> + <dc:comment comment=".wst -------------------------------"> + <!-- =============== THIS IS BROKEN! ============== --> <officiallines> <dc:for-each> <dc:context> @@ -2359,6 +2371,7 @@ </dc:context> </dc:for-each> </officiallines> + </dc:comment> </dc:macro> <!-- Common stuff -->
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/GaugeDischargeArtifact.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/GaugeDischargeArtifact.java Tue Jun 18 16:05:10 2013 +0200 @@ -168,7 +168,7 @@ DischargeTables dt = new DischargeTables(river.getName(), getDataAsString("ids")); - Map<String, double [][]> map = dt.getValues(100); + Map<String, double [][]> map = dt.getValues(); ArrayList<WQKms> res = new ArrayList<WQKms>(); @@ -185,7 +185,7 @@ } double [] kms = new double[values[0].length]; Arrays.fill(kms, gauge.getStation().doubleValue()); - res.add(new WQKms(kms, values[0], values[1], name)); + res.add(new WQKms(kms, values[0], values[1], name, WQKms.CENTIMETER_AT_GAUGE)); return new CalculationResult( res.toArray(new WQKms[res.size()]),
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/WINFOArtifact.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/WINFOArtifact.java Tue Jun 18 16:05:10 2013 +0200 @@ -745,7 +745,7 @@ return null; } - double [][] values = DischargeTables.loadDischargeTableValues(dt, 1); + double [][] values = DischargeTables.loadDischargeTableValues(dt); TDoubleArrayList wsOut = new TDoubleArrayList(ws.length); TDoubleArrayList qsOut = new TDoubleArrayList(ws.length); @@ -757,8 +757,7 @@ logger.warn("W is NaN: ignored"); continue; } - double w = ws[i] / 100d; - double [] qs = DischargeTables.getQsForW(values, w); + double [] qs = DischargeTables.getQsForW(values, ws[i]); if (qs.length == 0) { logger.warn("No Qs found for W = " + ws[i]); @@ -766,7 +765,7 @@ else { for (double q: qs) { wsOut.add(ws[i]); - qsOut.add(q * 100d); + qsOut.add(q); } } generatedWs |= qs.length != 1; @@ -1154,13 +1153,5 @@ return values.toNativeArray(); } - - /** - * Returns the WstValueTable of current river. - */ - public WstValueTable getWstValueTable() { - River r = RiverUtils.getRiver(this); - return WstValueTableFactory.getTable(r); - } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/templating/FunctionResolver.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/datacage/templating/FunctionResolver.java Tue Jun 18 16:05:10 2013 +0200 @@ -234,7 +234,7 @@ Object locations = args.get(1); Object from = args.get(2); - if (mode instanceof String && mode.equals("locations")) { + if (mode instanceof String && mode.equals("location")) { if (!(locations instanceof String)) { return -FAR_AWAY; } @@ -279,7 +279,7 @@ Object locations = args.get(1); Object to = args.get(2); - if (mode instanceof String && mode.equals("locations")) { + if (mode instanceof String && mode.equals("location")) { if (!(locations instanceof String)) { return FAR_AWAY; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/Calculation6.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/Calculation6.java Tue Jun 18 16:05:10 2013 +0200 @@ -39,8 +39,6 @@ private Long officialGaugeNumber; - public static final double SCALE = 1d; - public Calculation6(HistoricalDischargeAccess access) { EvaluationMode mode = access.getEvaluationMode(); Timerange tr = access.getEvaluationTimerange(); @@ -329,8 +327,7 @@ Integer id = dt.getId(); double [][] vs = cache.get(id); if (vs == null) { - vs = DischargeTables.loadDischargeTableValues( - dt, DischargeTables.HISTORICAL_SCALE); + vs = DischargeTables.loadDischargeTableValues(dt); cache.put(id, vs); } return vs;
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/DischargeTables.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/DischargeTables.java Tue Jun 18 16:05:10 2013 +0200 @@ -34,20 +34,12 @@ /** Private logger. */ private static Logger log = Logger.getLogger(DischargeTables.class); - /** Scale to convert discharge table values of master table into [cm]. */ - public static final double MASTER_SCALE = 100d; - - /** Scale to convert discharge table values of historical tables into [cm]. */ - public static final double HISTORICAL_SCALE = 1d; - public static final int MASTER = 0; protected List<String> gaugeNames; protected String riverName; - protected double scale; - protected int kind; protected Map<String, double [][]> values; @@ -76,18 +68,13 @@ List<String> gaugeNames, int kind ) { - scale = Double.NaN; this.kind = kind; this.riverName = riverName; this.gaugeNames = gaugeNames; } public double [][] getFirstTable() { - return getFirstTable(MASTER_SCALE); - } - - public double [][] getFirstTable(double scale) { - Map<String, double [][]> values = getValues(scale); + Map<String, double [][]> values = getValues(); for (double [][] table: values.values()) { return table; } @@ -95,13 +82,8 @@ } public Map<String, double [][]> getValues() { - return getValues(MASTER_SCALE); - } - - public Map<String, double [][]> getValues(double scale) { - if (values == null || scale != this.scale) { - values = loadValues(scale); - this.scale = scale; + if (values == null) { + values = loadValues(); } return values; } @@ -109,7 +91,7 @@ /** * Returns mapping of gauge name to values. */ - protected Map<String, double [][]> loadValues(double scale) { + protected Map<String, double [][]> loadValues() { Map<String, double [][]> values = new HashMap<String, double [][]>(); Session session = SessionHolder.HOLDER.get(); @@ -147,7 +129,7 @@ if (table == null) { table = tables.get(0); } - double [][] vs = loadDischargeTableValues(table, scale); + double [][] vs = loadDischargeTableValues(table); values.put(gaugeName, vs); } @@ -158,14 +140,10 @@ /** * @param table The discharge table - * @param scale The scale factor to adjust W and Q values. * * @return the values of a discharge table. */ - public static double[][] loadDischargeTableValues( - DischargeTable table, - double scale - ) { + public static double[][] loadDischargeTableValues(DischargeTable table) { List<DischargeTableValue> dtvs = table.getDischargeTableValues(); final double [][] vs = new double[2][dtvs.size()]; @@ -173,8 +151,8 @@ int idx = 0; for (DischargeTableValue dtv: dtvs) { double q = dtv.getQ().doubleValue(); - vs[0][idx] = q * scale; - vs[1][idx] = dtv.getW().doubleValue() * scale; + vs[0][idx] = q; + vs[1][idx] = dtv.getW().doubleValue(); ++idx; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/GaugeDischargeCurveFacet.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/GaugeDischargeCurveFacet.java Tue Jun 18 16:05:10 2013 +0200 @@ -67,8 +67,7 @@ DischargeTables dt = new DischargeTables(river, name); - Map<String, double [][]> map = dt.getValues( - DischargeTables.MASTER_SCALE); + Map<String, double [][]> map = dt.getValues(); double [][] values = map.get(name); if (values == null) { @@ -76,7 +75,7 @@ } double [] kms = new double[values[0].length]; Arrays.fill(kms, gauge.getStation().doubleValue()); - return new WQKms(kms, values[0], values[1], name); + return new WQKms(kms, values[0], values[1], name, WQKms.CENTIMETER_AT_GAUGE); } @Override
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MiddleBedHeightCalculation.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/MiddleBedHeightCalculation.java Tue Jun 18 16:05:10 2013 +0200 @@ -157,11 +157,17 @@ for (BedHeightSingleValue value: values) { if (value.getHeight() != null) { + double uncert = value.getUncertainty() != null ? + value.getUncertainty().doubleValue() : Double.NaN; + double sounding = value.getSoundingWidth() != null ? + value.getSoundingWidth().doubleValue() : Double.NaN; + double gap = value.getDataGap() != null ? + value.getDataGap().doubleValue() : Double.NaN; data.addAll(value.getStation().doubleValue(), value.getHeight().doubleValue(), - value.getUncertainty().doubleValue(), - value.getSoundingWidth().doubleValue(), - value.getDataGap().doubleValue(), + uncert, + sounding, + gap, value.getWidth().doubleValue(), false); }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/Segment.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/Segment.java Tue Jun 18 16:05:10 2013 +0200 @@ -187,17 +187,14 @@ DischargeTable dt = gauge.fetchMasterDischargeTable(); - //TODO: Change scale from 100 to 1 immediately after - // discharge table import changed to cm! double [][] table = - DischargeTables.loadDischargeTableValues(dt, 100); + DischargeTables.loadDischargeTableValues(dt); // need the original values for naming segment.backup(); for (int i = 0; i < values.length; ++i) { - //TODO: s.o. - double w = values[i]; /* / 100.0; */ + double w = values[i] * 100.0; double [] qs = DischargeTables.getQsForW(table, w); if (qs.length == 0) { log.warn("No Qs found for W = " + values[i]);
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/W.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/W.java Tue Jun 18 16:05:10 2013 +0200 @@ -22,6 +22,12 @@ protected TDoubleArrayList ws; + public static final int METER_OVER_REFPOINT = 0; + + public static final int CENTIMETER_AT_GAUGE = 1; + + protected int referenceSystem; + public W() { ws = new TDoubleArrayList(); } @@ -36,10 +42,35 @@ } public W(int capacity, String name) { + this(capacity, "", METER_OVER_REFPOINT); + } + + public W(int capacity, String name, int referenceSystem) { super(name); + this.referenceSystem = referenceSystem; ws = new TDoubleArrayList(capacity); } + public void setReferenceSystem(int val) { + referenceSystem = val; + } + + /** Return the used reference system for W. + * If the W's refer to values in meters over a reference + * point (e.g. NN+m) they are in meter. If they are + * relative to the PNP of a gauge they are in centimeter*/ + public int getReferenceSystem() { + return referenceSystem; + } + + /** Convenience function to get the correct unit for W values. */ + public String getWUnit() { + if (getReferenceSystem() == CENTIMETER_AT_GAUGE) { + return "cm"; + } + return "m"; + } + public void add(double value) { ws.add(value); }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WKmsJRDataSource.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WKmsJRDataSource.java Tue Jun 18 16:05:10 2013 +0200 @@ -90,12 +90,21 @@ else if ("gauge".equals(fieldName)) { value = metaData.get("gauge"); } + else if ("datum".equals(fieldName)) { + value = metaData.get("datum"); + } else if ("calculation".equals(fieldName)) { value = metaData.get("calculation"); } else if ("differences".equals(fieldName)) { value = metaData.get("differences"); } + else if ("wUnit".equals(fieldName)) { + value = metaData.get("wUnit"); + } + else if ("valid_since".equals(fieldName)) { + value = metaData.get("valid_since"); + } else if ("km".equals(fieldName)) { value = data.get(index)[0]; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQ.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQ.java Tue Jun 18 16:05:10 2013 +0200 @@ -12,6 +12,8 @@ import gnu.trove.TDoubleArrayList; +import java.math.BigDecimal; + import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -108,5 +110,27 @@ public void removeNaNs() { DoubleUtil.removeNaNs(new TDoubleArrayList [] { ws, qs }); } + + /** Returns either a modified copy or the same Object with fixed W values. + * If a conversion takes place converted is set to true + */ + public static WQ getFixedWQforExportAtGauge(WQ wq, BigDecimal datum) { + if (wq.getReferenceSystem() == wq.CENTIMETER_AT_GAUGE) { + // Do nothing + return wq; + } + // If we convert we work on a copy to avoid side effects. + WQ ret = new WQ(wq.size(), wq.getName()); + ret.setReferenceSystem(wq.CENTIMETER_AT_GAUGE); + + // When we convert and have a datum we have a calculated + // result at a gauge so we must subtract the datum. + double subtractDatum = datum == null ? 0 : datum.doubleValue(); + for (int i=0; i < wq.size(); i++) { + ret.add((wq.get(i)[0] - subtractDatum)* 100, wq.get(i)[1]); + } + log.debug("Converted W values to centimeter and substracted: " + subtractDatum); + return ret; + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQKms.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/WQKms.java Tue Jun 18 16:05:10 2013 +0200 @@ -58,8 +58,17 @@ public WQKms(double [] kms, double [] qs, double [] ws, String name) { + this(kms, qs, ws, name, 0); + } + + public WQKms(double []kms, WQ wq) { + this(kms, wq.getQs(), wq.getWs(), wq.getName(), wq.getReferenceSystem()); + } + + public WQKms(double [] kms, double [] qs, double [] ws, String name, int wReferenceSystem) { super(qs, ws, name); this.kms = new TDoubleArrayList(kms); + setReferenceSystem(wReferenceSystem); } @Override
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedHeightFactory.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedHeightFactory.java Tue Jun 18 16:05:10 2013 +0200 @@ -154,12 +154,11 @@ for (int i = 0; i < results.size(); i++) { Object[] row = results.get(i); log.debug("got station: " + (Double)row[1]); - height.add( - (Double) row[0], - (Double) row[1], - (Double) row[2], - (Double) row[3], - (Integer) row[4]); + Double row0 = row[0] != null ? (Double)row[0] : Double.NaN; + Double row1 = row[1] != null ? (Double)row[1] : Double.NaN; + Double row2 = row[2] != null ? (Double)row[2] : Double.NaN; + Double row3 = row[3] != null ? (Double)row[3] : Double.NaN; + height.add(row0, row1, row2, row3, (Integer) row[4]); } return height; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedloadDiameterDataFacet.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/BedloadDiameterDataFacet.java Tue Jun 18 16:05:10 2013 +0200 @@ -35,7 +35,7 @@ D4EArtifact d4e = (D4EArtifact) artifact; BedQualityAccess access = new BedQualityAccess(d4e, context); int ndx = getIndex() & 7; - int diam = (getIndex() >> 3); + int diam = (getIndex() >> 4); String diameter = ""; switch (diam) { case 1: diameter = "d10"; break;
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/QualityMeasurementFactory.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/QualityMeasurementFactory.java Tue Jun 18 16:05:10 2013 +0200 @@ -55,7 +55,8 @@ " sp.tiefevon IS NOT NULL AND " + " sp.tiefebis IS NOT NULL AND " + // TODO: Test if char diameter ist null. " st.km BETWEEN :from - 0.001 AND :to + 0.001 AND " + - " st.datum BETWEEN :start AND :end"; + " st.datum BETWEEN :start AND :end " + + "ORDER BY st.km"; private static final String SQL_BEDLOAD_MEASUREMENT = "SELECT m.km as km," + @@ -82,7 +83,8 @@ " m.km IS NOT NULL AND " + " m.d10 IS NOT NULL AND" + //TODO: Add all other char. diameter. " m.km BETWEEN :from - 0.001 AND :to + 0.001 AND" + - " m.datum BETWEEN :start AND :end"; + " m.datum BETWEEN :start AND :end " + + "ORDER BY m.km"; public static final class QualityMeasurementResultTransformer extends BasicTransformerAdapter {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java Tue Jun 18 16:05:10 2013 +0200 @@ -94,43 +94,57 @@ double[][] sd = getLoadData(result); // Sort by km. - TreeMap<Double, Double> sortData = new TreeMap<Double,Double>(); + TreeMap<Double, Double> sortedKmLoad = new TreeMap<Double,Double>(); double[] km = sd[0]; double[] load = sd[1]; + // Build map of km->load, but directly exclude the ones which do + // not match against a measurements station ranges start. for (int i = 0 ; i < km.length; i++) { - sortData.put(km[i], load[i]); + for (MeasurementStation station: stations) { + if (Math.abs(station.getStation() - km[i]) <= EPSILON) { + sortedKmLoad.put(km[i], load[i]); + continue; + } + } } + // [0] -> x, [1] -> y double[][] values = new double[2][]; - values[0] = new double[km.length*3]; - values[1] = new double[km.length*3]; - - List<double[]> kmWithoutStation = new ArrayList<double[]>(); + values[0] = new double[sortedKmLoad.size()*3]; + values[1] = new double[sortedKmLoad.size()*3]; // Find station via its station (km). // TODO use a binarySearch instead of linear absdiff approach int i = 0; - for (Map.Entry<Double, Double> entry: sortData.entrySet()) { + for (Map.Entry<Double, Double> kmLoad: sortedKmLoad.entrySet()) { boolean matchFound = false; - // For now, ignore overlaps like (B> next A) - for (MeasurementStation station: stations) { - if (Math.abs(station.getStation() - entry.getKey()) < EPSILON || - station.getRange().containsTolerant(entry.getKey())) { - // TODO: In rare cases, two matches can be found. - values[0][i*3] = station.getRange().getA().doubleValue() + EPSILON; - values[1][i*3] = entry.getValue(); - values[0][i*3+1] = station.getRange().getB().doubleValue() - EPSILON; - values[1][i*3+1] = entry.getValue(); - values[0][i*3+2] = station.getRange().getB().doubleValue(); - values[1][i*3+2] = entry.getValue(); + for (int k = 0; k < stations.size(); k++) { + MeasurementStation station = stations.get(k); + if (Math.abs(station.getStation() - kmLoad.getKey()) < EPSILON) { + // Value has been taken at measurement station. + values[0][i*3] = station.getRange().getA().doubleValue() + EPSILON; + values[1][i*3] = kmLoad.getValue(); + double endValue = 0d; + // Valid until next measurements stations begin of range, + // or end of current range if last value. + if (k+2 <= stations.size()) { + endValue = stations.get(k+1).getRange().getA().doubleValue(); + } + else { + endValue = station.getRange().getB().doubleValue(); + } + values[0][i*3+1] = endValue; + values[1][i*3+1] = kmLoad.getValue(); + values[0][i*3+2] = endValue; + values[1][i*3+2] = kmLoad.getValue(); matchFound = true; } } // Store points without match for later assessment. if (!matchFound) { - logger.warn("measurement without station ("+entry.getKey()+")!"); + logger.warn("measurement without station ("+kmLoad.getKey()+")!"); } i++; }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/Measurements.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/sq/Measurements.java Tue Jun 18 16:05:10 2013 +0200 @@ -59,7 +59,7 @@ public static final SExtractor S_BL_EXTRACTOR = new SExtractor() { @Override public double getS(Measurement measument) { - return measument.BL_S(); + return measument.S_BL_1(); } };
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/services/DischargeTablesOverview.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/services/DischargeTablesOverview.java Tue Jun 18 16:05:10 2013 +0200 @@ -147,14 +147,7 @@ double[][] xy = null; - if (dt.getKind() == DischargeTables.MASTER) { - xy = DischargeTables.loadDischargeTableValues(dt, - DischargeTables.MASTER_SCALE); - } - else { - xy = DischargeTables.loadDischargeTableValues(dt, - DischargeTables.HISTORICAL_SCALE); - } + xy = DischargeTables.loadDischargeTableValues(dt); XYSeries series = new XYSeries(createSeriesTitle(callMeta, dt), false); for (int i = 0, n = xy[0].length; i < n; i++) {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/services/MainValuesService.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/services/MainValuesService.java Tue Jun 18 16:05:10 2013 +0200 @@ -26,6 +26,7 @@ import org.dive4elements.river.model.MainValue; import org.dive4elements.river.model.MainValueType; import org.dive4elements.river.model.NamedMainValue; +import org.dive4elements.river.model.OfficialLine; import org.dive4elements.river.model.Range; import org.dive4elements.river.model.River; @@ -205,7 +206,7 @@ doc.appendChild(rootEl); appendMetaInformation(doc, rootEl, river, gauge, context); - appendMainValues(doc, rootEl, mainValues, context); + appendMainValues(doc, rootEl, mainValues, river.getName(), context); return doc; } @@ -250,10 +251,22 @@ } + /** Checks i a main value has an official associated, */ + protected static boolean hasOfficialLine(NamedMainValue nmv, String river) { + for (OfficialLine ol: nmv.getOfficialLines()) { + if (river.equals(ol.getWstColumn().getWst().getRiver().getName())) { + return true; + } + } + return false; + } + + protected void appendMainValues( Document doc, Element root, List<MainValue> mainValues, + String river, Object context) { logger.debug("MainValuesService.appendMainValues"); @@ -266,7 +279,7 @@ Element list = cr.create("mainvalues"); for (MainValue mainValue: mainValues) { - Element newEl = buildMainValueElement(doc, mainValue, context); + Element newEl = buildMainValueElement(doc, mainValue, river, context); if (newEl != null) { list.appendChild(newEl); @@ -290,6 +303,7 @@ protected Element buildMainValueElement( Document doc, MainValue mainValue, + String river, Object context) { ElementCreator cr = new ElementCreator( @@ -306,6 +320,11 @@ cr.addAttr(el, "name", namedMainValue.getName()); cr.addAttr(el, "type", mainValueType.getName()); + if (hasOfficialLine(namedMainValue, river)) { + cr.addAttr(el, "official", "true"); + } + + return el; } }
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/FloodMapState.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/FloodMapState.java Tue Jun 18 16:05:10 2013 +0200 @@ -458,6 +458,7 @@ info.setData(hwsShapefile); info.setSrid(srid); info.setGroupTitle(group); + info.setGroup(group + artifact.identifier()); MapfileGenerator generator = new ArtifactMapfileGenerator(); Template tpl = generator.getTemplateByName(MapfileGenerator.SHP_LAYER_TEMPLATE); try { @@ -600,7 +601,7 @@ WSPLGEN_BARRIERS_LINES, "LINE", srid, - "barriers"); + MapfileGenerator.MS_BARRIERS_PREFIX); if (scenario.equals(WSPLGENJob.GEL_NOSPERRE)) { logger.debug("WSPLGEN will not use barrier features."); @@ -624,10 +625,10 @@ artifact, dir, MapfileGenerator.MS_LAYER_PREFIX + "barriers-poly", - shapePolys.getAbsolutePath(), + WSPLGEN_BARRIERS_POLY, "POLYGON", srid, - "barriers"); + MapfileGenerator.MS_BARRIERS_PREFIX); if (scenario.equals(WSPLGENJob.GEL_NOSPERRE)) { logger.debug("WSPLGEN will not use barrier features.");
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/WQSelect.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/WQSelect.java Tue Jun 18 16:05:10 2013 +0200 @@ -34,6 +34,7 @@ import org.dive4elements.river.artifacts.model.WstFactory; import org.dive4elements.river.artifacts.model.WstValueTable; +import org.dive4elements.river.artifacts.model.WstValueTableFactory; import org.dive4elements.river.artifacts.resources.Resources; import org.dive4elements.river.utils.RiverUtils; @@ -423,7 +424,8 @@ logger.debug("WQSelect.determineMinMaxWFree"); WINFOArtifact winfo = (WINFOArtifact) artifact; - WstValueTable valueTable = winfo.getWstValueTable(); + WstValueTable valueTable = WstValueTableFactory.getTable( + RiverUtils.getRiver(winfo)); double[] minmaxW = null; if(valueTable != null) { @@ -485,7 +487,8 @@ logger.debug("WQSelect.determineMinMaxQ"); WINFOArtifact winfo = (WINFOArtifact) artifact; - WstValueTable valueTable = winfo.getWstValueTable(); + WstValueTable valueTable = WstValueTableFactory.getTable( + RiverUtils.getRiver(winfo)); double[] minmaxQ = null; if(valueTable != null) {
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadCalculate.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/states/minfo/SedimentLoadCalculate.java Tue Jun 18 16:05:10 2013 +0200 @@ -14,6 +14,8 @@ import org.apache.log4j.Logger; import org.dive4elements.artifactdatabase.state.Facet; +import org.dive4elements.artifactdatabase.state.FacetActivity; +import org.dive4elements.artifacts.Artifact; import org.dive4elements.artifacts.CallContext; import org.dive4elements.artifacts.CallMeta; import org.dive4elements.river.artifacts.D4EArtifact; @@ -48,6 +50,33 @@ public static final String I18N_FACET_SEDIMENTLOAD_TOTAL_LOAD = "facet.sedimentload.total_load"; public static final String I18N_FACET_SEDIMENTLOAD_TOTAL = "facet.sedimentload.total"; + static { + // Active/deactivate facets. + FacetActivity.Registry.getInstance().register( + "minfo", + new FacetActivity() { + @Override + public Boolean isInitialActive( + Artifact artifact, + Facet facet, + String output + ) { + String name = facet.getName(); + if (name.equals(SEDIMENT_LOAD_COARSE) || + name.equals(SEDIMENT_LOAD_FINEMIDDLE) || + name.equals(SEDIMENT_LOAD_SAND) || + name.equals(SEDIMENT_LOAD_SUSP_SAND) || + name.equals(SEDIMENT_LOAD_SUSP_SEDIMENT) || + name.equals(SEDIMENT_LOAD_SUSP_SAND_BED)){ + return Boolean.FALSE; + } + else { + return null; + } + } + }); + } + @Override public Object computeAdvance(D4EArtifact artifact, String hash, CallContext context, List<Facet> facets, Object old) {
--- a/artifacts/src/main/java/org/dive4elements/river/exports/ATExporter.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/ATExporter.java Tue Jun 18 16:05:10 2013 +0200 @@ -85,15 +85,6 @@ return; } - ATWriter at; - try { - at = new ATWriter(data); - } - catch (IllegalArgumentException iae) { - logger.error("creating ATWriter failed", iae); - throw new IOException(iae); - } - River river = RiverUtils.getRiver(master); RangeAccess rangeAccess = new RangeAccess(master); double[] kms = rangeAccess.getLocations(); @@ -103,7 +94,8 @@ // at gauge. TimeInterval interval = gauge.fetchMasterDischargeTable().getTimeInterval(); - at.write( + ATWriter.write( + data, new OutputStreamWriter(out, DEFAULT_ENCODING), context.getMeta(), river.getName(), @@ -111,11 +103,12 @@ gauge.getName(), gauge.getDatum(), interval.getStartTime(), - false); + river.getWstUnit().getName()); } else { // at km - at.write( + ATWriter.write( + data, new OutputStreamWriter(out), context.getMeta(), river.getName(), @@ -123,7 +116,7 @@ null, null, null, - true); + river.getWstUnit().getName()); } }
--- a/artifacts/src/main/java/org/dive4elements/river/exports/ATWriter.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/ATWriter.java Tue Jun 18 16:05:10 2013 +0200 @@ -46,30 +46,124 @@ public static final String I18N_AT_GAUGE_HEADER = "export.discharge.curve.at.gauge.header"; + public static final String I18N_AT_CALC_GAUGE_HEADER = + "export.discharge.curve.at.gauge.calc.header"; + public static final String EMPTY = " "; - protected double minW; - protected double maxW; - protected double minQ; - protected double maxQ; - - protected UnivariateRealFunction qFunc; - - public ATWriter() { + public static double getQ(int w, UnivariateRealFunction qFunc) { + try { + double val = qFunc.value(w); + return val; + } + catch (FunctionEvaluationException aode) { + // should not happen + logger.error("spline interpolation failed", aode); + return Double.NaN; + } } - public ATWriter(WQ wq) throws IllegalArgumentException { + public static void printQ(PrintWriter out, double q) { + String format; + if (q < 1d) format = " % 8.3f"; + else if (q < 10d) format = " % 8.2f"; + else if (q < 100d) format = " % 8.1f"; + else { + format = " % 8.0f"; + if (q > 1000d) q = Math.rint(q/10d)*10d; + } + out.printf(Locale.US, format, q); + } + + protected static void printCalculatedGaugeHeader( + PrintWriter out, + CallMeta callMeta, + String river, + double km, + String gName, + BigDecimal datum, + Date date, + String unit + ) { + out.print("*" + Resources.getMsg( + callMeta, + I18N_AT_CALC_GAUGE_HEADER, + I18N_AT_CALC_GAUGE_HEADER, + new Object[] { river, gName, datum, unit } )); + out.print("\r\n"); + } + + protected static void printGaugeHeader( + PrintWriter out, + CallMeta callMeta, + String river, + double km, + String gName, + BigDecimal datum, + Date date, + String unit + ) { + DateFormat f = DateFormat.getDateInstance(DateFormat.MEDIUM, + Resources.getLocale(callMeta)); + out.print("*" + Resources.getMsg( + callMeta, + I18N_AT_GAUGE_HEADER, + I18N_AT_GAUGE_HEADER, + new Object[] { river, gName, f.format(date), datum, unit} )); + out.print("\r\n"); + } + + protected static void printHeader( + PrintWriter out, + CallMeta callMeta, + String river, + double km + ) { + out.print("*" + Resources.getMsg( + callMeta, + I18N_AT_HEADER, + I18N_AT_HEADER, + new Object[] { river, km } )); + out.print("\r\n"); + } + + public static void write( + WQ values, + Writer writer, + CallMeta meta, + String river, + double km, + String gName, + BigDecimal datum, + Date date, + String unit) + throws IOException + { + int minW; + int maxW; + double minQ; + double maxQ; + + UnivariateRealFunction qFunc; + + WQ wq = WQ.getFixedWQforExportAtGauge(values, datum); + + // If we converted to centimeter we know that the WQ table is + // calculated because of the assumption that all calculations + // are in Meter and only the discharge tables data is in meter. + boolean isCalculation = wq.getReferenceSystem() != values.getReferenceSystem(); int [] bounds = wq.longestIncreasingWRangeIndices(); if (logger.isDebugEnabled()) { - logger.debug("exporting w between indices " + - bounds[0] + " and " + bounds[1] + " (" + - wq.getW(bounds[0]) + ", " + wq.getW(bounds[1])); + logger.debug("exporting " + (isCalculation ? "calculated " : "") + + "w between indices " + bounds[0] + " and " + bounds[1] + " (" + + (int)Math.ceil(wq.getW(bounds[0])) + ", " + + (int)Math.floor(wq.getW(bounds[1]))+ ")"); } if (bounds[1]-bounds[0] < 1) { // Only first w can be written out. - minW = maxW = wq.getW(bounds[0]); + minW = maxW = (int)Math.round(wq.getW(bounds[0])); minQ = maxQ = wq.getQ(bounds[0]); // constant function qFunc = new PolynomialFunction(new double [] { minQ }); @@ -89,117 +183,51 @@ ? new LinearInterpolator().interpolate(ws, qs) : new SplineInterpolator().interpolate(ws, qs); - minW = wq.getW(bounds[0]); - maxW = wq.getW(bounds[1]); + minW = (int)Math.ceil(wq.getW(bounds[0])); + maxW = (int)Math.floor(wq.getW(bounds[1])); minQ = wq.getQ(bounds[0]); maxQ = wq.getQ(bounds[1]); - } - - public double getQ(double w) { - - try { - return qFunc.value(w); - } - catch (FunctionEvaluationException aode) { - // should not happen - logger.warn("spline interpolation failed", aode); - return w <= minW ? minQ : maxQ; - } - } - - public static void printQ(PrintWriter out, double q) { - String format; - if (q < 1d) format = " % 8.3f"; - else if (q < 10d) format = " % 8.2f"; - else if (q < 100d) format = " % 8.1f"; - else { - format = " % 8.0f"; - if (q > 1000d) q = Math.rint(q/10d)*10d; - } - out.printf(Locale.US, format, q); - } - - - protected static void printGaugeHeader( - PrintWriter out, - CallMeta callMeta, - String river, - double km, - String gName, - BigDecimal datum, - Date date - ) { - DateFormat f = DateFormat.getDateInstance(); - out.print("*" + Resources.getMsg( - callMeta, - I18N_AT_GAUGE_HEADER, - I18N_AT_GAUGE_HEADER, - new Object[] { river, gName, f.format(date), datum } )); - out.print("\r\n"); - } - - protected static void printHeader( - PrintWriter out, - CallMeta callMeta, - String river, - double km - ) { - out.print("*" + Resources.getMsg( - callMeta, - I18N_AT_HEADER, - I18N_AT_HEADER, - new Object[] { river, km } )); - out.print("\r\n"); - } - - public void write( - Writer writer, - CallMeta meta, - String river, - double km, - String gName, - BigDecimal datum, - Date date, - boolean wOutUnitIsMeter) - throws IOException - { PrintWriter out = new PrintWriter(writer); // A header is required, because the desktop version of FLYS will skip // the first row. if (gName != null) { - printGaugeHeader(out, meta, river, km, gName, datum, date); + if (isCalculation) { + printCalculatedGaugeHeader(out, meta, river, km, gName, datum, date, unit); + } else { + printGaugeHeader(out, meta, river, km, gName, datum, date, unit); + } } else { printHeader(out, meta, river, km); } - double rest = (minW * 100.0) % 10.0; + int rest = minW % 10; - double startW = Math.rint((minW - rest*0.01)*10.0)*0.1; + int startW = minW - rest; if (logger.isDebugEnabled()) { logger.debug("startW: " + startW); logger.debug("rest: " + rest); + logger.debug("maxW: " + maxW); } int col = 0; - for (double w = startW; w <= maxW; w += 0.01) { + for (int w = startW; w < maxW; w++) { if (col == 0) { - if (wOutUnitIsMeter) { - // For some crazy reason W's at a gauge should be in the - // unit but W's exported elsewhere should be in Meter - out.printf(Locale.US, "%5.2f", w); - } else { - out.printf(Locale.US, "%8d", (int)Math.round(w * 100d)); - } + out.printf(Locale.US, "%8d", w); } if (w < minW) { out.print(EMPTY); - } - else { - printQ(out, getQ(w)); + } else { + double actQ = getQ(w, qFunc); + if (Double.isNaN(actQ)) { + // Can't happen™ + break; + } else { + printQ(out, actQ); + } } if (++col >= COLUMNS) {
--- a/artifacts/src/main/java/org/dive4elements/river/exports/ComputedDischargeCurveExporter.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/ComputedDischargeCurveExporter.java Tue Jun 18 16:05:10 2013 +0200 @@ -39,12 +39,14 @@ import org.dive4elements.river.artifacts.access.RangeAccess; import org.dive4elements.river.artifacts.model.CalculationResult; -import org.dive4elements.river.artifacts.model.GaugesFactory; +import org.dive4elements.river.artifacts.model.WQ; import org.dive4elements.river.artifacts.model.WQKms; import org.dive4elements.river.artifacts.model.WKmsJRDataSource; import org.dive4elements.river.artifacts.resources.Resources; import org.dive4elements.river.model.Gauge; +import org.dive4elements.river.model.River; + import org.dive4elements.river.utils.RiverUtils; import org.dive4elements.river.utils.Formatter; @@ -68,10 +70,17 @@ public static final String DEFAULT_CSV_Q_HEADER = "Q [m\u00b3/s]"; public static final String PDF_HEADER_MODE = "export.computed.discharge.pdf.mode"; + public static final String PDF_HEADER_CALC_MODE = "export.computed.discharge.pdf.calc.mode"; public static final String JASPER_FILE = "export.computed.discharge.pdf.file"; protected List<WQKms> data; + protected String wUnit; + protected String riverUnit; + protected String gaugeName; + protected double gaugeDatum; + protected boolean isCalculated; + protected Date validSince; public void init(Document request, OutputStream out, CallContext context) { logger.debug("ComputedDischargeCurveExporter.init"); @@ -87,19 +96,58 @@ if (d instanceof CalculationResult) { d = ((CalculationResult)d).getData(); } + WQKms referenceWQ = null; // used for gauge / unit observations if (d instanceof WQKms[]){ data.addAll(Arrays.asList((WQKms [])d)); - return; + // If there is a unit mix in this list + // we are screwed anyway. + referenceWQ = ((WQKms[])d)[0]; } else if (d instanceof WQKms) { data.add((WQKms)d); + referenceWQ = (WQKms)d; + } else { + logger.warn("Can't add data for export. Unkown data type " + + d.getClass().getName()); return; } - logger.warn("Can't add data for csv export. Unkown data type " + - d.getClass().getName()); + if (referenceWQ != null) { + wUnit = referenceWQ.getWUnit(); + D4EArtifact arti = (D4EArtifact)master; + River river = RiverUtils.getRiver(arti); + riverUnit = river.getWstUnit().getName(); + RangeAccess rangeAccess = new RangeAccess(arti); + + double[] kms = rangeAccess.getKmRange(); + + Gauge gauge = river.determineGaugeByPosition(kms[0]); + logger.debug("Kms 0: " + kms[0]); + if (Math.abs(kms[0] - gauge.getStation().doubleValue()) < 1e-4) { + gaugeName = gauge.getName(); + gaugeDatum = gauge.getDatum().doubleValue(); + + // Assumption is that values at a gauge that are in the meter + // reference system are calculated. + isCalculated = wUnit.equals("m"); + + // Now convert the data to cm because we are at gauge + List<WQKms> newData = new ArrayList<WQKms>(); + for (WQKms d2: data) { + newData.add(new WQKms(d2.getKms(), + WQ.getFixedWQforExportAtGauge((WQ)d2, gauge.getDatum()))); + } + data = newData; // All hail the garbage collector + wUnit = "cm"; + + validSince = gauge.fetchMasterDischargeTable().getTimeInterval().getStartTime(); + } else { + gaugeName = ""; + validSince = null; + gaugeDatum = Double.NaN; + } + } } - protected void writeCSVData(CSVWriter writer) { logger.info("ComputedDischargeCurveExporter.writeData"); @@ -128,21 +176,13 @@ protected void writeCSVHeader(CSVWriter writer) { logger.debug("ComputedDischargeCurveExporter.writeCSVHeader"); - String unit = RiverUtils.getRiver((D4EArtifact) master).getWstUnit().getName(); RangeAccess access = new RangeAccess((D4EArtifact)master); double[] km = access.getLocations(); + // If we are not at gauge (cm) use the river unit + String realUnit = wUnit.equals("cm") ? "cm" : riverUnit; String header = - msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER, new Object[] { unit }); + msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER, new Object[] {realUnit}); - if (km != null) { - List<Gauge>gauges = GaugesFactory.getGauges(access.getRiver()); - for (Gauge g: gauges) { - if (Math.abs(g.getStation().doubleValue() - km[0]) < 0.0001d) { - header = "W [cm]"; - break; - } - } - } writer.writeNext(new String[] { header, msg(CSV_Q_HEADER, DEFAULT_CSV_Q_HEADER) @@ -156,6 +196,9 @@ * @return the number formatter for W values. */ protected NumberFormat getWFormatter() { + if (wUnit.equals("cm")) { + return Formatter.getFormatter(context, 0, 0); + } return Formatter.getComputedDischargeW(context); } @@ -210,6 +253,13 @@ CallMeta meta = context.getMeta(); D4EArtifact flys = (D4EArtifact) master; + source.addMetaData("gauge", gaugeName); + if (!Double.isNaN(gaugeDatum)) { + NumberFormat mf = Formatter.getMeterFormat(context); + source.addMetaData("datum", mf.format(gaugeDatum) + " " + riverUnit); + } else { + source.addMetaData("datum", ""); + } source.addMetaData ("river", RiverUtils.getRivername(flys)); @@ -218,14 +268,28 @@ source.addMetaData("date", df.format(new Date())); + source.addMetaData("wUnit", wUnit.equals("m") ? riverUnit : "cm"); + RangeAccess rangeAccess = new RangeAccess(flys); double[] kms = rangeAccess.getKmRange(); - source.addMetaData("range", String.valueOf(kms[0])); + source.addMetaData("range", + Formatter.getCalculationKm(context.getMeta()).format(kms[0])); - source.addMetaData("calculation", Resources.getMsg( - locale, - PDF_HEADER_MODE, - "Computed Discharge")); + // When w is in meter we are not at a gauge so our values + // must be calculated + if (isCalculated || wUnit.equals("m")) { + source.addMetaData("valid_since", ""); + source.addMetaData("calculation", Resources.getMsg( + locale, + PDF_HEADER_CALC_MODE, + "Computed Discharge")); + } else { + source.addMetaData("valid_since", validSince == null ? "" : df.format(validSince)); + source.addMetaData("calculation", Resources.getMsg( + locale, + PDF_HEADER_MODE, + "Discharge")); + } } protected void addWQData(WKmsJRDataSource source) {
--- a/artifacts/src/main/java/org/dive4elements/river/exports/MapGenerator.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/MapGenerator.java Tue Jun 18 16:05:10 2013 +0200 @@ -120,9 +120,6 @@ setInitialExtent(extent); createWSPLGENLayer(flys, wms, attr); } - else if (FLOODMAP_BARRIERS.equals(name)) { - createBarriersLayer(flys, wms); - } else if (FLOODMAP_USERSHAPE.equals(name)) { createUserShapeLayer(flys, wms); } @@ -175,21 +172,6 @@ } - protected void createBarriersLayer(D4EArtifact flys, WMSLayerFacet wms) { - ArtifactMapfileGenerator mfg = new ArtifactMapfileGenerator(); - - try { - mfg.createBarriersLayer(flys, wms); - } - catch (FileNotFoundException fnfe) { - logger.error(fnfe, fnfe); - } - catch (IOException ioe) { - logger.error(ioe, ioe); - } - } - - protected void createUserShapeLayer(D4EArtifact flys, WMSLayerFacet wms) { ArtifactMapfileGenerator mfg = new ArtifactMapfileGenerator();
--- a/artifacts/src/main/java/org/dive4elements/river/exports/MiddleBedHeightExporter.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/MiddleBedHeightExporter.java Tue Jun 18 16:05:10 2013 +0200 @@ -131,13 +131,19 @@ int end = data.getEndYear(); if (start == end) { + String uncert = !Double.isNaN(data.getUncertainty(i)) ? + uncertF.format(data.getUncertainty(i)) : ""; + String gap = !Double.isNaN(data.getDataGap(i)) ? + gapF.format(data.getDataGap(i)) + "%" : ""; + String sound = !Double.isNaN(data.getSoundingWidth(i)) ? + soundF.format(data.getSoundingWidth(i)) : ""; writer.writeNext(new String[] { kmF.format(data.getKM(i)), data.getDescription(), heightF.format(data.getMiddleHeight(i)), - uncertF.format(data.getUncertainty(i)), - gapF.format(data.getDataGap(i)) + "%", - soundF.format(data.getSoundingWidth(i)), + uncert, + gap, + sound, widthF.format(data.getWidth(i)), RiverUtils.getLocationDescription(flys, data.getKM(i)), });
--- a/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixATExport.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixATExport.java Tue Jun 18 16:05:10 2013 +0200 @@ -16,6 +16,8 @@ import org.dive4elements.river.artifacts.access.FixAccess; +import org.dive4elements.river.utils.RiverUtils; + import org.dive4elements.river.artifacts.math.fitting.Function; import org.dive4elements.river.artifacts.math.fitting.FunctionFactory; @@ -26,6 +28,8 @@ import org.dive4elements.river.exports.AbstractExporter; +import org.dive4elements.river.model.River; + import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; @@ -80,12 +84,11 @@ Writer writer = new OutputStreamWriter(out, DEFAULT_CSV_CHARSET); - FixAccess access = new FixAccess((D4EArtifact)this.master); FixATWriter atWriter = new FixATWriter(this.function, this.parameters); NodeList nodes = request.getElementsByTagName("km"); String km = nodes.item(0).getTextContent(); double dkm = Double.parseDouble(km); - String river = access.getRiver(); + River river = RiverUtils.getRiver((D4EArtifact)master); atWriter.write(writer, context.getMeta(), river, dkm); writer.close(); }
--- a/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixATWriter.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/fixings/FixATWriter.java Tue Jun 18 16:05:10 2013 +0200 @@ -18,6 +18,9 @@ import org.dive4elements.river.exports.ATWriter; +import org.dive4elements.river.model.Gauge; +import org.dive4elements.river.model.River; + import java.io.IOException; import java.io.PrintWriter; import java.io.Writer; @@ -35,9 +38,15 @@ public static final String I18N_HEADER_KEY = "fix.export.at.header"; + public static final String I18N_GAUGE_HEADER_KEY = + "fix.export.at.gauge.header"; + public static final String I18N_HEADER_DEFAULT = "Exported fixings discharge curve for {0} {0}-km: {1}"; + public static final String I18N_GAUGE_HEADER_DEFAULT = + "Exported fixings discharge curve for {0}, gauge: {1} datum[{3}] = {2}"; + public static final String [] Q_MAX_COLUMN = new String [] { "max_q" }; private static final int MAX_ITERATIONS = 10000; @@ -58,12 +67,21 @@ public void write( Writer writer, CallMeta meta, - String river, + River river, double km ) throws IOException { PrintWriter out = new PrintWriter(writer); - printHeader(out, meta, river, km); + + int subtractPNP = 0; + // Special case handling for at's at gauges + Gauge gauge = river.determineGaugeByPosition(km); + if (Math.abs(km - gauge.getStation().doubleValue()) < 1e-4) { + printGaugeHeader(out, meta, river, gauge); + subtractPNP = (int)Math.round(gauge.getDatum().doubleValue() * 100); + } else { + printHeader(out, meta, river.getName(), km); + } double [] coeffs = parameters.interpolate( "km", km, function.getParameterNames()); @@ -116,7 +134,7 @@ log.debug("wcm: " + wcm); } - out.printf(Locale.US, "%8d", wRow); + out.printf(Locale.US, "%8d", wRow - subtractPNP); for (int i = 0; i < wcm; i++) { out.print(ATWriter.EMPTY); @@ -140,7 +158,7 @@ if (w > wMax) { break; } - out.printf(Locale.US, "%8d", wRow += 10); + out.printf(Locale.US, "%8d", (wRow += 10) - subtractPNP); wcm = 0; } @@ -160,6 +178,20 @@ river, km)); } + protected void printGaugeHeader( + PrintWriter out, + CallMeta meta, + River river, + Gauge gauge + ) { + out.println("*" + Resources.format( + meta, + I18N_GAUGE_HEADER_KEY, + I18N_GAUGE_HEADER_DEFAULT, + new Object[] { river.getName(), gauge.getName(), + gauge.getDatum(), river.getWstUnit().getName() })); + } + private static double minW( org.dive4elements.river.artifacts.math.Function function, double maxW,
--- a/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedQualityExporter.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedQualityExporter.java Tue Jun 18 16:05:10 2013 +0200 @@ -122,6 +122,9 @@ row[ndx] = beds[k].getDiameterCap(km); row[ndx + 1] = beds[k].getDiameterSub(km); } + if (beds.length == 0) { + continue; + } BedParametersResult[] params = results[j].getParameters(); for(int k = 0; k < params.length; k++) { // loads.length + (beds.lenght * 2) * (j + 1): shift bed and bedload columns. @@ -203,6 +206,9 @@ beds[j].getType().toString()) + " - " + d1 + "-" + d2); } + if (beds.length == 0) { + continue; + } if (params.length > 0) { header.add( msg(CSV_HEADER_DENSITY_CAP, CSV_HEADER_DENSITY_CAP) +
--- a/artifacts/src/main/java/org/dive4elements/river/exports/minfo/SedimentLoadLSGenerator.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/minfo/SedimentLoadLSGenerator.java Tue Jun 18 16:05:10 2013 +0200 @@ -70,7 +70,7 @@ public static final String I18N_YAXIS_D_LABEL_DEFAULT = "delta S [m]"; public static final String I18N_YAXIS_V_LABEL_DEFAULT = "Geschwindigkeit v [m/s]"; - private D4EArtifact artifact; + private String yLabel = ""; @Override protected YAxisWalker getYAxisWalker() { @@ -101,8 +101,16 @@ } Facet facet = bundle.getFacet(); - artifact = (D4EArtifact)bundle.getArtifact(); + D4EArtifact artifact = (D4EArtifact)bundle.getArtifact(); + SedimentLoadAccess slaccess = new SedimentLoadAccess(artifact); + String unit = slaccess.getUnit(); + if (unit != null && unit.equals("m3_per_a")) { + yLabel = msg(I18N_YAXIS_LABEL_2, I18N_YAXIS_LABEL_DEFAULT_2); + } + else { + yLabel = msg(I18N_YAXIS_LABEL_1, I18N_YAXIS_LABEL_DEFAULT_1); + } if (facet == null) { return; } @@ -118,13 +126,11 @@ context.putContextValue("endkm", getXBounds(0).getUpper()); } else if (getXBounds(0) == null && getDomainAxisRange() == null) { - D4EArtifact artifact = (D4EArtifact)bundle.getArtifact(); RangeAccess access = new RangeAccess(artifact); context.putContextValue("startkm", access.getFrom()); context.putContextValue("endkm", access.getTo()); } else if (getXBounds(0) == null && getDomainAxisRange() != null){ - D4EArtifact artifact = (D4EArtifact)bundle.getArtifact(); RangeAccess access = new RangeAccess(artifact); Bounds b = new DoubleBounds(access.getFrom(), access.getTo()); Bounds bounds = @@ -226,13 +232,7 @@ protected String getDefaultYAxisLabel(int pos) { String label = "default"; if (pos == YAXIS.L.idx) { - SedimentLoadAccess access = new SedimentLoadAccess(artifact); - if (access.getUnit().equals("m3_per_a")) { - label = msg(I18N_YAXIS_LABEL_2, I18N_YAXIS_LABEL_DEFAULT_2); - } - else { - label = msg(I18N_YAXIS_LABEL_1, I18N_YAXIS_LABEL_DEFAULT_1); - } + label = yLabel; } else if (pos == YAXIS.V.idx) { label = msg(I18N_YAXIS_V_LABEL, I18N_YAXIS_V_LABEL_DEFAULT);
--- a/artifacts/src/main/java/org/dive4elements/river/exports/sq/SQRelationExporter.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/exports/sq/SQRelationExporter.java Tue Jun 18 16:05:10 2013 +0200 @@ -156,7 +156,8 @@ } protected List<String[]> data2StringArrays(SQResult result) { - String km = String.valueOf(result.getKm()); + String km = Formatter.getSQRelationKM(context + ).format(result.getKm()); List<String[]> retval = new ArrayList<String[]>(); for (int i = 0; i < SQResult.NUMBER_FRACTIONS; ++i) {
--- a/artifacts/src/main/java/org/dive4elements/river/utils/ArtifactMapfileGenerator.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/utils/ArtifactMapfileGenerator.java Tue Jun 18 16:05:10 2013 +0200 @@ -111,53 +111,52 @@ /** * Creates a layer file used for Mapserver's mapfile which represents the - * user defined barriers. + * shape files uploaded by the user. * * @param flys The D4EArtifact that owns <i>wms</i>. * @param wms The WMSLayerFacet that contains information for the layer. */ - public void createBarriersLayer(D4EArtifact flys, WMSLayerFacet wms) - throws FileNotFoundException, IOException + public void createUserShapeLayer(D4EArtifact flys, WMSLayerFacet wms) + throws FileNotFoundException, IOException { - logger.debug("createBarriersLayer"); - - //String uuid = flys.identifier(); - //File dir = new File(getShapefileBaseDir(), uuid); - - createBarriersLineLayer(flys, wms); - createBarriersPolygonLayer(flys, wms); - } + logger.debug("createUserShapeLayer"); - - protected void createBarriersLineLayer( - D4EArtifact flys, - WMSLayerFacet wms - ) - throws FileNotFoundException, IOException - { - String uuid = flys.identifier(); - String group = MS_BARRIERS_PREFIX + uuid; - String groupTitle = "I18N_BARRIERS_TITLE"; - - File dir = new File(getShapefileBaseDir(), uuid); - File test = new File(dir, WSPLGEN_LINES_SHAPE); + String uuid = flys.identifier(); + File dir = new File(getShapefileBaseDir(), uuid); + File test = new File(dir, WSPLGEN_USER_SHAPE); if (!test.exists() || !test.canRead()) { - logger.debug("No barrier line layer existing."); + logger.debug("No user layer existing."); return; } - LayerInfo lineInfo = new LayerInfo(); - lineInfo.setName(MS_LINE_PREFIX + uuid); - lineInfo.setType("LINE"); - lineInfo.setDirectory(uuid); - lineInfo.setData(WSPLGEN_LINES_SHAPE); - lineInfo.setTitle("I18N_LINE_SHAPE"); - lineInfo.setGroup(group); - lineInfo.setGroupTitle(groupTitle); - lineInfo.setSrid(wms.getSrid()); + File userShape = new File(dir, WSPLGEN_USER_SHAPE); + ShpFiles sf = new ShpFiles(userShape); + ShapefileReader sfr = new ShapefileReader(sf, true, false, null); + ShapefileHeader sfh = sfr.getHeader(); - String nameLines = MS_LAYER_PREFIX + wms.getName() + "-lines"; + String group = uuid + MS_USERSHAPE_PREFIX; + String groupTitle = "I18N_USER_SHAPE_TITLE"; + + LayerInfo info = new LayerInfo(); + info.setName(MS_USERSHAPE_PREFIX + uuid); + if (sfh.getShapeType().isLineType()) { + info.setType("LINE"); + } + else if (sfh.getShapeType().isPolygonType()) { + info.setType("POLYGON"); + } + else { + return; + } + info.setDirectory(uuid); + info.setData(WSPLGEN_USER_SHAPE); + info.setTitle("I18N_USER_SHAPE"); + info.setGroup(group); + info.setGroupTitle(groupTitle); + info.setSrid(wms.getSrid()); + + String nameUser = MS_LAYER_PREFIX + wms.getName(); Template tpl = getTemplateByName(SHP_LAYER_TEMPLATE); if (tpl == null) { @@ -166,181 +165,71 @@ } try { - writeLayer(lineInfo, new File(dir, nameLines), tpl); + writeLayer(info, new File(dir, nameUser), tpl); } catch (FileNotFoundException fnfe) { logger.error(fnfe, fnfe); - logger.warn("Unable to write layer: " + nameLines); + logger.warn("Unable to write layer: " + nameUser); + } + + } + + + /** + * Creates a layer file used for Mapserver's mapfile which represents + * geometries from database. + * + * @param flys The D4EArtifact that owns <i>wms</i>. + * @param wms The WMSLayerFacet that contains information for the layer. + */ + public void createDatabaseLayer( + D4EArtifact flys, + WMSDBLayerFacet wms, + String style + ) + throws FileNotFoundException, IOException + { + logger.debug("createDatabaseLayer"); + + LayerInfo layerinfo = new LayerInfo(); + layerinfo.setName(wms.getName() + "-" + flys.identifier()); + layerinfo.setType(wms.getGeometryType()); + layerinfo.setFilter(wms.getFilter()); + layerinfo.setData(wms.getData()); + layerinfo.setTitle(wms.getDescription()); + layerinfo.setStyle(style); + if(wms.getExtent() != null) { + layerinfo.setExtent(GeometryUtils.jtsBoundsToOLBounds(wms.getExtent())); + } + layerinfo.setConnection(wms.getConnection()); + layerinfo.setConnectionType(wms.getConnectionType()); + layerinfo.setLabelItem(wms.getLabelItem()); + layerinfo.setSrid(wms.getSrid()); + + String name = MS_LAYER_PREFIX + wms.getName(); + + Template template = getTemplateByName(DB_LAYER_TEMPLATE); + if (template == null) { + logger.warn("Template '" + DB_LAYER_TEMPLATE + "' found."); + return; + } + + try { + File dir = new File(getShapefileBaseDir(), flys.identifier()); + writeLayer(layerinfo, new File(dir, name), template); + } + catch (FileNotFoundException fnfe) { + logger.error(fnfe, fnfe); + logger.warn("Unable to write layer: " + name); } } - protected void createBarriersPolygonLayer( - D4EArtifact flys, - WMSLayerFacet wms - ) - throws FileNotFoundException, IOException - { - String uuid = flys.identifier(); - String group = uuid + MS_BARRIERS_PREFIX; - String groupTitle = "I18N_BARRIERS_TITLE"; - - File dir = new File(getShapefileBaseDir(), uuid); - File test = new File(dir, WSPLGEN_POLYGONS_SHAPE); - - if (!test.exists() || !test.canRead()) { - logger.debug("No barrier line layer existing."); - return; - } - - LayerInfo polygonInfo = new LayerInfo(); - polygonInfo.setName(MS_POLYGONS_PREFIX + uuid); - polygonInfo.setType("POLYGON"); - polygonInfo.setDirectory(uuid); - polygonInfo.setData(WSPLGEN_POLYGONS_SHAPE); - polygonInfo.setTitle("I18N_POLYGON_SHAPE"); - polygonInfo.setGroup(group); - polygonInfo.setGroupTitle(groupTitle); - polygonInfo.setSrid(wms.getSrid()); - - String namePolygons = MS_LAYER_PREFIX + wms.getName() + "-polygons"; - - Template tpl = getTemplateByName(SHP_LAYER_TEMPLATE); - if (tpl == null) { - logger.warn("Template '" + SHP_LAYER_TEMPLATE + "' found."); - return; - } - - try { - writeLayer(polygonInfo, new File(dir, namePolygons), tpl); - } - catch (FileNotFoundException fnfe) { - logger.error(fnfe, fnfe); - logger.warn("Unable to write layer: " + namePolygons); - } - } - - - /** - * Creates a layer file used for Mapserver's mapfile which represents the - * shape files uploaded by the user. - * - * @param flys The D4EArtifact that owns <i>wms</i>. - * @param wms The WMSLayerFacet that contains information for the layer. - */ - public void createUserShapeLayer(D4EArtifact flys, WMSLayerFacet wms) - throws FileNotFoundException, IOException - { - logger.debug("createUserShapeLayer"); - - String uuid = flys.identifier(); - File dir = new File(getShapefileBaseDir(), uuid); - File test = new File(dir, WSPLGEN_USER_SHAPE); - - if (!test.exists() || !test.canRead()) { - logger.debug("No user layer existing."); - return; - } - - File userShape = new File(dir, WSPLGEN_USER_SHAPE); - ShpFiles sf = new ShpFiles(userShape); - ShapefileReader sfr = new ShapefileReader(sf, true, false, null); - ShapefileHeader sfh = sfr.getHeader(); - - String group = uuid + MS_USERSHAPE_PREFIX; - String groupTitle = "I18N_USER_SHAPE_TITLE"; - - LayerInfo info = new LayerInfo(); - info.setName(MS_USERSHAPE_PREFIX + uuid); - if (sfh.getShapeType().isLineType()) { - info.setType("LINE"); - } - else if (sfh.getShapeType().isPolygonType()) { - info.setType("POLYGON"); - } - else { - return; - } - info.setDirectory(uuid); - info.setData(WSPLGEN_USER_SHAPE); - info.setTitle("I18N_USER_SHAPE"); - info.setGroup(group); - info.setGroupTitle(groupTitle); - info.setSrid(wms.getSrid()); - - String nameUser = MS_LAYER_PREFIX + wms.getName(); - - Template tpl = getTemplateByName(SHP_LAYER_TEMPLATE); - if (tpl == null) { - logger.warn("Template '" + SHP_LAYER_TEMPLATE + "' found."); - return; - } - - try { - writeLayer(info, new File(dir, nameUser), tpl); - } - catch (FileNotFoundException fnfe) { - logger.error(fnfe, fnfe); - logger.warn("Unable to write layer: " + nameUser); - } - - } - - - /** - * Creates a layer file used for Mapserver's mapfile which represents - * geometries from database. - * - * @param flys The D4EArtifact that owns <i>wms</i>. - * @param wms The WMSLayerFacet that contains information for the layer. - */ - public void createDatabaseLayer( - D4EArtifact flys, - WMSDBLayerFacet wms, - String style - ) - throws FileNotFoundException, IOException - { - logger.debug("createDatabaseLayer"); - - LayerInfo layerinfo = new LayerInfo(); - layerinfo.setName(wms.getName() + "-" + flys.identifier()); - layerinfo.setType(wms.getGeometryType()); - layerinfo.setFilter(wms.getFilter()); - layerinfo.setData(wms.getData()); - layerinfo.setTitle(wms.getDescription()); - layerinfo.setStyle(style); - if(wms.getExtent() != null) { - layerinfo.setExtent(GeometryUtils.jtsBoundsToOLBounds(wms.getExtent())); - } - layerinfo.setConnection(wms.getConnection()); - layerinfo.setConnectionType(wms.getConnectionType()); - layerinfo.setLabelItem(wms.getLabelItem()); - layerinfo.setSrid(wms.getSrid()); - - String name = MS_LAYER_PREFIX + wms.getName(); - - Template template = getTemplateByName(DB_LAYER_TEMPLATE); - if (template == null) { - logger.warn("Template '" + DB_LAYER_TEMPLATE + "' found."); - return; - } - - try { - File dir = new File(getShapefileBaseDir(), flys.identifier()); - writeLayer(layerinfo, new File(dir, name), template); - } - catch (FileNotFoundException fnfe) { - logger.error(fnfe, fnfe); - logger.warn("Unable to write layer: " + name); - } - } - - @Override + @Override protected String getMapfilePath() { return RiverUtils.getXPathString(RiverUtils.XPATH_FLOODMAP_MAPFILE_PATH); } - @Override + @Override protected String getMapfileTemplate() { return RiverUtils.getXPathString(RiverUtils.XPATH_FLOODMAP_MAPFILE_TEMPLATE); }
--- a/artifacts/src/main/java/org/dive4elements/river/utils/Formatter.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/utils/Formatter.java Tue Jun 18 16:05:10 2013 +0200 @@ -92,10 +92,12 @@ public static final int VARIANCE_MAX_DIGITS = 3; // SQ Relation - public static final int SQ_RELATION_A_MAX_DIGITS = 7; - public static final int SQ_RELATION_A_MIN_DIGITS = 7; - public static final int SQ_RELATION_B_MAX_DIGITS = 3; - public static final int SQ_RELATION_B_MIN_DIGITS = 3; + public static final int SQ_RELATION_KM_MIN_DIGITS = 2; + public static final int SQ_RELATION_KM_MAX_DIGITS = 2; + public static final int SQ_RELATION_A_MAX_DIGITS = 7; + public static final int SQ_RELATION_A_MIN_DIGITS = 7; + public static final int SQ_RELATION_B_MAX_DIGITS = 3; + public static final int SQ_RELATION_B_MIN_DIGITS = 3; /** * Creates a localized NumberFormatter with given range of decimal digits. @@ -453,6 +455,13 @@ SQ_RELATION_B_MAX_DIGITS); } + public static NumberFormat getSQRelationKM(CallContext context) { + return getFormatter( + context, + SQ_RELATION_KM_MIN_DIGITS, + SQ_RELATION_KM_MAX_DIGITS); + } + public static NumberFormat getMeterFormat(CallContext context) { return getFormatter( context,
--- a/artifacts/src/main/java/org/dive4elements/river/wsplgen/FacetCreator.java Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/java/org/dive4elements/river/wsplgen/FacetCreator.java Tue Jun 18 16:05:10 2013 +0200 @@ -168,10 +168,7 @@ hash, getUrl()); - barriers.addLayer(MapfileGenerator.MS_LAYER_PREFIX + - MapfileGenerator.MS_BARRIERS_PREFIX + "lines" + artifact.identifier()); - barriers.addLayer( MapfileGenerator.MS_LAYER_PREFIX + - MapfileGenerator.MS_BARRIERS_PREFIX + "poly" + artifact.identifier()); + barriers.addLayer(MapfileGenerator.MS_BARRIERS_PREFIX + artifact.identifier()); barriers.setSrid(getSrid()); barriers.setExtent(getBounds());
--- a/artifacts/src/main/resources/messages.properties Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/resources/messages.properties Tue Jun 18 16:05:10 2013 +0200 @@ -339,7 +339,8 @@ export.discharge.longitudinal.section.csv.header.cw = W corr. [NN +m] export.discharge.longitudinal.section.csv.header.q = Q [m\u00b3/s] export.discharge.curve.at.header = Computed Discharge Curve for {0} {0}-km: {1} -export.discharge.curve.at.gauge.header = Discharge Table for {1}/{0} since {2} PNP[NN+m] = {3} +export.discharge.curve.at.gauge.header = Discharge Table for {1}/{0} since {2} PNP[{4}] = {3} +export.discharge.curve.at.gauge.calc.header = Computed Discharge Curve for {0}, Gauge: {1} PNP[{3}] = {2} export.historical.discharge.csv.header.timerange = Timerange export.historical.discharge.csv.header.waterlevel = Waterlevel [cm] export.historical.discharge.csv.header.discharge = Discharge [m\u00b3/s] @@ -351,7 +352,8 @@ export.reference_curve.csv.header.w.q = equiv. Q (m\u00b3/s) export.waterlevel.pdf.mode = Waterlevel -export.computed.discharge.pdf.mode = Computed Dischargecurve +export.computed.discharge.pdf.mode = Dischargecurve +export.computed.discharge.pdf.calc.mode = Computed Dischargecurve export.duration.pdf.mode = Durationcurve export.wdifferences.pdf.mode = W Differences export.historical.discharge.pdf.mode = Historical Discharge @@ -604,6 +606,7 @@ fix.km.chart.interpolated=interpolated fix.export.at.header = Exported fixings discharge curve for {0} {0}-km: {1} +fix.export.at.gauge.header = Exported fixings discharge curve for {0}, gauge: {1} datum[{3}] = {2} sq.km.chart.label = Measuring Points sq.km.chart.title = Measuring Points sq.km.chart.km.axis = km
--- a/artifacts/src/main/resources/messages_de.properties Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/resources/messages_de.properties Tue Jun 18 16:05:10 2013 +0200 @@ -339,7 +339,8 @@ export.discharge.longitudinal.section.csv.header.cw = W korr. [NN + m] export.discharge.longitudinal.section.csv.header.q = Q [m\u00b3/s] export.discharge.curve.at.header = Berechnete Abflusstafel f\u00fcr {0}, km {1} -export.discharge.curve.at.gauge.header = Abflusstafel f\u00fcr {1}/{0} ab {2} PNP[NN+m] = {3} +export.discharge.curve.at.gauge.header = Abflusstafel f\u00fcr {1}/{0} ab {2} PNP[{4}] = {3} +export.discharge.curve.at.gauge.calc.header = Berechnete Abflusstafel f\u00fcr {0}, Pegel: {1} PNP[{3}] = {2} export.historical.discharge.csv.header.timerange = Zeitraum export.historical.discharge.csv.header.waterlevel = Wasserstand [cm] export.historical.discharge.csv.header.discharge = Abfluss [m\u00b3/s] @@ -353,6 +354,7 @@ export.waterlevel.pdf.mode = Wasserstand export.computed.discharge.pdf.mode = Abflusskurve +export.computed.discharge.pdf.calc.mode = Berechnete Abflusskurve export.duration.pdf.mode = Dauerlinie export.wdifferences.pdf.mode = W Differenzen export.historical.discharge.pdf.mode = Historischer Abfluss @@ -606,7 +608,8 @@ fix.km.chart.measured=gemessen fix.km.chart.interpolated=interpoliert -fix.export.at.header = Abflusskurve aus der Fixierungsanalyse f\u00fcr {0} {0}-km: {1} +fix.export.at.header = Abflusskurve aus der Fixierungsanalyse f\u00fcr {0} {0}-km: {1} +fix.export.at.gauge.header = Abflusstafel aus der Fixierungsanalyse f\u00fcr {0}, Pegel: {1} PNP[{3}] = {2} sq.km.chart.label = Feststoffdaten sq.km.chart.title = Feststoffdatenbestand SedDB sq.km.chart.km.axis = km
--- a/artifacts/src/main/resources/messages_de_DE.properties Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/resources/messages_de_DE.properties Tue Jun 18 16:05:10 2013 +0200 @@ -335,7 +335,8 @@ export.discharge.longitudinal.section.csv.header.w = W [NN + m] export.discharge.longitudinal.section.csv.header.cw = W korr. [NN + m] export.discharge.longitudinal.section.csv.header.q = Q [m\u00b3/s] -export.discharge.curve.at.gauge.header = Abflusstafel f\u00fcr {1}/{0} ab {2} PNP[NN+m] = {3} +export.discharge.curve.at.gauge.header = Abflusstafel f\u00fcr {1}/{0} ab {2} PNP[{4}] = {3} +export.discharge.curve.at.gauge.calc.header = Berechnete Abflusstafel f\u00fcr {0}, Pegel: {1} PNP[{3}] = {2} export.discharge.curve.at.header = Berechnete Abflusstafel f\u00fcr {0}, km {1} export.historical.discharge.csv.header.timerange = Zeitraum export.historical.discharge.csv.header.waterlevel = Wasserstand [cm] @@ -349,6 +350,7 @@ export.waterlevel.pdf.mode = Wasserstand export.computed.discharge.pdf.mode = Abflusskurve +export.computed.discharge.pdf.calc.mode = Berechnete Abflusskurve export.duration.pdf.mode = Dauerline export.wdifferences.pdf.mode = W Differenzen export.historical.discharge.pdf.mode = Historischer Abfluss @@ -604,7 +606,8 @@ fix.km.chart.measured=gemessen fix.km.chart.interpolated=interpoliert -fix.export.at.header = Abflusskurve aus der Fixierungsanalyse f\u00fcr {0} {0}-km: {1} +fix.export.at.header = Abflusstafel aus der Fixierungsanalyse f\u00fcr {0} {0}-km: {1} +fix.export.at.gauge.header = Abflusstafel aus der Fixierungsanalyse f\u00fcr {0}, Pegel: {1} PNP[{3}] = {2} sq.km.chart.label = Feststoffdaten sq.km.chart.title = Feststoffdatenbestand SedDB sq.km.chart.km.axis = km
--- a/artifacts/src/main/resources/messages_en.properties Mon Jun 17 17:16:25 2013 +0200 +++ b/artifacts/src/main/resources/messages_en.properties Tue Jun 18 16:05:10 2013 +0200 @@ -340,8 +340,9 @@ export.discharge.longitudinal.section.csv.header.w = W [NN + m] export.discharge.longitudinal.section.csv.header.cw = W corr. [NN + m] export.discharge.longitudinal.section.csv.header.q = Q [m\u00b3/s] -export.discharge.curve.at.header = Computed Discharge Curve for {0} {0}-km: {1} -export.discharge.curve.at.gauge.header = Dischargetable for {1}/{0} since {2} Datum[NN+m] = {3} +export.discharge.curve.at.header = Computed discharge curve for {0} {0}-km: {1} +export.discharge.curve.at.gauge.header = Dischargetable for {1}/{0} since {2} datum[{4}] = {3} +export.discharge.curve.at.gauge.calc.header = Computed discharge curve for {0}, gauge: {1} datum[{3}] = {2} export.historical.discharge.csv.header.timerange = Timerange export.historical.discharge.csv.header.waterlevel = Waterlevel [cm] export.historical.discharge.csv.header.discharge = Discharge [m\u00b3/s] @@ -353,7 +354,8 @@ export.reference_curve.csv.header.w.q = equiv. Q (m\u00b3/s) export.waterlevel.pdf.mode = Waterlevel -export.computed.discharge.pdf.mode = Computed Dischargecurve +export.computed.discharge.pdf.mode = Dischargecurve +export.computed.discharge.pdf.calc.mode = Computed Dischargecurve export.duration.pdf.mode = Durationcurve export.wdifferences.pdf.mode = W Differences export.historical.discharge.pdf.mode = Historical Discharge @@ -605,6 +607,7 @@ fix.km.chart.interpolated=interpolated fix.export.at.header = Exported fixings discharge curve for {0} {0}-km: {1} +fix.export.at.gauge.header = Exported fixings discharge curve for {0}, gauge: {1} datum[{3}] = {2} sq.km.chart.label = Measuring Points sq.km.chart.title = Measuring Points sq.km.chart.km.axis = km
--- a/backend/doc/schema/oracle-drop.sql Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/doc/schema/oracle-drop.sql Tue Jun 18 16:05:10 2013 +0200 @@ -57,6 +57,7 @@ DROP TABLE wst_column_values; DROP TABLE wst_columns; DROP TABLE wst_q_ranges; +DROP TABLE official_lines; DROP TABLE wsts; DROP TABLE wst_kinds; DROP SEQUENCE ANNOTATION_TYPES_ID_SEQ; @@ -86,11 +87,9 @@ DROP SEQUENCE WST_COLUMN_VALUES_ID_SEQ; DROP SEQUENCE WST_COLUMNS_ID_SEQ; DROP SEQUENCE WST_Q_RANGES_ID_SEQ; +DROP SEQUENCE OFFICIAL_LINES_ID_SEQ; DROP SEQUENCE WSTS_ID_SEQ; DROP VIEW wst_value_table; DROP VIEW wst_w_values ; DROP VIEW wst_q_values; -DROP VIEW official_lines; -DROP VIEW q_main_values; -DROP VIEW official_q_values; DROP VIEW wst_ranges;
--- a/backend/doc/schema/oracle.sql Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/doc/schema/oracle.sql Tue Jun 18 16:05:10 2013 +0200 @@ -344,6 +344,17 @@ PRIMARY KEY (id) ); +-- OFFICIAL_LINES +CREATE SEQUENCE OFFICIAL_LINES_ID_SEQ; + +CREATE TABLE official_lines ( + id NUMBER(38,0) NOT NULL, + wst_column_id NUMBER(38,0) NOT NULL, + named_main_value_id NUMBER(38,0) NOT NULL, + + PRIMARY KEY (id), + UNIQUE (wst_column_id, named_main_value_id) +); -- WSTS --lookup table for wst kinds @@ -406,10 +417,13 @@ ALTER TABLE wst_column_q_ranges ADD CONSTRAINT cWstColumnQRangesWstQRanges FOREIGN KEY (wst_q_range_id) REFERENCES wst_q_ranges ON DELETE CASCADE; ALTER TABLE wst_columns ADD CONSTRAINT cWstColumnsWsts FOREIGN KEY (wst_id) REFERENCES wsts ON DELETE CASCADE; ALTER TABLE wst_column_values ADD CONSTRAINT cWstColumnValuesWstColumns FOREIGN KEY (wst_column_id) REFERENCES wst_columns ON DELETE CASCADE; -ALTER TABLE wst_q_ranges ADD CONSTRAINT cWstQRangesRanges FOREIGN KEY (range_id) REFERENCES RANGES ON DELETE CASCADE; +ALTER TABLE wst_q_ranges ADD CONSTRAINT cWstQRangesRanges FOREIGN KEY (range_id) REFERENCES RANGES ON LETE CASCADE; ALTER TABLE wsts ADD CONSTRAINT cWstsRivers FOREIGN KEY (river_id) REFERENCES rivers ON DELETE CASCADE; ALTER TABLE wsts ADD CONSTRAINT cWstsWstKinds FOREIGN KEY (kind) REFERENCES wst_kinds; +ALTER TABLE official_lines ADD CONSTRAINT cOffLinesWstColumns FOREIGN KEY (wst_column_id) REFERENCES wst_columns ON DELETE CASCADE; +ALTER TABLE official_lines ADD CONSTRAINT cOffLinesNamedMainValues FOREIGN KEY (named_main_value_id) REFERENCES named_main_values ON DELETE CASCADE; + -- VIEWS CREATE VIEW wst_value_table AS @@ -457,61 +471,6 @@ JOIN wst_columns wc ON wcqr.wst_column_id = wc.id ORDER BY wc.position, wcqr.wst_column_id, r.a; --- Views to make the 'Amtlichen Linien' easier to access. - -CREATE VIEW official_lines -AS - SELECT w.river_id AS river_id, - w.id AS wst_id, - wc.id AS wst_column_id, - wc.name AS name, - wc.position AS wst_column_pos - FROM wsts w - JOIN wst_columns wc - ON wc.wst_id = w.id - WHERE w.kind = 3; - -CREATE VIEW q_main_values -AS - SELECT riv.id AS river_id, - g.id AS gauge_id, - g.name AS gauge_name, - r.a AS a, - r.b AS b, - REGEXP_REPLACE( - nmv.name, '[:space:]*\(.*\)[:space:]*', '') AS name, - CAST(mv.value AS NUMERIC(38, 5)) AS value - FROM main_values mv - JOIN named_main_values nmv - ON mv.named_value_id = nmv.id - JOIN main_value_types mvt - ON nmv.type_id = mvt.id - JOIN gauges g - ON mv.gauge_id = g.id - JOIN ranges r - ON g.range_id = r.id - JOIN rivers riv - ON g.river_id = riv.id - WHERE mvt.name = 'Q' - ORDER BY g.id, CAST(mv.value AS NUMERIC(38,5)); - -CREATE VIEW official_q_values -AS - SELECT ol.river_id AS river_id, - wst_id, - wst_column_id, - gauge_id, - gauge_name, - a, - b, - ol.name, - value, - wst_column_pos - FROM official_lines ol - JOIN q_main_values qmv - ON ol.river_id = qmv.river_id - AND ol.name = qmv.name; - CREATE VIEW wst_ranges AS SELECT wc.id AS wst_column_id,
--- a/backend/doc/schema/postgresql.sql Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/doc/schema/postgresql.sql Tue Jun 18 16:05:10 2013 +0200 @@ -242,6 +242,16 @@ UNIQUE (wst_column_id, wst_q_range_id) ); +CREATE SEQUENCE OFFICIAL_LINES_ID_SEQ; + +CREATE TABLE official_lines ( + id int PRIMARY KEY NOT NULL, + wst_column_id int NOT NULL REFERENCES wst_columns(id) ON DELETE CASCADE, + named_main_value_id int NOT NULL REFERENCES named_main_values(id) ON DELETE CASCADE, + + UNIQUE (wst_column_id, named_main_value_id) +); + CREATE VIEW wst_value_table AS SELECT wcv.position AS position, @@ -376,59 +386,6 @@ CHECK (a <= b) ); -CREATE VIEW official_lines -AS - SELECT w.river_id AS river_id, - w.id AS wst_id, - wc.id AS wst_column_id, - wc.name AS name, - wc.position AS wst_column_pos - FROM wsts w - JOIN wst_columns wc - ON wc.wst_id = w.id - WHERE w.kind = 3; - -CREATE VIEW q_main_values -AS - SELECT riv.id AS river_id, - g.id AS gauge_id, - g.name AS gauge_name, - r.a AS a, - r.b AS b, - REGEXP_REPLACE( - nmv.name, E'[:space:]*\\(.*\\)[:space:]*', '') AS name, - CAST(mv.value AS NUMERIC(38, 2)) AS value - FROM main_values mv - JOIN named_main_values nmv - ON mv.named_value_id = nmv.id - JOIN main_value_types mvt - ON nmv.type_id = mvt.id - JOIN gauges g - ON mv.gauge_id = g.id - JOIN ranges r - ON g.range_id = r.id - JOIN rivers riv - ON g.river_id = riv.id - WHERE mvt.name = 'Q' - ORDER BY g.id, CAST(mv.value AS NUMERIC(38,2)); - -CREATE VIEW official_q_values -AS - SELECT ol.river_id AS river_id, - wst_id, - wst_column_id, - gauge_id, - gauge_name, - a, - b, - ol.name, - value, - wst_column_pos - FROM official_lines ol - JOIN q_main_values qmv - ON ol.river_id = qmv.river_id - AND ol.name = qmv.name; - CREATE VIEW wst_ranges AS SELECT wc.id AS wst_column_id,
--- a/backend/src/main/java/org/dive4elements/river/backend/FLYSCredentials.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/backend/FLYSCredentials.java Tue Jun 18 16:05:10 2013 +0200 @@ -84,6 +84,7 @@ import org.dive4elements.river.model.WstColumnQRange; import org.dive4elements.river.model.WstColumnValue; import org.dive4elements.river.model.WstQRange; +import org.dive4elements.river.model.OfficialLine; public class FLYSCredentials extends Credentials @@ -205,7 +206,8 @@ WstColumnQRange.class, WstColumnValue.class, Wst.class, - WstQRange.class + WstQRange.class, + OfficialLine.class }; public FLYSCredentials() {
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/src/main/java/org/dive4elements/river/importer/ImportOfficialLine.java Tue Jun 18 16:05:10 2013 +0200 @@ -0,0 +1,73 @@ +/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde + * Software engineering by Intevation GmbH + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ + +package org.dive4elements.river.importer; + +import java.util.List; + +import org.dive4elements.river.model.NamedMainValue; +import org.dive4elements.river.model.OfficialLine; +import org.dive4elements.river.model.River; +import org.dive4elements.river.model.WstColumn; +import org.hibernate.Query; +import org.hibernate.Session; + +public class ImportOfficialLine +{ + protected String name; + protected ImportWstColumn wstColumn; + + protected OfficialLine peer; + + public ImportOfficialLine() { + } + + public ImportOfficialLine(String name, ImportWstColumn wstColumn) { + this.name = name; + this.wstColumn = wstColumn; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public OfficialLine getPeer(River river) { + if (peer == null) { + // XXX: This is a bit odd. We do not have not enough infos here + // to create a new NamedMainValue. So we just look for existing ones. + Session session = ImporterSession.getInstance().getDatabaseSession(); + NamedMainValue nmv = NamedMainValue.fetchByName(name, session); + if (nmv == null) { + // failed -> failed to create OfficialLine + return null; + } + WstColumn wc = wstColumn.getPeer(river); + Query query = session.createQuery( + "from OfficialLine " + + "where namedMainValue = :nmv and wstColumn = :wc"); + query.setParameter("nmv", nmv); + query.setParameter("wc", wc); + List<OfficialLine> lines = query.list(); + if (lines.isEmpty()) { + peer = new OfficialLine(wc, nmv); + session.save(peer); + } + else { + peer = lines.get(0); + } + + } + return peer; + } +} +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 : +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/src/main/java/org/dive4elements/river/importer/ImportOfficialWstColumn.java Tue Jun 18 16:05:10 2013 +0200 @@ -0,0 +1,45 @@ +/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde + * Software engineering by Intevation GmbH + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ + +package org.dive4elements.river.importer; + +public class ImportOfficialWstColumn +extends ImportWstColumn +{ + public static final ImportWst.ImportWstColumnFactory COLUMN_FACTORY = + new ImportWst.ImportWstColumnFactory() { + @Override + public ImportWstColumn create(ImportWst importWst, int position) { + return new ImportOfficialWstColumn(importWst, null, null, position); + } + }; + + protected ImportOfficialLine officialLine; + + public ImportOfficialWstColumn() { + super(); + } + + public ImportOfficialWstColumn( + ImportWst wst, + String name, + String description, + Integer position + ) { + super(wst, name, description, position); + } + + public ImportOfficialLine getOfficialLine() { + return officialLine; + } + + public void setOfficialLine(ImportOfficialLine officialLine) { + this.officialLine = officialLine; + } +} +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java Tue Jun 18 16:05:10 2013 +0200 @@ -24,6 +24,7 @@ import org.dive4elements.river.importer.parsers.HYKParser; import org.dive4elements.river.importer.parsers.MeasurementStationsParser; import org.dive4elements.river.importer.parsers.MorphologicalWidthParser; +import org.dive4elements.river.importer.parsers.OfficialLinesConfigParser; import org.dive4elements.river.importer.parsers.PRFParser; import org.dive4elements.river.importer.parsers.PegelGltParser; import org.dive4elements.river.importer.parsers.SQRelationParser; @@ -45,6 +46,7 @@ import java.util.Calendar; import java.util.Date; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -75,6 +77,9 @@ public static final String OFFICIAL_LINES = "Amtl_Linien.wst"; + public static final String OFFICIAL_LINES_CONFIG = + "Amtl_Linien.config"; + public static final String FLOOD_WATER = "HW-Marken"; public static final String FLOOD_PROTECTION = @@ -788,11 +793,42 @@ } log.debug("Found WST file: " + file); - WstParser wstParser = new WstParser(); + ImportWst iw = new ImportWst(ImportOfficialWstColumn.COLUMN_FACTORY); + + WstParser wstParser = new WstParser(iw); wstParser.parse(file); - ImportWst iw = wstParser.getWst(); iw.setKind(3); iw.setDescription(folder + "/" + iw.getDescription()); + + File configFile = FileTools.repair(new File(dir, OFFICIAL_LINES_CONFIG)); + if (!configFile.isFile() || !configFile.canRead()) { + log.warn("no config file for official lines found"); + } + else { + OfficialLinesConfigParser olcp = new OfficialLinesConfigParser(); + try { + olcp.parse(configFile); + } + catch (IOException ioe) { + log.warn("Error reading offical lines config", ioe); + } + List<String> mainValueNames = olcp.getMainValueNames(); + if (mainValueNames.isEmpty()) { + log.warn("config file for offical lines contains no entries"); + } + else { + // Join as much as possible. + Iterator<ImportWstColumn> wi = iw.getColumns().iterator(); + Iterator<String> si = olcp.getMainValueNames().iterator(); + while (wi.hasNext() && si.hasNext()) { + ImportOfficialWstColumn wc = (ImportOfficialWstColumn)wi.next(); + String name = si.next(); + ImportOfficialLine iol = new ImportOfficialLine(name, wc); + wc.setOfficialLine(iol); + } + } + } + officialLines.add(iw); } // for all folders @@ -1221,12 +1257,25 @@ } public void storeOfficialLines() { - if (!Config.INSTANCE.skipOfficialLines()) { - log.info("store official lines wsts"); - River river = getPeer(); - for (ImportWst wst: officialLines) { - log.debug("name: " + wst.getDescription()); - wst.storeDependencies(river); + if (Config.INSTANCE.skipOfficialLines() || officialLines.isEmpty()) { + return; + } + + log.info("store official lines wsts"); + River river = getPeer(); + for (ImportWst wst: officialLines) { + log.debug("name: " + wst.getDescription()); + wst.storeDependencies(river); + + // Store the official lines after the columns are store. + for (ImportWstColumn wc: wst.getColumns()) { + ImportOfficialWstColumn owc = (ImportOfficialWstColumn)wc; + ImportOfficialLine ioc = owc.getOfficialLine(); + if (ioc != null) { + if (ioc.getPeer(river) == null) { + log.warn("Cannot store official line: " + ioc.getName()); + } + } } } }
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportWst.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/importer/ImportWst.java Tue Jun 18 16:05:10 2013 +0200 @@ -19,10 +19,23 @@ import java.util.ArrayList; import java.util.List; +/** Not (yet) db-mapped WST object. */ public class ImportWst { private static Logger log = Logger.getLogger(ImportWst.class); + public interface ImportWstColumnFactory { + ImportWstColumn create(ImportWst iw, int position); + } + + public static final ImportWstColumnFactory COLUMN_FACTORY = + new ImportWstColumnFactory() { + @Override + public ImportWstColumn create(ImportWst importWst, int position) { + return new ImportWstColumn(importWst, null, null, position); + } + }; + protected String description; protected Integer kind; @@ -31,16 +44,27 @@ protected ImportUnit unit; + protected ImportWstColumnFactory columnFactory; + /** Wst as in db. */ protected Wst peer; public ImportWst() { + this(COLUMN_FACTORY); + } + + public ImportWst(ImportWstColumnFactory columnFactory) { + this.columnFactory = columnFactory; kind = 0; columns = new ArrayList<ImportWstColumn>(); } public ImportWst(String description) { - this(); + this(description, COLUMN_FACTORY); + } + + public ImportWst(String description, ImportWstColumnFactory columnFactory) { + this(columnFactory); this.description = description; } @@ -64,7 +88,7 @@ /** Create columns that can be accessed with getColumn. */ public void setNumberColumns(int numColumns) { for (int i = 0; i < numColumns; ++i) { - columns.add(new ImportWstColumn(this, null, null, i)); + columns.add(columnFactory.create(this, i)); } }
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportWstQRange.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/importer/ImportWstQRange.java Tue Jun 18 16:05:10 2013 +0200 @@ -37,6 +37,15 @@ this.q = q; } + public ImportWstQRange( + BigDecimal a, + BigDecimal b, + BigDecimal q + ) { + this.range = new ImportRange(a, b); + this.q = q; + } + public ImportRange getRange() { return range; }
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/BedHeightParser.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/BedHeightParser.java Tue Jun 18 16:05:10 2013 +0200 @@ -16,7 +16,6 @@ import java.text.ParseException; import java.util.ArrayList; -import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.TreeSet;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/NameAndTimeInterval.java Tue Jun 18 16:05:10 2013 +0200 @@ -0,0 +1,196 @@ +/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde + * Software engineering by Intevation GmbH + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ +package org.dive4elements.river.importer.parsers; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.regex.Pattern; + +import org.apache.log4j.Logger; +import org.dive4elements.river.importer.ImportTimeInterval; +import org.dive4elements.river.utils.DateGuesser; + +public class NameAndTimeInterval { + + private static Logger log = Logger.getLogger(NameAndTimeInterval.class); + + // TODO: To be extented. + private static final Pattern MAIN_VALUE = Pattern.compile( + "^(HQ|HSQ|MHW|GLQ|NMQ|HQEXT)(\\d*)$"); + + private String name; + private ImportTimeInterval timeInterval; + + public NameAndTimeInterval() { + } + + public NameAndTimeInterval(String name) { + this(name, null); + } + + public NameAndTimeInterval(String name, ImportTimeInterval timeInterval) { + this.name = name; + this.timeInterval = timeInterval; + } + + public String getName() { + return name; + } + + public ImportTimeInterval getTimeInterval() { + return timeInterval; + } + + @Override + public String toString() { + return "name: " + name + " time interval: " + timeInterval; + } + + public static boolean isMainValue(String s) { + s = s.replace(" ", "").toUpperCase(); + return MAIN_VALUE.matcher(s).matches(); + } + + public static NameAndTimeInterval parseName(String name) { + List<String> result = new ArrayList<String>(); + + unbracket(name, 0, result); + + int length = result.size(); + + if (length < 1) { // Should not happen. + return new NameAndTimeInterval(name); + } + + if (length == 1) { // No date at all -> use first part. + return new NameAndTimeInterval(result.get(0).trim()); + } + + if (length == 2) { // e.g. HQ(1994) or HQ(1994 - 1999) + + String type = result.get(0).trim(); + ImportTimeInterval timeInterval = null; + + String datePart = result.get(1).trim(); + if (isMainValue(datePart)) { // e.g. W(HQ100) + type += "(" + datePart + ")"; + timeInterval = null; + } + else { + timeInterval = getTimeInterval(result.get(1).trim()); + + if (timeInterval == null) { // No date at all. + type = name; + } + } + + return new NameAndTimeInterval(type, timeInterval); + } + + if (length == 3) { // e.g W(Q(1994)) or W(Q(1994 - 1999)) + + String type = + result.get(0).trim() + "(" + + result.get(1).trim() + ")"; + + ImportTimeInterval timeInterval = getTimeInterval( + result.get(2).trim()); + + if (timeInterval == null) { // No date at all. + type = name; + } + + return new NameAndTimeInterval(type, timeInterval); + } + + // more than 3 elements return unmodified. + + return new NameAndTimeInterval(name); + } + + private static ImportTimeInterval getTimeInterval(String datePart) { + + int minus = datePart.indexOf('-'); + + if (minus < 0) { // '-' not found + + Date date = null; + try { + date = DateGuesser.guessDate(datePart); + } + catch (IllegalArgumentException iae) { + log.warn("STA: Invalid date '" + datePart + "'"); + return null; + } + + return new ImportTimeInterval(date); + } + + // Found '-' so we have <from> - <to> + String startPart = datePart.substring(0, minus).trim(); + String endPart = datePart.substring(minus).trim(); + + Date startDate = null; + Date endDate = null; + + try { + startDate = DateGuesser.guessDate(startPart); + } + catch (IllegalArgumentException iae) { + log.warn("STA: Invalid start date '" + startPart + "'"); + } + + try { + endDate = DateGuesser.guessDate(endPart); + } + catch (IllegalArgumentException iae) { + log.warn("STA: Invalid end date '" + endPart + "'"); + } + + if (startDate == null) { + log.warn("STA: Need start date."); + return null; + } + + return new ImportTimeInterval(startDate, endDate); + } + + private static int unbracket(String s, int index, List<String> result) { + StringBuilder sb = new StringBuilder(); + int length = s.length(); + while (index < length) { + char c = s.charAt(index); + switch (c) { + case '(': + index = unbracket(s, index+1, result); + break; + case ')': + result.add(0, sb.toString()); + return index+1; + default: + sb.append(c); + ++index; + } + } + result.add(0, sb.toString()); + + return index; + } + + /* + public static void main(String [] args) { + for (String arg: args) { + NameAndTimeInterval nti = parseName(arg); + System.out.println(arg + " -> " + nti); + } + } + */ +} +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 : +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/OfficialLinesConfigParser.java Tue Jun 18 16:05:10 2013 +0200 @@ -0,0 +1,65 @@ +/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde + * Software engineering by Intevation GmbH + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ + +package org.dive4elements.river.importer.parsers; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.LineNumberReader; +import java.util.ArrayList; +import java.util.List; + +import org.apache.log4j.Logger; + +public class OfficialLinesConfigParser { + + private static Logger log = Logger.getLogger(OfficialLinesConfigParser.class); + + public static final String ENCODING = "ISO-8859-1"; + + private List<String> mainValueNames; + + public OfficialLinesConfigParser() { + mainValueNames = new ArrayList<String>(); + } + + public void reset() { + mainValueNames.clear(); + } + + public void parse(File file) throws IOException { + + log.info("Parsing offical lines config file: " + file); + + LineNumberReader reader = + new LineNumberReader( + new InputStreamReader( + new FileInputStream(file), ENCODING)); + + try { + String line; + while ((line = reader.readLine()) != null) { + if ((line = line.trim()).length() == 0 || line.charAt(0) == '*') { + continue; + } + NameAndTimeInterval nat = NameAndTimeInterval.parseName(line); + mainValueNames.add(nat.getName()); + } + } + finally { + reader.close(); + } + } + + public List<String> getMainValueNames() { + return mainValueNames; + } +} +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/StaFileParser.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/StaFileParser.java Tue Jun 18 16:05:10 2013 +0200 @@ -19,10 +19,8 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -import java.util.Date; import java.util.HashMap; import java.util.ArrayList; -import java.util.List; import org.apache.log4j.Logger; @@ -30,8 +28,6 @@ import org.dive4elements.river.importer.ImportMainValue; import org.dive4elements.river.importer.ImportNamedMainValue; import org.dive4elements.river.importer.ImportGauge; -import org.dive4elements.river.importer.ImportTimeInterval; -import org.dive4elements.river.utils.DateGuesser; public class StaFileParser { @@ -46,42 +42,6 @@ Pattern.compile("\\s*([^\\s]+)\\s+([^\\s]+)\\s+([" + Pattern.quote(TYPES) + "]).*"); - // TODO: To be extented. - private static final Pattern MAIN_VALUE = Pattern.compile( - "^(HQ|MHW|GLQ|NMQ|HQEXT)(\\d*)$"); - - private static boolean isMainValue(String s) { - s = s.replace(" ", "").toUpperCase(); - return MAIN_VALUE.matcher(s).matches(); - } - - - public static final class NameAndTimeInterval { - private String name; - private ImportTimeInterval timeInterval; - - public NameAndTimeInterval(String name) { - this(name, null); - } - - public NameAndTimeInterval(String name, ImportTimeInterval timeInterval) { - this.name = name; - this.timeInterval = timeInterval; - } - - public String getName() { - return name; - } - - public ImportTimeInterval getTimeInterval() { - return timeInterval; - } - - @Override - public String toString() { - return "name: " + name + " time interval: " + timeInterval; - } - } // class NameAndTimeInterval public StaFileParser() { } @@ -91,13 +51,12 @@ File file = gauge.getStaFile(); log.info("parsing STA file: " + file); - LineNumberReader in = null; + LineNumberReader in = + new LineNumberReader( + new InputStreamReader( + new FileInputStream(file), ENCODING)); + try { - in = - new LineNumberReader( - new InputStreamReader( - new FileInputStream(file), ENCODING)); - String line = in.readLine(); if (line == null) { @@ -204,7 +163,8 @@ types.put(typeString, type); } String name = m.group(1); - NameAndTimeInterval nat = parseName(name); + NameAndTimeInterval nat = + NameAndTimeInterval.parseName(name); ImportNamedMainValue namedMainValue = new ImportNamedMainValue(type, nat.getName()); namedMainValues.add(namedMainValue); @@ -227,147 +187,11 @@ gauge.setMainValues(mainValues); } finally { - if (in != null) { - in.close(); - } + in.close(); } log.info("finished parsing STA file: " + file); return true; } - protected static NameAndTimeInterval parseName(String name) { - List<String> result = new ArrayList<String>(); - - unbracket(name, 0, result); - - int length = result.size(); - - if (length < 1) { // Should not happen. - return new NameAndTimeInterval(name); - } - - if (length == 1) { // No date at all -> use first part. - return new NameAndTimeInterval(result.get(0).trim()); - } - - if (length == 2) { // e.g. HQ(1994) or HQ(1994 - 1999) - - String type = result.get(0).trim(); - ImportTimeInterval timeInterval = null; - - String datePart = result.get(1).trim(); - if (isMainValue(datePart)) { // e.g. W(HQ100) - type += "(" + datePart + ")"; - timeInterval = null; - } - else { - timeInterval = getTimeInterval(result.get(1).trim()); - - if (timeInterval == null) { // No date at all. - type = name; - } - } - - return new NameAndTimeInterval(type, timeInterval); - } - - if (length == 3) { // e.g W(Q(1994)) or W(Q(1994 - 1999)) - - String type = - result.get(0).trim() + "(" + - result.get(1).trim() + ")"; - - ImportTimeInterval timeInterval = getTimeInterval( - result.get(2).trim()); - - if (timeInterval == null) { // No date at all. - type = name; - } - - return new NameAndTimeInterval(type, timeInterval); - } - - // more than 3 elements return unmodified. - - return new NameAndTimeInterval(name); - } - - private static ImportTimeInterval getTimeInterval(String datePart) { - - int minus = datePart.indexOf('-'); - - if (minus < 0) { // '-' not found - - Date date = null; - try { - date = DateGuesser.guessDate(datePart); - } - catch (IllegalArgumentException iae) { - log.warn("STA: Invalid date '" + datePart + "'"); - return null; - } - - return new ImportTimeInterval(date); - } - - // Found '-' so we have <from> - <to> - String startPart = datePart.substring(0, minus).trim(); - String endPart = datePart.substring(minus).trim(); - - Date startDate = null; - Date endDate = null; - - try { - startDate = DateGuesser.guessDate(startPart); - } - catch (IllegalArgumentException iae) { - log.warn("STA: Invalid start date '" + startPart + "'"); - } - - try { - endDate = DateGuesser.guessDate(endPart); - } - catch (IllegalArgumentException iae) { - log.warn("STA: Invalid end date '" + endPart + "'"); - } - - if (startDate == null) { - log.warn("STA: Need start date."); - return null; - } - - return new ImportTimeInterval(startDate, endDate); - } - - private static int unbracket(String s, int index, List<String> result) { - StringBuilder sb = new StringBuilder(); - int length = s.length(); - while (index < length) { - char c = s.charAt(index); - switch (c) { - case '(': - index = unbracket(s, index+1, result); - break; - case ')': - result.add(0, sb.toString()); - return index+1; - default: - sb.append(c); - ++index; - } - } - result.add(0, sb.toString()); - - return index; - } - - /* - public static void main(String [] args) { - for (String arg: args) { - NameAndTimeInterval nti = parseName(arg); - System.out.println(arg + " -> " + nti); - } - } - */ } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/WaterlevelDifferencesParser.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/WaterlevelDifferencesParser.java Tue Jun 18 16:05:10 2013 +0200 @@ -20,10 +20,13 @@ import org.apache.log4j.Logger; +import org.dive4elements.river.importer.ImportTimeInterval; import org.dive4elements.river.importer.ImportUnit; import org.dive4elements.river.importer.ImportWst; +import org.dive4elements.river.importer.ImportWstQRange; import org.dive4elements.river.importer.ImportWstColumn; +import org.dive4elements.river.importer.ImportWstColumnValue; /** @@ -94,6 +97,17 @@ differences.add(current); } + // For all differences columns, add a single Q-Range with + // 0. + for (ImportWstColumn column: columns) { + List<ImportWstColumnValue> cValues = column.getColumnValues(); + column.addColumnQRange( + new ImportWstQRange( + cValues.get(0).getPosition(), + cValues.get(cValues.size() - 1).getPosition(), + new BigDecimal(0d)) + ); + } current = null; columns = null; } @@ -164,15 +178,20 @@ String name = cols[i].replace("\"", ""); log.debug("Create new column '" + name + "'"); - current.getColumn(i).setName(name); - current.getColumn(i).setDescription(name); + ImportWstColumn column = current.getColumn(i); + column.setName(name); + column.setDescription(name); Matcher m = YEARS_IN_COLUMN.matcher(name); if (m.matches()) { - String startYear = m.group(1); - String endYear = m.group(2); - // TODO create and set ImportTimeInterval + int startYear = Integer.parseInt(m.group(1)); + int endYear = Integer.parseInt(m.group(2)); + ImportTimeInterval time = new ImportTimeInterval( + getStartDateFromYear(startYear), + getEndDateFromYear(endYear) + ); + column.setTimeInterval(time); } else { log.debug("No time interval in column header found: " + name); } @@ -180,6 +199,7 @@ } + /** Handle one line of data, add one value for all columns. */ private void handleDataLine(String line) { String[] cols = line.split(SEPERATOR_CHAR);
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/WaterlevelParser.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/WaterlevelParser.java Tue Jun 18 16:05:10 2013 +0200 @@ -20,6 +20,7 @@ import org.apache.log4j.Logger; +import org.dive4elements.river.importer.ImportTimeInterval; import org.dive4elements.river.importer.ImportUnit; import org.dive4elements.river.importer.ImportRange; @@ -91,6 +92,22 @@ column = current.getColumn(0); column.setName(currentDescription); column.setDescription(currentDescription); + + // Try to extract and set the TimeInterval. + Matcher m = WaterlevelDifferencesParser.YEARS_IN_COLUMN.matcher(currentDescription); + + if (m.matches()) { + int startYear = Integer.parseInt(m.group(1)); + int endYear = Integer.parseInt(m.group(2)); + ImportTimeInterval time = new ImportTimeInterval( + getStartDateFromYear(startYear), + getEndDateFromYear(endYear) + ); + column.setTimeInterval(time); + } else { + log.debug("No time interval in column header found: " + currentDescription); + } + current.setKind(7); }
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/WstParser.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/WstParser.java Tue Jun 18 16:05:10 2013 +0200 @@ -69,6 +69,10 @@ public WstParser() { } + public WstParser(ImportWst wst) { + this.wst = wst; + } + public ImportWst getWst() { return wst; } @@ -77,7 +81,7 @@ this.wst = wst; } - public ImportTimeInterval guessDate(String string) { + public static ImportTimeInterval guessDate(String string) { try { return new ImportTimeInterval( DateGuesser.guessDate(string)); @@ -95,13 +99,11 @@ wst = new ImportWst(file.getName()); - LineNumberReader in = null; + LineNumberReader in = + new LineNumberReader( + new InputStreamReader( + new FileInputStream(file), ENCODING)); try { - in = - new LineNumberReader( - new InputStreamReader( - new FileInputStream(file), ENCODING)); - String input; boolean first = true; int columnCount = 0; @@ -109,7 +111,7 @@ String [] lsBezeichner = null; String [] langBezeichner = null; int [] colNaWidths = null; - String [] quellen = null; + // String [] quellen = null; String [] daten = null; BigDecimal [] aktAbfluesse = null; @@ -213,7 +215,7 @@ if (spezial.length() == 0) { continue; } - quellen = StringUtil.splitQuoted(spezial, '"'); + // quellen = StringUtil.splitQuoted(spezial, '"'); } else if (spezial.startsWith(COLUMN_DATUM)) { spezial = spezial.substring(COLUMN_DATUM.length()).trim(); @@ -356,9 +358,7 @@ fixRangesOrder(); } finally { - if (in != null) { - in.close(); - } + in.close(); } }
--- a/backend/src/main/java/org/dive4elements/river/model/Gauge.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/model/Gauge.java Tue Jun 18 16:05:10 2013 +0200 @@ -40,8 +40,6 @@ { private static final Logger log = Logger.getLogger(Gauge.class); - public static final int DEFAULT_SCALE = 100; - public static final int MASTER_DISCHARGE_TABLE = 0; private Integer id; @@ -173,21 +171,11 @@ /** - * Returns min and max W values of this gauge based with a DEFAULT_SCALE. - * - * @return min and max W value of this gauge [min,max]. - */ - public double[] determineMinMaxW() { - return determineMinMaxW(DEFAULT_SCALE); - } - - - /** * Returns min and max W values of this gauge. * * @return the min and max W value of this gauge [min,max]. */ - public double[] determineMinMaxW(int scale) { + public double[] determineMinMaxW() { Session session = SessionHolder.HOLDER.get(); List<DischargeTable> tables = getDischargeTables(); @@ -221,7 +209,7 @@ BigDecimal b = (BigDecimal)result[1]; return a != null && b != null - ? new double [] { a.doubleValue()*scale, b.doubleValue()*scale } + ? new double [] { a.doubleValue(), b.doubleValue() } : null; }
--- a/backend/src/main/java/org/dive4elements/river/model/NamedMainValue.java Mon Jun 17 17:16:25 2013 +0200 +++ b/backend/src/main/java/org/dive4elements/river/model/NamedMainValue.java Tue Jun 18 16:05:10 2013 +0200 @@ -9,9 +9,11 @@ package org.dive4elements.river.model; import java.io.Serializable; +import java.util.List; import javax.persistence.Entity; import javax.persistence.Id; +import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.GeneratedValue; import javax.persistence.Column; @@ -20,6 +22,9 @@ import javax.persistence.OneToOne; import javax.persistence.JoinColumn; +import org.hibernate.Query; +import org.hibernate.Session; + @Entity @Table(name = "named_main_values") public class NamedMainValue @@ -29,6 +34,8 @@ private String name; private MainValueType type; + private List<OfficialLine> officialLines; + public NamedMainValue() { } @@ -72,5 +79,23 @@ public void setType(MainValueType type) { this.type = type; } + + @OneToMany + @JoinColumn(name = "named_main_value_id") + public List<OfficialLine> getOfficialLines() { + return officialLines; + } + + public void setOfficialLines(List<OfficialLine> officialLines) { + this.officialLines = officialLines; + } + + public static NamedMainValue fetchByName(String name, Session session) { + Query query = session.createQuery( + "from NamedMainValue where name=:name"); + query.setString("name", name); + List<NamedMainValue> named = query.list(); + return named.isEmpty() ? null : named.get(0); + } } // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/src/main/java/org/dive4elements/river/model/OfficialLine.java Tue Jun 18 16:05:10 2013 +0200 @@ -0,0 +1,78 @@ +/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde + * Software engineering by Intevation GmbH + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ + +package org.dive4elements.river.model; + +import java.io.Serializable; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.OneToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; + +@Entity +@Table(name = "official_lines") +public class OfficialLine +implements Serializable +{ + + private Integer id; + private WstColumn wstColumn; + private NamedMainValue namedMainValue; + + public OfficialLine() { + } + + public OfficialLine(WstColumn wstColumn, NamedMainValue namedMainValue) { + this.wstColumn = wstColumn; + this.namedMainValue = namedMainValue; + } + + @Id + @SequenceGenerator( + name = "SEQUENCE_OFFICIAL_LINES_ID_SEQ", + sequenceName = "OFFICIAL_LINES_ID_SEQ_ID_SEQ", + allocationSize = 1) + @GeneratedValue( + strategy = GenerationType.SEQUENCE, + generator = "SEQUENCE_OFFICIAL_LINES_ID_SEQ") + @Column(name = "id") + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + @OneToOne + @JoinColumn(name = "wst_column_id" ) + public WstColumn getWstColumn() { + return wstColumn; + } + + public void setWstColumn(WstColumn wstColumn) { + this.wstColumn = wstColumn; + } + + @OneToOne + @JoinColumn(name = "named_main_value_id" ) + public NamedMainValue getNamedMainValue() { + return namedMainValue; + } + + public void setNamedMainValue(NamedMainValue namedMainValue) { + this.namedMainValue = namedMainValue; + } +} +// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/LinkSelection.java Mon Jun 17 17:16:25 2013 +0200 +++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/LinkSelection.java Tue Jun 18 16:05:10 2013 +0200 @@ -8,11 +8,7 @@ package org.dive4elements.river.client.client.ui; -import java.util.HashMap; -import java.util.Map; - import com.google.gwt.core.client.GWT; - import com.smartgwt.client.types.VerticalAlignment; import com.smartgwt.client.widgets.Canvas; import com.smartgwt.client.widgets.HTMLPane; @@ -29,6 +25,9 @@ import com.smartgwt.client.widgets.layout.HLayout; import com.smartgwt.client.widgets.layout.VLayout; +import java.util.HashMap; +import java.util.Map; + import org.dive4elements.river.client.client.FLYSConstants; import org.dive4elements.river.client.client.event.StepForwardEvent; import org.dive4elements.river.client.shared.model.Data; @@ -67,7 +66,7 @@ private class Trigger { - private LinkSelection ls; + private final LinkSelection ls; public Trigger(LinkSelection ls) { this.ls = ls;
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WikiImgLink.java Mon Jun 17 17:16:25 2013 +0200 +++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/WikiImgLink.java Tue Jun 18 16:05:10 2013 +0200 @@ -11,7 +11,6 @@ import com.smartgwt.client.types.Overflow; import org.dive4elements.river.client.client.FLYS; -import org.dive4elements.river.client.client.ui.WikiLinks; public class WikiImgLink extends ImgLink {
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/stationinfo/GaugeListGrid.java Mon Jun 17 17:16:25 2013 +0200 +++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/stationinfo/GaugeListGrid.java Tue Jun 18 16:05:10 2013 +0200 @@ -12,17 +12,14 @@ import java.util.List; import com.google.gwt.core.client.GWT; -import com.smartgwt.client.types.ListGridFieldType; import com.smartgwt.client.widgets.Canvas; import com.smartgwt.client.widgets.WidgetCanvas; -import com.smartgwt.client.widgets.form.DynamicForm; import com.smartgwt.client.widgets.grid.ListGridField; import com.smartgwt.client.widgets.grid.ListGridRecord; import com.smartgwt.client.widgets.grid.events.RecordClickEvent; import com.smartgwt.client.widgets.grid.events.RecordClickHandler; import org.dive4elements.river.client.client.FLYS; -import org.dive4elements.river.client.client.ui.WikiLinks; import org.dive4elements.river.client.shared.model.Data; import org.dive4elements.river.client.shared.model.DataItem; import org.dive4elements.river.client.shared.model.DataList;
--- a/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/stationinfo/MeasurementStationListGrid.java Mon Jun 17 17:16:25 2013 +0200 +++ b/gwt-client/src/main/java/org/dive4elements/river/client/client/ui/stationinfo/MeasurementStationListGrid.java Tue Jun 18 16:05:10 2013 +0200 @@ -10,7 +10,6 @@ import com.google.gwt.core.client.GWT; -import com.smartgwt.client.types.ListGridFieldType; import com.smartgwt.client.widgets.Canvas; import com.smartgwt.client.widgets.WidgetCanvas; import com.smartgwt.client.widgets.grid.ListGridField; @@ -19,7 +18,6 @@ import com.smartgwt.client.widgets.grid.events.RecordClickHandler; import org.dive4elements.river.client.client.FLYS; -import org.dive4elements.river.client.client.ui.WikiLinks; import org.dive4elements.river.client.shared.model.MeasurementStation; import org.dive4elements.river.client.shared.model.RiverInfo;
--- a/gwt-client/src/main/webapp/images/FLYS_Karte_interactive.html Mon Jun 17 17:16:25 2013 +0200 +++ b/gwt-client/src/main/webapp/images/FLYS_Karte_interactive.html Tue Jun 18 16:05:10 2013 +0200 @@ -26,7 +26,7 @@ if (river) { highlighted = name; river.style.visibility="visible"; - } + } } function highlightList(name) { name = stripRiver(name); @@ -76,8 +76,8 @@ <area id="neckar_wehrarm_area" shape="poly" onmouseover="highlight('Neckar (über Wehrarme)'); highlightList('Neckar (über Wehrarme)')" onmouseout="unHighlight('Neckar (über Wehrarme)'); unHighlightList('Neckar (über Wehrarme)')" onclick="selectRiver('Neckar (über Wehrarme)')" coords="272,524,248,540,253,561,259,570,254,605,306,615,308,590,290,583,292,546,320,540,309,529" /> <area id="oder_area" shape="poly" onmouseover="highlight('Oder'); highlightList('Oder')" onmouseout="unHighlight('Oder'); unHighlightList('Oder')" onclick="selectRiver('Oder')" coords="572,143,542,155,542,178,529,203,541,238,565,252,583,273,570,307,603,327,641,320,639,286,633,275,614,269,619,246,635,236,637,195,606,160" /> <area id="rhein_area" shape="poly" onmouseover="highlight('Rhein'); highlightList('Rhein')" onmouseout="unHighlight('Rhein'); unHighlightList('Rhein')" onclick="selectRiver('Rhein')" coords="86,301,138,317,157,334,153,355,177,381,185,407,198,450,197,464,219,471,224,478,240,473,241,492,269,502,273,523,249,539,254,561,260,570,256,597,227,628,229,639,252,653,277,644,279,668,236,670,237,690,265,687,281,696,283,717,248,728,188,737,170,718,167,669,203,569,222,545,213,521,195,521,193,467,171,459,145,444,122,403,49,391,52,352,23,344,4,304" /> - <area id="saale_area" shape="poly" onmouseover="highlight('Saale'); highlightList('Saale')" onmouseout="unHighlight('Saale'); unHighlightList('Saale')" onclick="selectRiver('Saale')" coords="445,314,414,311,366,299,366,342,395,346,404,356,424,358,477,354,463,341" /> - <area id="saale_thüringen_area" shape="poly" onmouseover="highlight('Saale-Thüringen'); highlightList('Saale-Thüringen')" onmouseout="unHighlight('Saale-Thüringen'); unHighlightList('Saale-Thüringen')" onclick="selectRiver('Saale-Thüringen')" coords="424,358,476,354,490,384,430,461,375,444,387,415,402,408,420,387,435,383" /> + <area id="saale_area" shape="poly" onmouseover="highlight('Saale'); highlightList('Saale')" onmouseout="unHighlight('Saale'); unHighlightList('Saale')" onclick="selectRiver('Saale')" coords="445,314,414,311,366,299,366,342,395,346,404,356,477,371,463,341" /> + <area id="saale_thueringen_area" shape="poly" onmouseover="highlight('Saale-Thüringen'); highlightList('Saale-Thüringen')" onmouseout="unHighlight('Saale-Thüringen'); unHighlightList('Saale-Thüringen')" onclick="selectRiver('Saale-Thüringen')" coords="430,365,462,377,477,385,430,461,375,444,387,415,402,408,420,387,435,383" /> <area id="saar_area" shape="poly" onmouseover="highlight('Saar'); highlightList('Saar')" onmouseout="unHighlight('Saar'); unHighlightList('Saar')" onclick="selectRiver('Saar')" coords="150,539,129,544,139,564,155,588,181,593,187,565,201,536,195,526,195,512,154,516" /> <area id="saar_wiltingerbogen_area" shape="poly" onmouseover="highlight('Saar (Wiltinger Bogen)'); highlightList('Saar (Wiltinger Bogen)')" onmouseout="unHighlight('Saar (Wiltinger Bogen)'); unHighlightList('Saar (Wiltinger Bogen)')" onclick="selectRiver('Saar (Wiltinger Bogen)')" coords="139,518,155,516,149,540,130,542" /> <area id="werra_sommer_area" shape="poly" onmouseover="highlight('Werra (Sommer)'); highlightList('Werra (Sommer)')" onmouseout="unHighlight('Werra (Sommer)'); unHighlightList('Werra (Sommer)')" onclick="selectRiver('Werra (Sommer)')" coords="316,361,334,351,360,342,387,345,398,354,384,372,381,389,371,400,350,390,345,382,336,378,334,372,329,368" /> @@ -120,7 +120,7 @@ <img src="images/FLYS_Oder.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Oder" > <img src="images/FLYS_Rhein.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Rhein" > <img src="images/FLYS_Saale.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Saale" > - <img src="images/FLYS_Saale_Thüringen.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Saale-Thüringen" > + <img src="images/FLYS_Saale_Thueringen.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Saale-Thüringen" > <img src="images/FLYS_Saar.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Saar" > <img src="images/FLYS_Saar_WiltingerBogen.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Saar (Wiltinger Bogen)" > <img src="images/FLYS_Werra_Sommer.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Werra (Sommer)" > @@ -157,6 +157,7 @@ <img src="images/FLYS_Neckar_inactive.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Neckar_inactive" > <img src="images/FLYS_Neckar_inactive.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Neckar (über Wehrarme)_inactive" > <img src="images/FLYS_Saale_inactive.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Saale_inactive" > + <img src="images/FLYS_Saale_Thueringen_inactive.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Saale-Thüringen_inactive" > <img src="images/FLYS_Saar_inactive.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Saar_inactive" > <img src="images/FLYS_Werra_inactive.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Werra (Sommer)_inactive" > <img src="images/FLYS_Werra_inactive.png" style="position: absolute; left: 8px; top: 8px; visibility: hidden; z-index: 50;" id="Werra (Winter)_inactive" >