view artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java @ 8938:9c02733a1b3c

Work on Sinfo-tkh - exports; using same logic for wst-description as winfo
author gernotbelger
date Tue, 06 Mar 2018 17:09:39 +0100
parents 791714b92b5c
children 5d5d482da3e9
line wrap: on
line source
/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
 * Software engineering by Intevation GmbH
 *
 * This file is Free Software under the GNU AGPL (>=v3)
 * and comes with ABSOLUTELY NO WARRANTY! Check out the
 * documentation coming with Dive4Elements River for details.
 */

package org.dive4elements.river.artifacts.sinfo.tkhstate;

import java.text.DateFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.Locale;

import org.apache.commons.lang.math.DoubleRange;
import org.apache.log4j.Logger;
import org.dive4elements.river.FLYS;
import org.dive4elements.river.artifacts.resources.Resources;
import org.dive4elements.river.artifacts.sinfo.SInfoI18NStrings;
import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter;
import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource;
import org.dive4elements.river.artifacts.sinfo.util.RiverInfo;
import org.dive4elements.river.artifacts.sinfo.util.WstInfo;
import org.dive4elements.river.utils.RiverUtils;

import au.com.bytecode.opencsv.CSVWriter;

/**
 * Generates different output formats (csv, pdf) of data that resulted from a tkh computation.
 *
 * @author Gernot Belger
 */
// REMARK: must be public because its registered in generators.xml
public class TkhExporter extends AbstractSInfoExporter<TkhResultRow, TkhCalculationResult, TkhCalculationResults> {

    private static enum ExportMode {
        pdf, csv
    }

    /** The log used in this exporter. */
    private static Logger log = Logger.getLogger(TkhExporter.class);

    private static final String CSV_META_CALCULATION_FORMULA = "sinfo.export.tkh.calculation.formula";

    private static final String CSV_TKH_HEADER = "sinfo.export.tkh.csv.header.tkh";

    private static final String CSV_TKHKIND_HEADER = "sinfo.export.tkh.csv.header.tkhkind";

    private static final String PREFIX_TKH_KIND = "sinfo.export.tkh.soilkind.";

    private static final String CSV_MEAN_BED_HEIGHT_HEADER_SHORT = "sinfo.export.flow_depth.csv.header.mean_bed_height.short";

    private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper";

    @Override
    protected Logger getLog() {
        return log;
    }

    @Override
    protected void writeCSVGlobalMetadata(final CSVWriter writer, final TkhCalculationResults results) {
        log.info("TkhExporter.writeCSVMeta");

        final String calcModeLabel = results.getCalcModeLabel();
        final RiverInfo river = results.getRiver();
        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_RESULT, msg(SInfoI18NStrings.CSV_META_HEADER_RESULT_LABEL), river.getName(), calcModeLabel);

        // "# FLYS-Version: "
        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_VERSION, msg(SInfoI18NStrings.CSV_META_VERSION_LABEL), FLYS.VERSION);

        // "# Bearbeiter: "
        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_USER, msg(SInfoI18NStrings.CSV_META_USER_LABEL), results.getUser());

        // "# Datum der Erstellung: "
        final Locale locale = Resources.getLocale(this.context.getMeta());
        final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_CREATION, msg(SInfoI18NStrings.CSV_META_CREATION_LABEL), df.format(new Date()));

        // "# Gewässer: "
        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_RIVER, msg(SInfoI18NStrings.CSV_META_RIVER_LABEL), river.getName());

        // "# Höhensystem des Flusses: "
        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEIGHT_UNIT_RIVER, river.getWstUnit());

        // "# Ort/Bereich (km): "
        final DoubleRange calcRange = results.getCalcRange();
        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_RANGE, msg(SInfoI18NStrings.CSV_META_RANGE_LABEL),
                getKmFormatter().format(calcRange.getMinimumDouble()), getKmFormatter().format(calcRange.getMaximumDouble()));

        // "# Berechnungsgrundlage: Gleichung nach GILL (1971)"
        writeCSVMetaEntry(writer, CSV_META_CALCULATION_FORMULA);

        writer.writeNext(new String[] { "" });
    }

    /**
     * Write the header, with different headings depending on whether at a gauge or at a location.
     */

    @Override
    protected void writeCSVHeader(final CSVWriter writer, final TkhCalculationResults results, final RiverInfo river) {
        log.info("TkhExporter.writeCSVHeader");

        final Collection<String> header = new ArrayList<>(11);

        header.add(msg(SInfoI18NStrings.CSV_KM_HEADER));
        header.add(msgUnit(CSV_TKH_HEADER, SInfoI18NStrings.UNIT_CM));
        header.add(msg(CSV_TKHKIND_HEADER));
        header.add(msgUnit(SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_HEADER, river.getWstUnit()));

        header.add(msgUnit(SInfoI18NStrings.CSV_WATERLEVEL_HEADER, river.getWstUnit()));
        header.add(msgUnit(SInfoI18NStrings.CSV_DISCHARGE_HEADER, SInfoI18NStrings.UNIT_CUBIC_M));

        final String descriptionHeader = results.getDescriptionHeader();
        if (descriptionHeader != null)
            header.add(msg(descriptionHeader));

        header.add(msg(SInfoI18NStrings.CSV_GAUGE_HEADER));
        header.add(msg(SInfoI18NStrings.CSV_LOCATION_HEADER));

        writer.writeNext(header.toArray(new String[header.size()]));
    }

    @Override
    // FIXME: rename
    protected void writeCSVResultMetadata(final CSVWriter writer, final TkhCalculationResults results, final TkhCalculationResult result) {

        /* first some specific metadata */
        final WstInfo wst = result.getWst();

        // "##METADATEN WASSERSPIEGELLAGE"
        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL);
        // "# Bezeichnung der Wasserspiegellage: "
        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_NAME, wst.getLabel());
        // "# Bezugspegel: "
        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_GAUGE, wst.getGauge());
        // // "# Jahr/Zeitraum der Wasserspiegellage: "
        // writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_YEAR, Integer.toString(wst.getYear()));
    }

    @Override
    protected String[] formatCSVRow(final TkhCalculationResults results, final TkhResultRow row) {
        return formatRow(results, row, ExportMode.csv);
    }

    /**
     * Format a row of a flow depth result into an array of string, both used by csv and pdf
     *
     * @param results
     *
     * @param useTkh
     */
    private String[] formatRow(final TkhCalculationResults results, final TkhResultRow row, final ExportMode mode) {

        final Collection<String> lines = new ArrayList<>(11);

        // Fluss-km
        lines.add(getKmFormatter().format(row.getStation()));

        // TKH [cm]
        lines.add(getTkhFormatter().format(row.getTkh()));

        // Einteilung der Gewässersohle (starr/mobil)
        lines.add(msg(PREFIX_TKH_KIND + row.getTkhKind().name()));

        // Mittlere Sohlhöhe [NN + m]
        lines.add(getMeanBedHeighFormatter().format(row.getMeanBedHeight()));

        // Wasserstand [NN + m]
        lines.add(getW2Formatter().format(row.getWaterlevel()));

        // Q [m³/s]
        final double discharge = row.getDischarge();
        final double roundedDischarge = RiverUtils.roundQ(discharge);
        lines.add(getQFormatter().format(roundedDischarge));

        // Bezeichnung
        // REMARK: always export this column in pdf-mode, because WInfo also does it (no need for two jasper-templates).
        if (results.getDescriptionHeader() != null || mode == ExportMode.pdf)
            lines.add(row.getWaterlevelLabel());

        // Bezugspegel
        lines.add(row.getGauge());

        // Lage
        lines.add(row.getLocation());

        return lines.toArray(new String[lines.size()]);
    }

    @Override
    protected final String getJasperFile() {
        return JASPER_FILE;
    }

    @Override
    protected final void addJRMetaData(final MetaAndTableJRDataSource source, final TkhCalculationResults results) {

        final RiverInfo river = results.getRiver();
        final String wstUnitName = river.getWstUnit();

        /* general metadata */
        source.addMetaData("header", msg(SInfoI18NStrings.CSV_META_HEADER_RESULT_LABEL));
        source.addMetaData("calcMode", results.getCalcModeLabel());

        source.addMetaData("version_label", msg(SInfoI18NStrings.CSV_META_VERSION_LABEL));
        source.addMetaData("version", FLYS.VERSION);

        source.addMetaData("user_label", msg(SInfoI18NStrings.CSV_META_USER_LABEL));
        source.addMetaData("user", results.getUser());

        final Locale locale = Resources.getLocale(this.context.getMeta());
        final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
        source.addMetaData("date_label", msg(SInfoI18NStrings.CSV_META_CREATION_LABEL));
        source.addMetaData("date", df.format(new Date()));

        source.addMetaData("river_label", msg(SInfoI18NStrings.CSV_META_RIVER_LABEL));
        source.addMetaData("river", river.getName());
        source.addMetaData("river_unit", wstUnitName);

        final DoubleRange calcRange = results.getCalcRange();
        final NumberFormat kmFormatter = getKmFormatter();
        final String rangeValue = String.format("%s - %s", kmFormatter.format(calcRange.getMinimumDouble()), kmFormatter.format(calcRange.getMaximumDouble()));
        source.addMetaData("range_label", msg(SInfoI18NStrings.CSV_META_RANGE_LABEL));
        source.addMetaData("range", rangeValue);

        /* column headings */
        source.addMetaData("station_header", msg(SInfoI18NStrings.CSV_KM_HEADER));
        source.addMetaData("tkh_header", msg(CSV_TKH_HEADER));
        source.addMetaData("bedheight_header", msg(CSV_MEAN_BED_HEIGHT_HEADER_SHORT));
        source.addMetaData("waterlevel_header", msg(SInfoI18NStrings.CSV_WATERLEVEL_HEADER));
        source.addMetaData("discharge_header", msg(SInfoI18NStrings.CSV_DISCHARGE_HEADER));

        // REMARK: actually the column makes no sense if description header is null. But (software symmetry...) WINFO also
        // writes an empty column into the pdf in that case (most probably to avoid the need for two jasper templates).
        final String descriptionHeader = results.getDescriptionHeader();
        final String waterlevelNameHeader = descriptionHeader == null ? msg(SInfoI18NStrings.CSV_LABEL_HEADER) : descriptionHeader;
        source.addMetaData("waterlevel_name_header", waterlevelNameHeader);

        source.addMetaData("gauge_header", msg(SInfoI18NStrings.CSV_GAUGE_HEADER));
        source.addMetaData("location_header", msg(SInfoI18NStrings.CSV_LOCATION_HEADER));
    }

    @Override
    protected String[] formatPDFRow(final TkhCalculationResults results, final TkhResultRow row) {
        return formatRow(results, row, ExportMode.pdf);
    }
}

http://dive4elements.wald.intevation.org