view artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java @ 8980:b194fa64506a

SINFO - show results themes according to spec, either raw data or floating mean values. Some improvements to error handling and handling of empty results.
author gernotbelger
date Thu, 05 Apr 2018 18:30:34 +0200
parents 09e4a4909814
children 7c1611b5a59e
line wrap: on
line source
/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
 * Software engineering by Intevation GmbH
 *
 * This file is Free Software under the GNU AGPL (>=v3)
 * and comes with ABSOLUTELY NO WARRANTY! Check out the
 * documentation coming with Dive4Elements River for details.
 */

package org.dive4elements.river.artifacts.sinfo.tkhstate;

import java.util.ArrayList;
import java.util.Collection;

import org.apache.log4j.Logger;
import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter;
import org.dive4elements.river.artifacts.sinfo.common.SInfoResultRow;
import org.dive4elements.river.artifacts.sinfo.common.SInfoResultType;
import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource;
import org.dive4elements.river.artifacts.sinfo.util.RiverInfo;
import org.dive4elements.river.artifacts.sinfo.util.WstInfo;

import au.com.bytecode.opencsv.CSVWriter;

/**
 * Generates different output formats (csv, pdf) of data that resulted from a tkh computation.
 *
 * @author Gernot Belger
 */
// REMARK: must be public because its registered in generators.xml
public class TkhExporter extends AbstractSInfoExporter<TkhCalculationResult, TkhCalculationResults> {

    private static enum ExportMode {
        pdf, csv
    }

    /** The log used in this exporter. */
    private static Logger log = Logger.getLogger(TkhExporter.class);

    private static final String CSV_META_CALCULATION_FORMULA = "sinfo.export.tkh.calculation.formula";

    private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper";

    @Override
    protected Logger getLog() {
        return log;
    }

    @Override
    protected void writeCSVGlobalMetadata(final CSVWriter writer, final TkhCalculationResults results) {
        log.info("TkhExporter.writeCSVMeta");

        super.writeCSVGlobalMetadataDefaults(writer, results);

        // "# Berechnungsgrundlage: Gleichung nach GILL (1971)"
        writeCSVMetaEntry(writer, CSV_META_CALCULATION_FORMULA);

        writer.writeNext(new String[] { "" });
    }

    /**
     * Write the header, with different headings depending on whether at a gauge or at a location.
     */

    @Override
    protected void writeCSVHeader(final CSVWriter writer, final TkhCalculationResults results, final RiverInfo river) {
        log.info("TkhExporter.writeCSVHeader");

        final Collection<String> header = new ArrayList<>(11);

        header.add(msg(SInfoResultType.station.getCsvHeader()));
        header.add(msgUnit(SInfoResultType.tkh.getCsvHeader(), SInfoResultType.tkh.getUnit()));
        header.add(msg(SInfoResultType.soilkind.getCsvHeader()));
        header.add(msgUnit(SInfoResultType.meanBedHeight.getCsvHeader(), river.getWstUnit()));
        header.add(msgUnit(SInfoResultType.waterlevel.getCsvHeader(), river.getWstUnit()));
        header.add(msgUnit(SInfoResultType.discharge.getCsvHeader(), SInfoResultType.discharge.getUnit()));

        final String descriptionHeader = results.getDescriptionHeader();
        if (descriptionHeader != null)
            header.add(msg(descriptionHeader));

        header.add(msg(SInfoResultType.gaugeLabel.getCsvHeader()));
        header.add(msg(SInfoResultType.location.getCsvHeader()));

        writer.writeNext(header.toArray(new String[header.size()]));
    }

    @Override
    // FIXME: rename
    protected void writeCSVResultMetadata(final CSVWriter writer, final TkhCalculationResults results, final TkhCalculationResult result) {

        final WstInfo wst = result.getWst();
        super.writeCSVWaterlevelMetadata(writer, wst);

        // FIXME:
        // "# W/Pegel [cm]: " (nur bei Eingabe des Wasserstands am Pegel)
        // "# Q (m³/s): " (nur bei Eingabe des Durchflusses)
    }

    @Override
    protected String[] formatCSVRow(final TkhCalculationResults results, final TkhCalculationResult result, final SInfoResultRow row) {
        return formatRow(results, row, ExportMode.csv);
    }

    /**
     * Format a row of a flow depth result into an array of string, both used by csv and pdf
     *
     * @param results
     *
     * @param useTkh
     */
    private String[] formatRow(final TkhCalculationResults results, final SInfoResultRow row, final ExportMode mode) {

        final Collection<String> lines = new ArrayList<>(11);

        lines.add(row.exportValue(this.context, SInfoResultType.station));
        lines.add(row.exportValue(this.context, SInfoResultType.tkh));
        lines.add(row.exportValue(this.context, SInfoResultType.soilkind));
        lines.add(row.exportValue(this.context, SInfoResultType.meanBedHeight));
        lines.add(row.exportValue(this.context, SInfoResultType.waterlevel));
        lines.add(row.exportValue(this.context, SInfoResultType.discharge));

        // REMARK: always export this column in pdf-mode, because WInfo also does it (no need for two jasper-templates).
        if (results.getDescriptionHeader() != null || mode == ExportMode.pdf)
            lines.add(row.exportValue(this.context, SInfoResultType.waterlevelLabel));

        lines.add(row.exportValue(this.context, SInfoResultType.gaugeLabel));
        lines.add(row.exportValue(this.context, SInfoResultType.location));

        return lines.toArray(new String[lines.size()]);
    }

    @Override
    protected final String getJasperFile() {
        return JASPER_FILE;
    }

    @Override
    protected final void addJRMetaData(final MetaAndTableJRDataSource source, final TkhCalculationResults results) {

        /* general metadata */
        super.addJRMetaDataDefaults(source, results);

        /* column headings */
        source.addMetaData("station_header", SInfoResultType.station.getPdfHeader(this.context.getMeta()));
        source.addMetaData("tkh_header", SInfoResultType.tkh.getPdfHeader(this.context.getMeta()));
        source.addMetaData("bedheight_header", SInfoResultType.meanBedHeight.getPdfHeader(this.context.getMeta()));
        source.addMetaData("waterlevel_header", SInfoResultType.waterlevel.getPdfHeader(this.context.getMeta()));
        source.addMetaData("discharge_header", SInfoResultType.discharge.getPdfHeader(this.context.getMeta()));

        // REMARK: actually the column makes no sense if description header is null. But (software symmetry...) WINFO also
        // writes an empty column into the pdf in that case (most probably to avoid the need for two jasper templates).
        final String descriptionHeader = results.getDescriptionHeader();
        final String waterlevelNameHeader = descriptionHeader == null ? SInfoResultType.waterlevelLabel.getPdfHeader(this.context.getMeta())
                : descriptionHeader;
        source.addMetaData("waterlevel_name_header", waterlevelNameHeader);

        source.addMetaData("gauge_header", SInfoResultType.gaugeLabel.getPdfHeader(this.context.getMeta()));
        source.addMetaData("location_header", SInfoResultType.location.getPdfHeader(this.context.getMeta()));
    }

    @Override
    protected String[] formatPDFRow(final TkhCalculationResults results, final SInfoResultRow row) {
        return formatRow(results, row, ExportMode.pdf);
    }
}

http://dive4elements.wald.intevation.org