Mercurial > dive4elements > river
diff artifacts/src/main/java/org/dive4elements/river/artifacts/common/ExportContextCSV.java @ 9150:23945061daec
gigantic refactoring: exporter, result, results
to support multiple jaspers -> collisions
author | gernotbelger |
---|---|
date | Thu, 14 Jun 2018 16:56:31 +0200 |
parents | |
children | cd24db77f044 |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/common/ExportContextCSV.java Thu Jun 14 16:56:31 2018 +0200 @@ -0,0 +1,236 @@ +/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde + * Software engineering by + * Björnsen Beratende Ingenieure GmbH + * Dr. Schumacher Ingenieurbüro für Wasser und Umwelt + * + * This file is Free Software under the GNU AGPL (>=v3) + * and comes with ABSOLUTELY NO WARRANTY! Check out the + * documentation coming with Dive4Elements River for details. + */ +package org.dive4elements.river.artifacts.common; + +import java.text.DateFormat; +import java.text.NumberFormat; +import java.util.Date; +import java.util.Locale; + +import org.apache.commons.lang.math.DoubleRange; +import org.dive4elements.artifacts.CallContext; +import org.dive4elements.artifacts.CallMeta; +import org.dive4elements.river.FLYS; +import org.dive4elements.river.artifacts.resources.Resources; +import org.dive4elements.river.artifacts.sinfo.common.SInfoI18NStrings; +import org.dive4elements.river.artifacts.sinfo.common.SInfoResultType; +import org.dive4elements.river.artifacts.sinfo.util.BedHeightInfo; +import org.dive4elements.river.artifacts.sinfo.util.RiverInfo; +import org.dive4elements.river.artifacts.sinfo.util.WstInfo; +import org.dive4elements.river.utils.Formatter; + +import au.com.bytecode.opencsv.CSVWriter; + +/** + * @author Domenico Nardi Tironi + * + */ +public final class ExportContextCSV { + + private static final String CSV_META_HEADER_EVALUATOR = "sinfo.export.flow_depth.csv.meta.header.sounding.evaluator"; + + private static final String CSV_META_HEADER_SOUNDING = "sinfo.export.flow_depth.csv.meta.header.sounding"; + + private static final String CSV_META_HEADER_SOUNDING_YEAR = "sinfo.export.flow_depth.csv.meta.header.sounding.year"; + + private static final String CSV_META_HEADER_SOUNDING_TYPE = "sinfo.export.flow_depth.csv.meta.header.sounding.type"; + + private static final String CSV_META_HEADER_SOUNDING_PRJ = "sinfo.export.flow_depth.csv.meta.header.sounding.prj"; + + private static final String CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL = "sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel"; + + private static final String CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL_ORIGINAL = "sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel.original"; + + private static NumberFormat qFormat = null; + + private static NumberFormat flowDepthFormat = null; + + private NumberFormat kmFormat; + + /** The CallContext object. */ + private final CallContext context; + + private final CSVWriter writer; + + public ExportContextCSV(final CallContext context, final CSVWriter writer) { + this.context = context; + this.writer = writer; + } + + private String msg(final String key) { + return Resources.getMsg(this.context.getMeta(), key, key); + } + + public String msg(final String key, final Object... args) { + return Resources.getMsg(this.context.getMeta(), key, key, args); + } + + public final void writeCSVMetaEntry(final String message, final Object... messageArgs) { + + final CallMeta meta = this.context.getMeta(); + + this.writer.writeNext(new String[] { Resources.getMsg(meta, message, message, messageArgs) }); + } + + public void writeBlankLine() { + writeCSVLine(new String[] { "" }); + + } + + // *** CUSTOM STUFF that is used multiple times ***/// + + public final void writeCSVSoundingMetadata2(final BedHeightInfo sounding, final String mainLabel) { + // "##METADATEN PEILUNG" + writeCSVMetaEntry(mainLabel); + + // "# Jahr der Peilung: " + writeCSVMetaEntry(CSV_META_HEADER_SOUNDING_YEAR, Integer.toString(sounding.getYear())); + // "# Aufnahmeart: " + writeCSVMetaEntry(CSV_META_HEADER_SOUNDING_TYPE, sounding.getType()); + // "# Auswerter: " + writeCSVMetaEntry(CSV_META_HEADER_EVALUATOR, sounding.getEvaluationBy()); + // "# Lagesystem: " + writeCSVMetaEntry(CSV_META_HEADER_SOUNDING_PRJ, sounding.getLocationSystem()); + // "# Höhensystem: " + writeCSVMetaEntry(CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL, sounding.getCurElevationModelUnit()); + // "# ursprüngliches Höhensystem: " + writeCSVMetaEntry(CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL_ORIGINAL, sounding.getOldElevationModelUnit()); + } + + public final void writeCSVSoundingMetadata(final BedHeightInfo sounding) { + // "##METADATEN PEILUNG" + writeCSVMetaEntry(CSV_META_HEADER_SOUNDING); + + } + + public void writeCSVLine(final String[] line) { + this.writer.writeNext(line); + } + + public String formatCsvHeader(final IResultType type) { + return msg(type.getCsvHeader()); + } + + public String formatRowValue(final ResultRow row, final IResultType type) { + return row.exportValue(this.context, type); + } + + public void addJRMetadata(final MetaAndTableJRDataSource source, final String key, final IResultType type) { + source.addMetaData(key, type.getPdfHeader(this.context.getMeta())); + } + + public final void writeCSVGlobalMetadataDefaults(final AbstractCalculationResults results) { + // TODO: results as member + final String calcModeLabel = results.getCalcModeLabel(); + final RiverInfo river = results.getRiver(); + final DoubleRange calcRange = results.getCalcRange(); + + writeCSVMetaEntry(I18NStrings.CSV_META_HEADER_RESULT, msg(I18NStrings.CSV_META_HEADER_RESULT_LABEL), river.getName(), calcModeLabel); + + // "# FLYS-Version: " + writeCSVMetaEntry(I18NStrings.CSV_META_VERSION, msg(I18NStrings.CSV_META_VERSION_LABEL), FLYS.VERSION); + + // "# Bearbeiter: " + writeCSVMetaEntry(I18NStrings.CSV_META_USER, msg(I18NStrings.CSV_META_USER_LABEL), results.getUser()); + + // "# Datum der Erstellung: " + final Locale locale = Resources.getLocale(this.context.getMeta()); + final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale); + writeCSVMetaEntry(I18NStrings.CSV_META_CREATION, msg(I18NStrings.CSV_META_CREATION_LABEL), df.format(new Date())); + + // "# Gewässer: " + writeCSVMetaEntry(I18NStrings.CSV_META_RIVER, msg(I18NStrings.CSV_META_RIVER_LABEL), river.getName()); + + // "# Höhensystem des Flusses: " + writeCSVMetaEntry(I18NStrings.CSV_META_HEIGHT_UNIT_RIVER, river.getWstUnit()); + + if (calcRange != null) { + // "# Ort/Bereich (km): " + writeCSVMetaEntry(I18NStrings.CSV_META_RANGE, msg(I18NStrings.CSV_META_RANGE_LABEL), getKmFormatter().format(calcRange.getMinimumDouble()), + getKmFormatter().format(calcRange.getMaximumDouble())); + } + } + + public final void writeCSVWaterlevelMetadata(final WstInfo wst) { + writeCSVWaterlevelMetadata(wst, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL); // default Label; used in 99% + } + + public final void writeCSVWaterlevelMetadata(final WstInfo wst, final String mainLabel) { + // "##METADATEN WASSERSPIEGELLAGE" + writeCSVMetaEntry(mainLabel); + + // "# Bezeichnung der Wasserspiegellage: " + writeCSVMetaEntry(SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_NAME, wst.getLabel()); + + // "# Bezugspegel: " + writeCSVMetaEntry(SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_GAUGE, wst.getGauge()); + + // "# Jahr/Zeitraum der Wasserspiegellage: " + final int year = wst.getYear(); + if (year > 0) + writeCSVMetaEntry(SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_YEAR, Integer.toString(year)); + } + + // copy from AbstractExporter TODO merge with ExportContextPDF + protected NumberFormat getKmFormatter() { + + if (this.kmFormat == null) { + this.kmFormat = Formatter.getWaterlevelKM(this.context); + } + return this.kmFormat; + } + + public void addJRMetadata(final MetaAndTableJRDataSource source, final String key, final String msg) { + source.addMetaData(key, msg); + + } + + public NumberFormat getQFormatter() { + if (this.qFormat == null) { + this.qFormat = Formatter.getWaterlevelQ(this.context); + } + return this.qFormat; + } + + public final NumberFormat getFlowDepthFormatter() { + if (this.flowDepthFormat == null) + this.flowDepthFormat = Formatter.getFlowDepth(this.context); + return this.flowDepthFormat; + } + + /** + * Formats header with unit: msg [unit] + */ + + public String msgUnitCSV(final IResultType typeWithUnit) { // TODO: use generic Type! + final String unit = msg(typeWithUnit.getUnit()); + return msgUnitCSV(typeWithUnit, unit); + } + + public String msgUnitCSV(final IResultType type, final String unit) { + final String msg = msg(type.getCsvHeader()); + return String.format("%s [%s]", msg, unit); + } + + public void writeCSVSoundingMetadata(final BedHeightInfo currentSounding, final String csvMetaHeaderSoundingCurrent) { + + } + + public final String msgUnitLabel(final IResultType typeWithUnit, final String label) { + final String msg = msg(typeWithUnit.getCsvHeader()); + final String unit = msg(typeWithUnit.getUnit()); + return String.format("%s [%s] (%s)", msg, unit, label); + } + + public String msgPdf(final SInfoResultType type) { + return type.getPdfHeader(this.context.getMeta()); + + } +} \ No newline at end of file