view artifacts/src/main/java/org/dive4elements/river/exports/minfo/BedQualityExporter.java @ 6332:f5bb53106ae8

Remove createBarriersLayer and createBarriers The generated mapfiles did not work and were just confusing. This looks like historical cruft that was never deleted. The real barrier mapfiles are created in the Floodmap state
author Andre Heinecke <aheinecke@intevation.de>
date Thu, 13 Jun 2013 17:24:56 +0200
parents af13ceeba52a
children e2b1057cee04
line wrap: on
line source
/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
 * Software engineering by Intevation GmbH
 *
 * This file is Free Software under the GNU AGPL (>=v3)
 * and comes with ABSOLUTELY NO WARRANTY! Check out the
 * documentation coming with Dive4Elements River for details.
 */

package org.dive4elements.river.exports.minfo;

import gnu.trove.TDoubleArrayList;

import java.io.IOException;
import java.io.OutputStream;
import java.text.DateFormat;
import java.text.NumberFormat;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;

import org.apache.log4j.Logger;
import org.w3c.dom.Document;

import au.com.bytecode.opencsv.CSVWriter;
import org.dive4elements.artifacts.CallContext;
import org.dive4elements.river.artifacts.model.CalculationResult;
import org.dive4elements.river.artifacts.model.minfo.BedDiameterResult;
import org.dive4elements.river.artifacts.model.minfo.BedParametersResult;
import org.dive4elements.river.artifacts.model.minfo.BedQualityResult;
import org.dive4elements.river.artifacts.model.minfo.BedloadDiameterResult;
import org.dive4elements.river.exports.AbstractExporter;
import org.dive4elements.river.utils.Formatter;


public class BedQualityExporter
extends AbstractExporter
{
    /** Private logger. */
    private static Logger logger = Logger.getLogger(BedQualityExporter.class);

    private static final String CSV_HEADER_KM = "export.minfo.bedquality.km";
    private static final String CSV_HEADER_DENSITY_CAP =
        "export.minfo.bedquality.density_cap";
    private static final String CSV_HEADER_DENSITY_SUB =
        "export.minfo.bedquality.density_sub";
    private static final String CSV_HEADER_POROSITY_CAP =
        "export.minfo.bedquality.porosity_cap";
    private static final String CSV_HEADER_POROSITY_SUB =
        "export.minfo.bedquality.porosity_sub";
    private static final String CSV_HEADER_BEDLOAD =
        "export.minfo.bedquality.bedload";
    private static final String CSV_HEADER_BED_CAP =
        "export.minfo.bedquality.bed_cap";
    private static final String CSV_HEADER_BED_SUB =
        "export.minfo.bedquality.bed_sub";

    private BedQualityResult[] results;

    @Override
    public void init(Document request, OutputStream out, CallContext context) {
        logger.debug("BedQualityExporter.init");
        super.init(request, out, context);
        results = new BedQualityResult[0];
    }

    @Override
    protected void writeCSVData(CSVWriter writer) throws IOException {
        // TODO Auto-generated method stub
        writeCSVHeader(writer);

        NumberFormat kmf = Formatter.getCalculationKm(context.getMeta());

        TDoubleArrayList kms = new TDoubleArrayList();
        int cols = 1;
        for (int i = 0; i < results.length; i++) {
            BedDiameterResult[] beds = results[i].getBedResults();
            for (int j = 0; j < beds.length; j++) {
                TDoubleArrayList bkms = beds[j].getKms();
                for (int k = 0; k < bkms.size(); k++) {
                    if (!kms.contains(bkms.get(k))) {
                        kms.add(bkms.get(k));
                    }
                }
            }
            BedloadDiameterResult[] loads = results[i].getBedloadResults();
            for (int j = 0; j < loads.length; j++) {
                TDoubleArrayList lkms = loads[i].getKms();
                for (int k = 0; k < lkms.size(); k++) {
                    if (!kms.contains(lkms.get(k))) {
                        kms.add(lkms.get(k));
                    }
                }
            }
            cols += beds.length * 2;
            cols += loads.length;
            if (beds.length > 0) {
                cols += 4;
            }
        }

        kms.sort();
        List<double[]> rows = new LinkedList<double[]>();
        for (int i = 0; i < kms.size(); i++) {
            double[] row = new double[cols];
            double km = kms.get(i);
            row[0] = km;
            for (int j = 0; j < results.length; j++) {
                BedloadDiameterResult[] loads = results[j].getBedloadResults();

                for(int k = 0; k < loads.length; k++) {
                    // k + 1: shift km column.
                    // j* loads.length: shift periods.
                    row[(k + 1) + (j * loads.length)] =
                        loads[k].getDiameter(km);
                }
                BedDiameterResult[] beds = results[j].getBedResults();
                for (int k = 0; k < beds.length; k++) {
                    // k + 1: shift km column.
                    // j * beds.length: shift periods.
                    // loads.length * results.length: shift bed load columns.
                    int ndx = (k + 1) + (j * beds.length) + (loads.length * results.length);
                    row[ndx] = beds[k].getDiameterCap(km);
                    row[ndx + 1] = beds[k].getDiameterSub(km);
                }
                BedParametersResult[] params = results[j].getParameters();
                for(int k = 0; k < params.length; k++) {
                    // loads.length + (beds.lenght * 2) * (j + 1): shift bed and bedload columns.
                    int ndx = 1 + (loads.length + (beds.length * 2) * (j + 1));
                    row[ndx] = params[k].getLoadDensityCap(km);
                    row[ndx + 1] = params[k].getLoadDensitySub(km);
                    row[ndx + 2] = params[k].getPorosityCap(km);
                    row[ndx + 3] = params[k].getPorositySub(km);
                }
            }
            rows.add(row);
        }
        for (double[] d : rows) {
            logger.debug(Arrays.toString(d));
            List<String> cells = new LinkedList<String>();
            for (int i = 0; i < d.length; i++) {
                if (!Double.isNaN(d[i])) {
                    NumberFormat nf = Formatter.getFormatter(context, 1, 3);
                    cells.add(nf.format(d[i]));
                }
                else {
                    cells.add("");
                }
            }
            writer.writeNext(cells.toArray(new String[cells.size()]));
        }
    }

    @Override
    protected void writePDF(OutputStream out) {
        // TODO Auto-generated method stub

    }

    @Override
    protected void addData(Object data) {
        // TODO Auto-generated method stub
        logger.debug("addData()");
        if (!(data instanceof CalculationResult)) {
            logger.warn("Invalid data type.");
            return;
        }
        Object[] d = (Object[])((CalculationResult)data).getData();

        if (!(d instanceof BedQualityResult[])) {
            logger.warn("Invalid result object.");
            return;
        }
        results = (BedQualityResult[])d;
    }

    protected void writeCSVHeader(CSVWriter writer) {
        logger.debug("writeCSVHeader()");

        List<String> header = new LinkedList<String>();
        if (results != null)  {
            header.add(msg(CSV_HEADER_KM, "km"));
            for (int i = 0; i < results.length; i++) {
                DateFormat df = Formatter.getDateFormatter(context.getMeta(), "dd.MM.yyyy");
                String d1 = df.format(results[i].getDateRange().getFrom());
                String d2 = df.format(results[i].getDateRange().getTo());
                BedloadDiameterResult[] loads = results[i].getBedloadResults();
                BedDiameterResult[] beds = results[i].getBedResults();
                BedParametersResult[] params = results[i].getParameters();
                for (int j = 0; j < loads.length; j++) {
                    header.add(msg(CSV_HEADER_BEDLOAD, CSV_HEADER_BEDLOAD) +
                        " - " +
                        msg(loads[j].getType().toString(),
                            loads[j].getType().toString()) + " - " +
                        d1 + "-" + d2);
                }
                for (int j = 0; j < beds.length; j++) {
                    header.add(msg(CSV_HEADER_BED_CAP, CSV_HEADER_BED_CAP) + " - " +
                        msg(beds[j].getType().toString(),
                            beds[j].getType().toString()) + " - " +
                        d1 + "-" + d2);
                    header.add(msg(CSV_HEADER_BED_SUB, CSV_HEADER_BED_SUB) + " - " +
                        msg(beds[j].getType().toString(),
                            beds[j].getType().toString()) + " - " +
                        d1 + "-" + d2);
                }
                if (params.length > 0) {
                    header.add(
                        msg(CSV_HEADER_DENSITY_CAP, CSV_HEADER_DENSITY_CAP) +
                        " - " + d1 + "-" + d2);
                    header.add(
                        msg(CSV_HEADER_DENSITY_SUB, CSV_HEADER_DENSITY_SUB) +
                        " - " + d1 + "-" + d2);
                    header.add(
                        msg(CSV_HEADER_POROSITY_CAP, CSV_HEADER_POROSITY_CAP) +
                        " - " + d1 + "-" + d2);
                    header.add(
                        msg(CSV_HEADER_POROSITY_SUB, CSV_HEADER_POROSITY_SUB) +
                        " - " + d1 + "-" + d2);
                }
            }
        }
        writer.writeNext(header.toArray(new String[header.size()]));
    }
}

http://dive4elements.wald.intevation.org