comparison backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/FlowDepthParser.java @ 8971:50416a0df385

Importer for the Schifffahrt (S-INFO) and Oekologie (U-INFO) files
author mschaefer
date Tue, 03 Apr 2018 10:18:30 +0200
parents
children ae76f618d990
comparison
equal deleted inserted replaced
8970:da5dc7446652 8971:50416a0df385
1 /* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
2 * Software engineering by
3 * Björnsen Beratende Ingenieure GmbH
4 * Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
5 *
6 * This file is Free Software under the GNU AGPL (>=v3)
7 * and comes with ABSOLUTELY NO WARRANTY! Check out the
8 * documentation coming with Dive4Elements River for details.
9 */
10
11 package org.dive4elements.river.importer.sinfo.parsers;
12
13 import java.io.File;
14 import java.io.FileInputStream;
15 import java.io.IOException;
16 import java.io.InputStreamReader;
17 import java.io.LineNumberReader;
18 import java.util.ArrayList;
19 import java.util.List;
20 import java.util.regex.Matcher;
21 import java.util.regex.Pattern;
22
23 import org.apache.log4j.Logger;
24 import org.dive4elements.river.importer.Config;
25 import org.dive4elements.river.importer.ImportRiver;
26 import org.dive4elements.river.importer.common.AbstractParser;
27 import org.dive4elements.river.importer.common.ParsingState;
28 import org.dive4elements.river.importer.sinfo.importitem.FlowDepthColumnSeriesImport;
29 import org.dive4elements.river.importer.sinfo.importitem.FlowDepthKmLineImport;
30 import org.dive4elements.river.importer.sinfo.importitem.FlowDepthSeriesImport;
31 import org.dive4elements.river.model.sinfo.FlowDepthColumn;
32 import org.dive4elements.river.model.sinfo.FlowDepthValue;
33
34 /**
35 * Reads and parses the header of a flow depth file and handles the parse and store of the columns
36 *
37 * @author Matthias Schäfer
38 *
39 */
40 public class FlowDepthParser extends AbstractParser<FlowDepthColumn, FlowDepthValue, FlowDepthKmLineImport, FlowDepthColumnSeriesImport> {
41
42 /***** FIELDS *****/
43
44 private static final Logger log = Logger.getLogger(FlowDepthParser.class);
45
46 private static final Pattern META_YEAR = Pattern.compile("^#\\sBezugsjahr:\\s*([12]\\d\\d\\d).*", Pattern.CASE_INSENSITIVE);
47
48 private static final Pattern META_EVALUATOR = Pattern.compile("^#\\sAuswerter:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
49
50 private static final Pattern META_SOUNDING = Pattern.compile("^#\\sPeilung:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
51
52 private static final Pattern META_TYPE = Pattern.compile("^#\\sTyp:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
53
54 private static final Pattern COLUMN_TITLE = Pattern.compile("Flie((.)|(ss))tiefe\\s*\\((.+?)\\)\\s*\\[m\\].*", Pattern.CASE_INSENSITIVE);
55
56 private final FlowDepthSeriesImport tkhGroup;
57
58 private final List<FlowDepthColumnParser> colParsers;
59
60
61 /***** CONSTRUCTORS *****/
62
63 public FlowDepthParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
64 super(importPath, rootRelativePath, river);
65 this.tkhGroup = new FlowDepthSeriesImport(importPath.getName().replaceAll("\\.csv", ""));
66 this.seriesHeader = new FlowDepthColumnSeriesImport(this.tkhGroup.getFilename(), this.tkhGroup, null, null);
67 this.colParsers = new ArrayList<>();
68 }
69
70
71 /***** METHODS *****/
72
73 @Override
74 protected Logger getLog() {
75 return log;
76 }
77
78 /**
79 * Whether this import type shall be skipped
80 */
81 public static boolean shallSkip() {
82 return Config.INSTANCE.skipSInfoFlowDepth();
83 }
84
85 /**
86 * Creates a list of parsers for all collision import files in a directory
87 */
88 public static List<FlowDepthParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
89 final List<FlowDepthParser> parsers = new ArrayList<>();
90 for (final File file : listFiles(importDir, ".csv"))
91 parsers.add(new FlowDepthParser(file, new File(relativeDir, file.getName()), river));
92 return parsers;
93 }
94
95 @Override
96 public void parse() throws IOException {
97 getLog().info("Start parsing:;'" + this.rootRelativePath + "'");
98 // this.seriesHeader = createSeriesImport(this.importPath.getName().replaceAll("\\.csv", ""));
99 this.metaPatternsMatched.clear();
100 this.kmExists.clear();
101 this.colParsers.clear();
102 this.headerParsingState = ParsingState.CONTINUE;
103 try {
104 try {
105 this.in = new LineNumberReader(new InputStreamReader(new FileInputStream(this.importPath), ENCODING));
106 }
107 catch (final Exception e) {
108 logError("Could not open (" + e.getMessage() + ")");
109 this.headerParsingState = ParsingState.STOP;
110 }
111 this.currentLine = null;
112 while (this.headerParsingState == ParsingState.CONTINUE) {
113 this.currentLine = this.in.readLine();
114 if (this.currentLine == null)
115 break;
116 this.currentLine = this.currentLine.trim();
117 if (this.currentLine.isEmpty())
118 continue;
119 handleMetaLine();
120 }
121 }
122 finally {
123 if (this.in != null) {
124 this.in.close();
125 this.in = null;
126 }
127 }
128 if (this.headerParsingState == ParsingState.STOP) {
129 logError("Parsing of the file stopped due to a severe error");
130 return;
131 }
132 for (final FlowDepthColumnParser colParser : this.colParsers)
133 colParser.parse();
134 }
135
136 @Override
137 protected boolean handleMetaOther() {
138 if (handleMetaYear())
139 return true;
140 else if (handleMetaType())
141 return true;
142 else if (handleMetaSounding())
143 return true;
144 else if (handleMetaEvaluator())
145 return true;
146 else
147 return false;
148 }
149
150 private boolean handleMetaYear() {
151 final Matcher m = META_YEAR.matcher(this.currentLine);
152 if (m.matches()) {
153 this.metaPatternsMatched.add(META_YEAR);
154 this.tkhGroup.setYear(Integer.parseInt(m.group(1)));
155 return true;
156 }
157 return false;
158 }
159
160 private boolean handleMetaType() {
161 final Matcher m = META_TYPE.matcher(this.currentLine);
162 return m.matches();
163 }
164
165 private boolean handleMetaSounding() {
166 final Matcher m = META_SOUNDING.matcher(this.currentLine);
167 if (m.matches()) {
168 this.metaPatternsMatched.add(META_SOUNDING);
169 this.tkhGroup.setSounding_info(parseMetaInfo(m.group(1).trim()));
170 return true;
171 }
172 return false;
173 }
174
175 private boolean handleMetaEvaluator() {
176 final Matcher m = META_EVALUATOR.matcher(this.currentLine);
177 if (m.matches()) {
178 this.metaPatternsMatched.add(META_EVALUATOR);
179 this.tkhGroup.setEvaluation_by(parseMetaInfo(m.group(1).trim()));
180 return true;
181 }
182 return false;
183 }
184
185 @Override
186 protected boolean handleMetaColumnTitles() {
187 if (!super.handleMetaColumnTitles())
188 return false;
189 this.tkhGroup.setKmrange_info(this.seriesHeader.getKmrange_info());
190 this.tkhGroup.setComment(this.seriesHeader.getComment());
191 for (int i = 1; i <= this.columnTitles.size() - 1; i++) {
192 final Matcher m = COLUMN_TITLE.matcher(this.columnTitles.get(i));
193 if (m.matches())
194 this.colParsers.add(new FlowDepthColumnParser(this.importPath, this.rootRelativePath, this.river, this.tkhGroup, i, m.group(4).trim()));
195 else
196 logWarning("No title found in column " + i + ", skipped");
197 }
198 return true;
199 }
200
201 @Override
202 public void store() {
203 if (this.headerParsingState != ParsingState.STOP) {
204 this.tkhGroup.getPeer(this.river.getPeer());
205 for (final FlowDepthColumnParser colParser : this.colParsers)
206 colParser.store();
207 }
208 else
209 logWarning("Severe parsing errors, not storing series '" + this.tkhGroup.getFilename() + "'");
210 }
211
212 @Override
213 protected FlowDepthColumnSeriesImport createSeriesImport(final String filename) {
214 throw new UnsupportedOperationException();
215 }
216
217 @Override
218 protected FlowDepthKmLineImport createKmLineImport(final Double km, final String[] values) {
219 throw new UnsupportedOperationException();
220 }
221 }

http://dive4elements.wald.intevation.org