# HG changeset patch # User Andre Heinecke # Date 1362397552 -3600 # Node ID a0abb6787ab1bf0be26f2c270f0ff44e5740561b # Parent 8937dd13023066cfbe04636019c795bc7aa86ef1 Add first version of import_river script diff -r 8937dd130230 -r a0abb6787ab1 flys-backend/contrib/import_river.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/flys-backend/contrib/import_river.sh Mon Mar 04 12:45:52 2013 +0100 @@ -0,0 +1,344 @@ +#!/bin/bash +# Import script for rivers +# +# Authors: +# Andre Heinecke +# +# Copyright: +# Copyright (C) 2012 Greenbone Networks GmbH +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +set -e + +# Default settings +DEFAULT_HOST=localhost +DEFAULT_PORT=1521 +DEFAULT_USER=flys_dami +DEFAULT_PASS=flys_dami +DEFAULT_LOG=$PWD/logs +DEFAULT_BACKEND_NAME="XE" +JAR="hydr_morph/importer.jar" +IMPORTER_DRY_RUN=false +IMPORTER_MAINVALUE_TYPES=QWTD +IMPORTER_ANNOTATION_TYPES="conf/annotation-types.xml" + + +MIN_MEMORY="8024m" + +OPTIONAL_LIBS="${DIR}"/../opt +if [ -d "$OPTIONAL_LIBS" ]; then + export PATH="$OPTIONAL_LIBS/bin:$PATH" + export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib:$LD_LIBRARY_PATH" + export LD_LIBRARY_PATH="$OPTIONAL_LIBS/lib64:$LD_LIBRARY_PATH" + export PYTHONPATH="$OPTIONAL_LIBS/lib/python2.6/site-packages:$PYTHONPATH" + export PYTHONPATH="$OPTIONAL_LIBS/lib64/python2.6/site-packages:$PYTHONPATH" + export GDAL_DATA="$OPTIONAL_LIBS/share/gdal" +fi + +usage(){ + cat << EOF + +usage: $0 [options] gew_file + +Import a river described by the gew_file + +OPTIONS: + -?, --help Show this message + -u, --username= Database username. Default: $DEFAULT_USER + -w, --password= Database password. Default: $DEFAULT_PASS + -h, --host= Connect to database on host . + Default: $DEFAULT_HOST + -p, --port= Use port number . Default: $DEFAULT_PORT + -d, --db-name= Name of the database / backend. Default: $DEFAULT_BACKEND_NAME + -l, --log-dir= Directory in which to create the log files. + Default: $LOG_DIR + --postgres Database is PostgreSQL + --skip-hydro Skip import of hydrological data + --skip-morpho Skip import of morphological data + --skip-geo Skip import of geographic data + --skip-wst Skip import of wst data +EOF +exit 0 +} + +OPTS=`getopt -o ?u:w:h:p:d: \ + -l help,username:,password:,host:,port:,db-name:,skip-hydro,skip-morpho,skip-geo,skip-wst \ + -n $0 -- "$@"` +if [ $? != 0 ] ; then usage; fi +eval set -- "$OPTS" +while true ; do + case "$1" in + "-?"|"--help") + usage;; + "--") + shift + break;; + "-u"|"--username") + USER=$2 + shift 2;; + "-w"|"--password") + PASS=$2 + shift 2;; + "-h"|"--host") + HOST=$2 + shift 2;; + "-p"|"--port") + PORT=$2 + shift 2;; + "-l"|"--log-dir") + LOG=$2 + shift 2;; + "-d"|"--db-name") + BACKEND_NAME=$2 + shift 2;; + "--skip-hydro") + SKIP_HYDRO="TRUE" + shift;; + "--skip-morpho") + SKIP_MORPHO="TRUE" + shift;; + "--skip-wst") + SKIP_WST="TRUE" + shift;; + "--skip-geo") + SKIP_GEO="TRUE" + shift;; + "--postgres") + POSTGRES="TRUE" + shift;; + *) + echo "Unknown Option $1" + usage;; + esac +done + +if [ -z $USER ]; then + USER=$DEFAULT_USER +fi +if [ -z $PASS ]; then + PASS=$DEFAULT_PASS +fi +if [ -z $PORT ]; then + PORT=$DEFAULT_PORT +fi +if [ -z $HOST ]; then + HOST=$DEFAULT_HOST +fi +if [ -z $BACKEND_NAME ]; then + BACKEND_NAME=$DEFAULT_BACKEND_NAME +fi +if [ -z $LOGDIR ]; then + LOG=$DEFAULT_LOG +fi + +if [ $# != 1 ]; then + usage +fi + +if [ ! -r $1 ]; then + echo "Could not open $1 please ensure it exists and is readable" +fi + +GEW_FILE="$1" +RIVER_NAME=$(grep "Gew.sser" "$1" | awk '{print $2}') +DATE=$(date +%Y.%m.%d_%H%M) +LOG_DIR=${LOG}/${RIVER_NAME}-$DATE +mkdir -p ${LOG_DIR} + +if [ "POSTGRES" = "TRUE" ]; then + JAR=$(echo "$JAR" | sed 's/importer/importer_psql/') + if [ ! -r "$JAR" ]; then + echo "Could not find Postgres importer $JAR" + exit 1 + fi + OGR_CONNECTION="PG:dbname=$BACKEND_NAME host=$HOST port=$PORT \ + user=$USER password=$PASS" + BACKEND_DB_PREFIX="jdbc:postgresql:" + BACKEND_DB_DRIVER="org.postgresql.Driver" + BACKEND_DB_DIALECT="org.hibernate.dialect.PostgreSQLDialect" +else + BACKEND_DB_PREFIX="jdbc:oracle:thin:@" + BACKEND_DB_DRIVER="oracle.jdbc.OracleDriver" + BACKEND_DB_DIALECT="org.hibernate.dialect.OracleDialect" +fi + +BACKEND_URL=$BACKEND_DB_PREFIX//$HOST:$PORT/$BACKEND_NAME + +echo "Importing $RIVER_NAME into $BACKEND_URL." + +import_hydro(){ + LOG_FILE=${LOG_DIR}/hydro.log + echo Importing Hydrological data. + echo Logging into: $LOG_FILE + sed 's!./import.log!'"$LOG_FILE"'!' conf/log4j.properties > $LOG_DIR/log4j.properties + java -jar \ + -Xmx$MIN_MEMORY \ + -server \ + -Dlog4j.configuration=file://$LOG_DIR/log4j.properties \ + -Dflys.backend.user=$USER \ + -Dflys.backend.password=$PASS \ + -Dflys.backend.url=$BACKEND_URL \ + -Dflys.backend.driver=$BACKEND_DB_DRIVER \ + -Dflys.backend.dialect=$BACKEND_DB_DIALECT \ + -Dflys.backend.importer.infogew.file="$GEW_FILE" \ + -Dflys.backend.main.value.types=$IMPORTER_MAINVALUE_TYPES \ + -Dflys.backend.importer.annotation.types=$IMPORTER_ANNOTATION_TYPES \ + -Dflys.backend.importer.dry.run=$IMPORTER_DRY_RUN \ + -Dflys.backend.importer.skip.annotations=false \ + -Dflys.backend.importer.skip.bwastr=false \ + -Dflys.backend.importer.skip.da50s=false \ + -Dflys.backend.importer.skip.da66s=false \ + -Dflys.backend.importer.skip.extra.wsts=false \ + -Dflys.backend.importer.skip.fixations=false \ + -Dflys.backend.importer.skip.flood.water=false \ + -Dflys.backend.importer.skip.flood.protection=false \ + -Dflys.backend.importer.skip.gauges=false \ + -Dflys.backend.importer.skip.historical.discharge.tables=false \ + -Dflys.backend.importer.skip.hyks=false \ + -Dflys.backend.importer.skip.official.lines=false \ + -Dflys.backend.importer.skip.prfs=false \ + -Dflys.backend.importer.skip.w80s=false \ + -Dflys.backend.importer.skip.wst=true \ + -Dflys.backend.importer.skip.waterlevel.differences=true \ + -Dflys.backend.importer.skip.waterlevels=true \ + -Dflys.backend.importer.skip.sq.relation=true \ + -Dflys.backend.importer.skip.sediment.density=true \ + -Dflys.backend.importer.skip.sediment.yield=true \ + -Dflys.backend.importer.skip.morphological.width=true \ + -Dflys.backend.importer.skip.flow.velocity=true \ + -Dflys.backend.importer.skip.bed.height.single=true \ + -Dflys.backend.importer.skip.bed.height.epoch=true \ + $JAR +} + +import_morpho(){ + LOG_FILE=${LOG_DIR}/morpho.log + echo Importing Morphological data. + echo Logging into: $LOG_FILE + sed 's!./import.log!'"$LOG_FILE"'!' conf/log4j.properties > $LOG_DIR/log4j.properties + java -jar \ + -Xmx$MIN_MEMORY \ + -server \ + -Dlog4j.configuration=file://$LOG_DIR/log4j.properties \ + -Dflys.backend.user=$USER \ + -Dflys.backend.password=$PASS \ + -Dflys.backend.url=$BACKEND_URL \ + -Dflys.backend.driver=$BACKEND_DB_DRIVER \ + -Dflys.backend.dialect=$BACKEND_DB_DIALECT \ + -Dflys.backend.importer.infogew.file="$GEW_FILE" \ + -Dflys.backend.main.value.types=$IMPORTER_MAINVALUE_TYPES \ + -Dflys.backend.importer.annotation.types=$IMPORTER_ANNOTATION_TYPES \ + -Dflys.backend.importer.dry.run=$IMPORTER_DRY_RUN \ + -Dflys.backend.importer.skip.annotations=true \ + -Dflys.backend.importer.skip.bwastr=true \ + -Dflys.backend.importer.skip.da50s=true \ + -Dflys.backend.importer.skip.da66s=true \ + -Dflys.backend.importer.skip.extra.wsts=true \ + -Dflys.backend.importer.skip.fixations=true \ + -Dflys.backend.importer.skip.flood.water=true \ + -Dflys.backend.importer.skip.flood.protection=true \ + -Dflys.backend.importer.skip.gauges=true \ + -Dflys.backend.importer.skip.historical.discharge.tables=true \ + -Dflys.backend.importer.skip.hyks=true \ + -Dflys.backend.importer.skip.official.lines=true \ + -Dflys.backend.importer.skip.prfs=true \ + -Dflys.backend.importer.skip.w80s=true \ + -Dflys.backend.importer.skip.wst=true \ + -Dflys.backend.importer.skip.waterlevel.differences=false \ + -Dflys.backend.importer.skip.waterlevels=false \ + -Dflys.backend.importer.skip.sq.relation=false \ + -Dflys.backend.importer.skip.sediment.density=false \ + -Dflys.backend.importer.skip.sediment.yield=false \ + -Dflys.backend.importer.skip.morphological.width=false \ + -Dflys.backend.importer.skip.flow.velocity=false \ + -Dflys.backend.importer.skip.bed.height.single=false \ + -Dflys.backend.importer.skip.bed.height.epoch=false \ + $JAR +} + +import_wst(){ + LOG_FILE=${LOG_DIR}/wst.log + echo Importing WST data. + echo Logging into: $LOG_FILE + sed 's!./import.log!'"$LOG_FILE"'!' conf/log4j.properties > $LOG_DIR/log4j.properties + java -jar \ + -Xmx$MIN_MEMORY \ + -server \ + -Dlog4j.configuration=file://$LOG_DIR/log4j.properties \ + -Dflys.backend.user=$USER \ + -Dflys.backend.password=$PASS \ + -Dflys.backend.url=$BACKEND_URL \ + -Dflys.backend.driver=$BACKEND_DB_DRIVER \ + -Dflys.backend.dialect=$BACKEND_DB_DIALECT \ + -Dflys.backend.importer.infogew.file="$GEW_FILE" \ + -Dflys.backend.main.value.types=$IMPORTER_MAINVALUE_TYPES \ + -Dflys.backend.importer.annotation.types=$IMPORTER_ANNOTATION_TYPES \ + -Dflys.backend.importer.dry.run=$IMPORTER_DRY_RUN \ + -Dflys.backend.importer.skip.annotations=true \ + -Dflys.backend.importer.skip.bwastr=true \ + -Dflys.backend.importer.skip.da50s=true \ + -Dflys.backend.importer.skip.da66s=true \ + -Dflys.backend.importer.skip.extra.wsts=true \ + -Dflys.backend.importer.skip.fixations=true \ + -Dflys.backend.importer.skip.flood.water=true \ + -Dflys.backend.importer.skip.flood.protection=true \ + -Dflys.backend.importer.skip.gauges=true \ + -Dflys.backend.importer.skip.historical.discharge.tables=true \ + -Dflys.backend.importer.skip.hyks=true \ + -Dflys.backend.importer.skip.official.lines=true \ + -Dflys.backend.importer.skip.prfs=true \ + -Dflys.backend.importer.skip.w80s=true \ + -Dflys.backend.importer.skip.wst=false \ + -Dflys.backend.importer.skip.waterlevel.differences=true \ + -Dflys.backend.importer.skip.waterlevels=true \ + -Dflys.backend.importer.skip.sq.relation=true \ + -Dflys.backend.importer.skip.sediment.density=true \ + -Dflys.backend.importer.skip.sediment.yield=true \ + -Dflys.backend.importer.skip.morphological.width=true \ + -Dflys.backend.importer.skip.flow.velocity=true \ + -Dflys.backend.importer.skip.bed.height.single=true \ + -Dflys.backend.importer.skip.bed.height.epoch=true \ + $JAR +} + +import_geo(){ + LOG_FILE=${LOG_DIR}/geo.log + echo Importing Geographic data. + echo Logging into: $LOG_FILE + + python $DIR/geodaesie/shpimporter.py \ + --directory $RIVER_PATH \ + --river_name $RIVER_NAME \ + --ogr_connection "$OGR_CONNECTION" \ + --host $HOST \ + --user $USER \ + --password $PASS \ + --verbose 2 2>&1 > "$LOG_FILE" +} + + +if [ "$SKIP_HYDRO" != "TRUE" ]; then +import_hydro +fi +if [ "$SKIP_WST" != "TRUE" ]; then +import_wst +fi +if [ "$SKIP_MORPHO" != "TRUE" ]; then +import_morpho +fi +if [ "$SKIP_GEO" != "TRUE" ]; then +import_geo +fi