view db_schema/setup-db.sh @ 1173:5239306ee55e pgaudit

Improbe audit trail configuration. Audit only relevant columns, thus not internal stuff like tree_modified. Audit INSERT also, because we will need it to track initial values.
author Tom Gottfried <tom@intevation.de>
date Fri, 11 Nov 2016 16:50:00 +0100
parents 74e5b9630064
children 2e59a51d914f
line wrap: on
line source
#!/bin/sh -e
# SYNOPSIS
# ./setup-db.sh [-cn] [ROLE_NAME] [ROLE_PW] [DB_NAME]
#   -c         clean - drop an existing database
#   -n         no data - do not import example data
#   ROLE_NAME  name of db user (default = lada)
#   ROLE_PW    login password  (default = ROLE_NAME)
#   DB_NAME    name of the databaes (default = ROLE_NAME)
#
# There will be used a remote database server if there exists the
# enviroment variable DB_SRV and optional DB_PORT

DIR=`dirname $0`

while getopts "cn" opt; do
    case "$opt" in
        c)
            DROP_DB="true"
            ;;
        n)
            NO_DATA="true"
            ;;
    esac
done

shift $((OPTIND-1))

ROLE_NAME=${1:-lada}
echo "ROLE_NAME = $ROLE_NAME"
ROLE_PW=${2:-$ROLE_NAME}
echo "ROLE_PW = $ROLE_PW"
DB_NAME=${3:-$ROLE_NAME}
echo "DB_NAME = $DB_NAME"
AUDITOR_ROLE="auditor"

# if variable DB_SRV and otional DB_PORT is set a remote database connection will be used
if [ -n "$DB_SRV" ] ; then DB_CONNECT_STRING="-h $DB_SRV" ; fi
if [ -n "$DB_SRV" -a -n "$DB_PORT"  ] ; then
  DB_CONNECT_STRING="$DB_CONNECT_STRING -p $DB_PORT"
fi
DB_CONNECT_STRING="$DB_CONNECT_STRING -U postgres"
echo "DB_CONNECT_STRING = $DB_CONNECT_STRING"

if [ `psql $DB_CONNECT_STRING -t --command "SELECT count(*) FROM pg_catalog.pg_user WHERE usename = '$ROLE_NAME'"` -eq 0 ] ; then
  echo create user $ROLE_NAME
  psql $DB_CONNECT_STRING --command "CREATE USER $ROLE_NAME PASSWORD '$ROLE_PW';"
fi

if [ $(psql $DB_CONNECT_STRING -t --command \
            "SELECT count(*) FROM pg_roles WHERE rolname = '$AUDITOR_ROLE'") \
         -eq 0 ]
then
  echo create user $AUDITOR_ROLE
  psql $DB_CONNECT_STRING --command "CREATE ROLE $AUDITOR_ROLE"
fi

if [ "$DROP_DB" = "true" ] && psql $DB_CONNECT_STRING -l | grep -q "^ $DB_NAME " ; then
  echo drop db $DB_NAME
  psql $DB_CONNECT_STRING --command "DROP DATABASE $DB_NAME"
fi

echo create db $DB_NAME
psql $DB_CONNECT_STRING --command \
     "CREATE DATABASE $DB_NAME WITH OWNER = $ROLE_NAME ENCODING = 'UTF8'"

echo create extensions
psql $DB_CONNECT_STRING -d $DB_NAME  --command  \
     "CREATE EXTENSION IF NOT EXISTS postgis WITH SCHEMA public;
      CREATE EXTENSION IF NOT EXISTS pgaudit;"

echo create stammdaten schema
psql -q $DB_CONNECT_STRING -d $DB_NAME -f $DIR/stammdaten_schema.sql

echo create lada schema
psql -q $DB_CONNECT_STRING -d $DB_NAME -f $DIR/lada_schema.sql

echo set grants
psql $DB_CONNECT_STRING -d $DB_NAME --command \
     "GRANT USAGE ON SCHEMA stammdaten, land TO $ROLE_NAME;
      GRANT USAGE
            ON ALL SEQUENCES IN SCHEMA stammdaten, land TO $ROLE_NAME;
      GRANT SELECT, INSERT, UPDATE, DELETE, REFERENCES
            ON ALL TABLES IN SCHEMA stammdaten, land TO $ROLE_NAME;"

echo configure audit trail
psql $DB_CONNECT_STRING -d $DB_NAME --command \
     "ALTER DATABASE $DB_NAME SET pgaudit.role TO '$AUDITOR_ROLE';
      ALTER DATABASE $DB_NAME SET pgaudit.log_parameter TO on;
      GRANT DELETE ON
          land.probe,
          land.ortszuordnung,
          land.zusatz_wert,
          land.messung,
          land.messwert,
          land.status_protokoll
          TO $AUDITOR_ROLE;
      GRANT INSERT, UPDATE, DELETE ON
          land.kommentar_p,
          land.kommentar_m
          TO $AUDITOR_ROLE;
      GRANT INSERT, UPDATE (
              id_alt,
              test,
              mst_id,
              labor_mst_id,
              hauptproben_nr,
              datenbasis_id,
              ba_id,
              probenart_id,
              media_desk,
              media,
              umw_id,
              probeentnahme_beginn,
              probeentnahme_ende,
              mittelungsdauer,
              erzeuger_id,
              probe_nehmer_id,
              mpl_id,
              mpr_id,
              solldatum_beginn,
              solldatum_ende
          ) ON land.probe TO $AUDITOR_ROLE;
      GRANT INSERT, UPDATE (
              ort_id,
              ortszuordnung_typ,
              ortszusatztext
          ) ON land.ortszuordnung TO $AUDITOR_ROLE;
      GRANT INSERT, UPDATE (
              pzs_id,
              messwert_pzs,
              messfehler,
              nwg_zu_messwert
          ) ON land.zusatz_wert TO $AUDITOR_ROLE;
      GRANT INSERT, UPDATE (
              id_alt,
              nebenproben_nr,
              mmt_id,
              messdauer,
              messzeitpunkt,
              fertig,
              geplant
          ) ON land.messung TO $AUDITOR_ROLE;
      GRANT INSERT, UPDATE (
              messgroesse_id,
              messwert_nwg,
              messwert,
              messfehler,
              nwg_zu_messwert,
              meh_id,
              grenzwertueberschreitung
          ) ON land.messwert TO $AUDITOR_ROLE;
      GRANT INSERT, UPDATE (
              mst_id,
              datum,
              text,
              messungs_id,
              status_kombi
          ) ON land.status_protokoll TO $AUDITOR_ROLE;"

if [ "$NO_DATA" != "true" ]; then
    echo import stammdaten
    psql -q $DB_CONNECT_STRING -d $DB_NAME -f $DIR/stammdaten_data.sql

    echo import lada test data
    psql -q $DB_CONNECT_STRING -d $DB_NAME -f $DIR/lada_data.sql

    echo create user $ROLE_NAME
    psql $DB_CONNECT_STRING -d $DB_NAME --command "CREATE SCHEMA geo AUTHORIZATION $ROLE_NAME"

    TS="0101"
    cd /tmp
    if [ ! -f vg250_${TS}.utm32s.shape.ebenen.zip ]; then
        curl -O \
            http://sg.geodatenzentrum.de/web_download/vg/vg250_${TS}/utm32s/shape/vg250_${TS}.utm32s.shape.ebenen.zip
    fi
    unzip vg250_${TS}.utm32s.shape.ebenen.zip "*VG250_GEM*"
    cd vg250_${TS}.utm32s.shape.ebenen/vg250_ebenen/
    shp2pgsql VG250_GEM geo.gem_utm | psql -q $DB_CONNECT_STRING -d $DB_NAME
    cd /tmp
    rm -rf vg250_${TS}.utm32s.shape.ebenen
fi
This site is hosted by Intevation GmbH (Datenschutzerklärung und Impressum | Privacy Policy and Imprint)