addback deltaScripts for 15.1.1
This commit is contained in:
parent
f3aca814e2
commit
200426ddef
9 changed files with 363 additions and 0 deletions
27
deltaScripts/15.1.1/DR3763/convertFile.sh
Executable file
27
deltaScripts/15.1.1/DR3763/convertFile.sh
Executable file
|
@ -0,0 +1,27 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# This searches for archiver xml files in common_static and sends them to updateRetentionTags.sh
|
||||||
|
# to update retention hour tags.
|
||||||
|
#
|
||||||
|
# The base files should be updated when upgraded to 15.1.1 but this will handle any that are added.
|
||||||
|
|
||||||
|
COMMON=/awips2/edex/data/utility/common_static
|
||||||
|
DIR=`dirname $0`
|
||||||
|
|
||||||
|
echo "+++ checking base +++"
|
||||||
|
$DIR/updateRetentionTags.sh ${COMMON}/base/archiver/purger/*.xml
|
||||||
|
|
||||||
|
for dir in `ls ${COMMON}` ; do
|
||||||
|
if [[ "$dir" != "base" && "$dir" != "configured" && -d "${COMMON}/$dir" ]] ; then
|
||||||
|
echo "+++ checking $dir +++"
|
||||||
|
for d in `ls ${COMMON}/$dir/` ; do
|
||||||
|
pDir="${COMMON}/$dir/$d/archiver/purger"
|
||||||
|
if [[ -d "$pDir" ]] ; then
|
||||||
|
for f in `ls $pDir` ; do
|
||||||
|
if [[ "${f##*.}" == "xml" ]] ; then
|
||||||
|
$DIR/updateRetentionTags.sh $pDir/$f
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
done
|
22
deltaScripts/15.1.1/DR3763/updateRetentionTags.sh
Executable file
22
deltaScripts/15.1.1/DR3763/updateRetentionTags.sh
Executable file
|
@ -0,0 +1,22 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# This assumes arguments are archiver xml files that need to have the tag minRetentionHours
|
||||||
|
# changed to defaultRetentionHours and extRetentionHours to selectedRetentionHours.
|
||||||
|
# If the conversion is not needed the file is left untouched otherwise the original has '.bak' appended
|
||||||
|
# to its name and it is replaced with the converted file.
|
||||||
|
|
||||||
|
for f in $* ; do
|
||||||
|
rm -f ${f}.$$
|
||||||
|
sed -e 's/minRetentionHours>/defaultRetentionHours>/g' -e 's/extRetentionHours>/selectedRetentionHours>/g' $f > ${f}.$$
|
||||||
|
cmp -s $f ${f}.$$
|
||||||
|
if [[ $? != 0 ]] ; then
|
||||||
|
rm -f ${f}.bak
|
||||||
|
mv $f ${f}.bak
|
||||||
|
mv ${f}.$$ $f
|
||||||
|
chmod 664 $f
|
||||||
|
chown awips:fxalpha $f
|
||||||
|
echo "converted $f"
|
||||||
|
else
|
||||||
|
echo "No conversion needed for $f"
|
||||||
|
rm -f ${f}.$$
|
||||||
|
fi
|
||||||
|
done
|
12
deltaScripts/15.1.1/DR4001/dropHazardServicesTables.sh
Normal file
12
deltaScripts/15.1.1/DR4001/dropHazardServicesTables.sh
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# DR #4001 - force hazard services tables to be recreated
|
||||||
|
|
||||||
|
PSQL="/awips2/psql/bin/psql"
|
||||||
|
|
||||||
|
echo "INFO: Dropping hazard services tables"
|
||||||
|
|
||||||
|
${PSQL} -U awips -d metadata -q -c "DROP TABLE IF EXISTS productdata, producttext, practice_hazards, practice_hazards_attributes, hazards_interoperability, hazards_interoperability_gfe CASCADE;"
|
||||||
|
${PSQL} -U awips -d metadata -q -c "DROP TABLE IF EXISTS practice_hazards_interoperability, practice_hazards_interoperability_gfe CASCADE;"
|
||||||
|
${PSQL} -U awips -d metadata -q -c "UPDATE plugin_info SET initialized=false WHERE name='hazards' OR name ='com.raytheon.uf.common.hazards.productgen';"
|
||||||
|
|
||||||
|
echo "INFO: Hazard services tables successfully dropped."
|
104
deltaScripts/15.1.1/DR4103/file_header.txt
Normal file
104
deltaScripts/15.1.1/DR4103/file_header.txt
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
# Variables used by service backup:
|
||||||
|
#
|
||||||
|
# AWIPS_HOME: The AWIPS II installation directory.
|
||||||
|
#
|
||||||
|
# GFESUITE_HOME: The server directory containing files and programs
|
||||||
|
# used by GFE during Service Backup
|
||||||
|
#
|
||||||
|
# GFESUITE_BIN: Directory containing GFE server side utility
|
||||||
|
# programs including ifpnetCDF and iscMosaic
|
||||||
|
#
|
||||||
|
# SVCBU_HOME: Directory used by service backup as a sandbox for
|
||||||
|
# constructing files to be sent and for processing
|
||||||
|
# received files.
|
||||||
|
#
|
||||||
|
# LOCALIZATION_PATH: This is the path to the root of the localization
|
||||||
|
# directory. This path is used for properly importing
|
||||||
|
# and exporting configuration data
|
||||||
|
#
|
||||||
|
# IFPS_LOG: Directory containing logs for the service backup
|
||||||
|
# operations.
|
||||||
|
#
|
||||||
|
# IFPS_DATA: Directory containing the svcbu_export_elements file.
|
||||||
|
# This file is used to specify which weather elements are
|
||||||
|
# packaged and sent when exporting digital data for a
|
||||||
|
# site.
|
||||||
|
#
|
||||||
|
# LOCK_DIR: Directory used for lock files. Each Service Backup
|
||||||
|
# operation maintains a lock file during its execution.
|
||||||
|
# The lock remains for the duration of the operation and
|
||||||
|
# is erased upon completion to prevent simultaneous
|
||||||
|
# operations from occurring.
|
||||||
|
#
|
||||||
|
# SCRIPTS_DIR: Directory containing the scripts used by service
|
||||||
|
# backup
|
||||||
|
#
|
||||||
|
# CAVE_LAUNCH_SCRIPT: This path points to the script which starts GFE. This
|
||||||
|
# variable is read when the user hits the 'Enable' button
|
||||||
|
# On the service backup GUI.
|
||||||
|
#
|
||||||
|
# SVCBU_HOST: Server where the service backup scripts will be
|
||||||
|
# executed.
|
||||||
|
#
|
||||||
|
# MSG_SEND_COMMAND: The command executed to send a message via the message handling
|
||||||
|
# system. This value will usually be msg_send. But, it can be
|
||||||
|
# changed to a different command in a test environment.
|
||||||
|
#
|
||||||
|
# CDSPORT: This is the port on which the Thrift Client listens
|
||||||
|
# for script execution events.
|
||||||
|
#
|
||||||
|
# SVCBU_DB: Defines which database to use for exporting
|
||||||
|
# grids to central server for service backup.
|
||||||
|
# VALID VALUES: Fcst
|
||||||
|
# Official (default)
|
||||||
|
#
|
||||||
|
# SVCBU_TRIM_ELEMS: Indication of whether ifpnetCDF needs to trim
|
||||||
|
# off elements while exporting grids to central
|
||||||
|
# server.
|
||||||
|
# VALID VALUES: 1 - To do element trimming
|
||||||
|
# 0 - To disable element trimming
|
||||||
|
# Note: ${IFPS_DATA}/svcbu_export_elements.ccc
|
||||||
|
# file has to be present for this to work. This file
|
||||||
|
# will contain list of elements to include in the
|
||||||
|
# netcdf file that's being sent over to central srv.
|
||||||
|
#
|
||||||
|
# SVCBU_FAILED_SITE_PORT: Unused
|
||||||
|
#
|
||||||
|
# SVCBU_GRIDAREA: The name of the edit area used when exporting grids
|
||||||
|
# to the central server for service backup and
|
||||||
|
# imported to the Restore databse after service backup.
|
||||||
|
# DEFUALT VALUE: ISC_Send_Area
|
||||||
|
#
|
||||||
|
# SVCBU_ADDRESSEE: The name of the msg_send addressee. Will be used to
|
||||||
|
# pass with -a flag of msg_send. (NCF use only).
|
||||||
|
#
|
||||||
|
# SVCBU_WMO_HEADER: The WMO header that will be used to pass in calls to
|
||||||
|
# msg_send with -i argument. This will be empty to
|
||||||
|
# begin with. Should not be changed. (NCF use only)
|
||||||
|
#
|
||||||
|
# EXPORT_GRID Indicate the ways of grid being exported
|
||||||
|
# VALID VALUES: 0 = do not export grids
|
||||||
|
# 1 = grids are exported by quartz timer
|
||||||
|
# at 15 after each hour, the service
|
||||||
|
# backup GUI, and from GFE via the
|
||||||
|
# 'Send Grids to NDFD...' script
|
||||||
|
# 2 = grids are exported only by the service backup GUI and from GFE via the 'Send
|
||||||
|
# Grids to NDFD...' script'
|
||||||
|
#
|
||||||
|
# SVCBU_USER Indicates that the site can configure a special user to
|
||||||
|
# run GFE when in service backup
|
||||||
|
# VALID VALUES: 0 = do not use a designated user to run
|
||||||
|
# GFE when in service backup
|
||||||
|
# 1 = use a designated user to run GFE
|
||||||
|
# when in service backup
|
||||||
|
#
|
||||||
|
# SVCBU_USER_ID The user id of the designated user to run GFE when
|
||||||
|
# in service backup
|
||||||
|
#
|
||||||
|
# PRIMARY_SITES (Optional) For dual-domain sites, a comma-separated
|
||||||
|
# list of sites for the export grids cron to run for
|
||||||
|
# instead of the site defined as AW_SITE_IDENTIFIER. If
|
||||||
|
# this setting is empty or not defined, cron will only
|
||||||
|
# export grids for site set as AW_SITE_IDENTIFIER.
|
||||||
|
#
|
||||||
|
#
|
55
deltaScripts/15.1.1/DR4103/migrateSvcbuProperties.sh
Normal file
55
deltaScripts/15.1.1/DR4103/migrateSvcbuProperties.sh
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
|
||||||
|
# Determine which directory this script lives in so we can locate
|
||||||
|
# file_header.txt ...
|
||||||
|
script_path=$(dirname $(readlink -f $0))
|
||||||
|
file_header=${script_path}/file_header.txt
|
||||||
|
|
||||||
|
echo "DR #4103: Moving svcbu.properties file to localization store..."
|
||||||
|
|
||||||
|
# source edex setup.env to get primary site id
|
||||||
|
source /awips2/edex/bin/setup.env
|
||||||
|
site_id=`echo ${AW_SITE_IDENTIFIER} | tr [a-z] [A-Z]`
|
||||||
|
|
||||||
|
base_file=/awips2/edex/data/utility/edex_static/base/config/gfe/svcbu.properties
|
||||||
|
old_site_file=/awips2/GFESuite/ServiceBackup/configuration/svcbu.properties
|
||||||
|
if [[ ! -f ${base_file} ]]
|
||||||
|
then
|
||||||
|
echo "ERROR: Can not find BASE-level svcbu.properties file ${base_file}."
|
||||||
|
echo "Exiting!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ ! -f ${old_site_file} ]]
|
||||||
|
then
|
||||||
|
echo "ERROR: Can not find previous version's svcbu.properties file ${old_site_file}."
|
||||||
|
echo "Exiting!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
site_override_contents=""
|
||||||
|
|
||||||
|
config_entries=( "GFESUITE_HOME" "GFESUITE_BIN" "SVCBU_HOME" "LOCALIZATION_PATH" "IFPS_LOG" "IFPS_DATA" "LOCK_DIR" "SCRIPTS_DIR" "CAVE_LAUNCH_SCRIPT" "SVCBU_HOST" "MSG_SEND_COMMAND" "CDSPORT" "SVCBU_DB" "SVCBU_TRIM_ELEMS" "SVCBU_FAILED_SITE_PORT" "SVCBU_GRIDAREA" "SVCBU_ADDRESSEE" "SVCBU_WMO_HEADER" "SVCBU_USER" "SVCBU_USER_ID" "EXPORT_GRID" "PRIMARY_SITES" )
|
||||||
|
|
||||||
|
for entry in "${config_entries[@]}"
|
||||||
|
do
|
||||||
|
base_value=$(grep -E "^${entry}=" ${base_file})
|
||||||
|
site_value=$(grep -E "^${entry}=" ${old_site_file})
|
||||||
|
if [ "${base_value}" != "${site_value}" ]
|
||||||
|
then
|
||||||
|
site_override_contents="${site_override_contents}\n${site_value}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -n "${site_override_contents}" ]]
|
||||||
|
then
|
||||||
|
new_site_file=/awips2/edex/data/utility/edex_static/site/${site_id}/config/gfe/svcbu.properties
|
||||||
|
|
||||||
|
echo "Writing new site override file ${new_site_file}."
|
||||||
|
cat ${file_header} > ${new_site_file}
|
||||||
|
echo "" >> ${new_site_file}
|
||||||
|
echo -e ${site_override_contents} >> ${new_site_file}
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -f ${old_site_file}
|
||||||
|
|
23
deltaScripts/15.1.1/DR4260/changeDefaultModel.sh
Normal file
23
deltaScripts/15.1.1/DR4260/changeDefaultModel.sh
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
#!/bin/sh
|
||||||
|
# sed/changeword
|
||||||
|
# changes one model to another model for SCANRunSiteConfig.xml files
|
||||||
|
#
|
||||||
|
|
||||||
|
files=$(find /awips2/edex/data/utility/common_static | grep SCANRunSiteConfig.xml)
|
||||||
|
echo "Updating all SCANRunSiteConfig.xml files to use HRRR instead of RUC130."
|
||||||
|
|
||||||
|
old=RUC130
|
||||||
|
new=HRRR
|
||||||
|
|
||||||
|
for f in $files
|
||||||
|
do
|
||||||
|
echo "Processing file: " $f
|
||||||
|
if test -f "$f"
|
||||||
|
then
|
||||||
|
sed "s/$old/$new/g" $f > $f.new
|
||||||
|
mv $f $f.orig
|
||||||
|
mv $f.new $f
|
||||||
|
rm $f.orig
|
||||||
|
echo $f done
|
||||||
|
fi
|
||||||
|
done
|
23
deltaScripts/15.1.1/DR4319/AddRadarVolumScanNumber.sh
Executable file
23
deltaScripts/15.1.1/DR4319/AddRadarVolumScanNumber.sh
Executable file
|
@ -0,0 +1,23 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
echo "Updating radar table to include volume scan number."
|
||||||
|
|
||||||
|
SQL="
|
||||||
|
DO \$\$
|
||||||
|
BEGIN
|
||||||
|
ALTER TABLE radar ADD COLUMN volumescannumber integer;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN duplicate_column THEN RAISE NOTICE 'column volumescannumber already exists in radar.';
|
||||||
|
END;
|
||||||
|
\$\$
|
||||||
|
"
|
||||||
|
|
||||||
|
/awips2/psql/bin/psql -U awips -d metadata -c "${SQL}"
|
||||||
|
if [[ $? != 0 ]]
|
||||||
|
then
|
||||||
|
echo "Failed to update radar table."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
/awips2/psql/bin/psql -U awips -d metadata -c "UPDATE radar SET volumescannumber=0 WHERE volumescannumber IS NULL;"
|
||||||
|
|
||||||
|
echo "Done"
|
3
deltaScripts/15.1.1/DR4522/updateActiveTable.sh
Normal file
3
deltaScripts/15.1.1/DR4522/updateActiveTable.sh
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
psql -h dx1 -U awips -d metadata -f updateActiveTable.sql
|
94
deltaScripts/15.1.1/DR4522/updateActiveTable.sql
Normal file
94
deltaScripts/15.1.1/DR4522/updateActiveTable.sql
Normal file
|
@ -0,0 +1,94 @@
|
||||||
|
BEGIN;
|
||||||
|
-- first remove any duplicate records
|
||||||
|
DROP TABLE IF EXISTS t_deleteIds;
|
||||||
|
CREATE TEMP TABLE t_deleteIds (id int);
|
||||||
|
|
||||||
|
INSERT INTO t_deleteIds(id) (
|
||||||
|
SELECT id FROM (
|
||||||
|
SELECT id,
|
||||||
|
ROW_NUMBER() OVER(PARTITION BY officeid, phen, sig, etn, ugczone ORDER BY issuetime DESC) AS Row
|
||||||
|
FROM activetable
|
||||||
|
) dups
|
||||||
|
WHERE dups.Row > 1);
|
||||||
|
|
||||||
|
DELETE FROM activetable a using t_deleteIds t WHERE a.id = t.id;
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
-- drop the old id column as primary key
|
||||||
|
ALTER TABLE activetable DROP CONSTRAINT IF EXISTS activetable_pkey;
|
||||||
|
ALTER TABLE activetable DROP COLUMN IF EXISTS id;
|
||||||
|
DROP SEQUENCE IF EXISTS activetableseq;
|
||||||
|
|
||||||
|
-- set proper length on several columns
|
||||||
|
ALTER TABLE activetable ALTER COLUMN act TYPE character varying(3);
|
||||||
|
ALTER TABLE activetable ALTER COLUMN wmoid TYPE character varying(22);
|
||||||
|
ALTER TABLE activetable ALTER COLUMN vtecstr TYPE character varying(48);
|
||||||
|
ALTER TABLE activetable ALTER COLUMN productclass TYPE character varying(1);
|
||||||
|
ALTER TABLE activetable ALTER COLUMN locationid TYPE character varying(5);
|
||||||
|
ALTER TABLE activetable ALTER COLUMN floodseverity TYPE character varying(1);
|
||||||
|
ALTER TABLE activetable ALTER COLUMN immediatecause TYPE character varying(2);
|
||||||
|
ALTER TABLE activetable ALTER COLUMN officeid TYPE character varying(4);
|
||||||
|
ALTER TABLE activetable ALTER COLUMN phen TYPE character varying(2);
|
||||||
|
ALTER TABLE activetable ALTER COLUMN sig TYPE character varying(1);
|
||||||
|
ALTER TABLE activetable ALTER COLUMN ugczone TYPE character varying(6);
|
||||||
|
|
||||||
|
-- add new primary key
|
||||||
|
ALTER TABLE activetable ALTER COLUMN officeid SET NOT NULL;
|
||||||
|
ALTER TABLE activetable ALTER COLUMN phen SET NOT NULL;
|
||||||
|
ALTER TABLE activetable ALTER COLUMN sig SET NOT NULL;
|
||||||
|
ALTER TABLE activetable ALTER COLUMN etn SET NOT NULL;
|
||||||
|
ALTER TABLE activetable ALTER COLUMN ugczone SET NOT NULL;
|
||||||
|
ALTER TABLE activetable ADD CONSTRAINT activetable_pkey PRIMARY KEY (etn, officeid, phen, sig, ugczone);
|
||||||
|
COMMIT;
|
||||||
|
VACUUM FULL ANALYZE activetable;
|
||||||
|
|
||||||
|
|
||||||
|
-- now do the same for the practice_activetable
|
||||||
|
BEGIN;
|
||||||
|
-- first remove any duplicate records
|
||||||
|
DROP TABLE IF EXISTS t_deleteIds;
|
||||||
|
CREATE TEMP TABLE t_deleteIds (id int);
|
||||||
|
|
||||||
|
INSERT INTO t_deleteIds(id) (
|
||||||
|
SELECT id FROM (
|
||||||
|
SELECT id,
|
||||||
|
ROW_NUMBER() OVER(PARTITION BY officeid, phen, sig, etn, ugczone ORDER BY issuetime DESC) AS Row
|
||||||
|
FROM practice_activetable
|
||||||
|
) dups
|
||||||
|
WHERE dups.Row > 1);
|
||||||
|
|
||||||
|
DELETE FROM practice_activetable a using t_deleteIds t WHERE a.id = t.id;
|
||||||
|
COMMIT;
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
-- drop the old id column as primary key
|
||||||
|
ALTER TABLE practice_activetable DROP CONSTRAINT IF EXISTS practice_activetable_pkey;
|
||||||
|
ALTER TABLE practice_activetable DROP COLUMN IF EXISTS id;
|
||||||
|
DROP SEQUENCE IF EXISTS practice_activetableseq;
|
||||||
|
|
||||||
|
-- set proper length on several columns
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN act TYPE character varying(3);
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN wmoid TYPE character varying(22);
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN vtecstr TYPE character varying(48);
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN productclass TYPE character varying(1);
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN locationid TYPE character varying(5);
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN floodseverity TYPE character varying(1);
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN immediatecause TYPE character varying(2);
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN officeid TYPE character varying(4);
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN phen TYPE character varying(2);
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN sig TYPE character varying(1);
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN ugczone TYPE character varying(6);
|
||||||
|
|
||||||
|
-- add new primary key
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN officeid SET NOT NULL;
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN phen SET NOT NULL;
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN sig SET NOT NULL;
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN etn SET NOT NULL;
|
||||||
|
ALTER TABLE practice_activetable ALTER COLUMN ugczone SET NOT NULL;
|
||||||
|
ALTER TABLE practice_activetable ADD CONSTRAINT practice_activetable_pkey PRIMARY KEY (etn, officeid, phen, sig, ugczone);
|
||||||
|
COMMIT;
|
||||||
|
VACUUM FULL ANALYZE practice_activetable;
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS t_deleteIds;
|
||||||
|
|
Loading…
Add table
Reference in a new issue